Bug 650161 - Update pointers to relocated objects r=terrence
☠☠ backed out by 71a2c9d84759 ☠ ☠
authorJon Coppeard <jcoppeard@mozilla.com>
Tue, 05 Jun 2012 11:47:42 -0700
changeset 221135 3adf62f886d9015e01bf33b65c8f81b05906209b
parent 221134 a6b5fcc90664de6e84e9d95ea547be28c20211fa
child 221136 d4fca818c9a8e19c27c2a7db4954e09273e0a1a8
push id3979
push userraliiev@mozilla.com
push dateMon, 13 Oct 2014 16:35:44 +0000
treeherdermozilla-beta@30f2cc610691 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs650161
milestone34.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 650161 - Update pointers to relocated objects r=terrence
js/public/HashTable.h
js/src/builtin/TypedObject.cpp
js/src/builtin/TypedObject.h
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.h
js/src/jsinfer.cpp
js/src/jsiter.cpp
js/src/jspropertytree.cpp
js/src/jspropertytree.h
js/src/jsproxy.cpp
js/src/jsscript.cpp
js/src/jswrapper.cpp
js/src/vm/ArrayBufferObject.cpp
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/Runtime.h
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
js/src/vm/Shape-inl.h
js/src/vm/Shape.cpp
js/src/vm/Shape.h
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -954,16 +954,17 @@ class HashTable : private AllocPolicy
             this->mutationCount = table_.mutationCount;
 #endif
         }
 
         // Removes the |front()| element and re-inserts it into the table with
         // a new key at the new Lookup position.  |front()| is invalid after
         // this operation until the next call to |popFront()|.
         void rekeyFront(const Lookup &l, const Key &k) {
+            JS_ASSERT(&k != &HashPolicy::getKey(this->cur->get()));
             Ptr p(*this->cur, table_);
             table_.rekeyWithoutRehash(p, l, k);
             rekeyed = true;
 #ifdef JS_DEBUG
             this->validEntry = false;
             this->mutationCount = table_.mutationCount;
 #endif
         }
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -1121,16 +1121,24 @@ StructMetaTypeDescr::construct(JSContext
 }
 
 size_t
 StructTypeDescr::fieldCount() const
 {
     return getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_NAMES).toObject().getDenseInitializedLength();
 }
 
+size_t
+StructTypeDescr::maybeForwardedFieldCount() const
+{
+    JSObject *fieldNames =
+        MaybeForwarded(&getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_NAMES).toObject());
+    return fieldNames->getDenseInitializedLength();
+}
+
 bool
 StructTypeDescr::fieldIndex(jsid id, size_t *out) const
 {
     JSObject &fieldNames = getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_NAMES).toObject();
     size_t l = fieldNames.getDenseInitializedLength();
     for (size_t i = 0; i < l; i++) {
         JSAtom &a = fieldNames.getDenseElement(i).toString()->asAtom();
         if (JSID_IS_ATOM(id, &a)) {
@@ -1152,25 +1160,43 @@ size_t
 StructTypeDescr::fieldOffset(size_t index) const
 {
     JSObject &fieldOffsets =
         getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_OFFSETS).toObject();
     JS_ASSERT(index < fieldOffsets.getDenseInitializedLength());
     return SafeCast<size_t>(fieldOffsets.getDenseElement(index).toInt32());
 }
 
+size_t
+StructTypeDescr::maybeForwardedFieldOffset(size_t index) const
+{
+    JSObject &fieldOffsets =
+        *MaybeForwarded(&getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_OFFSETS).toObject());
+    JS_ASSERT(index < fieldOffsets.getDenseInitializedLength());
+    return SafeCast<size_t>(fieldOffsets.getDenseElement(index).toInt32());
+}
+
 SizedTypeDescr&
 StructTypeDescr::fieldDescr(size_t index) const
 {
     JSObject &fieldDescrs =
         getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_TYPES).toObject();
     JS_ASSERT(index < fieldDescrs.getDenseInitializedLength());
     return fieldDescrs.getDenseElement(index).toObject().as<SizedTypeDescr>();
 }
 
+SizedTypeDescr&
+StructTypeDescr::maybeForwardedFieldDescr(size_t index) const
+{
+    JSObject &fieldDescrs =
+        *MaybeForwarded(&getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_TYPES).toObject());
+    JS_ASSERT(index < fieldDescrs.getDenseInitializedLength());
+    return fieldDescrs.getDenseElement(index).toObject().as<SizedTypeDescr>();
+}
+
 /******************************************************************************
  * Creating the TypedObject "module"
  *
  * We create one global, `TypedObject`, which contains the following
  * members:
  *
  * 1. uint8, uint16, etc
  * 2. ArrayType
@@ -1625,17 +1651,21 @@ ReportTypedObjTypeError(JSContext *cx,
 
 /*static*/ void
 TypedObject::obj_trace(JSTracer *trace, JSObject *object)
 {
     ArrayBufferViewObject::trace(trace, object);
 
     JS_ASSERT(object->is<TypedObject>());
     TypedObject &typedObj = object->as<TypedObject>();
-    TypeDescr &descr = typedObj.typeDescr();
+
+    // When this is called for compacting GC, the related objects we touch here
+    // may not have had their slots updated yet.
+    TypeDescr &descr = typedObj.maybeForwardedTypeDescr();
+
     if (descr.opaque()) {
         uint8_t *mem = typedObj.typedMem();
         if (!mem)
             return; // partially constructed
 
         if (typedObj.owner().isNeutered())
             return;
 
@@ -3087,35 +3117,35 @@ visitReferences(SizedTypeDescr &descr,
 
       case type::Reference:
         visitor.visitReference(descr.as<ReferenceTypeDescr>(), mem);
         return;
 
       case type::SizedArray:
       {
         SizedArrayTypeDescr &arrayDescr = descr.as<SizedArrayTypeDescr>();
-        SizedTypeDescr &elementDescr = arrayDescr.elementType();
+        SizedTypeDescr &elementDescr = arrayDescr.maybeForwardedElementType();
         for (int32_t i = 0; i < arrayDescr.length(); i++) {
             visitReferences(elementDescr, mem, visitor);
             mem += elementDescr.size();
         }
         return;
       }
 
       case type::UnsizedArray:
       {
         MOZ_CRASH("Only Sized Type representations");
       }
 
       case type::Struct:
       {
         StructTypeDescr &structDescr = descr.as<StructTypeDescr>();
-        for (size_t i = 0; i < structDescr.fieldCount(); i++) {
-            SizedTypeDescr &descr = structDescr.fieldDescr(i);
-            size_t offset = structDescr.fieldOffset(i);
+        for (size_t i = 0; i < structDescr.maybeForwardedFieldCount(); i++) {
+            SizedTypeDescr &descr = structDescr.maybeForwardedFieldDescr(i);
+            size_t offset = structDescr.maybeForwardedFieldOffset(i);
             visitReferences(descr, mem + offset, visitor);
         }
         return;
       }
     }
 
     MOZ_CRASH("Invalid type repr kind");
 }
--- a/js/src/builtin/TypedObject.h
+++ b/js/src/builtin/TypedObject.h
@@ -164,16 +164,20 @@ class TypedProto : public JSObject
     static const Class class_;
 
     inline void initTypeDescrSlot(TypeDescr &descr);
 
     TypeDescr &typeDescr() const {
         return getReservedSlot(JS_TYPROTO_SLOT_DESCR).toObject().as<TypeDescr>();
     }
 
+    TypeDescr &maybeForwardedTypeDescr() const {
+        return MaybeForwarded(&getReservedSlot(JS_TYPROTO_SLOT_DESCR).toObject())->as<TypeDescr>();
+    }
+
     inline type::Kind kind() const;
 };
 
 class TypeDescr : public JSObject
 {
   public:
     // This is *intentionally* not defined so as to produce link
     // errors if a is<FooTypeDescr>() etc goes wrong. Otherwise, the
@@ -448,16 +452,21 @@ class SizedArrayTypeDescr : public Compl
   public:
     static const Class class_;
     static const type::Kind Kind = type::SizedArray;
 
     SizedTypeDescr &elementType() const {
         return getReservedSlot(JS_DESCR_SLOT_ARRAY_ELEM_TYPE).toObject().as<SizedTypeDescr>();
     }
 
+    SizedTypeDescr &maybeForwardedElementType() const {
+        JSObject *elemType = &getReservedSlot(JS_DESCR_SLOT_ARRAY_ELEM_TYPE).toObject();
+        return MaybeForwarded(elemType)->as<SizedTypeDescr>();
+    }
+
     int32_t length() const {
         return getReservedSlot(JS_DESCR_SLOT_SIZED_ARRAY_LENGTH).toInt32();
     }
 };
 
 /*
  * Properties and methods of the `StructType` meta type object. There
  * is no `class_` field because `StructType` is just a native
@@ -487,29 +496,32 @@ class StructMetaTypeDescr : public JSObj
 
 class StructTypeDescr : public ComplexTypeDescr
 {
   public:
     static const Class class_;
 
     // Returns the number of fields defined in this struct.
     size_t fieldCount() const;
+    size_t maybeForwardedFieldCount() const;
 
     // Set `*out` to the index of the field named `id` and returns true,
     // or return false if no such field exists.
     bool fieldIndex(jsid id, size_t *out) const;
 
     // Return the name of the field at index `index`.
     JSAtom &fieldName(size_t index) const;
 
     // Return the type descr of the field at index `index`.
     SizedTypeDescr &fieldDescr(size_t index) const;
+    SizedTypeDescr &maybeForwardedFieldDescr(size_t index) const;
 
     // Return the offset of the field at index `index`.
     size_t fieldOffset(size_t index) const;
+    size_t maybeForwardedFieldOffset(size_t index) const;
 };
 
 typedef Handle<StructTypeDescr*> HandleStructTypeDescr;
 
 /*
  * This object exists in order to encapsulate the typed object types
  * somewhat, rather than sticking them all into the global object.
  * Eventually it will go away and become a module.
@@ -673,20 +685,28 @@ class TypedObject : public ArrayBufferVi
     ArrayBufferObject &owner() const {
         return getReservedSlot(JS_BUFVIEW_SLOT_OWNER).toObject().as<ArrayBufferObject>();
     }
 
     TypedProto &typedProto() const {
         return getProto()->as<TypedProto>();
     }
 
+    TypedProto &maybeForwardedTypedProto() const {
+        return MaybeForwarded(getProto())->as<TypedProto>();
+    }
+
     TypeDescr &typeDescr() const {
         return typedProto().typeDescr();
     }
 
+    TypeDescr &maybeForwardedTypeDescr() const {
+        return maybeForwardedTypedProto().maybeForwardedTypeDescr();
+    }
+
     uint8_t *typedMem() const {
         return (uint8_t*) getPrivate();
     }
 
     int32_t length() const {
         return getReservedSlot(JS_BUFVIEW_SLOT_LENGTH).toInt32();
     }
 
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -257,16 +257,21 @@ class GCRuntime
 
     void setParameter(JSGCParamKey key, uint32_t value);
     uint32_t getParameter(JSGCParamKey key);
 
     bool isHeapBusy() { return heapState != js::Idle; }
     bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
     bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
     bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
+#ifdef JSGC_COMPACTING
+    bool isHeapCompacting() { return isHeapMajorCollecting() && state() == COMPACT; }
+#else
+    bool isHeapCompacting() { return false; }
+#endif
 
     // Performance note: if isFJMinorCollecting turns out to be slow because
     // reading the counter is slow then we may be able to augment the counter
     // with a volatile flag that is set iff the counter is greater than
     // zero. (It will require some care to make sure the two variables stay in
     // sync.)
     bool isFJMinorCollecting() { return fjCollectionCounter > 0; }
     void incFJMinorCollecting() { fjCollectionCounter++; }
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -159,16 +159,20 @@ CheckMarkedThing(JSTracer *trc, T **thin
 {
 #ifdef DEBUG
     JS_ASSERT(trc);
     JS_ASSERT(thingp);
 
     T *thing = *thingp;
     JS_ASSERT(*thingp);
 
+#ifdef JSGC_COMPACTING
+    thing = MaybeForwarded(thing);
+#endif
+
 # ifdef JSGC_FJGENERATIONAL
     /*
      * The code below (runtimeFromMainThread(), etc) makes assumptions
      * not valid for the ForkJoin worker threads during ForkJoin GGC,
      * so just bail.
      */
     if (ForkJoinContext::current())
         return;
@@ -437,16 +441,20 @@ IsMarked(T **thingp)
             Nursery &nursery = rt->gc.nursery;
             return nursery.getForwardedPointer(thingp);
         }
     }
 #endif  // JSGC_GENERATIONAL
     Zone *zone = (*thingp)->tenuredZone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
+#ifdef JSGC_COMPACTING
+    if (zone->isGCCompacting() && IsForwarded(*thingp))
+        *thingp = Forwarded(*thingp);
+#endif
     return (*thingp)->isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     JS_ASSERT(thingp);
@@ -475,51 +483,70 @@ IsAboutToBeFinalized(T **thingp)
         if (rt->isHeapMinorCollecting()) {
             if (IsInsideNursery(thing))
                 return !nursery.getForwardedPointer(thingp);
             return false;
         }
     }
 #endif  // JSGC_GENERATIONAL
 
-    if (!thing->tenuredZone()->isGCSweeping())
-        return false;
+    Zone *zone = thing->tenuredZone();
+    if (zone->isGCSweeping()) {
+        /*
+         * We should return false for things that have been allocated during
+         * incremental sweeping, but this possibility doesn't occur at the moment
+         * because this function is only called at the very start of the sweeping a
+         * compartment group and during minor gc. Rather than do the extra check,
+         * we just assert that it's not necessary.
+         */
+        JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
 
-    /*
-     * We should return false for things that have been allocated during
-     * incremental sweeping, but this possibility doesn't occur at the moment
-     * because this function is only called at the very start of the sweeping a
-     * compartment group and during minor gc. Rather than do the extra check,
-     * we just assert that it's not necessary.
-     */
-    JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
+        return !thing->isMarked();
+    }
+#ifdef JSGC_COMPACTING
+    else if (zone->isGCCompacting() && IsForwarded(thing)) {
+        *thingp = Forwarded(thing);
+        return false;
+    }
+#endif
 
-    return !thing->isMarked();
+    return false;
 }
 
 template <typename T>
 T *
 UpdateIfRelocated(JSRuntime *rt, T **thingp)
 {
     JS_ASSERT(thingp);
+    if (!*thingp)
+        return nullptr;
+
 #ifdef JSGC_GENERATIONAL
+
 #ifdef JSGC_FJGENERATIONAL
-    if (*thingp && rt->isFJMinorCollecting()) {
+    if (rt->isFJMinorCollecting()) {
         ForkJoinContext *ctx = ForkJoinContext::current();
         ForkJoinNursery &nursery = ctx->nursery();
         if (nursery.isInsideFromspace(*thingp))
             nursery.getForwardedPointer(thingp);
+        return *thingp;
     }
-    else
 #endif
-    {
-        if (*thingp && rt->isHeapMinorCollecting() && IsInsideNursery(*thingp))
-            rt->gc.nursery.getForwardedPointer(thingp);
+
+    if (rt->isHeapMinorCollecting() && IsInsideNursery(*thingp)) {
+        rt->gc.nursery.getForwardedPointer(thingp);
+        return *thingp;
     }
 #endif  // JSGC_GENERATIONAL
+
+#ifdef JSGC_COMPACTING
+    Zone *zone = (*thingp)->tenuredZone();
+    if (zone->isGCCompacting() && IsForwarded(*thingp))
+        *thingp = Forwarded(*thingp);
+#endif
     return *thingp;
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
 Mark##base(JSTracer *trc, BarrieredBase<type*> *thing, const char *name)                          \
 {                                                                                                 \
     Mark<type>(trc, thing, name);                                                                 \
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -130,35 +130,36 @@ Zone::sweepBreakpoints(FreeOp *fop)
     /*
      * Sweep all compartments in a zone at the same time, since there is no way
      * to iterate over the scripts belonging to a single compartment in a zone.
      */
 
     gcstats::AutoPhase ap1(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES);
     gcstats::AutoPhase ap2(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
 
-    JS_ASSERT(isGCSweeping());
+    JS_ASSERT(isGCSweepingOrCompacting());
     for (ZoneCellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
-        JS_ASSERT(script->zone()->isGCSweeping());
+        JS_ASSERT_IF(isGCSweeping(), script->zone()->isGCSweeping());
         if (!script->hasAnyBreakpointsOrStepMode())
             continue;
 
         bool scriptGone = IsScriptAboutToBeFinalized(&script);
         JS_ASSERT(script == i.get<JSScript>());
         for (unsigned i = 0; i < script->length(); i++) {
             BreakpointSite *site = script->getBreakpointSite(script->offsetToPC(i));
             if (!site)
                 continue;
 
             Breakpoint *nextbp;
             for (Breakpoint *bp = site->firstBreakpoint(); bp; bp = nextbp) {
                 nextbp = bp->nextInSite();
                 HeapPtrObject &dbgobj = bp->debugger->toJSObjectRef();
-                JS_ASSERT_IF(dbgobj->zone()->isCollecting(), dbgobj->zone()->isGCSweeping());
+                JS_ASSERT_IF(isGCSweeping() && dbgobj->zone()->isCollecting(),
+                             dbgobj->zone()->isGCSweeping());
                 bool dying = scriptGone || IsObjectAboutToBeFinalized(&dbgobj);
                 JS_ASSERT_IF(!dying, !IsAboutToBeFinalized(&bp->getHandlerRef()));
                 if (dying)
                     bp->destroy(fop);
             }
         }
     }
 }
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -170,17 +170,18 @@ struct Zone : public JS::shadow::Zone,
 
     bool canCollect();
 
     enum GCState {
         NoGC,
         Mark,
         MarkGray,
         Sweep,
-        Finished
+        Finished,
+        Compact
     };
     void setGCState(GCState state) {
         JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
         JS_ASSERT_IF(state != NoGC, canCollect());
         gcState_ = state;
     }
 
     bool isCollecting() const {
@@ -188,31 +189,34 @@ struct Zone : public JS::shadow::Zone,
             return gcState_ != NoGC;
         else
             return needsIncrementalBarrier();
     }
 
     // If this returns true, all object tracing must be done with a GC marking
     // tracer.
     bool requireGCTracer() const {
-        return runtimeFromMainThread()->isHeapMajorCollecting() && gcState_ != NoGC;
+        JSRuntime *rt = runtimeFromMainThread();
+        return rt->isHeapMajorCollecting() && !rt->isHeapCompacting() && gcState_ != NoGC;
     }
 
     bool isGCMarking() {
         if (runtimeFromMainThread()->isHeapCollecting())
             return gcState_ == Mark || gcState_ == MarkGray;
         else
             return needsIncrementalBarrier();
     }
 
     bool wasGCStarted() const { return gcState_ != NoGC; }
     bool isGCMarkingBlack() { return gcState_ == Mark; }
     bool isGCMarkingGray() { return gcState_ == MarkGray; }
     bool isGCSweeping() { return gcState_ == Sweep; }
     bool isGCFinished() { return gcState_ == Finished; }
+    bool isGCCompacting() { return gcState_ == Compact; }
+    bool isGCSweepingOrCompacting() { return gcState_ == Sweep || gcState_ == Compact; }
 
     // Get a number that is incremented whenever this zone is collected, and
     // possibly at other times too.
     uint64_t gcNumber();
 
     bool compileBarriers() const { return compileBarriers(needsIncrementalBarrier()); }
     bool compileBarriers(bool needsIncrementalBarrier) const {
         return needsIncrementalBarrier ||
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -82,32 +82,37 @@ js::AutoCycleDetector::~AutoCycleDetecto
             cx->cycleDetectorSet.remove(obj);
     }
 }
 
 void
 js::TraceCycleDetectionSet(JSTracer *trc, js::ObjectSet &set)
 {
     for (js::ObjectSet::Enum e(set); !e.empty(); e.popFront()) {
-        JSObject *prior = e.front();
-        MarkObjectRoot(trc, const_cast<JSObject **>(&e.front()), "cycle detector table entry");
-        if (prior != e.front())
-            e.rekeyFront(e.front());
+        JSObject *key = e.front();
+        trc->setTracingLocation((void *)&e.front());
+        MarkObjectRoot(trc, &key, "cycle detector table entry");
+        if (key != e.front())
+            e.rekeyFront(key);
     }
 }
 
 void
 JSCompartment::sweepCallsiteClones()
 {
     if (callsiteClones.initialized()) {
         for (CallsiteCloneTable::Enum e(callsiteClones); !e.empty(); e.popFront()) {
             CallsiteCloneKey key = e.front().key();
-            JSFunction *fun = e.front().value();
-            if (!IsScriptMarked(&key.script) || !IsObjectMarked(&fun))
+            if (IsObjectAboutToBeFinalized(&key.original) || IsScriptAboutToBeFinalized(&key.script) ||
+                IsObjectAboutToBeFinalized(e.front().value().unsafeGet()))
+            {
                 e.removeFront();
+            } else if (key != e.front().key()) {
+                e.rekeyFront(key);
+            }
         }
     }
 }
 
 JSFunction *
 js::ExistingCloneFunctionAtCallsite(const CallsiteCloneTable &table, JSFunction *fun,
                                     JSScript *script, jsbytecode *pc)
 {
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -44,25 +44,39 @@ struct CallsiteCloneKey {
     /* The script of the call. */
     JSScript *script;
 
     /* The offset of the call. */
     uint32_t offset;
 
     CallsiteCloneKey(JSFunction *f, JSScript *s, uint32_t o) : original(f), script(s), offset(o) {}
 
+    bool operator==(const CallsiteCloneKey& other) {
+        return original == other.original && script == other.script && offset == other.offset;
+    }
+
+    bool operator!=(const CallsiteCloneKey& other) {
+        return !(*this == other);
+    }
+
     typedef CallsiteCloneKey Lookup;
 
     static inline uint32_t hash(CallsiteCloneKey key) {
         return uint32_t(size_t(key.script->offsetToPC(key.offset)) ^ size_t(key.original));
     }
 
     static inline bool match(const CallsiteCloneKey &a, const CallsiteCloneKey &b) {
         return a.script == b.script && a.offset == b.offset && a.original == b.original;
     }
+
+    static void rekey(CallsiteCloneKey &k, const CallsiteCloneKey &newKey) {
+        k.original = newKey.original;
+        k.script = newKey.script;
+        k.offset = newKey.offset;
+    }
 };
 
 typedef HashMap<CallsiteCloneKey,
                 ReadBarrieredFunction,
                 CallsiteCloneKey,
                 SystemAllocPolicy> CallsiteCloneTable;
 
 JSFunction *
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -580,18 +580,22 @@ JSCompartment::sweep(FreeOp *fop, bool r
 
     {
         gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES);
 
         /* Remove dead references held weakly by the compartment. */
 
         sweepBaseShapeTable();
         sweepInitialShapeTable();
-        sweepNewTypeObjectTable(newTypeObjects);
-        sweepNewTypeObjectTable(lazyTypeObjects);
+        {
+            gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
+                                  gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
+            sweepNewTypeObjectTable(newTypeObjects);
+            sweepNewTypeObjectTable(lazyTypeObjects);
+        }
         sweepCallsiteClones();
         savedStacks_.sweep(rt);
 
         if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet()))
             global_.set(nullptr);
 
         if (selfHostingScriptSource &&
             IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet()))
@@ -651,16 +655,69 @@ JSCompartment::sweepCrossCompartmentWrap
         } else if (key.wrapped != e.front().key().wrapped ||
                    key.debugger != e.front().key().debugger)
         {
             e.rekeyFront(key);
         }
     }
 }
 
+#ifdef JSGC_COMPACTING
+
+/*
+ * Fixup wrappers with moved keys or values.
+ */
+void
+JSCompartment::fixupCrossCompartmentWrappers(JSTracer *trc)
+{
+    for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
+        Value val = e.front().value();
+        if (IsForwarded(val)) {
+            val = Forwarded(val);
+            e.front().value().set(val);
+        }
+
+        // CrossCompartmentKey's hash does not depend on the debugger object,
+        // so update it but do not rekey if it changes
+        CrossCompartmentKey key = e.front().key();
+        if (key.debugger)
+            key.debugger = MaybeForwarded(key.debugger);
+        if (key.wrapped && IsForwarded(key.wrapped)) {
+            key.wrapped = Forwarded(key.wrapped);
+            e.rekeyFront(key, key);
+        }
+
+        if (!zone()->isCollecting() && val.isObject()) {
+            // Call the trace hook to update any pointers to relocated things.
+            JSObject *obj = &val.toObject();
+            const Class *clasp = obj->getClass();
+            if (clasp->trace)
+                clasp->trace(trc, obj);
+        }
+    }
+}
+
+void JSCompartment::fixupAfterMovingGC()
+{
+    fixupGlobal();
+    fixupNewTypeObjectTable(newTypeObjects);
+    fixupNewTypeObjectTable(lazyTypeObjects);
+    fixupInitialShapeTable();
+}
+
+void
+JSCompartment::fixupGlobal()
+{
+    GlobalObject *global = *global_.unsafeGet();
+    if (global)
+        global_.set(MaybeForwarded(global));
+}
+
+#endif // JSGC_COMPACTING
+
 void
 JSCompartment::purge()
 {
     dtoaCache.purge();
 }
 
 void
 JSCompartment::clearTables()
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -342,16 +342,24 @@ struct JSCompartment
     void trace(JSTracer *trc);
     void markRoots(JSTracer *trc);
     bool isDiscardingJitCode(JSTracer *trc);
     void sweep(js::FreeOp *fop, bool releaseTypes);
     void sweepCrossCompartmentWrappers();
     void purge();
     void clearTables();
 
+#ifdef JSGC_COMPACTING
+    void fixupInitialShapeTable();
+    void fixupNewTypeObjectTable(js::types::TypeObjectWithNewScriptSet &table);
+    void fixupCrossCompartmentWrappers(JSTracer *trc);
+    void fixupAfterMovingGC();
+    void fixupGlobal();
+#endif
+
     bool hasObjectMetadataCallback() const { return objectMetadataCallback; }
     void setObjectMetadataCallback(js::ObjectMetadataCallback callback);
     void forgetObjectMetadataCallback() {
         objectMetadataCallback = nullptr;
     }
     bool callObjectMetadataCallback(JSContext *cx, JSObject **obj) const {
         return objectMetadataCallback(cx, obj);
     }
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -42,17 +42,19 @@ namespace jit {
 
 namespace gc {
 
 enum State {
     NO_INCREMENTAL,
     MARK_ROOTS,
     MARK,
     SWEEP,
-    INVALID
+#ifdef JSGC_COMPACTING
+    COMPACT
+#endif
 };
 
 static inline JSGCTraceKind
 MapAllocToTraceKind(AllocKind kind)
 {
     static const JSGCTraceKind map[] = {
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
@@ -1270,16 +1272,17 @@ inline T
 MaybeForwarded(T t)
 {
     return IsForwarded(t) ? Forwarded(t) : t;
 }
 
 #else
 
 template <typename T> inline bool IsForwarded(T t) { return false; }
+template <typename T> inline T Forwarded(T t) { return t; }
 template <typename T> inline T MaybeForwarded(T t) { return t; }
 
 #endif // JSGC_COMPACTING
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 template <typename T>
 inline void
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -3435,22 +3435,23 @@ types::TypeMonitorCallSlow(JSContext *cx
         TypeScript::SetArgument(cx, script, arg, args[arg]);
 
     /* Watch for fewer actuals than formals to the call. */
     for (; arg < nargs; arg++)
         TypeScript::SetArgument(cx, script, arg, UndefinedValue());
 }
 
 static inline bool
-IsAboutToBeFinalized(TypeObjectKey *key)
+IsAboutToBeFinalized(TypeObjectKey **keyp)
 {
     /* Mask out the low bit indicating whether this is a type or JS object. */
-    gc::Cell *tmp = reinterpret_cast<gc::Cell *>(uintptr_t(key) & ~1);
+    uintptr_t flagBit = uintptr_t(*keyp) & 1;
+    gc::Cell *tmp = reinterpret_cast<gc::Cell *>(uintptr_t(*keyp) & ~1);
     bool isAboutToBeFinalized = IsCellAboutToBeFinalized(&tmp);
-    JS_ASSERT(tmp == reinterpret_cast<gc::Cell *>(uintptr_t(key) & ~1));
+    *keyp = reinterpret_cast<TypeObjectKey *>(uintptr_t(tmp) | flagBit);
     return isAboutToBeFinalized;
 }
 
 void
 types::FillBytecodeTypeMap(JSScript *script, uint32_t *bytecodeMap)
 {
     uint32_t added = 0;
     for (jsbytecode *pc = script->code(); pc < script->codeEnd(); pc += GetBytecodeLength(pc)) {
@@ -3938,17 +3939,17 @@ ConstraintTypeSet::sweep(Zone *zone, boo
     if (objectCount >= 2) {
         unsigned oldCapacity = HashSetCapacity(objectCount);
         TypeObjectKey **oldArray = objectSet;
 
         clearObjects();
         objectCount = 0;
         for (unsigned i = 0; i < oldCapacity; i++) {
             TypeObjectKey *object = oldArray[i];
-            if (object && !IsAboutToBeFinalized(object)) {
+            if (object && !IsAboutToBeFinalized(&object)) {
                 TypeObjectKey **pentry =
                     HashSetInsert<TypeObjectKey *,TypeObjectKey,TypeObjectKey>
                         (zone->types.typeLifoAlloc, objectSet, objectCount, object);
                 if (pentry) {
                     *pentry = object;
                 } else {
                     *oom = true;
                     flags |= TYPE_FLAG_ANYOBJECT;
@@ -3956,19 +3957,21 @@ ConstraintTypeSet::sweep(Zone *zone, boo
                     objectCount = 0;
                     break;
                 }
             }
         }
         setBaseObjectCount(objectCount);
     } else if (objectCount == 1) {
         TypeObjectKey *object = (TypeObjectKey *) objectSet;
-        if (IsAboutToBeFinalized(object)) {
+        if (IsAboutToBeFinalized(&object)) {
             objectSet = nullptr;
             setBaseObjectCount(0);
+        } else {
+            objectSet = reinterpret_cast<TypeObjectKey **>(object);
         }
     }
 
     /*
      * Type constraints only hold weak references. Copy constraints referring
      * to data that is still live into the zone's new arena.
      */
     TypeConstraint *constraint = constraintList;
@@ -4172,36 +4175,69 @@ TypeCompartment::sweep(FreeOp *fop)
                 e.rekeyFront(key);
         }
     }
 }
 
 void
 JSCompartment::sweepNewTypeObjectTable(TypeObjectWithNewScriptSet &table)
 {
-    gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
-                          gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
-
-    JS_ASSERT(zone()->isGCSweeping());
+    JS_ASSERT(zone()->isCollecting());
     if (table.initialized()) {
         for (TypeObjectWithNewScriptSet::Enum e(table); !e.empty(); e.popFront()) {
             TypeObjectWithNewScriptEntry entry = e.front();
-            if (IsTypeObjectAboutToBeFinalized(entry.object.unsafeGet())) {
-                e.removeFront();
-            } else if (entry.newFunction && IsObjectAboutToBeFinalized(&entry.newFunction)) {
+            if (IsTypeObjectAboutToBeFinalized(entry.object.unsafeGet()) ||
+                (entry.newFunction && IsObjectAboutToBeFinalized(&entry.newFunction)))
+            {
                 e.removeFront();
-            } else if (entry.object.unbarrieredGet() != e.front().object.unbarrieredGet()) {
+            } else {
+                /* Any rekeying necessary is handled by fixupNewTypeObjectTable() below. */
+                JS_ASSERT(entry.object == e.front().object);
+                JS_ASSERT(entry.newFunction == e.front().newFunction);
+            }
+        }
+    }
+}
+
+#ifdef JSGC_COMPACTING
+void
+JSCompartment::fixupNewTypeObjectTable(TypeObjectWithNewScriptSet &table)
+{
+    /*
+     * Each entry's hash depends on the object's prototype and we can't tell
+     * whether that has been moved or not in sweepNewTypeObjectTable().
+     */
+    JS_ASSERT(zone()->isCollecting());
+    if (table.initialized()) {
+        for (TypeObjectWithNewScriptSet::Enum e(table); !e.empty(); e.popFront()) {
+            TypeObjectWithNewScriptEntry entry = e.front();
+            bool needRekey = false;
+            if (IsForwarded(entry.object.get())) {
+                entry.object.set(Forwarded(entry.object.get()));
+                needRekey = true;
+            }
+            TaggedProto proto = entry.object->proto();
+            if (proto.isObject() && IsForwarded(proto.toObject())) {
+                proto = TaggedProto(Forwarded(proto.toObject()));
+                needRekey = true;
+            }
+            if (entry.newFunction && IsForwarded(entry.newFunction)) {
+                entry.newFunction = Forwarded(entry.newFunction);
+                needRekey = true;
+            }
+            if (needRekey) {
                 TypeObjectWithNewScriptSet::Lookup lookup(entry.object->clasp(),
-                                                          entry.object->proto(),
+                                                          proto,
                                                           entry.newFunction);
                 e.rekeyFront(lookup, entry);
             }
         }
     }
 }
+#endif
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 void
 JSCompartment::checkNewTypeObjectTablesAfterMovingGC()
 {
     checkNewTypeObjectTableAfterMovingGC(newTypeObjects);
     checkNewTypeObjectTableAfterMovingGC(lazyTypeObjects);
@@ -4240,17 +4276,17 @@ TypeCompartment::~TypeCompartment()
     js_delete(objectTypeTable);
     js_delete(allocationSiteTable);
 }
 
 /* static */ void
 TypeScript::Sweep(FreeOp *fop, JSScript *script, bool *oom)
 {
     JSCompartment *compartment = script->compartment();
-    JS_ASSERT(compartment->zone()->isGCSweeping());
+    JS_ASSERT(compartment->zone()->isGCSweepingOrCompacting());
 
     unsigned num = NumTypeSets(script);
     StackTypeSet *typeArray = script->types->typeArray();
 
     /* Remove constraints and references to dead objects from the persistent type sets. */
     for (unsigned i = 0; i < num; i++)
         typeArray[i].sweep(compartment->zone(), oom);
 }
@@ -4319,17 +4355,17 @@ TypeZone::~TypeZone()
 {
     js_delete(compilerOutputs);
     js_delete(pendingRecompiles);
 }
 
 void
 TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
 {
-    JS_ASSERT(zone()->isGCSweeping());
+    JS_ASSERT(zone()->isGCSweepingOrCompacting());
 
     JSRuntime *rt = fop->runtime();
 
     /*
      * Clear the analysis pool, but don't release its data yet. While
      * sweeping types any live data will be allocated into the pool.
      */
     LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize());
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1566,16 +1566,18 @@ FinalizeGenerator(FreeOp *fop, JSObject 
         JS_POISON(gen->fp, JS_SWEPT_FRAME_PATTERN, sizeof(InterpreterFrame));
     JS_POISON(gen, JS_SWEPT_FRAME_PATTERN, sizeof(JSGenerator));
     fop->free_(gen);
 }
 
 static void
 MarkGeneratorFrame(JSTracer *trc, JSGenerator *gen)
 {
+    gen->obj = MaybeForwarded(gen->obj.get());
+    MarkObject(trc, &gen->obj, "Generator Object");
     MarkValueRange(trc,
                    HeapValueify(gen->fp->generatorArgsSnapshotBegin()),
                    HeapValueify(gen->fp->generatorArgsSnapshotEnd()),
                    "Generator Floating Args");
     gen->fp->mark(trc);
     MarkValueRange(trc,
                    HeapValueify(gen->fp->generatorSlotsSnapshotBegin()),
                    HeapValueify(gen->regs.sp),
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -12,16 +12,17 @@
 
 #include "vm/Shape.h"
 
 #include "jsgcinlines.h"
 
 #include "vm/Shape-inl.h"
 
 using namespace js;
+using namespace js::gc;
 
 inline HashNumber
 ShapeHasher::hash(const Lookup &l)
 {
     return l.hash();
 }
 
 inline bool
@@ -263,16 +264,86 @@ Shape::sweep()
 
 void
 Shape::finalize(FreeOp *fop)
 {
     if (!inDictionary() && kids.isHash())
         fop->delete_(kids.toHash());
 }
 
+#ifdef JSGC_COMPACTING
+
+void
+Shape::fixupDictionaryShapeAfterMovingGC()
+{
+    if (!listp)
+        return;
+
+    JS_ASSERT(!IsInsideNursery(reinterpret_cast<Cell *>(listp)));
+    AllocKind kind = reinterpret_cast<Cell *>(listp)->tenuredGetAllocKind();
+    JS_ASSERT(kind == FINALIZE_SHAPE || kind <= FINALIZE_OBJECT_LAST);
+    if (kind == FINALIZE_SHAPE) {
+        // listp points to the parent field of the next shape.
+        Shape *next = reinterpret_cast<Shape *>(uintptr_t(listp) -
+                                                offsetof(Shape, parent));
+        listp = &gc::MaybeForwarded(next)->parent;
+    } else {
+        // listp points to the shape_ field of an object.
+        JSObject *last = reinterpret_cast<JSObject *>(uintptr_t(listp) -
+                                                      offsetof(JSObject, shape_));
+        listp = &gc::MaybeForwarded(last)->shape_;
+    }
+}
+
+void
+Shape::fixupShapeTreeAfterMovingGC()
+{
+    if (kids.isNull())
+        return;
+
+    if (kids.isShape()) {
+        if (gc::IsForwarded(kids.toShape()))
+            kids.setShape(gc::Forwarded(kids.toShape()));
+        return;
+    }
+
+    JS_ASSERT(kids.isHash());
+    KidsHash *kh = kids.toHash();
+    for (KidsHash::Enum e(*kh); !e.empty(); e.popFront()) {
+        Shape *key = e.front();
+        if (!IsForwarded(key))
+            continue;
+
+        key = Forwarded(key);
+        BaseShape *base = key->base();
+        if (IsForwarded(base))
+            base = Forwarded(base);
+        UnownedBaseShape *unowned = base->unowned();
+        if (IsForwarded(unowned))
+            unowned = Forwarded(unowned);
+        StackShape lookup(unowned,
+                          const_cast<Shape *>(key)->propidRef(),
+                          key->slotInfo & Shape::SLOT_MASK,
+                          key->attrs,
+                          key->flags);
+        e.rekeyFront(lookup, key);
+    }
+}
+
+void
+Shape::fixupAfterMovingGC()
+{
+    if (inDictionary())
+        fixupDictionaryShapeAfterMovingGC();
+    else
+        fixupShapeTreeAfterMovingGC();
+}
+
+#endif // JSGC_COMPACTING
+
 #ifdef DEBUG
 
 void
 KidsPointer::checkConsistency(Shape *aKid) const
 {
     if (isShape()) {
         JS_ASSERT(toShape() == aKid);
     } else {
--- a/js/src/jspropertytree.h
+++ b/js/src/jspropertytree.h
@@ -12,17 +12,17 @@
 
 #include "js/HashTable.h"
 
 namespace js {
 
 class Shape;
 struct StackShape;
 
-struct ShapeHasher {
+struct ShapeHasher : public DefaultHasher<Shape *> {
     typedef Shape *Key;
     typedef StackShape Lookup;
 
     static inline HashNumber hash(const Lookup &l);
     static inline bool match(Key k, const Lookup &l);
 };
 
 typedef HashSet<Shape *, ShapeHasher, SystemAllocPolicy> KidsHash;
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -2829,24 +2829,25 @@ js::proxy_Trace(JSTracer *trc, JSObject 
 
 /* static */ void
 ProxyObject::trace(JSTracer *trc, JSObject *obj)
 {
     ProxyObject *proxy = &obj->as<ProxyObject>();
 
 #ifdef DEBUG
     if (trc->runtime()->gc.isStrictProxyCheckingEnabled() && proxy->is<WrapperObject>()) {
-        JSObject *referent = &proxy->private_().toObject();
+        JSObject *referent = MaybeForwarded(&proxy->private_().toObject());
         if (referent->compartment() != proxy->compartment()) {
             /*
              * Assert that this proxy is tracked in the wrapper map. We maintain
              * the invariant that the wrapped object is the key in the wrapper map.
              */
             Value key = ObjectValue(*referent);
             WrapperMap::Ptr p = proxy->compartment()->lookupWrapper(key);
+            JS_ASSERT(p);
             JS_ASSERT(*p->value().unsafeGet() == ObjectValue(*proxy));
         }
     }
 #endif
 
     // Note: If you add new slots here, make sure to change
     // nuke() to cope.
     MarkCrossCompartmentSlot(trc, obj, proxy->slotOfPrivate(), "private");
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -3334,17 +3334,17 @@ JSScript::markChildren(JSTracer *trc)
     }
 
     if (hasConsts()) {
         ConstArray *constarray = consts();
         MarkValueRange(trc, constarray->length, constarray->vector, "consts");
     }
 
     if (sourceObject()) {
-        JS_ASSERT(sourceObject()->compartment() == compartment());
+        JS_ASSERT(MaybeForwarded(sourceObject())->compartment() == compartment());
         MarkObject(trc, &sourceObject_, "sourceObject");
     }
 
     if (functionNonDelazifying())
         MarkObject(trc, &function_, "function");
 
     if (enclosingScopeOrOriginalFunction_)
         MarkObject(trc, &enclosingScopeOrOriginalFunction_, "enclosing");
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -83,16 +83,21 @@ js::UncheckedUnwrap(JSObject *wrapped, b
     while (true) {
         if (!wrapped->is<WrapperObject>() ||
             MOZ_UNLIKELY(stopAtOuter && wrapped->getClass()->ext.innerObject))
         {
             break;
         }
         flags |= Wrapper::wrapperHandler(wrapped)->flags();
         wrapped = wrapped->as<ProxyObject>().private_().toObjectOrNull();
+
+        // This can be called from DirectProxyHandler::weakmapKeyDelegate() on a
+        // wrapper whose referent has been moved while it is still unmarked.
+        if (wrapped)
+            wrapped = MaybeForwarded(wrapped);
     }
     if (flagsp)
         *flagsp = flags;
     return wrapped;
 }
 
 JS_FRIEND_API(JSObject *)
 js::CheckedUnwrap(JSObject *obj, bool stopAtOuter)
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -821,19 +821,20 @@ ArrayBufferObject::finalize(FreeOp *fop,
 
     if (buffer.ownsData())
         buffer.releaseData(fop);
 }
 
 /* static */ void
 ArrayBufferObject::obj_trace(JSTracer *trc, JSObject *obj)
 {
-    if (!IS_GC_MARKING_TRACER(trc) && !trc->runtime()->isHeapMinorCollecting()
+    JSRuntime *rt = trc->runtime();
+    if (!IS_GC_MARKING_TRACER(trc) && !rt->isHeapMinorCollecting() && !rt->isHeapCompacting()
 #ifdef JSGC_FJGENERATIONAL
-        && !trc->runtime()->isFJMinorCollecting()
+        && !rt->isFJMinorCollecting()
 #endif
         )
     {
         return;
     }
 
     // ArrayBufferObjects need to maintain a list of possibly-weak pointers to
     // their views. The straightforward way to update the weak pointers would
@@ -847,25 +848,26 @@ ArrayBufferObject::obj_trace(JSTracer *t
     // multiple views are collected into a linked list during collection, and
     // then swept to prune out their dead views.
 
     ArrayBufferObject &buffer = AsArrayBuffer(obj);
     ArrayBufferViewObject *viewsHead = buffer.viewList();
     if (!viewsHead)
         return;
 
-    buffer.setViewList(UpdateObjectIfRelocated(trc->runtime(), &viewsHead));
+    ArrayBufferViewObject *tmp = viewsHead;
+    buffer.setViewList(UpdateObjectIfRelocated(rt, &tmp));
 
-    if (viewsHead->nextView() == nullptr) {
+    if (tmp->nextView() == nullptr) {
         // Single view: mark it, but only if we're actually doing a GC pass
         // right now. Otherwise, the tracing pass for barrier verification will
         // fail if we add another view and the pointer becomes weak.
         MarkObjectUnbarriered(trc, &viewsHead, "arraybuffer.singleview");
         buffer.setViewListNoBarrier(viewsHead);
-    } else {
+    } else if (!rt->isHeapCompacting()) {
         // Multiple views: do not mark, but append buffer to list.
         ArrayBufferVector &gcLiveArrayBuffers = buffer.compartment()->gcLiveArrayBuffers;
 
         // obj_trace may be called multiple times before sweep(), so avoid
         // adding this buffer to the list multiple times.
         if (buffer.inLiveList()) {
 #ifdef DEBUG
             bool found = false;
@@ -873,16 +875,29 @@ ArrayBufferObject::obj_trace(JSTracer *t
                 found |= gcLiveArrayBuffers[i] == &buffer;
             JS_ASSERT(found);
 #endif
         } else if (gcLiveArrayBuffers.append(&buffer)) {
             buffer.setInLiveList(true);
         } else {
             CrashAtUnhandlableOOM("OOM while updating live array buffers");
         }
+    } else {
+        // If we're fixing up pointers after compacting then trace everything.
+        ArrayBufferViewObject *prev = nullptr;
+        ArrayBufferViewObject *view = viewsHead;
+        while (view) {
+            JS_ASSERT(buffer.compartment() == MaybeForwarded(view)->compartment());
+            MarkObjectUnbarriered(trc, &view, "arraybuffer.singleview");
+            if (prev)
+                prev->setNextView(view);
+            else
+                buffer.setViewListNoBarrier(view);
+            view = view->nextView();
+        }
     }
 }
 
 /* static */ void
 ArrayBufferObject::sweep(JSCompartment *compartment)
 {
     JSRuntime *rt = compartment->runtimeFromMainThread();
     ArrayBufferVector &gcLiveArrayBuffers = compartment->gcLiveArrayBuffers;
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1702,17 +1702,17 @@ Debugger::trace(JSTracer *trc)
      * corresponding JS frames are still on the stack.
      *
      * (Once we support generator frames properly, we will need
      * weakly-referenced Debugger.Frame objects as well, for suspended generator
      * frames.)
      */
     for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
         RelocatablePtrObject &frameobj = r.front().value();
-        JS_ASSERT(frameobj->getPrivate());
+        JS_ASSERT(MaybeForwarded(frameobj.get())->getPrivate());
         MarkObject(trc, &frameobj, "live Debugger.Frame");
     }
 
     /*
      * Mark every allocation site in our allocation log.
      */
     for (AllocationSite *s = allocationsLog.getFirst(); s; s = s->getNext())
         MarkObject(trc, &s->frame, "allocation log SavedFrame");
@@ -1754,19 +1754,19 @@ Debugger::sweepAll(FreeOp *fop)
     for (gc::GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
         /* For each debuggee being GC'd, detach it from all its debuggers. */
         GlobalObjectSet &debuggees = comp->getDebuggees();
         for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
             GlobalObject *global = e.front();
             if (IsObjectAboutToBeFinalized(&global)) {
                 // See infallibility note above.
                 detachAllDebuggersFromGlobal(fop, global, &e);
+            } else if (global != e.front()) {
+                e.rekeyFront(global);
             }
-            else if (global != e.front())
-                e.rekeyFront(global);
         }
     }
 }
 
 void
 Debugger::detachAllDebuggersFromGlobal(FreeOp *fop, GlobalObject *global,
                                        GlobalObjectSet::Enum *compartmentEnum)
 {
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -122,16 +122,19 @@ class DebuggerWeakMap : private WeakMap<
   private:
     /* Override sweep method to also update our edge cache. */
     void sweep() {
         for (Enum e(*static_cast<Base *>(this)); !e.empty(); e.popFront()) {
             Key k(e.front().key());
             if (gc::IsAboutToBeFinalized(&k)) {
                 e.removeFront();
                 decZoneCount(k->zone());
+            } else {
+                // markKeys() should have done any necessary relocation.
+                JS_ASSERT(k == e.front().key());
             }
         }
         Base::assertEntriesNotAboutToBeFinalized();
     }
 
     bool incZoneCount(JS::Zone *zone) {
         CountMap::Ptr p = zoneCounts.lookupWithDefault(zone, 0);
         if (!p)
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -932,23 +932,24 @@ struct JSRuntime : public JS::shadow::Ru
      * duty (in debug builds) to verify that it matches the cx being used.
      */
     JSContext          *activeContext;
 #endif
 
     /* Garbage collector state, used by jsgc.c. */
     js::gc::GCRuntime   gc;
 
-    /* Garbase collector state has been sucessfully initialized. */
+    /* Garbage collector state has been sucessfully initialized. */
     bool                gcInitialized;
 
     bool isHeapBusy() { return gc.isHeapBusy(); }
     bool isHeapMajorCollecting() { return gc.isHeapMajorCollecting(); }
     bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
     bool isHeapCollecting() { return gc.isHeapCollecting(); }
+    bool isHeapCompacting() { return gc.isHeapCompacting(); }
 
     bool isFJMinorCollecting() { return gc.isFJMinorCollecting(); }
 
     int gcZeal() { return gc.zeal(); }
 
     void lockGC() {
         assertCanLock(js::GCLock);
         gc.lockGC();
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -19,16 +19,17 @@
 
 #include "jsatominlines.h"
 #include "jsobjinlines.h"
 #include "jsscriptinlines.h"
 
 #include "vm/Stack-inl.h"
 
 using namespace js;
+using namespace js::gc;
 using namespace js::types;
 
 using mozilla::PodZero;
 
 typedef Rooted<ArgumentsObject *> RootedArgumentsObject;
 typedef MutableHandle<ArgumentsObject *> MutableHandleArgumentsObject;
 
 /*****************************************************************************/
@@ -1779,30 +1780,43 @@ DebugScopes::sweep(JSRuntime *rt)
              * means that marks are only conservative approximations of
              * liveness; we should assume that anything could be marked.
              *
              * Thus, we must explicitly remove the entries from both liveScopes
              * and missingScopes here.
              */
             liveScopes.remove(&(*debugScope)->scope());
             e.removeFront();
+        } else {
+            ScopeIterKey key = e.front().key();
+            bool needsUpdate = false;
+            if (IsForwarded(key.cur())) {
+                key.updateCur(js::gc::Forwarded(key.cur()));
+                needsUpdate = true;
+            }
+            if (IsForwarded(key.staticScope())) {
+                key.updateStaticScope(Forwarded(key.staticScope()));
+                needsUpdate = true;
+            }
+            if (needsUpdate)
+                e.rekeyFront(key);
         }
     }
 
     for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
         ScopeObject *scope = e.front().key();
 
         /*
          * Scopes can be finalized when a debugger-synthesized ScopeObject is
          * no longer reachable via its DebugScopeObject.
          */
-        if (IsObjectAboutToBeFinalized(&scope)) {
+        if (IsObjectAboutToBeFinalized(&scope))
             e.removeFront();
-            continue;
-        }
+        else if (scope != e.front().key())
+            e.rekeyFront(scope);
     }
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
 DebugScopes::checkHashTablesAfterMovingGC(JSRuntime *runtime)
 {
     /*
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -699,16 +699,19 @@ class ScopeIterKey
     AbstractFramePtr frame() const { return frame_; }
     JSObject *cur() const { return cur_; }
     NestedScopeObject *staticScope() const { return staticScope_; }
     ScopeIter::Type type() const { return type_; }
     bool hasScopeObject() const { return hasScopeObject_; }
     JSObject *enclosingScope() const { return cur_; }
     JSObject *&enclosingScope() { return cur_; }
 
+    void updateCur(JSObject *obj) { cur_ = obj; }
+    void updateStaticScope(NestedScopeObject *obj) { staticScope_ = obj; }
+
     /* For use as hash policy */
     typedef ScopeIterKey Lookup;
     static HashNumber hash(ScopeIterKey si);
     static bool match(ScopeIterKey si1, ScopeIterKey si2);
     bool operator!=(const ScopeIterKey &other) const {
         return frame_ != other.frame_ ||
                cur_ != other.cur_ ||
                staticScope_ != other.staticScope_ ||
--- a/js/src/vm/Shape-inl.h
+++ b/js/src/vm/Shape-inl.h
@@ -215,11 +215,20 @@ GetShapeAttributes(JSObject *obj, Shape 
         if (obj->is<TypedArrayObject>())
             return JSPROP_ENUMERATE | JSPROP_PERMANENT;
         return JSPROP_ENUMERATE;
     }
 
     return shape->attributes();
 }
 
+#ifdef JSGC_COMPACTING
+inline void
+BaseShape::fixupAfterMovingGC()
+{
+    if (hasTable())
+        table().fixupAfterMovingGC();
+}
+#endif
+
 } /* namespace js */
 
 #endif /* vm_Shape_inl_h */
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -243,16 +243,30 @@ ShapeTable::search(jsid id, bool adding)
 #endif
         }
     }
 
     /* NOTREACHED */
     return nullptr;
 }
 
+#ifdef JSGC_COMPACTING
+void
+ShapeTable::fixupAfterMovingGC()
+{
+    int log2 = HASH_BITS - hashShift;
+    uint32_t size = JS_BIT(log2);
+    for (HashNumber i = 0; i < size; i++) {
+        Shape *shape = SHAPE_FETCH(&entries[i]);
+        if (shape && IsForwarded(shape))
+            SHAPE_STORE_PRESERVING_COLLISION(&entries[i], Forwarded(shape));
+    }
+}
+#endif
+
 bool
 ShapeTable::change(int log2Delta, ThreadSafeContext *cx)
 {
     JS_ASSERT(entries);
 
     /*
      * Grow, shrink, or compress by changing this->entries.
      */
@@ -424,16 +438,21 @@ JSObject::lookupChildProperty(ThreadSafe
     return shape;
 }
 
 bool
 js::ObjectImpl::toDictionaryMode(ThreadSafeContext *cx)
 {
     JS_ASSERT(!inDictionaryMode());
 
+#ifdef JSGC_COMPACTING
+    // TODO: This crashes if we run a compacting GC here.
+    js::gc::AutoSuppressGC nogc(zone()->runtimeFromAnyThread());
+#endif
+
     /* We allocate the shapes from cx->compartment(), so make sure it's right. */
     JS_ASSERT(cx->isInsideCurrentCompartment(this));
 
     /*
      * This function is thread safe as long as the object is thread local. It
      * does not modify the shared shapes, and only allocates newly allocated
      * (and thus also thread local) shapes.
      */
@@ -1524,18 +1543,23 @@ void
 JSCompartment::sweepBaseShapeTable()
 {
     gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
                           gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE);
 
     if (baseShapes.initialized()) {
         for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) {
             UnownedBaseShape *base = e.front().unbarrieredGet();
-            if (IsBaseShapeAboutToBeFinalized(&base))
+            if (IsBaseShapeAboutToBeFinalized(&base)) {
                 e.removeFront();
+            } else if (base != e.front()) {
+                StackBaseShape sbase(base);
+                ReadBarriered<UnownedBaseShape *> b(base);
+                e.rekeyFront(&sbase, b);
+            }
         }
     }
 }
 
 void
 BaseShape::finalize(FreeOp *fop)
 {
     if (table_) {
@@ -1818,17 +1842,19 @@ JSCompartment::sweepInitialShapeTable()
     gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
                           gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
 
     if (initialShapes.initialized()) {
         for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
             const InitialShapeEntry &entry = e.front();
             Shape *shape = entry.shape.unbarrieredGet();
             JSObject *proto = entry.proto.raw();
-            if (IsShapeAboutToBeFinalized(&shape) || (entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto))) {
+            if (IsShapeAboutToBeFinalized(&shape) ||
+                (entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto)))
+            {
                 e.removeFront();
             } else {
 #ifdef DEBUG
                 DebugOnly<JSObject *> parent = shape->getObjectParent();
                 JS_ASSERT(!parent || !IsObjectAboutToBeFinalized(&parent));
                 JS_ASSERT(parent == shape->getObjectParent());
 #endif
                 if (shape != entry.shape.unbarrieredGet() || proto != entry.proto.raw()) {
@@ -1836,16 +1862,57 @@ JSCompartment::sweepInitialShapeTable()
                     InitialShapeEntry newKey(readBarrieredShape, TaggedProto(proto));
                     e.rekeyFront(newKey.getLookup(), newKey);
                 }
             }
         }
     }
 }
 
+#ifdef JSGC_COMPACTING
+void
+JSCompartment::fixupInitialShapeTable()
+{
+    if (!initialShapes.initialized())
+        return;
+
+    for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
+        InitialShapeEntry entry = e.front();
+        bool needRekey = false;
+        if (IsForwarded(entry.shape.get())) {
+            entry.shape.set(Forwarded(entry.shape.get()));
+            needRekey = true;
+        }
+        if (entry.proto.isObject() && IsForwarded(entry.proto.toObject())) {
+            entry.proto = TaggedProto(Forwarded(entry.proto.toObject()));
+            needRekey = true;
+        }
+        JSObject *parent = entry.shape->getObjectParent();
+        if (parent) {
+            parent = MaybeForwarded(parent);
+            needRekey = true;
+        }
+        JSObject *metadata = entry.shape->getObjectMetadata();
+        if (metadata) {
+            metadata = MaybeForwarded(metadata);
+            needRekey = true;
+        }
+        if (needRekey) {
+            InitialShapeEntry::Lookup relookup(entry.shape->getObjectClass(),
+                                               entry.proto,
+                                               parent,
+                                               metadata,
+                                               entry.shape->numFixedSlots(),
+                                               entry.shape->getObjectFlags());
+            e.rekeyFront(relookup, entry);
+        }
+    }
+}
+#endif // JSGC_COMPACTING
+
 void
 AutoRooterGetterSetter::Inner::trace(JSTracer *trc)
 {
     if ((attrs & JSPROP_GETTER) && *pgetter)
         gc::MarkObjectRoot(trc, (JSObject**) pgetter, "AutoRooterGetterSetter getter");
     if ((attrs & JSPROP_SETTER) && *psetter)
         gc::MarkObjectRoot(trc, (JSObject**) psetter, "AutoRooterGetterSetter setter");
 }
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -185,16 +185,21 @@ struct ShapeTable {
     /*
      * NB: init and change are fallible but do not report OOM, so callers can
      * cope or ignore. They do however use the context's calloc method in
      * order to update the malloc counter on success.
      */
     bool            init(ThreadSafeContext *cx, Shape *lastProp);
     bool            change(int log2Delta, ThreadSafeContext *cx);
     Shape           **search(jsid id, bool adding);
+
+#ifdef JSGC_COMPACTING
+    /* Update entries whose shapes have been moved */
+    void            fixupAfterMovingGC();
+#endif
 };
 
 /*
  * Reuse the API-only JSPROP_INDEX attribute to mean shadowability.
  */
 #define JSPROP_SHADOWABLE       JSPROP_INDEX
 
 /*
@@ -500,16 +505,20 @@ class BaseShape : public gc::BarrieredCe
 
         if (parent)
             gc::MarkObject(trc, &parent, "parent");
 
         if (metadata)
             gc::MarkObject(trc, &metadata, "metadata");
     }
 
+#ifdef JSGC_COMPACTING
+    void fixupAfterMovingGC();
+#endif
+
   private:
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(BaseShape, clasp_) == offsetof(js::shadow::BaseShape, clasp_));
     }
 };
 
 class UnownedBaseShape : public BaseShape {};
 
@@ -545,17 +554,17 @@ BaseShape::toUnowned()
 
 UnownedBaseShape*
 BaseShape::baseUnowned()
 {
     JS_ASSERT(isOwned() && unowned_); return unowned_;
 }
 
 /* Entries for the per-compartment baseShapes set of unowned base shapes. */
-struct StackBaseShape
+struct StackBaseShape : public DefaultHasher<ReadBarrieredUnownedBaseShape>
 {
     typedef const StackBaseShape *Lookup;
 
     uint32_t flags;
     const Class *clasp;
     JSObject *parent;
     JSObject *metadata;
     PropertyOp rawGetter;
@@ -1023,20 +1032,29 @@ class Shape : public gc::BarrieredCell<S
         gc::MarkId(trc, &propidRef(), "propid");
         if (parent)
             MarkShape(trc, &parent, "parent");
     }
 
     inline Shape *search(ExclusiveContext *cx, jsid id);
     inline Shape *searchLinear(jsid id);
 
+#ifdef JSGC_COMPACTING
+    void fixupAfterMovingGC();
+#endif
+
     /* For JIT usage */
     static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
 
   private:
+#ifdef JSGC_COMPACTING
+    void fixupDictionaryShapeAfterMovingGC();
+    void fixupShapeTreeAfterMovingGC();
+#endif
+
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base));
         JS_STATIC_ASSERT(offsetof(Shape, slotInfo) == offsetof(js::shadow::Shape, slotInfo));
         JS_STATIC_ASSERT(FIXED_SLOTS_SHIFT == js::shadow::Shape::FIXED_SLOTS_SHIFT);
         static_assert(js::shadow::Object::MAX_FIXED_SLOTS <= FIXED_SLOTS_MAX,
                       "verify numFixedSlots() bitfield is big enough");
     }
 };