Bug 641027 - Add snapshot-at-the-beginning write barriers for incremental GC (r=luke,bhackett)
authorBill McCloskey <wmccloskey@mozilla.com>
Tue, 25 Oct 2011 16:07:42 -0700
changeset 80153 d4bd0f9bece82af8a3026e35bb5676ea7c2d9fb5
parent 80152 0b200d3bd4082e97ed9c0cf7415400b34a5dce7b
child 80154 65dcb557b3f6752817db6e7e9d50043e4afb5ace
push id323
push userrcampbell@mozilla.com
push dateTue, 15 Nov 2011 21:58:36 +0000
treeherderfx-team@3ea216303184 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke, bhackett
bugs641027
milestone11.0a1
Bug 641027 - Add snapshot-at-the-beginning write barriers for incremental GC (r=luke,bhackett)
configure.in
content/html/content/src/nsHTMLCanvasElement.cpp
js/public/HashTable.h
js/src/Makefile.in
js/src/configure.in
js/src/frontend/BytecodeEmitter-inl.h
js/src/frontend/BytecodeEmitter.cpp
js/src/frontend/BytecodeEmitter.h
js/src/frontend/Parser.cpp
js/src/frontend/SemanticAnalysis.cpp
js/src/gc/Barrier-inl.h
js/src/gc/Barrier.h
js/src/jsapi-tests/testArgumentsObject.cpp
js/src/jsapi-tests/testBug604087.cpp
js/src/jsapi-tests/testConservativeGC.cpp
js/src/jsapi-tests/testIndexToString.cpp
js/src/jsapi.cpp
js/src/jsapi.h
js/src/jsarray.cpp
js/src/jsarray.h
js/src/jsarrayinlines.h
js/src/jsatom.cpp
js/src/jsatom.h
js/src/jscell.h
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsexn.cpp
js/src/jsfun.cpp
js/src/jsfun.h
js/src/jsfuninlines.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsgcmark.cpp
js/src/jsgcmark.h
js/src/jsgcstats.cpp
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsinferinlines.h
js/src/jsinterp.cpp
js/src/jsiter.cpp
js/src/jsiter.h
js/src/jslock.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsobjinlines.h
js/src/jsprobes.cpp
js/src/jspropertytree.cpp
js/src/jsproxy.cpp
js/src/jsprvtd.h
js/src/jsscope.cpp
js/src/jsscope.h
js/src/jsscopeinlines.h
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jsscriptinlines.h
js/src/jstracer.cpp
js/src/jstracer.h
js/src/jstypedarray.cpp
js/src/jstypedarray.h
js/src/jswatchpoint.cpp
js/src/jswatchpoint.h
js/src/jsweakmap.cpp
js/src/jsweakmap.h
js/src/jswrapper.cpp
js/src/jsxdrapi.cpp
js/src/jsxml.cpp
js/src/jsxml.h
js/src/methodjit/BaseAssembler.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FastBuiltins.cpp
js/src/methodjit/FastOps.cpp
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.h
js/src/methodjit/LoopState.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/NunboxAssembler.h
js/src/methodjit/PolyIC.cpp
js/src/methodjit/PolyIC.h
js/src/methodjit/PunboxAssembler.h
js/src/methodjit/StubCalls.cpp
js/src/methodjit/StubCalls.h
js/src/shell/jsheaptools.cpp
js/src/tracejit/Writer.cpp
js/src/tracejit/Writer.h
js/src/vm/ArgumentsObject-inl.h
js/src/vm/ArgumentsObject.h
js/src/vm/CallObject-inl.h
js/src/vm/CallObject.cpp
js/src/vm/CallObject.h
js/src/vm/Debugger.cpp
js/src/vm/Debugger.h
js/src/vm/GlobalObject-inl.h
js/src/vm/GlobalObject.cpp
js/src/vm/GlobalObject.h
js/src/vm/RegExpObject-inl.h
js/src/vm/RegExpObject.cpp
js/src/vm/RegExpObject.h
js/src/vm/RegExpStatics-inl.h
js/src/vm/RegExpStatics.cpp
js/src/vm/RegExpStatics.h
js/src/vm/String-inl.h
js/src/vm/String.cpp
js/src/vm/String.h
js/xpconnect/src/XPCWrappedNative.cpp
js/xpconnect/src/XPCWrappedNativeProto.cpp
js/xpconnect/src/XPCWrappedNativeScope.cpp
js/xpconnect/src/dombindings.cpp
js/xpconnect/src/xpcprivate.h
--- a/configure.in
+++ b/configure.in
@@ -7223,16 +7223,27 @@ MOZ_ARG_ENABLE_BOOL(tracevis,
 [  --enable-tracevis       Enable TraceVis tracing tool (default=no)],
     MOZ_TRACEVIS=1,
     MOZ_TRACEVIS= )
 if test -n "$MOZ_TRACEVIS"; then
     AC_DEFINE(MOZ_TRACEVIS)
 fi
 
 dnl ========================================================
+dnl = Use incremental GC
+dnl ========================================================
+JSGC_INCREMENTAL=1
+MOZ_ARG_DISABLE_BOOL(gcincremental,
+[  --disable-gcincremental Disable incremental GC],
+    JSGC_INCREMENTAL= )
+if test -n "$JSGC_INCREMENTAL"; then
+    AC_DEFINE(JSGC_INCREMENTAL)
+fi
+
+dnl ========================================================
 dnl ETW - Event Tracing for Windows
 dnl ========================================================
 MOZ_ARG_ENABLE_BOOL(ETW,
 [  --enable-ETW            Enable ETW (Event Tracing for Windows) event reporting
                           (needs Windows Vista+ SDK)],
     MOZ_ETW=1,
     MOZ_ETW= )
 if test -n "$MOZ_ETW"; then
--- a/content/html/content/src/nsHTMLCanvasElement.cpp
+++ b/content/html/content/src/nsHTMLCanvasElement.cpp
@@ -496,18 +496,18 @@ nsHTMLCanvasElement::GetContext(const ns
       // note: if any contexts end up supporting something other
       // than objects, e.g. plain strings, then we'll need to expand
       // this to know how to create nsISupportsStrings etc.
       if (JSVAL_IS_OBJECT(aContextOptions)) {
         contextProps = do_CreateInstance("@mozilla.org/hash-property-bag;1");
 
         JSObject *opts = JSVAL_TO_OBJECT(aContextOptions);
         JSIdArray *props = JS_Enumerate(cx, opts);
-        for (int i = 0; props && i < props->length; ++i) {
-          jsid propid = props->vector[i];
+        for (int i = 0; props && i < JS_IdArrayLength(cx, props); ++i) {
+          jsid propid = JS_IdArrayGet(cx, props, i);
           jsval propname, propval;
           if (!JS_IdToValue(cx, propid, &propname) ||
               !JS_GetPropertyById(cx, opts, propid, &propval))
           {
             continue;
           }
 
           JSString *propnameString = JS_ValueToString(cx, propname);
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -907,17 +907,20 @@ class HashMapEntry
     template <class> friend class detail::HashTableEntry;
     void operator=(const HashMapEntry &rhs) {
         const_cast<Key &>(key) = rhs.key;
         value = rhs.value;
     }
 
   public:
     HashMapEntry() : key(), value() {}
-    HashMapEntry(const Key &k, const Value &v) : key(k), value(v) {}
+
+    template<typename KeyInput, typename ValueInput>
+    HashMapEntry(const KeyInput &k, const ValueInput &v) : key(k), value(v) {}
+
     HashMapEntry(MoveRef<HashMapEntry> rhs) 
       : key(Move(rhs->key)), value(Move(rhs->value)) { }
     void operator=(MoveRef<HashMapEntry> rhs) {
         const_cast<Key &>(key) = Move(rhs->key);
         value = Move(rhs->value);
     }
 
     const Key key;
@@ -1043,17 +1046,18 @@ class HashMap
      *    assert(p->key == 3);
      *    char val = p->value;
      */
     typedef typename Impl::AddPtr AddPtr;
     AddPtr lookupForAdd(const Lookup &l) const {
         return impl.lookupForAdd(l);
     }
 
-    bool add(AddPtr &p, const Key &k, const Value &v) {
+    template<typename KeyInput, typename ValueInput>
+    bool add(AddPtr &p, const KeyInput &k, const ValueInput &v) {
         Entry *pentry;
         if (!impl.add(p, &pentry))
             return false;
         const_cast<Key &>(pentry->key) = k;
         pentry->value = v;
         return true;
     }
 
@@ -1069,17 +1073,18 @@ class HashMap
     bool add(AddPtr &p, const Key &k) {
         Entry *pentry;
         if (!impl.add(p, &pentry))
             return false;
         const_cast<Key &>(pentry->key) = k;
         return true;
     }
 
-    bool relookupOrAdd(AddPtr &p, const Key &k, const Value &v) {
+    template<typename KeyInput, typename ValueInput>
+    bool relookupOrAdd(AddPtr &p, const KeyInput &k, const ValueInput &v) {
         return impl.relookupOrAdd(p, k, Entry(k, v));
     }
 
     /*
      * |all()| returns a Range containing |count()| elements. E.g.:
      *
      *   typedef HashMap<int,char> HM;
      *   HM h;
@@ -1132,17 +1137,18 @@ class HashMap
 
     /* Shorthand operations: */
 
     bool has(const Lookup &l) const {
         return impl.lookup(l) != NULL;
     }
 
     /* Overwrite existing value with v. Return NULL on oom. */
-    Entry *put(const Key &k, const Value &v) {
+    template<typename KeyInput, typename ValueInput>
+    Entry *put(const KeyInput &k, const ValueInput &v) {
         AddPtr p = lookupForAdd(k);
         if (p) {
             p->value = v;
             return &*p;
         }
         return add(p, k, v) ? &*p : NULL;
     }
 
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -245,16 +245,17 @@ EXPORTS_vm = \
 
 EXPORTS_ds = \
 		LifoAlloc.h \
 		BitArray.h \
 		$(NULL)
 
 EXPORTS_gc = \
 		Statistics.h \
+		Barrier.h \
 		$(NULL)
 
 ######################################################
 # BEGIN include exported headers from the JS engine
 #
 #       Ultimately, after cleansing INSTALLED_HEADERS,
 #       these will be the ONLY headers exported by
 #       the js engine
--- a/js/src/configure.in
+++ b/js/src/configure.in
@@ -4438,16 +4438,27 @@ MOZ_ARG_ENABLE_BOOL(tracevis,
 if test -n "$MOZ_TRACEVIS"; then
     AC_DEFINE(MOZ_TRACEVIS)
     if test -z "$ENABLE_TRACEJIT"; then
        AC_MSG_ERROR([--enable-tracevis is incompatible with --disable-tracejit])
     fi
 fi
 
 dnl ========================================================
+dnl = Use incremental GC
+dnl ========================================================
+JSGC_INCREMENTAL=1
+MOZ_ARG_DISABLE_BOOL(gcincremental,
+[  --disable-gcincremental Disable incremental GC],
+    JSGC_INCREMENTAL= )
+if test -n "$JSGC_INCREMENTAL"; then
+    AC_DEFINE(JSGC_INCREMENTAL)
+fi
+
+dnl ========================================================
 dnl = Use Valgrind
 dnl ========================================================
 MOZ_ARG_ENABLE_BOOL(valgrind,
 [  --enable-valgrind       Enable Valgrind integration hooks (default=no)],
     MOZ_VALGRIND=1,
     MOZ_VALGRIND= )
 if test -n "$MOZ_VALGRIND"; then
     AC_CHECK_HEADER([valgrind/valgrind.h], [],
new file mode 100644
--- /dev/null
+++ b/js/src/frontend/BytecodeEmitter-inl.h
@@ -0,0 +1,73 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=99:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Mozilla Communicator client code, released
+ * March 31, 1998.
+ *
+ * The Initial Developer of the Original Code is
+ * Netscape Communications Corporation.
+ * Portions created by the Initial Developer are Copyright (C) 1998
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either of the GNU General Public License Version 2 or later (the "GPL"),
+ * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef BytecodeEmitter_inl_h__
+#define BytecodeEmitter_inl_h__
+
+#include "frontend/ParseNode.h"
+#include "frontend/TokenStream.h"
+
+namespace js {
+
+inline
+TreeContext::TreeContext(Parser *prs)
+  : flags(0), bodyid(0), blockidGen(0), parenDepth(0), yieldCount(0), argumentsCount(0),
+    topStmt(NULL), topScopeStmt(NULL), blockChainBox(NULL), blockNode(NULL),
+    decls(prs->context), parser(prs), yieldNode(NULL), argumentsNode(NULL), scopeChain_(NULL),
+    lexdeps(prs->context), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
+    innermostWith(NULL), bindings(prs->context), sharpSlotBase(-1)
+{
+    prs->tc = this;
+}
+
+/*
+ * For functions the tree context is constructed and destructed a second
+ * time during code generation. To avoid a redundant stats update in such
+ * cases, we store uint16(-1) in maxScopeDepth.
+ */
+inline
+TreeContext::~TreeContext()
+{
+    parser->tc = this->parent;
+}
+
+} /* namespace js */
+
+#endif /* BytecodeEmitter_inl_h__ */
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -66,20 +66,20 @@
 #include "ds/LifoAlloc.h"
 #include "frontend/BytecodeCompiler.h"
 #include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 #include "frontend/TokenStream.h"
 #include "vm/RegExpObject.h"
 
 #include "jsatominlines.h"
-#include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 
+#include "frontend/BytecodeEmitter-inl.h"
 #include "frontend/ParseMaps-inl.h"
 
 /* Allocation chunk counts, must be powers of two in general. */
 #define BYTECODE_CHUNK_LENGTH  1024    /* initial bytecode chunk length */
 #define SRCNOTE_CHUNK_LENGTH   1024    /* initial srcnote chunk length */
 
 /* Macros to compute byte sizes from typed element counts. */
 #define BYTECODE_SIZE(n)        ((n) * sizeof(jsbytecode))
@@ -7772,32 +7772,32 @@ CGObjectList::index(ObjectBox *objbox)
 }
 
 void
 CGObjectList::finish(JSObjectArray *array)
 {
     JS_ASSERT(length <= INDEX_LIMIT);
     JS_ASSERT(length == array->length);
 
-    JSObject **cursor = array->vector + array->length;
+    js::HeapPtrObject *cursor = array->vector + array->length;
     ObjectBox *objbox = lastbox;
     do {
         --cursor;
         JS_ASSERT(!*cursor);
         *cursor = objbox->object;
     } while ((objbox = objbox->emitLink) != NULL);
     JS_ASSERT(cursor == array->vector);
 }
 
 void
 GCConstList::finish(JSConstArray *array)
 {
     JS_ASSERT(array->length == list.length());
     Value *src = list.begin(), *srcend = list.end();
-    Value *dst = array->vector;
+    HeapValue *dst = array->vector;
     for (; src != srcend; ++src, ++dst)
         *dst = *src;
 }
 
 /*
  * We should try to get rid of offsetBias (always 0 or 1, where 1 is
  * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
  */
--- a/js/src/frontend/BytecodeEmitter.h
+++ b/js/src/frontend/BytecodeEmitter.h
@@ -354,34 +354,18 @@ struct TreeContext {                /* t
 
     ParseNode       *innermostWith; /* innermost WITH parse node */
 
     Bindings        bindings;       /* bindings in this code, including
                                        arguments if we're compiling a function */
 
     void trace(JSTracer *trc);
 
-    TreeContext(Parser *prs)
-      : flags(0), bodyid(0), blockidGen(0), parenDepth(0), yieldCount(0), argumentsCount(0),
-        topStmt(NULL), topScopeStmt(NULL), blockChainBox(NULL), blockNode(NULL),
-        decls(prs->context), parser(prs), yieldNode(NULL), argumentsNode(NULL), scopeChain_(NULL),
-        lexdeps(prs->context), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
-        innermostWith(NULL), bindings(prs->context), sharpSlotBase(-1)
-    {
-        prs->tc = this;
-    }
-
-    /*
-     * For functions the tree context is constructed and destructed a second
-     * time during code generation. To avoid a redundant stats update in such
-     * cases, we store uint16(-1) in maxScopeDepth.
-     */
-    ~TreeContext() {
-        parser->tc = this->parent;
-    }
+    inline TreeContext(Parser *prs);
+    inline ~TreeContext();
 
     /*
      * js::BytecodeEmitter derives from js::TreeContext; however, only the
      * top-level BytecodeEmitters are actually used as full-fledged tree contexts
      * (to hold decls and lexdeps). We can avoid allocation overhead by making
      * this distinction explicit.
      */
     enum InitBehavior {
--- a/js/src/frontend/Parser.cpp
+++ b/js/src/frontend/Parser.cpp
@@ -87,19 +87,19 @@
 #include "jsxml.h"
 #endif
 
 #if JS_HAS_DESTRUCTURING
 #include "jsdhash.h"
 #endif
 
 #include "jsatominlines.h"
-#include "jsobjinlines.h"
 #include "jsscriptinlines.h"
 
+#include "frontend/BytecodeEmitter-inl.h"
 #include "frontend/ParseMaps-inl.h"
 #include "frontend/ParseNode-inl.h"
 #include "vm/RegExpObject-inl.h"
 
 using namespace js;
 using namespace js::gc;
 using namespace js::frontend;
 
@@ -241,17 +241,17 @@ Parser::newFunctionBox(JSObject *obj, Pa
     return funbox;
 }
 
 void
 Parser::trace(JSTracer *trc)
 {
     ObjectBox *objbox = traceListHead;
     while (objbox) {
-        MarkObject(trc, *objbox->object, "parser.object");
+        MarkRoot(trc, objbox->object, "parser.object");
         if (objbox->isFunctionBox)
             static_cast<FunctionBox *>(objbox)->bindings.trace(trc);
         objbox = objbox->traceLink;
     }
 
     for (TreeContext *tc = this->tc; tc; tc = tc->parent)
         tc->trace(trc);
 }
--- a/js/src/frontend/SemanticAnalysis.cpp
+++ b/js/src/frontend/SemanticAnalysis.cpp
@@ -40,16 +40,17 @@
 
 #include "frontend/SemanticAnalysis.h"
 
 #include "jsfun.h"
 
 #include "frontend/BytecodeEmitter.h"
 #include "frontend/Parser.h"
 
+#include "jsobjinlines.h"
 #include "jsfuninlines.h"
 
 using namespace js;
 using namespace js::frontend;
 
 /*
  * Walk the function box list at |*funboxHead|, removing boxes for deleted
  * functions and cleaning up method lists. We do this once, before
new file mode 100644
--- /dev/null
+++ b/js/src/gc/Barrier-inl.h
@@ -0,0 +1,259 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=78:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is SpiderMonkey global object code.
+ *
+ * The Initial Developer of the Original Code is
+ * the Mozilla Foundation.
+ * Portions created by the Initial Developer are Copyright (C) 2011
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either of the GNU General Public License Version 2 or later (the "GPL"),
+ * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#include "jsgcmark.h"
+
+#include "gc/Barrier.h"
+
+#ifndef jsgc_barrier_inl_h___
+#define jsgc_barrier_inl_h___
+
+namespace js {
+
+static JS_ALWAYS_INLINE void
+ClearValueRange(JSCompartment *comp, HeapValue *vec, uintN len, bool useHoles)
+{
+    if (useHoles) {
+        for (uintN i = 0; i < len; i++)
+            vec[i].set(comp, MagicValue(JS_ARRAY_HOLE));
+    } else {
+        for (uintN i = 0; i < len; i++)
+            vec[i].set(comp, UndefinedValue());
+    }
+}
+
+static JS_ALWAYS_INLINE void
+InitValueRange(HeapValue *vec, uintN len, bool useHoles)
+{
+    if (useHoles) {
+        for (uintN i = 0; i < len; i++)
+            vec[i].init(MagicValue(JS_ARRAY_HOLE));
+    } else {
+        for (uintN i = 0; i < len; i++)
+            vec[i].init(UndefinedValue());
+    }
+}
+
+static JS_ALWAYS_INLINE void
+DestroyValueRange(HeapValue *vec, uintN len)
+{
+    for (uintN i = 0; i < len; i++)
+        vec[i].~HeapValue();
+}
+
+inline
+HeapValue::HeapValue(const Value &v)
+    : value(v)
+{
+    post();
+}
+
+inline
+HeapValue::HeapValue(const HeapValue &v)
+    : value(v.value)
+{
+    post();
+}
+
+inline
+HeapValue::~HeapValue()
+{
+    pre();
+}
+
+inline void
+HeapValue::init(const Value &v)
+{
+    value = v;
+    post();
+}
+
+inline void
+HeapValue::writeBarrierPre(const Value &value)
+{
+#ifdef JSGC_INCREMENTAL
+    if (value.isMarkable()) {
+        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
+        writeBarrierPre(cell->compartment(), value);
+    }
+#endif
+}
+
+inline void
+HeapValue::writeBarrierPost(const Value &value, void *addr)
+{
+}
+
+inline void
+HeapValue::writeBarrierPre(JSCompartment *comp, const Value &value)
+{
+#ifdef JSGC_INCREMENTAL
+    if (comp->needsBarrier())
+        js::gc::MarkValueUnbarriered(comp->barrierTracer(), value, "write barrier");
+#endif
+}
+
+inline void
+HeapValue::writeBarrierPost(JSCompartment *comp, const Value &value, void *addr)
+{
+}
+
+inline void
+HeapValue::pre()
+{
+    writeBarrierPre(value);
+}
+
+inline void
+HeapValue::post()
+{
+}
+
+inline void
+HeapValue::pre(JSCompartment *comp)
+{
+    writeBarrierPre(comp, value);
+}
+
+inline void
+HeapValue::post(JSCompartment *comp)
+{
+}
+
+inline HeapValue &
+HeapValue::operator=(const Value &v)
+{
+    pre();
+    value = v;
+    post();
+    return *this;
+}
+
+inline HeapValue &
+HeapValue::operator=(const HeapValue &v)
+{
+    pre();
+    value = v.value;
+    post();
+    return *this;
+}
+
+inline void
+HeapValue::set(JSCompartment *comp, const Value &v)
+{
+#ifdef DEBUG
+    if (value.isMarkable()) {
+        js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
+        JS_ASSERT(cell->compartment() == comp ||
+                  cell->compartment() == comp->rt->atomsCompartment);
+    }
+#endif
+
+    pre(comp);
+    value = v;
+    post(comp);
+}
+
+inline void
+HeapValue::boxNonDoubleFrom(JSValueType type, uint64 *out)
+{
+    pre();
+    value.boxNonDoubleFrom(type, out);
+    post();
+}
+
+inline
+HeapId::HeapId(jsid id)
+    : value(id)
+{
+    post();
+}
+
+inline
+HeapId::~HeapId()
+{
+    pre();
+}
+
+inline void
+HeapId::init(jsid id)
+{
+    value = id;
+    post();
+}
+
+inline void
+HeapId::pre()
+{
+#ifdef JSGC_INCREMENTAL
+    if (JS_UNLIKELY(JSID_IS_OBJECT(value))) {
+        JSObject *obj = JSID_TO_OBJECT(value);
+        JSCompartment *comp = obj->compartment();
+        if (comp->needsBarrier())
+            js::gc::MarkObjectUnbarriered(comp->barrierTracer(), obj, "write barrier");
+    }
+#endif
+}
+
+inline void
+HeapId::post()
+{
+}
+
+inline HeapId &
+HeapId::operator=(jsid id)
+{
+    pre();
+    value = id;
+    post();
+    return *this;
+}
+
+inline HeapId &
+HeapId::operator=(const HeapId &v)
+{
+    pre();
+    value = v.value;
+    post();
+    return *this;
+}
+
+} /* namespace js */
+
+#endif /* jsgc_barrier_inl_h___ */
new file mode 100644
--- /dev/null
+++ b/js/src/gc/Barrier.h
@@ -0,0 +1,450 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=78:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is SpiderMonkey global object code.
+ *
+ * The Initial Developer of the Original Code is
+ * the Mozilla Foundation.
+ * Portions created by the Initial Developer are Copyright (C) 2011
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either of the GNU General Public License Version 2 or later (the "GPL"),
+ * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef jsgc_barrier_h___
+#define jsgc_barrier_h___
+
+#include "jsapi.h"
+#include "jscell.h"
+
+#include "js/HashTable.h"
+
+/*
+ * A write barrier is a mechanism used by incremental or generation GCs to
+ * ensure that every value that needs to be marked is marked. In general, the
+ * write barrier should be invoked whenever a write can cause the set of things
+ * traced through by the GC to change. This includes:
+ *   - writes to object properties
+ *   - writes to array slots
+ *   - writes to fields like JSObject::lastProp that we trace through
+ *   - writes to fields in private data, like JSGenerator::obj
+ *   - writes to non-markable fields like JSObject::private that point to
+ *     markable data
+ * The last category is the trickiest. Even though the private pointers does not
+ * point to a GC thing, changing the private pointer may change the set of
+ * objects that are traced by the GC. Therefore it needs a write barrier.
+ *
+ * Every barriered write should have the following form:
+ *   <pre-barrier>
+ *   obj->field = value; // do the actual write
+ *   <post-barrier>
+ * The pre-barrier is used for incremental GC and the post-barrier is for
+ * generational GC.
+ *
+ *                               PRE-BARRIER
+ *
+ * To understand the pre-barrier, let's consider how incremental GC works. The
+ * GC itself is divided into "slices". Between each slice, JS code is allowed to
+ * run. Each slice should be short so that the user doesn't notice the
+ * interruptions. In our GC, the structure of the slices is as follows:
+ *
+ * 1. ... JS work, which leads to a request to do GC ...
+ * 2. [first GC slice, which performs all root marking and possibly more marking]
+ * 3. ... more JS work is allowed to run ...
+ * 4. [GC mark slice, which runs entirely in drainMarkStack]
+ * 5. ... more JS work ...
+ * 6. [GC mark slice, which runs entirely in drainMarkStack]
+ * 7. ... more JS work ...
+ * 8. [GC marking finishes; sweeping done non-incrementally; GC is done]
+ * 9. ... JS continues uninterrupted now that GC is finishes ...
+ *
+ * Of course, there may be a different number of slices depending on how much
+ * marking is to be done.
+ *
+ * The danger inherent in this scheme is that the JS code in steps 3, 5, and 7
+ * might change the heap in a way that causes the GC to collect an object that
+ * is actually reachable. The write barrier prevents this from happening. We use
+ * a variant of incremental GC called "snapshot at the beginning." This approach
+ * guarantees the invariant that if an object is reachable in step 2, then we
+ * will mark it eventually. The name comes from the idea that we take a
+ * theoretical "snapshot" of all reachable objects in step 2; all objects in
+ * that snapshot should eventually be marked. (Note that the write barrier
+ * verifier code takes an actual snapshot.)
+ *
+ * The basic correctness invariant of a snapshot-at-the-beginning collector is
+ * that any object reachable at the end of the GC (step 9) must either:
+ *   (1) have been reachable at the beginning (step 2) and thus in the snapshot
+ *   (2) or must have been newly allocated, in steps 3, 5, or 7.
+ * To deal with case (2), any objects allocated during an incremental GC are
+ * automatically marked black.
+ *
+ * This strategy is actually somewhat conservative: if an object becomes
+ * unreachable between steps 2 and 8, it would be safe to collect it. We won't,
+ * mainly for simplicity. (Also, note that the snapshot is entirely
+ * theoretical. We don't actually do anything special in step 2 that we wouldn't
+ * do in a non-incremental GC.
+ *
+ * It's the pre-barrier's job to maintain the snapshot invariant. Consider the
+ * write "obj->field = value". Let the prior value of obj->field be
+ * value0. Since it's possible that value0 may have been what obj->field
+ * contained in step 2, when the snapshot was taken, the barrier marks
+ * value0. Note that it only does this if we're in the middle of an incremental
+ * GC. Since this is rare, the cost of the write barrier is usually just an
+ * extra branch.
+ *
+ * In practice, we implement the pre-barrier differently based on the type of
+ * value0. E.g., see JSObject::writeBarrierPre, which is used if obj->field is
+ * a JSObject*. It takes value0 as a parameter.
+ *
+ *                                POST-BARRIER
+ *
+ * These are not yet implemented. Once we get generational GC, they will allow
+ * us to keep track of pointers from non-nursery space into the nursery.
+ *
+ *                            IMPLEMENTATION DETAILS
+ *
+ * Since it would be awkward to change every write to memory into a function
+ * call, this file contains a bunch of C++ classes and templates that use
+ * operator overloading to take care of barriers automatically. In many cases,
+ * all that's necessary to make some field be barriered is to replace
+ *     Type *field;
+ * with
+ *     HeapPtr<Type> field;
+ * There are also special classes HeapValue and HeapId, which barrier js::Value
+ * and jsid, respectively.
+ *
+ * One additional note: not all object writes need to be barriered. Writes to
+ * newly allocated objects do not need a barrier as long as the GC is not
+ * allowed to run in between the allocation and the write. In these cases, we
+ * use the "obj->field.init(value)" method instead of "obj->field = value".
+ * We use the init naming idiom in many places to signify that a field is being
+ * assigned for the first time, and that no GCs have taken place between the
+ * object allocation and the assignment.
+ */
+
+namespace js {
+
+/*
+ * Ideally, we would like to make the argument to functions like MarkShape be a
+ * HeapPtr<const js::Shape>. That would ensure that we don't forget to
+ * barrier any fields that we mark through. However, that would prohibit us from
+ * passing in a derived class like HeapPtr<js::EmptyShape>.
+ *
+ * To overcome the problem, we make the argument to MarkShape be a
+ * MarkablePtr<const js::Shape>. And we allow conversions from HeapPtr<T>
+ * to MarkablePtr<U> as long as T can be converted to U.
+ */
+template<class T>
+class MarkablePtr
+{
+  public:
+    T *value;
+
+    explicit MarkablePtr(T *value) : value(value) {}
+};
+
+template<class T, typename Unioned = uintptr_t>
+class HeapPtr
+{
+    union {
+        T *value;
+        Unioned other;
+    };
+
+  public:
+    HeapPtr() : value(NULL) {}
+    explicit HeapPtr(T *v) : value(v) { post(); }
+    explicit HeapPtr(const HeapPtr<T> &v) : value(v.value) { post(); }
+
+    ~HeapPtr() { pre(); }
+
+    /* Use this to install a ptr into a newly allocated object. */
+    void init(T *v) {
+        value = v;
+        post();
+    }
+
+    /* Use to set the pointer to NULL. */
+    void clear() {
+	pre();
+	value = NULL;
+    }
+
+    /* Use this if the automatic coercion to T* isn't working. */
+    T *get() const { return value; }
+
+    /*
+     * Use these if you want to change the value without invoking the barrier.
+     * Obviously this is dangerous unless you know the barrier is not needed.
+     */
+    T **unsafeGet() { return &value; }
+    void unsafeSet(T *v) { value = v; }
+
+    Unioned *unsafeGetUnioned() { return &other; }
+
+    HeapPtr<T, Unioned> &operator=(T *v) {
+        pre();
+        value = v;
+        post();
+        return *this;
+    }
+
+    HeapPtr<T, Unioned> &operator=(const HeapPtr<T> &v) {
+        pre();
+        value = v.value;
+        post();
+        return *this;
+    }
+
+    T &operator*() const { return *value; }
+    T *operator->() const { return value; }
+
+    operator T*() const { return value; }
+
+    /*
+     * This coerces to MarkablePtr<U> as long as T can coerce to U. See the
+     * comment for MarkablePtr above.
+     */
+    template<class U>
+    operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
+
+  private:
+    void pre() { T::writeBarrierPre(value); }
+    void post() { T::writeBarrierPost(value, (void *)&value); }
+
+    /* Make this friend so it can access pre() and post(). */
+    template<class T1, class T2>
+    friend inline void
+    BarrieredSetPair(JSCompartment *comp,
+                     HeapPtr<T1> &v1, T1 *val1,
+                     HeapPtr<T2> &v2, T2 *val2);
+};
+
+/*
+ * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
+ * barriers with only one branch to check if we're in an incremental GC.
+ */
+template<class T1, class T2>
+static inline void
+BarrieredSetPair(JSCompartment *comp,
+                 HeapPtr<T1> &v1, T1 *val1,
+                 HeapPtr<T2> &v2, T2 *val2)
+{
+    if (T1::needWriteBarrierPre(comp)) {
+        v1.pre();
+        v2.pre();
+    }
+    v1.unsafeSet(val1);
+    v2.unsafeSet(val2);
+    v1.post();
+    v2.post();
+}
+
+typedef HeapPtr<JSObject> HeapPtrObject;
+typedef HeapPtr<JSFunction> HeapPtrFunction;
+typedef HeapPtr<JSString> HeapPtrString;
+typedef HeapPtr<JSScript> HeapPtrScript;
+typedef HeapPtr<Shape> HeapPtrShape;
+typedef HeapPtr<const Shape> HeapPtrConstShape;
+typedef HeapPtr<JSXML> HeapPtrXML;
+
+/* Useful for hashtables with a HeapPtr as key. */
+template<class T>
+struct HeapPtrHasher
+{
+    typedef HeapPtr<T> Key;
+    typedef T *Lookup;
+
+    static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
+    static bool match(const Key &k, Lookup l) { return k.get() == l; }
+};
+
+/* Specialized hashing policy for HeapPtrs. */
+template <class T>
+struct DefaultHasher< HeapPtr<T> >: HeapPtrHasher<T> { };
+
+class HeapValue
+{
+    Value value;
+
+  public:
+    explicit HeapValue() : value(UndefinedValue()) {}
+    explicit inline HeapValue(const Value &v);
+    explicit inline HeapValue(const HeapValue &v);
+
+    inline ~HeapValue();
+
+    inline void init(const Value &v);
+
+    inline HeapValue &operator=(const Value &v);
+    inline HeapValue &operator=(const HeapValue &v);
+
+    /*
+     * This is a faster version of operator=. Normally, operator= has to
+     * determine the compartment of the value before it can decide whether to do
+     * the barrier. If you already know the compartment, it's faster to pass it
+     * in.
+     */
+    inline void set(JSCompartment *comp, const Value &v);
+
+    const Value &get() const { return value; }
+    operator const Value &() const { return value; }
+
+    bool isMarkable() const { return value.isMarkable(); }
+    bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
+    bool isUndefined() const { return value.isUndefined(); }
+    bool isObject() const { return value.isObject(); }
+    bool isGCThing() const { return value.isGCThing(); }
+    bool isTrue() const { return value.isTrue(); }
+    bool isFalse() const { return value.isFalse(); }
+    bool isInt32() const { return value.isInt32(); }
+    bool isNull() const { return value.isNull(); }
+
+    JSObject &toObject() const { return value.toObject(); }
+    JSObject *toObjectOrNull() const { return value.toObjectOrNull(); }
+    void *toGCThing() const { return value.toGCThing(); }
+    double toDouble() const { return value.toDouble(); }
+    int32 toInt32() const { return value.toInt32(); }
+    JSString *toString() const { return value.toString(); }
+    bool toBoolean() const { return value.toBoolean(); }
+    double toNumber() const { return value.toNumber(); }
+
+    unsigned gcKind() const { return value.gcKind(); }
+
+    inline void boxNonDoubleFrom(JSValueType type, uint64 *out);
+
+    uint64 asRawBits() const { return value.asRawBits(); }
+
+#ifdef DEBUG
+    JSWhyMagic whyMagic() const { return value.whyMagic(); }
+#endif
+
+    static inline void writeBarrierPre(const Value &v);
+    static inline void writeBarrierPost(const Value &v, void *addr);
+
+    static inline void writeBarrierPre(JSCompartment *comp, const Value &v);
+    static inline void writeBarrierPost(JSCompartment *comp, const Value &v, void *addr);
+
+  private:
+    inline void pre();
+    inline void post();
+
+    inline void pre(JSCompartment *comp);
+    inline void post(JSCompartment *comp);
+};
+
+static inline const Value *
+Valueify(const HeapValue *array)
+{
+    JS_ASSERT(sizeof(HeapValue) == sizeof(Value));
+    return (const Value *)array;
+}
+
+class HeapValueArray
+{
+    HeapValue *array;
+
+  public:
+    HeapValueArray(HeapValue *array) : array(array) {}
+
+    operator const Value *() const { return Valueify(array); }
+    operator HeapValue *() const { return array; }
+
+    HeapValueArray operator +(int offset) const { return HeapValueArray(array + offset); }
+    HeapValueArray operator +(uint32 offset) const { return HeapValueArray(array + offset); }
+};
+
+class HeapId
+{
+    jsid value;
+
+  public:
+    explicit HeapId() : value(JSID_VOID) {}
+    explicit inline HeapId(jsid id);
+
+    inline ~HeapId();
+
+    inline void init(jsid id);
+
+    inline HeapId &operator=(jsid id);
+    inline HeapId &operator=(const HeapId &v);
+
+    bool operator==(jsid id) const { return value == id; }
+    bool operator!=(jsid id) const { return value != id; }
+
+    jsid get() const { return value; }
+    operator jsid() const { return value; }
+
+  private:
+    inline void pre();
+    inline void post();
+
+    HeapId(const HeapId &v);
+};
+
+/*
+ * Incremental GC requires that weak pointers have read barriers. This is mostly
+ * an issue for empty shapes stored in JSCompartment. The problem happens when,
+ * during an incremental GC, some JS code stores one of the compartment's empty
+ * shapes into an object already marked black. Normally, this would not be a
+ * problem, because the empty shape would have been part of the initial snapshot
+ * when the GC started. However, since this is a weak pointer, it isn't. So we
+ * may collect the empty shape even though a live object points to it. To fix
+ * this, we mark these empty shapes black whenever they get read out.
+ */
+template<class T>
+class ReadBarriered
+{
+    T *value;
+
+  public:
+    ReadBarriered(T *value) : value(value) {}
+
+    T *get() const {
+        if (!value)
+            return NULL;
+        T::readBarrier(value);
+        return value;
+    }
+
+    operator T*() const { return get(); }
+
+    T *unsafeGet() { return value; }
+
+    void set(T *v) { value = v; }
+
+    operator bool() { return !!value; }
+
+    template<class U>
+    operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
+};
+
+}
+
+#endif /* jsgc_barrier_h___ */
--- a/js/src/jsapi-tests/testArgumentsObject.cpp
+++ b/js/src/jsapi-tests/testArgumentsObject.cpp
@@ -1,16 +1,18 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sw=4 et tw=99:
  */
 
 #include "tests.h"
 
 #include "vm/Stack-inl.h"
 
+#include "jsobjinlines.h"
+
 using namespace js;
 
 static const char NORMAL_ZERO[] =
     "function f() { return arguments; }";
 static const char NORMAL_ONE[] =
     "function f(a) { return arguments; }";
 static const char NORMAL_TWO[] =
     "function f(a, b) { return arguments; }";
--- a/js/src/jsapi-tests/testBug604087.cpp
+++ b/js/src/jsapi-tests/testBug604087.cpp
@@ -3,16 +3,18 @@
  *
  * Tests JS_TransplantObject
  */
 
 #include "tests.h"
 #include "jsobj.h"
 #include "jswrapper.h"
 
+#include "jsobjinlines.h"
+
 struct OuterWrapper : js::Wrapper
 {
     OuterWrapper() : Wrapper(0) {}
 
     virtual bool isOuterWindow() {
         return true;
     }
 
--- a/js/src/jsapi-tests/testConservativeGC.cpp
+++ b/js/src/jsapi-tests/testConservativeGC.cpp
@@ -2,28 +2,30 @@
 #include "jsobj.h"
 #include "vm/String.h"
 
 BEGIN_TEST(testConservativeGC)
 {
     jsval v2;
     EVAL("({foo: 'bar'});", &v2);
     CHECK(JSVAL_IS_OBJECT(v2));
-    JSObject objCopy = *JSVAL_TO_OBJECT(v2);
+    char objCopy[sizeof(JSObject)];
+    memcpy(&objCopy, JSVAL_TO_OBJECT(v2), sizeof(JSObject));
 
     jsval v3;
     EVAL("String(Math.PI);", &v3);
     CHECK(JSVAL_IS_STRING(v3));
     JSString strCopy = *JSVAL_TO_STRING(v3);
 
     jsval tmp;
     EVAL("({foo2: 'bar2'});", &tmp);
     CHECK(JSVAL_IS_OBJECT(tmp));
     JSObject *obj2 = JSVAL_TO_OBJECT(tmp);
-    JSObject obj2Copy = *obj2;
+    char obj2Copy[sizeof(JSObject)];
+    memcpy(&obj2Copy, obj2, sizeof(JSObject));
 
     EVAL("String(Math.sqrt(3));", &tmp);
     CHECK(JSVAL_IS_STRING(tmp));
     JSString *str2 = JSVAL_TO_STRING(tmp);
     JSString str2Copy = *str2;
 
     tmp = JSVAL_NULL;
 
@@ -31,20 +33,20 @@ BEGIN_TEST(testConservativeGC)
 
     EVAL("var a = [];\n"
          "for (var i = 0; i != 10000; ++i) {\n"
          "a.push(i + 0.1, [1, 2], String(Math.sqrt(i)), {a: i});\n"
          "}", &tmp);
 
     JS_GC(cx);
 
-    checkObjectFields(&objCopy, JSVAL_TO_OBJECT(v2));
+    checkObjectFields((JSObject *)objCopy, JSVAL_TO_OBJECT(v2));
     CHECK(!memcmp(&strCopy, JSVAL_TO_STRING(v3), sizeof(strCopy)));
 
-    checkObjectFields(&obj2Copy, obj2);
+    checkObjectFields((JSObject *)obj2Copy, obj2);
     CHECK(!memcmp(&str2Copy, str2, sizeof(str2Copy)));
 
     return true;
 }
 
 bool checkObjectFields(JSObject *savedCopy, JSObject *obj)
 {
     /* Ignore fields which are unstable across GCs. */
--- a/js/src/jsapi-tests/testIndexToString.cpp
+++ b/js/src/jsapi-tests/testIndexToString.cpp
@@ -4,16 +4,18 @@
 
 #include "tests.h"
 
 #include "jscntxt.h"
 #include "jscompartment.h"
 #include "jsnum.h"
 #include "jsstr.h"
 
+#include "jsobjinlines.h"
+
 #include "vm/String-inl.h"
 
 using namespace mozilla;
 
 template<size_t N> JSFlatString *
 NewString(JSContext *cx, const jschar (&chars)[N])
 {
     return js_NewStringCopyN(cx, chars, N);
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -91,16 +91,17 @@
 
 #include "jsatominlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 
 #include "vm/RegExpObject-inl.h"
+#include "vm/RegExpStatics-inl.h"
 #include "vm/Stack-inl.h"
 #include "vm/String-inl.h"
 
 #if ENABLE_YARR_JIT
 #include "assembler/jit/ExecutableAllocator.h"
 #include "methodjit/Logging.h"
 #endif
 
@@ -652,17 +653,17 @@ JSRuntime::JSRuntime()
     gcKeepAtoms(0),
     gcBytes(0),
     gcTriggerBytes(0),
     gcLastBytes(0),
     gcMaxBytes(0),
     gcMaxMallocBytes(0),
     gcEmptyArenaPoolLifespan(0),
     gcNumber(0),
-    gcMarkingTracer(NULL),
+    gcIncrementalTracer(NULL),
     gcChunkAllocationSinceLastGC(false),
     gcNextFullGCTime(0),
     gcJitReleaseTime(0),
     gcMode(JSGC_MODE_GLOBAL),
     gcIsNeeded(0),
     gcWeakMapList(NULL),
     gcStats(thisFromCtor()),
     gcTriggerCompartment(NULL),
@@ -1322,17 +1323,17 @@ Class dummy_class = {
 
 } /*namespace js */
 
 JS_PUBLIC_API(JSCrossCompartmentCall *)
 JS_EnterCrossCompartmentCallScript(JSContext *cx, JSScript *target)
 {
     CHECK_REQUEST(cx);
     JS_ASSERT(!target->isCachedEval);
-    GlobalObject *global = target->u.globalObject;
+    GlobalObject *global = target->globalObject;
     if (!global) {
         SwitchToCompartment sc(cx, target->compartment());
         global = GlobalObject::create(cx, &dummy_class);
         if (!global)
             return NULL;
     }
     return JS_EnterCrossCompartmentCall(cx, global);
 }
@@ -1947,32 +1948,28 @@ JS_EnumerateStandardClasses(JSContext *c
         {
                 return JS_FALSE;
         }
     }
 
     return JS_TRUE;
 }
 
-namespace js {
-
-JSIdArray *
+static JSIdArray *
 NewIdArray(JSContext *cx, jsint length)
 {
     JSIdArray *ida;
 
     ida = (JSIdArray *)
         cx->calloc_(offsetof(JSIdArray, vector) + length * sizeof(jsval));
     if (ida)
         ida->length = length;
     return ida;
 }
 
-}
-
 /*
  * Unlike realloc(3), this function frees ida on failure.
  */
 static JSIdArray *
 SetIdArrayLength(JSContext *cx, JSIdArray *ida, jsint length)
 {
     JSIdArray *rida;
 
@@ -1995,17 +1992,17 @@ AddAtomToArray(JSContext *cx, JSAtom *at
     i = *ip;
     length = ida->length;
     if (i >= length) {
         ida = SetIdArrayLength(cx, ida, JS_MAX(length * 2, 8));
         if (!ida)
             return NULL;
         JS_ASSERT(i < ida->length);
     }
-    ida->vector[i] = ATOM_TO_JSID(atom);
+    ida->vector[i].init(ATOM_TO_JSID(atom));
     *ip = i + 1;
     return ida;
 }
 
 static JSIdArray *
 EnumerateIfResolved(JSContext *cx, JSObject *obj, JSAtom *atom, JSIdArray *ida,
                     jsint *ip, JSBool *foundp)
 {
@@ -2326,20 +2323,25 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, 
 
 JS_PUBLIC_API(void)
 JS_TraceRuntime(JSTracer *trc)
 {
     TraceRuntime(trc);
 }
 
 JS_PUBLIC_API(void)
+JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
+{
+    js::TraceChildren(trc, thing, kind);
+}
+
+JS_PUBLIC_API(void)
 JS_CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind)
 {
-    JS_ASSERT(thing);
-    MarkKind(trc, thing, kind);
+    js::CallTracer(trc, thing, kind);
 }
 
 #ifdef DEBUG
 
 #ifdef HAVE_XPCONNECT
 #include "dump_xpc.h"
 #endif
 
@@ -2777,18 +2779,18 @@ JS_SetGCCallbackRT(JSRuntime *rt, JSGCCa
     rt->gcCallback = cb;
     return oldcb;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsAboutToBeFinalized(JSContext *cx, void *thing)
 {
     JS_ASSERT(thing);
-    JS_ASSERT(!cx->runtime->gcMarkingTracer);
-    return IsAboutToBeFinalized(cx, thing);
+    JS_ASSERT(!cx->runtime->gcIncrementalTracer);
+    return IsAboutToBeFinalized(cx, (gc::Cell *)thing);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32 value)
 {
     switch (key) {
       case JSGC_MAX_BYTES:
         rt->gcMaxBytes = value;
@@ -2937,16 +2939,29 @@ JS_SetNativeStackQuota(JSContext *cx, si
         JS_ASSERT(stackBase >= stackSize);
         cx->stackLimit = stackBase - (stackSize - 1);
     }
 #endif
 }
 
 /************************************************************************/
 
+JS_PUBLIC_API(jsint)
+JS_IdArrayLength(JSContext *cx, JSIdArray *ida)
+{
+    return ida->length;
+}
+
+JS_PUBLIC_API(jsid)
+JS_IdArrayGet(JSContext *cx, JSIdArray *ida, jsint index)
+{
+    JS_ASSERT(index >= 0 && index < ida->length);
+    return ida->vector[index];
+}
+
 JS_PUBLIC_API(void)
 JS_DestroyIdArray(JSContext *cx, JSIdArray *ida)
 {
     cx->free_(ida);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_ValueToId(JSContext *cx, jsval v, jsid *idp)
@@ -4100,22 +4115,26 @@ prop_iter_finalize(JSContext *cx, JSObje
 static void
 prop_iter_trace(JSTracer *trc, JSObject *obj)
 {
     void *pdata = obj->getPrivate();
     if (!pdata)
         return;
 
     if (obj->getSlot(JSSLOT_ITER_INDEX).toInt32() < 0) {
-        /* Native case: just mark the next property to visit. */
-        MarkShape(trc, (Shape *)pdata, "prop iter shape");
+        /*
+         * Native case: just mark the next property to visit. We don't need a
+         * barrier here because the pointer is updated via setPrivate, which
+         * always takes a barrier.
+         */
+        MarkShapeUnbarriered(trc, (Shape *)pdata, "prop iter shape");
     } else {
         /* Non-native case: mark each id in the JSIdArray private. */
         JSIdArray *ida = (JSIdArray *) pdata;
-        MarkIdRange(trc, ida->length, ida->vector, "prop iter");
+        MarkIdRange(trc, ida->vector, ida->vector + ida->length, "prop iter");
     }
 }
 
 static Class prop_iter_class = {
     "PropertyIterator",
     JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
@@ -4133,29 +4152,29 @@ static Class prop_iter_class = {
     NULL,           /* hasInstance */
     prop_iter_trace
 };
 
 JS_PUBLIC_API(JSObject *)
 JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
 {
     JSObject *iterobj;
-    const void *pdata;
+    void *pdata;
     jsint index;
     JSIdArray *ida;
 
     CHECK_REQUEST(cx);
     assertSameCompartment(cx, obj);
     iterobj = NewNonFunction<WithProto::Class>(cx, &prop_iter_class, NULL, obj);
     if (!iterobj)
         return NULL;
 
     if (obj->isNative()) {
         /* Native case: start with the last property in obj. */
-        pdata = obj->lastProperty();
+        pdata = (void *)obj->lastProperty();
         index = -1;
     } else {
         /*
          * Non-native case: enumerate a JSIdArray and keep it via private.
          *
          * Note: we have to make sure that we root obj around the call to
          * JS_Enumerate to protect against multiple allocations under it.
          */
@@ -4163,17 +4182,17 @@ JS_NewPropertyIterator(JSContext *cx, JS
         ida = JS_Enumerate(cx, obj);
         if (!ida)
             return NULL;
         pdata = ida;
         index = ida->length;
     }
 
     /* iterobj cannot escape to other threads here. */
-    iterobj->setPrivate(const_cast<void *>(pdata));
+    iterobj->setPrivate(pdata);
     iterobj->setSlot(JSSLOT_ITER_INDEX, Int32Value(index));
     return iterobj;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp)
 {
     jsint i;
@@ -4426,17 +4445,17 @@ JS_CloneFunctionObject(JSContext *cx, JS
                 return NULL;
             }
             obj = obj->getParent();
         }
 
         Value v;
         if (!obj->getGeneric(cx, r.front().propid, &v))
             return NULL;
-        clone->getFlatClosureUpvars()[i] = v;
+        clone->setFlatClosureUpvar(i, v);
     }
 
     return clone;
 }
 
 JS_PUBLIC_API(JSObject *)
 JS_GetFunctionObject(JSFunction *fun)
 {
@@ -4877,19 +4896,19 @@ JS_CompileFileHandle(JSContext *cx, JSOb
     JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
     return JS_CompileFileHandleForPrincipals(cx, obj, filename, file, NULL);
 }
 
 JS_PUBLIC_API(JSObject *)
 JS_GetGlobalFromScript(JSScript *script)
 {
     JS_ASSERT(!script->isCachedEval);
-    JS_ASSERT(script->u.globalObject);
-
-    return script->u.globalObject;
+    JS_ASSERT(script->globalObject);
+
+    return script->globalObject;
 }
 
 static JSFunction *
 CompileUCFunctionForPrincipalsCommon(JSContext *cx, JSObject *obj,
                                      JSPrincipals *principals, const char *name,
                                      uintN nargs, const char **argnames,
                                      const jschar *chars, size_t length,
                                      const char *filename, uintN lineno, JSVersion version)
@@ -6362,16 +6381,73 @@ JS_ScheduleGC(JSContext *cx, uint32 coun
 JS_FRIEND_API(void *)
 js_GetCompartmentPrivate(JSCompartment *compartment)
 {
     return compartment->data;
 }
 
 /************************************************************************/
 
+JS_PUBLIC_API(void)
+JS_RegisterReference(void **ref)
+{
+}
+
+JS_PUBLIC_API(void)
+JS_ModifyReference(void **ref, void *newval)
+{
+    // XPConnect uses the lower bits of its JSObject refs for evil purposes,
+    // so we need to fix this.
+    void *thing = *ref;
+    *ref = newval;
+    thing = (void *)((uintptr_t)thing & ~7);
+    if (!thing)
+        return;
+    uint32 kind = GetGCThingTraceKind(thing);
+    if (kind == JSTRACE_OBJECT)
+        JSObject::writeBarrierPre((JSObject *) thing);
+    else if (kind == JSTRACE_STRING)
+        JSString::writeBarrierPre((JSString *) thing);
+    else
+        JS_NOT_REACHED("invalid trace kind");
+}
+
+JS_PUBLIC_API(void)
+JS_UnregisterReference(void **ref)
+{
+    // For now we just want to trigger a write barrier.
+    JS_ModifyReference(ref, NULL);
+}
+
+JS_PUBLIC_API(void)
+JS_RegisterValue(jsval *val)
+{
+}
+
+JS_PUBLIC_API(void)
+JS_ModifyValue(jsval *val, jsval newval)
+{
+    HeapValue::writeBarrierPre(*val);
+    *val = newval;
+}
+
+JS_PUBLIC_API(void)
+JS_UnregisterValue(jsval *val)
+{
+    JS_ModifyValue(val, JSVAL_VOID);
+}
+
+JS_PUBLIC_API(JSTracer *)
+JS_GetIncrementalGCTracer(JSRuntime *rt)
+{
+    return rt->gcIncrementalTracer;
+}
+
+/************************************************************************/
+
 #if !defined(STATIC_EXPORTABLE_JS_API) && !defined(STATIC_JS_API) && defined(XP_WIN)
 
 #include "jswin.h"
 
 /*
  * Initialization routine for the JS DLL.
  */
 BOOL WINAPI DllMain (HINSTANCE hDLL, DWORD dwReason, LPVOID lpReserved)
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -2693,16 +2693,17 @@ JSVAL_TRACE_KIND(jsval v)
  * If eagerlyTraceWeakMaps is true, when we trace a WeakMap visit all
  * of its mappings.  This should be used in cases where the tracer
  * wants to use the existing liveness of entries.
  */
 typedef void
 (* JSTraceCallback)(JSTracer *trc, void *thing, JSGCTraceKind kind);
 
 struct JSTracer {
+    JSRuntime           *runtime;
     JSContext           *context;
     JSTraceCallback     callback;
     JSTraceNamePrinter  debugPrinter;
     const void          *debugPrintArg;
     size_t              debugPrintIndex;
     JSBool              eagerlyTraceWeakMaps;
 };
 
@@ -2793,16 +2794,17 @@ JS_CallTracer(JSTracer *trc, void *thing
         JS_CALL_TRACER((trc), str_, JSTRACE_STRING, name);                    \
     JS_END_MACRO
 
 /*
  * API for JSTraceCallback implementations.
  */
 # define JS_TRACER_INIT(trc, cx_, callback_)                                  \
     JS_BEGIN_MACRO                                                            \
+        (trc)->runtime = (cx_)->runtime;                                      \
         (trc)->context = (cx_);                                               \
         (trc)->callback = (callback_);                                        \
         (trc)->debugPrinter = NULL;                                           \
         (trc)->debugPrintArg = NULL;                                          \
         (trc)->debugPrintIndex = (size_t)-1;                                  \
         (trc)->eagerlyTraceWeakMaps = JS_TRUE;                                \
     JS_END_MACRO
 
@@ -2838,16 +2840,98 @@ JS_GetTraceEdgeName(JSTracer *trc, char 
  */
 extern JS_PUBLIC_API(JSBool)
 JS_DumpHeap(JSContext *cx, FILE *fp, void* startThing, JSGCTraceKind kind,
             void *thingToFind, size_t maxDepth, void *thingToIgnore);
 
 #endif
 
 /*
+ * Write barrier API.
+ *
+ * This API is used to inform SpiderMonkey of pointers to JS GC things in the
+ * malloc heap. There is no need to use this API unless incremental GC is
+ * enabled. When they are, the requirements for using the API are as follows:
+ *
+ * All pointers to JS GC things from the malloc heap must be registered and
+ * unregistered with the API functions below. This is *in addition* to the
+ * normal rooting and tracing that must be done normally--these functions will
+ * not take care of rooting for you.
+ *
+ * Besides registration, the JS_ModifyReference function must be called to
+ * change the value of these references. You should not change them using
+ * assignment.
+ *
+ * To avoid the headache of using these API functions, the JSBarrieredObjectPtr
+ * C++ class is provided--simply replace your JSObject* with a
+ * JSBarrieredObjectPtr. It will take care of calling the registration and
+ * modification APIs.
+ *
+ * For more explanation, see the comment in gc/Barrier.h.
+ */
+
+/* These functions are to be used for objects and strings. */
+extern JS_PUBLIC_API(void)
+JS_RegisterReference(void **ref);
+
+extern JS_PUBLIC_API(void)
+JS_ModifyReference(void **ref, void *newval);
+
+extern JS_PUBLIC_API(void)
+JS_UnregisterReference(void **ref);
+
+/* These functions are for values. */
+extern JS_PUBLIC_API(void)
+JS_RegisterValue(jsval *val);
+
+extern JS_PUBLIC_API(void)
+JS_ModifyValue(jsval *val, jsval newval);
+
+extern JS_PUBLIC_API(void)
+JS_UnregisterValue(jsval *val);
+
+extern JS_PUBLIC_API(JSTracer *)
+JS_GetIncrementalGCTracer(JSRuntime *rt);
+
+#ifdef __cplusplus
+JS_END_EXTERN_C
+
+namespace JS {
+
+class HeapPtrObject
+{
+    JSObject *value;
+
+  public:
+    HeapPtrObject() : value(NULL) { JS_RegisterReference((void **) &value); }
+
+    HeapPtrObject(JSObject *obj) : value(obj) { JS_RegisterReference((void **) &value); }
+
+    ~HeapPtrObject() { JS_UnregisterReference((void **) &value); }
+
+    void init(JSObject *obj) { value = obj; }
+
+    JSObject *get() const { return value; }
+
+    HeapPtrObject &operator=(JSObject *obj) {
+        JS_ModifyReference((void **) &value, obj);
+        return *this;
+    }
+
+    JSObject &operator*() const { return *value; }
+    JSObject *operator->() const { return value; }
+    operator JSObject *() const { return value; }
+};
+
+} /* namespace JS */
+
+JS_BEGIN_EXTERN_C
+#endif
+
+/*
  * Garbage collector API.
  */
 extern JS_PUBLIC_API(void)
 JS_GC(JSContext *cx);
 
 extern JS_PUBLIC_API(void)
 JS_CompartmentGC(JSContext *cx, JSCompartment *comp);
 
@@ -3110,20 +3194,21 @@ struct JSClass {
                                          (((clasp)->flags                     \
                                            >> JSCLASS_CACHED_PROTO_SHIFT)     \
                                           & JSCLASS_CACHED_PROTO_MASK))
 
 /* Initializer for unused members of statically initialized JSClass structs. */
 #define JSCLASS_NO_INTERNAL_MEMBERS     0,{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}
 #define JSCLASS_NO_OPTIONAL_MEMBERS     0,0,0,0,0,0,0,JSCLASS_NO_INTERNAL_MEMBERS
 
-struct JSIdArray {
-    jsint length;
-    jsid  vector[1];    /* actually, length jsid words */
-};
+extern JS_PUBLIC_API(jsint)
+JS_IdArrayLength(JSContext *cx, JSIdArray *ida);
+
+extern JS_PUBLIC_API(jsid)
+JS_IdArrayGet(JSContext *cx, JSIdArray *ida, jsint index);
 
 extern JS_PUBLIC_API(void)
 JS_DestroyIdArray(JSContext *cx, JSIdArray *ida);
 
 extern JS_PUBLIC_API(JSBool)
 JS_ValueToId(JSContext *cx, jsval v, jsid *idp);
 
 extern JS_PUBLIC_API(JSBool)
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -644,24 +644,23 @@ array_length_setter(JSContext *cx, JSObj
     if (obj->isDenseArray()) {
         /*
          * Don't reallocate if we're not actually shrinking our slots. If we do
          * shrink slots here, shrink the initialized length too.  This permits us
          * us to disregard length when reading from arrays as long we are within
          * the initialized capacity.
          */
         jsuint oldcap = obj->getDenseArrayCapacity();
+        jsuint oldinit = obj->getDenseArrayInitializedLength();
+        if (oldinit > newlen)
+            obj->setDenseArrayInitializedLength(newlen);
         if (oldcap > newlen)
             obj->shrinkDenseArrayElements(cx, newlen);
-        jsuint oldinit = obj->getDenseArrayInitializedLength();
-        if (oldinit > newlen) {
-            obj->setDenseArrayInitializedLength(newlen);
-            if (!cx->typeInferenceEnabled())
-                obj->backfillDenseArrayHoles(cx);
-        }
+        if (oldinit > newlen && !cx->typeInferenceEnabled())
+            obj->backfillDenseArrayHoles(cx);
     } else if (oldlen - newlen < (1 << 24)) {
         do {
             --oldlen;
             if (!JS_CHECK_OPERATION_LIMIT(cx)) {
                 obj->setArrayLength(cx, oldlen + 1);
                 return false;
             }
             int deletion = DeleteArrayElement(cx, obj, oldlen, strict);
@@ -1335,18 +1334,21 @@ JSObject::makeDenseArraySlow(JSContext *
      * Save old map now, before calling InitScopeForObject. We'll have to undo
      * on error. This is gross, but a better way is not obvious. Note: the
      * exact contents of the array are not preserved on error.
      */
     js::Shape *oldMap = lastProp;
 
     /* Create a native scope. */
     gc::AllocKind kind = getAllocKind();
-    if (!InitScopeForObject(cx, this, &SlowArrayClass, getProto()->getNewType(cx), kind))
+    js::EmptyShape *empty = InitScopeForObject(cx, this, &SlowArrayClass,
+                                               getProto()->getNewType(cx), kind);
+    if (!empty)
         return false;
+    setMap(empty);
 
     backfillDenseArrayHoles(cx);
 
     uint32 arrayCapacity = getDenseArrayCapacity();
     uint32 arrayInitialized = getDenseArrayInitializedLength();
 
     /*
      * Adjust the slots to account for the different layout between dense
@@ -1362,30 +1364,30 @@ JSObject::makeDenseArraySlow(JSContext *
     }
     capacity = numFixedSlots() + arrayCapacity;
     clasp = &SlowArrayClass;
 
     /*
      * Root all values in the array during conversion, as SlowArrayClass only
      * protects up to its slot span.
      */
-    AutoValueArray autoArray(cx, slots, arrayInitialized);
+    AutoValueArray autoArray(cx, Valueify(slots), arrayInitialized);
 
     /* The initialized length is used iff this is a dense array. */
-    initializedLength = 0;
+    initializedLength() = 0;
     JS_ASSERT(newType == NULL);
 
     /*
      * Begin with the length property to share more of the property tree.
      * The getter/setter here will directly access the object's private value.
      */
     if (!AddLengthProperty(cx, this)) {
         setMap(oldMap);
         capacity = arrayCapacity;
-        initializedLength = arrayInitialized;
+        initializedLength() = arrayInitialized;
         clasp = &ArrayClass;
         return false;
     }
 
     /*
      * Create new properties pointing to existing elements. Pack the array to
      * remove holes, so that shapes use successive slots (as for other objects).
      */
@@ -1398,17 +1400,17 @@ JSObject::makeDenseArraySlow(JSContext *
         if (slots[i].isMagic(JS_ARRAY_HOLE))
             continue;
 
         setSlot(next, slots[i]);
 
         if (!addDataProperty(cx, id, next, JSPROP_ENUMERATE)) {
             setMap(oldMap);
             capacity = arrayCapacity;
-            initializedLength = arrayInitialized;
+            initializedLength() = arrayInitialized;
             clasp = &ArrayClass;
             return false;
         }
 
         next++;
     }
 
     clearSlotRange(next, capacity - next);
@@ -2492,17 +2494,17 @@ NewbornArrayPushImpl(JSContext *cx, JSOb
     JS_ASSERT(length <= obj->getDenseArrayCapacity());
 
     if (length == obj->getDenseArrayCapacity() && !obj->ensureSlots(cx, length + 1))
         return false;
 
     if (cx->typeInferenceEnabled())
         obj->setDenseArrayInitializedLength(length + 1);
     obj->setDenseArrayLength(length + 1);
-    obj->setDenseArrayElementWithType(cx, length, v);
+    obj->initDenseArrayElementWithType(cx, length, v);
     return true;
 }
 
 JSBool
 js_NewbornArrayPush(JSContext *cx, JSObject *obj, const Value &vp)
 {
     return NewbornArrayPushImpl(cx, obj, vp);
 }
@@ -2888,26 +2890,26 @@ array_splice(JSContext *cx, uintN argc, 
         uint32 sourceIndex = actualStart + actualDeleteCount;
         uint32 targetIndex = actualStart + itemCount;
         uint32 finalLength = len - actualDeleteCount + itemCount;
 
         if (CanOptimizeForDenseStorage(obj, 0, len, cx)) {
             /* Steps 12(a)-(b). */
             obj->moveDenseArrayElements(targetIndex, sourceIndex, len - sourceIndex);
 
-            /* Steps 12(c)-(d). */
-            obj->shrinkDenseArrayElements(cx, finalLength);
-
             /*
-             * The array's initialized length is now out of sync with the array
-             * elements: resynchronize it.
+             * Update the initialized length. Do so before shrinking so that we
+             * can apply the write barrier to the old slots.
              */
             if (cx->typeInferenceEnabled())
                 obj->setDenseArrayInitializedLength(finalLength);
 
+            /* Steps 12(c)-(d). */
+            obj->shrinkDenseArrayElements(cx, finalLength);
+
             /* Fix running enumerators for the deleted items. */
             if (!js_SuppressDeletedElements(cx, obj, finalLength, len))
                 return false;
         } else {
             /*
              * This is all very slow if the length is very large. We don't yet
              * have the ability to iterate in sorted order, so we just do the
              * pessimistic thing and let JS_CHECK_OPERATION_LIMIT handle the
@@ -3014,20 +3016,22 @@ mjit::stubs::ArrayConcatTwoArrays(VMFram
     JS_ASSERT(initlen2 == obj2->getArrayLength());
 
     /* No overflow here due to nslots limit. */
     uint32 len = initlen1 + initlen2;
 
     if (!result->ensureSlots(f.cx, len))
         THROW();
 
-    result->copyDenseArrayElements(0, obj1->getDenseArrayElements(), initlen1);
-    result->copyDenseArrayElements(initlen1, obj2->getDenseArrayElements(), initlen2);
-
+    JS_ASSERT(!result->getDenseArrayInitializedLength());
     result->setDenseArrayInitializedLength(len);
+
+    result->initDenseArrayElements(0, obj1->getDenseArrayElements(), initlen1);
+    result->initDenseArrayElements(initlen1, obj2->getDenseArrayElements(), initlen2);
+
     result->setDenseArrayLength(len);
 }
 #endif /* JS_METHODJIT */
 
 /*
  * Python-esque sequence operations.
  */
 JSBool
@@ -3915,17 +3919,17 @@ NewDenseCopiedArray(JSContext *cx, uint3
         return NULL;
 
     JS_ASSERT(obj->getDenseArrayCapacity() >= length);
 
     if (cx->typeInferenceEnabled())
         obj->setDenseArrayInitializedLength(vp ? length : 0);
 
     if (vp)
-        obj->copyDenseArrayElements(0, vp, length);
+        obj->initDenseArrayElements(0, vp, length);
 
     return obj;
 }
 
 #ifdef JS_TRACER
 JS_DEFINE_CALLINFO_2(extern, OBJECT, NewDenseEmptyArray, CONTEXT, OBJECT, 0,
                      nanojit::ACCSET_STORE_ANY)
 JS_DEFINE_CALLINFO_3(extern, OBJECT, NewDenseAllocatedArray, CONTEXT, UINT32, OBJECT, 0,
--- a/js/src/jsarray.h
+++ b/js/src/jsarray.h
@@ -50,17 +50,17 @@
 
 /* Small arrays are dense, no matter what. */
 const uintN MIN_SPARSE_INDEX = 256;
 
 inline uint32
 JSObject::getDenseArrayInitializedLength()
 {
     JS_ASSERT(isDenseArray());
-    return initializedLength;
+    return initializedLength();
 }
 
 inline bool
 JSObject::isPackedDenseArray()
 {
     JS_ASSERT(isDenseArray());
     return flags & PACKED_ARRAY;
 }
--- a/js/src/jsarrayinlines.h
+++ b/js/src/jsarrayinlines.h
@@ -43,17 +43,19 @@
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 
 inline void
 JSObject::setDenseArrayInitializedLength(uint32 length)
 {
     JS_ASSERT(isDenseArray());
     JS_ASSERT(length <= getDenseArrayCapacity());
-    initializedLength = length;
+    uint32 cur = initializedLength();
+    prepareSlotRangeForOverwrite(length, cur);
+    initializedLength() = length;
 }
 
 inline void
 JSObject::markDenseArrayNotPacked(JSContext *cx)
 {
     JS_ASSERT(isDenseArray());
     if (flags & PACKED_ARRAY) {
         flags ^= PACKED_ARRAY;
@@ -72,22 +74,23 @@ inline void
 JSObject::ensureDenseArrayInitializedLength(JSContext *cx, uint32 index, uint32 extra)
 {
     /*
      * Ensure that the array's contents have been initialized up to index, and
      * mark the elements through 'index + extra' as initialized in preparation
      * for a write.
      */
     JS_ASSERT(index + extra <= capacity);
-    if (initializedLength < index) {
+    if (initializedLength() < index)
         markDenseArrayNotPacked(cx);
-        js::ClearValueRange(slots + initializedLength, index - initializedLength, true);
+
+    if (initializedLength() < index + extra) {
+        js::InitValueRange(slots + initializedLength(), index + extra - initializedLength(), true);
+        initializedLength() = index + extra;
     }
-    if (initializedLength < index + extra)
-        initializedLength = index + extra;
 }
 
 inline JSObject::EnsureDenseResult
 JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra)
 {
     JS_ASSERT(isDenseArray());
 
     uintN currentCapacity = numSlots();
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -384,36 +384,36 @@ js_FinishCommonAtoms(JSContext *cx)
 {
     cx->runtime->emptyString = NULL;
     cx->runtime->atomState.junkAtoms();
 }
 
 void
 js_TraceAtomState(JSTracer *trc)
 {
-    JSRuntime *rt = trc->context->runtime;
+    JSRuntime *rt = trc->runtime;
     JSAtomState *state = &rt->atomState;
 
 #ifdef DEBUG
     size_t number = 0;
 #endif
 
     if (rt->gcKeepAtoms) {
         for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
             JS_SET_TRACING_INDEX(trc, "locked_atom", number++);
-            MarkString(trc, r.front().asPtr());
+            MarkAtom(trc, r.front().asPtr());
         }
     } else {
         for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
             AtomStateEntry entry = r.front();
             if (!entry.isTagged())
                 continue;
 
             JS_SET_TRACING_INDEX(trc, "interned_atom", number++);
-            MarkString(trc, entry.asPtr());
+            MarkAtom(trc, entry.asPtr());
         }
     }
 }
 
 void
 js_SweepAtomState(JSContext *cx)
 {
     JSAtomState *state = &cx->runtime->atomState;
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -44,19 +44,25 @@
 #include "jsversion.h"
 #include "jsalloc.h"
 #include "jsapi.h"
 #include "jsprvtd.h"
 #include "jshash.h"
 #include "jspubtd.h"
 #include "jslock.h"
 
+#include "gc/Barrier.h"
 #include "js/HashTable.h"
 #include "vm/String.h"
 
+struct JSIdArray {
+    jsint length;
+    js::HeapId vector[1];    /* actually, length jsid words */
+};
+
 /* Engine-internal extensions of jsid */
 
 static JS_ALWAYS_INLINE jsid
 JSID_FROM_BITS(size_t bits)
 {
     jsid id;
     JSID_BITS(id) = bits;
     return id;
--- a/js/src/jscell.h
+++ b/js/src/jscell.h
@@ -61,30 +61,30 @@ enum AllocKind {
     FINALIZE_OBJECT8,
     FINALIZE_OBJECT8_BACKGROUND,
     FINALIZE_OBJECT12,
     FINALIZE_OBJECT12_BACKGROUND,
     FINALIZE_OBJECT16,
     FINALIZE_OBJECT16_BACKGROUND,
     FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
     FINALIZE_FUNCTION,
-    FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
     FINALIZE_SCRIPT,
     FINALIZE_SHAPE,
     FINALIZE_TYPE_OBJECT,
 #if JS_HAS_XML_SUPPORT
     FINALIZE_XML,
 #endif
     FINALIZE_SHORT_STRING,
     FINALIZE_STRING,
-    FINALIZE_EXTERNAL_STRING,
-    FINALIZE_LAST = FINALIZE_EXTERNAL_STRING
+    FINALIZE_EXTERNAL_STRING
 };
 
-const size_t FINALIZE_LIMIT = FINALIZE_LAST + 1;
+static const unsigned FINALIZE_LIMIT = FINALIZE_EXTERNAL_STRING + 1;
+static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT16_BACKGROUND + 1;
+static const unsigned FINALIZE_FUNCTION_AND_OBJECT_LIMIT = FINALIZE_FUNCTION + 1;
 
 /*
  * Live objects are marked black. How many other additional colors are available
  * depends on the size of the GCThing. Objects marked gray are eligible for
  * cycle collection.
  */
 static const uint32 BLACK = 0;
 static const uint32 GRAY = 1;
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -455,21 +455,22 @@ struct JSRuntime
     uint32              gcTriggerBytes;
     size_t              gcLastBytes;
     size_t              gcMaxBytes;
     size_t              gcMaxMallocBytes;
     uint32              gcEmptyArenaPoolLifespan;
     /* We access this without the GC lock, however a race will not affect correctness */
     volatile uint32     gcNumFreeArenas;
     uint32              gcNumber;
-    js::GCMarker        *gcMarkingTracer;
+    js::GCMarker        *gcIncrementalTracer;
     bool                gcChunkAllocationSinceLastGC;
     int64               gcNextFullGCTime;
     int64               gcJitReleaseTime;
     JSGCMode            gcMode;
+    volatile jsuword    gcBarrierFailed;
     volatile jsuword    gcIsNeeded;
     js::WeakMapBase     *gcWeakMapList;
     js::gcstats::Statistics gcStats;
 
     /* The reason that an interrupt-triggered GC should be called. */
     js::gcstats::Reason gcTriggerReason;
 
     /* Pre-allocated space for the GC mark stacks. Pointer type ensures alignment. */
@@ -2211,28 +2212,16 @@ js_RegenerateShapeForGC(JSRuntime *rt)
     return shape;
 }
 
 namespace js {
 
 /************************************************************************/
 
 static JS_ALWAYS_INLINE void
-ClearValueRange(Value *vec, uintN len, bool useHoles)
-{
-    if (useHoles) {
-        for (uintN i = 0; i < len; i++)
-            vec[i].setMagic(JS_ARRAY_HOLE);
-    } else {
-        for (uintN i = 0; i < len; i++)
-            vec[i].setUndefined();
-    }
-}
-
-static JS_ALWAYS_INLINE void
 MakeRangeGCSafe(Value *vec, size_t len)
 {
     PodZero(vec, len);
 }
 
 static JS_ALWAYS_INLINE void
 MakeRangeGCSafe(Value *beg, Value *end)
 {
@@ -2411,36 +2400,33 @@ class AutoShapeVector : public AutoVecto
         JS_GUARD_OBJECT_NOTIFIER_INIT;
     }
 
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 class AutoValueArray : public AutoGCRooter
 {
-    js::Value *start_;
+    const js::Value *start_;
     unsigned length_;
 
   public:
-    AutoValueArray(JSContext *cx, js::Value *start, unsigned length
+    AutoValueArray(JSContext *cx, const js::Value *start, unsigned length
                    JS_GUARD_OBJECT_NOTIFIER_PARAM)
         : AutoGCRooter(cx, VALARRAY), start_(start), length_(length)
     {
         JS_GUARD_OBJECT_NOTIFIER_INIT;
     }
 
-    Value *start() const { return start_; }
+    const Value *start() const { return start_; }
     unsigned length() const { return length_; }
 
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
-JSIdArray *
-NewIdArray(JSContext *cx, jsint length);
-
 /*
  * Allocation policy that uses JSRuntime::malloc_ and friends, so that
  * memory pressure is properly accounted for. This is suitable for
  * long-lived objects owned by the JSRuntime.
  *
  * Since it doesn't hold a JSContext (those may not live long enough), it
  * can't report out-of-memory conditions itself; the caller must check for
  * OOM and take the appropriate action.
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -110,17 +110,17 @@ class AutoNamespaceArray : protected Aut
         array.finish(context);
     }
 
     uint32 length() const { return array.length; }
 
   public:
     friend void AutoGCRooter::trace(JSTracer *trc);
 
-    JSXMLArray array;
+    JSXMLArray<JSObject> array;
 };
 
 #ifdef DEBUG
 class CompartmentChecker
 {
   private:
     JSContext *context;
     JSCompartment *compartment;
@@ -203,18 +203,18 @@ class CompartmentChecker
                     check(ida->vector[i]);
             }
         }
     }
 
     void check(JSScript *script) {
         if (script) {
             check(script->compartment());
-            if (!script->isCachedEval && script->u.globalObject)
-                check(script->u.globalObject);
+            if (!script->isCachedEval && script->globalObject)
+                check(script->globalObject);
         }
     }
 
     void check(StackFrame *fp) {
         check(&fp->scopeChain());
     }
 };
 
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -53,29 +53,32 @@
 #include "assembler/jit/ExecutableAllocator.h"
 #include "yarr/BumpPointerAllocator.h"
 #include "methodjit/MethodJIT.h"
 #include "methodjit/PolyIC.h"
 #include "methodjit/MonoIC.h"
 #include "vm/Debugger.h"
 
 #include "jsgcinlines.h"
+#include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 
 #if ENABLE_YARR_JIT
 #include "assembler/jit/ExecutableAllocator.h"
 #endif
 
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 
 JSCompartment::JSCompartment(JSRuntime *rt)
   : rt(rt),
     principals(NULL),
+    needsBarrier_(false),
+    gcIncrementalTracer(NULL),
     gcBytes(0),
     gcTriggerBytes(0),
     gcLastBytes(0),
     hold(false),
     typeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
 #ifdef JS_TRACER
     traceMonitor_(NULL),
 #endif
@@ -344,16 +347,26 @@ JSCompartment::wrap(JSContext *cx, JSStr
     AutoValueRooter tvr(cx, StringValue(*strp));
     if (!wrap(cx, tvr.addr()))
         return false;
     *strp = tvr.value().toString();
     return true;
 }
 
 bool
+JSCompartment::wrap(JSContext *cx, HeapPtrString *strp)
+{
+    AutoValueRooter tvr(cx, StringValue(*strp));
+    if (!wrap(cx, tvr.addr()))
+        return false;
+    *strp = tvr.value().toString();
+    return true;
+}
+
+bool
 JSCompartment::wrap(JSContext *cx, JSObject **objp)
 {
     if (!*objp)
         return true;
     AutoValueRooter tvr(cx, ObjectValue(**objp));
     if (!wrap(cx, tvr.addr()))
         return false;
     *objp = &tvr.value().toObject();
@@ -415,61 +428,61 @@ JSCompartment::wrap(JSContext *cx, AutoI
 /*
  * This method marks pointers that cross compartment boundaries. It should be
  * called only for per-compartment GCs, since full GCs naturally follow pointers
  * across compartments.
  */
 void
 JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
 {
-    JS_ASSERT(trc->context->runtime->gcCurrentCompartment);
+    JS_ASSERT(trc->runtime->gcCurrentCompartment);
 
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront())
-        MarkValue(trc, e.front().key, "cross-compartment wrapper");
+        MarkRoot(trc, e.front().key, "cross-compartment wrapper");
 }
 
 void
 JSCompartment::markTypes(JSTracer *trc)
 {
     /*
      * Mark all scripts, type objects and singleton JS objects in the
      * compartment. These can be referred to directly by type sets, which we
      * cannot modify while code which depends on these type sets is active.
      */
     JS_ASSERT(activeAnalysis);
 
     for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
-        MarkScript(trc, script, "mark_types_script");
+        MarkRoot(trc, script, "mark_types_script");
     }
 
     for (size_t thingKind = FINALIZE_OBJECT0;
-         thingKind <= FINALIZE_FUNCTION_AND_OBJECT_LAST;
+         thingKind < FINALIZE_FUNCTION_AND_OBJECT_LIMIT;
          thingKind++) {
         for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
             JSObject *object = i.get<JSObject>();
             if (!object->isNewborn() && object->hasSingletonType())
-                MarkObject(trc, *object, "mark_types_singleton");
+                MarkRoot(trc, object, "mark_types_singleton");
         }
     }
 
     for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next())
-        MarkTypeObject(trc, i.get<types::TypeObject>(), "mark_types_scan");
+        MarkRoot(trc, i.get<types::TypeObject>(), "mark_types_scan");
 }
 
 void
 JSCompartment::sweep(JSContext *cx, bool releaseTypes)
 {
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
-        JS_ASSERT_IF(IsAboutToBeFinalized(cx, e.front().key.toGCThing()) &&
-                     !IsAboutToBeFinalized(cx, e.front().value.toGCThing()),
+        JS_ASSERT_IF(IsAboutToBeFinalized(cx, e.front().key) &&
+                     !IsAboutToBeFinalized(cx, e.front().value),
                      e.front().key.isString());
-        if (IsAboutToBeFinalized(cx, e.front().key.toGCThing()) ||
-            IsAboutToBeFinalized(cx, e.front().value.toGCThing())) {
+        if (IsAboutToBeFinalized(cx, e.front().key) ||
+            IsAboutToBeFinalized(cx, e.front().value)) {
             e.removeFront();
         }
     }
 
     /* Remove dead empty shapes. */
     if (emptyArgumentsShape && IsAboutToBeFinalized(cx, emptyArgumentsShape))
         emptyArgumentsShape = NULL;
     if (emptyBlockShape && IsAboutToBeFinalized(cx, emptyBlockShape))
@@ -579,17 +592,17 @@ JSCompartment::purge(JSContext *cx)
      * not null when we have script owned by an object and not from the eval
      * cache.
      */
     for (size_t i = 0; i < ArrayLength(evalCache); ++i) {
         for (JSScript **listHeadp = &evalCache[i]; *listHeadp; ) {
             JSScript *script = *listHeadp;
             JS_ASSERT(GetGCThingTraceKind(script) == JSTRACE_SCRIPT);
             *listHeadp = NULL;
-            listHeadp = &script->u.evalHashLink;
+            listHeadp = &script->evalHashLink();
         }
     }
 
     nativeIterCache.purge();
     toSourceCache.destroyIfConstructed();
 
 #ifdef JS_TRACER
     /*
@@ -822,17 +835,17 @@ JSCompartment::markTrapClosuresIterative
     bool markedAny = false;
     JSContext *cx = trc->context;
     for (BreakpointSiteMap::Range r = breakpointSites.all(); !r.empty(); r.popFront()) {
         BreakpointSite *site = r.front().value;
 
         // Put off marking trap state until we know the script is live.
         if (site->trapHandler && !IsAboutToBeFinalized(cx, site->script)) {
             if (site->trapClosure.isMarkable() &&
-                IsAboutToBeFinalized(cx, site->trapClosure.toGCThing()))
+                IsAboutToBeFinalized(cx, site->trapClosure))
             {
                 markedAny = true;
             }
             MarkValue(trc, site->trapClosure, "trap closure");
         }
     }
     return markedAny;
 }
@@ -853,8 +866,15 @@ JSCompartment::sweepBreakpoints(JSContex
             if (scriptGone || IsAboutToBeFinalized(cx, bp->debugger->toJSObject()))
                 bp->destroy(cx, &e);
         }
         
         if (clearTrap)
             site->clearTrap(cx, &e);
     }
 }
+
+GCMarker *
+JSCompartment::createBarrierTracer()
+{
+    JS_ASSERT(!gcIncrementalTracer);
+    return NULL;
+}
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -392,16 +392,30 @@ typedef HashSet<ScriptFilenameEntry *,
 } /* namespace js */
 
 struct JS_FRIEND_API(JSCompartment) {
     JSRuntime                    *rt;
     JSPrincipals                 *principals;
 
     js::gc::ArenaLists           arenas;
 
+    bool                         needsBarrier_;
+    js::GCMarker                 *gcIncrementalTracer;
+
+    bool needsBarrier() {
+        return needsBarrier_;
+    }
+
+    js::GCMarker *barrierTracer() {
+        JS_ASSERT(needsBarrier_);
+        if (gcIncrementalTracer)
+            return gcIncrementalTracer;
+        return createBarrierTracer();
+    }
+
     uint32                       gcBytes;
     uint32                       gcTriggerBytes;
     size_t                       gcLastBytes;
 
     bool                         hold;
     bool                         isSystemCompartment;
 
     /*
@@ -469,44 +483,47 @@ struct JS_FRIEND_API(JSCompartment) {
 #ifdef DEBUG
     /* Property metering. */
     jsrefcount                   livePropTreeNodes;
     jsrefcount                   totalPropTreeNodes;
     jsrefcount                   propTreeKidsChunks;
     jsrefcount                   liveDictModeNodes;
 #endif
 
+    typedef js::ReadBarriered<js::EmptyShape> BarrieredEmptyShape;
+    typedef js::ReadBarriered<const js::Shape> BarrieredShape;
+
     /*
      * Runtime-shared empty scopes for well-known built-in objects that lack
      * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
      */
-    js::EmptyShape               *emptyArgumentsShape;
-    js::EmptyShape               *emptyBlockShape;
-    js::EmptyShape               *emptyCallShape;
-    js::EmptyShape               *emptyDeclEnvShape;
-    js::EmptyShape               *emptyEnumeratorShape;
-    js::EmptyShape               *emptyWithShape;
+    BarrieredEmptyShape          emptyArgumentsShape;
+    BarrieredEmptyShape          emptyBlockShape;
+    BarrieredEmptyShape          emptyCallShape;
+    BarrieredEmptyShape          emptyDeclEnvShape;
+    BarrieredEmptyShape          emptyEnumeratorShape;
+    BarrieredEmptyShape          emptyWithShape;
 
     typedef js::HashSet<js::EmptyShape *,
                         js::DefaultHasher<js::EmptyShape *>,
                         js::SystemAllocPolicy> EmptyShapeSet;
 
     EmptyShapeSet                emptyShapes;
 
     /*
      * Initial shapes given to RegExp and String objects, encoding the initial
      * sets of built-in instance properties and the fixed slots where they must
      * be stored (see JSObject::JSSLOT_(REGEXP|STRING)_*). Later property
      * additions may cause these shapes to not be used by a RegExp or String
      * (even along the entire shape parent chain, should the object go into
      * dictionary mode). But because all the initial properties are
      * non-configurable, they will always map to fixed slots.
      */
-    const js::Shape              *initialRegExpShape;
-    const js::Shape              *initialStringShape;
+    BarrieredShape               initialRegExpShape;
+    BarrieredShape               initialStringShape;
 
   private:
     enum { DebugFromC = 1, DebugFromJS = 2 };
 
     uintN                        debugModeBits;  // see debugMode() below
 
   public:
     js::NativeIterCache          nativeIterCache;
@@ -521,16 +538,17 @@ struct JS_FRIEND_API(JSCompartment) {
 
     bool init(JSContext *cx);
 
     /* Mark cross-compartment wrappers. */
     void markCrossCompartmentWrappers(JSTracer *trc);
 
     bool wrap(JSContext *cx, js::Value *vp);
     bool wrap(JSContext *cx, JSString **strp);
+    bool wrap(JSContext *cx, js::HeapPtrString *strp);
     bool wrap(JSContext *cx, JSObject **objp);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::StrictPropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
 
     void markTypes(JSTracer *trc);
@@ -619,16 +637,18 @@ struct JS_FRIEND_API(JSCompartment) {
                                                   js::GlobalObject *scriptGlobal);
     void clearBreakpointsIn(JSContext *cx, js::Debugger *dbg, JSScript *script, JSObject *handler);
     void clearTraps(JSContext *cx, JSScript *script);
     bool markTrapClosuresIteratively(JSTracer *trc);
 
   private:
     void sweepBreakpoints(JSContext *cx);
 
+    js::GCMarker *createBarrierTracer();
+
   public:
     js::WatchpointMap *watchpointMap;
 };
 
 #define JS_PROPERTY_TREE(cx)    ((cx)->compartment->propertyTree)
 
 /*
  * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -64,18 +64,20 @@
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jswrapper.h"
 
 #include "vm/GlobalObject.h"
 
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
+#include "jsstrinlines.h"
 
 #include "vm/Stack-inl.h"
+#include "vm/String-inl.h"
 
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 using namespace js::types;
 
 /* Forward declarations for ErrorClass's initializer. */
 static JSBool
@@ -108,27 +110,27 @@ Class js::ErrorClass = {
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
     exn_trace
 };
 
 typedef struct JSStackTraceElem {
-    JSString            *funName;
+    js::HeapPtrString   funName;
     size_t              argc;
     const char          *filename;
     uintN               ulineno;
 } JSStackTraceElem;
 
 typedef struct JSExnPrivate {
     /* A copy of the JSErrorReport originally generated. */
     JSErrorReport       *errorReport;
-    JSString            *message;
-    JSString            *filename;
+    js::HeapPtrString   message;
+    js::HeapPtrString   filename;
     uintN               lineno;
     size_t              stackDepth;
     intN                exnType;
     JSStackTraceElem    stackElems[1];
 } JSExnPrivate;
 
 static JSString *
 StackTraceToString(JSContext *cx, JSExnPrivate *priv);
@@ -323,22 +325,22 @@ InitExnPrivate(JSContext *cx, JSObject *
                 if (!checkAccess(cx, &fp->callee(), callerid, JSACC_READ, &v))
                     break;
             }
 
             if (!frames.growBy(1))
                 return false;
             JSStackTraceElem &frame = frames.back();
             if (fp->isNonEvalFunctionFrame()) {
-                frame.funName = fp->fun()->atom ? fp->fun()->atom : cx->runtime->emptyString;
+                frame.funName.init(fp->fun()->atom ? fp->fun()->atom : cx->runtime->emptyString);
                 frame.argc = fp->numActualArgs();
                 if (!fp->forEachCanonicalActualArg(AppendArg(values)))
                     return false;
             } else {
-                frame.funName = NULL;
+                frame.funName.init(NULL);
                 frame.argc = 0;
             }
             if (fp->isScriptFrame()) {
                 frame.filename = fp->script()->filename;
                 frame.ulineno = js_FramePCToLineNumber(cx, fp, i.pc());
             } else {
                 frame.ulineno = 0;
                 frame.filename = NULL;
@@ -352,34 +354,37 @@ InitExnPrivate(JSContext *cx, JSObject *
     size_t nbytes = offsetof(JSExnPrivate, stackElems) +
                     frames.length() * sizeof(JSStackTraceElem) +
                     values.length() * sizeof(Value);
 
     JSExnPrivate *priv = (JSExnPrivate *)cx->malloc_(nbytes);
     if (!priv)
         return false;
 
+    /* Initialize to zero so that write barriers don't witness undefined values. */
+    memset(priv, 0, nbytes);
+
     if (report) {
         /*
          * Construct a new copy of the error report struct. We can't use the
          * error report struct that was passed in, because it's allocated on
          * the stack, and also because it may point to transient data in the
          * TokenStream.
          */
         priv->errorReport = CopyErrorReport(cx, report);
         if (!priv->errorReport) {
             cx->free_(priv);
             return false;
         }
     } else {
         priv->errorReport = NULL;
     }
 
-    priv->message = message;
-    priv->filename = filename;
+    priv->message.init(message);
+    priv->filename.init(filename);
     priv->lineno = lineno;
     priv->stackDepth = frames.length();
     priv->exnType = exnType;
 
     JSStackTraceElem *framesDest = priv->stackElems;
     Value *valuesDest = reinterpret_cast<Value *>(framesDest + frames.length());
     JS_ASSERT(valuesDest == GetStackTraceValueBuffer(priv));
 
@@ -417,18 +422,19 @@ exn_trace(JSTracer *trc, JSObject *obj)
             if (elem->funName)
                 MarkString(trc, elem->funName, "stack trace function name");
             if (IS_GC_MARKING_TRACER(trc) && elem->filename)
                 js_MarkScriptFilename(elem->filename);
             vcount += elem->argc;
         }
         vp = GetStackTraceValueBuffer(priv);
         for (i = 0; i != vcount; ++i, ++vp) {
+            /* This value is read-only, so it's okay for it to be Unbarriered. */
             v = *vp;
-            JS_CALL_VALUE_TRACER(trc, v, "stack trace argument");
+            MarkValueUnbarriered(trc, v, "stack trace argument");
         }
     }
 }
 
 static void
 exn_finalize(JSContext *cx, JSObject *obj)
 {
     if (JSExnPrivate *priv = GetExnPrivate(obj)) {
@@ -489,18 +495,16 @@ exn_resolve(JSContext *cx, JSObject *obj
         }
 
         atom = cx->runtime->atomState.stackAtom;
         if (str == atom) {
             stack = StackTraceToString(cx, priv);
             if (!stack)
                 return false;
 
-            /* Allow to GC all things that were used to build stack trace. */
-            priv->stackDepth = 0;
             prop = js_stack_str;
             v = STRING_TO_JSVAL(stack);
             attrs = JSPROP_ENUMERATE;
             goto define;
         }
     }
     return true;
 
@@ -1337,21 +1341,21 @@ js_CopyErrorObject(JSContext *cx, JSObje
     // Copy each field. Don't bother copying the stack elements.
     if (priv->errorReport) {
         copy->errorReport = CopyErrorReport(cx, priv->errorReport);
         if (!copy->errorReport)
             return NULL;
     } else {
         copy->errorReport = NULL;
     }
-    copy->message = priv->message;
+    copy->message.init(priv->message);
     if (!cx->compartment->wrap(cx, &copy->message))
         return NULL;
     JS::Anchor<JSString *> messageAnchor(copy->message);
-    copy->filename = priv->filename;
+    copy->filename.init(priv->filename);
     if (!cx->compartment->wrap(cx, &copy->filename))
         return NULL;
     JS::Anchor<JSString *> filenameAnchor(copy->filename);
     copy->lineno = priv->lineno;
     copy->stackDepth = 0;
     copy->exnType = priv->exnType;
 
     // Create the Error object.
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -146,42 +146,45 @@ ArgumentsObject::create(JSContext *cx, u
     EmptyShape *emptyArgumentsShape = EmptyShape::getEmptyArgumentsShape(cx);
     if (!emptyArgumentsShape)
         return NULL;
 
     ArgumentsData *data = (ArgumentsData *)
         cx->malloc_(offsetof(ArgumentsData, slots) + argc * sizeof(Value));
     if (!data)
         return NULL;
-    SetValueRangeToUndefined(data->slots, argc);
+
+    data->callee.init(ObjectValue(callee));
+    InitValueRange(data->slots, argc, false);
 
     /* Can't fail from here on, so initialize everything in argsobj. */
     obj->init(cx, callee.getFunctionPrivate()->inStrictMode()
               ? &StrictArgumentsObjectClass
               : &NormalArgumentsObjectClass,
               type, proto->getParent(), NULL, false);
-    obj->setMap(emptyArgumentsShape);
+    obj->initMap(emptyArgumentsShape);
 
     ArgumentsObject *argsobj = obj->asArguments();
 
     JS_ASSERT(UINT32_MAX > (uint64(argc) << PACKED_BITS_COUNT));
-    argsobj->setInitialLength(argc);
-
-    argsobj->setCalleeAndData(callee, data);
+    argsobj->initInitialLength(argc);
+    argsobj->initData(data);
 
     return argsobj;
 }
 
 struct STATIC_SKIP_INFERENCE PutArg
 {
-    PutArg(Value *dst) : dst(dst) {}
-    Value *dst;
+    PutArg(JSCompartment *comp, HeapValue *dst) : dst(dst), compartment(comp) {}
+    HeapValue *dst;
+    JSCompartment *compartment;
     bool operator()(uintN, Value *src) {
+        JS_ASSERT(dst->isMagic(JS_ARGS_HOLE) || dst->isUndefined());
         if (!dst->isMagic(JS_ARGS_HOLE))
-            *dst = *src;
+            dst->set(compartment, *src);
         ++dst;
         return true;
     }
 };
 
 JSObject *
 js_GetArgsObject(JSContext *cx, StackFrame *fp)
 {
@@ -215,31 +218,32 @@ js_GetArgsObject(JSContext *cx, StackFra
      * actual parameter values.  It is the caller's responsibility to get the
      * arguments object before any parameters are modified!  (The emitter
      * ensures this by synthesizing an arguments access at the start of any
      * strict mode function that contains an assignment to a parameter, or
      * that calls eval.)  Non-strict mode arguments use the frame pointer to
      * retrieve up-to-date parameter values.
      */
     if (argsobj->isStrictArguments())
-        fp->forEachCanonicalActualArg(PutArg(argsobj->data()->slots));
+        fp->forEachCanonicalActualArg(PutArg(cx->compartment, argsobj->data()->slots));
     else
         argsobj->setStackFrame(fp);
 
     fp->setArgsObj(*argsobj);
     return argsobj;
 }
 
 void
 js_PutArgsObject(StackFrame *fp)
 {
     ArgumentsObject &argsobj = fp->argsObj();
     if (argsobj.isNormalArguments()) {
         JS_ASSERT(argsobj.maybeStackFrame() == fp);
-        fp->forEachCanonicalActualArg(PutArg(argsobj.data()->slots));
+        JSCompartment *comp = fp->scopeChain().compartment();
+        fp->forEachCanonicalActualArg(PutArg(comp, argsobj.data()->slots));
         argsobj.setStackFrame(NULL);
     } else {
         JS_ASSERT(!argsobj.maybeStackFrame());
     }
 }
 
 #ifdef JS_TRACER
 
@@ -277,20 +281,21 @@ js_PutArgumentsOnTrace(JSContext *cx, JS
     JS_ASSERT(argsobj->onTrace());
 
     /*
      * TraceRecorder::putActivationObjects builds a single, contiguous array of
      * the arguments, regardless of whether #actuals > #formals so there is no
      * need to worry about actual vs. formal arguments.
      */
     Value *srcend = argv + argsobj->initialLength();
-    Value *dst = argsobj->data()->slots;
+    HeapValue *dst = argsobj->data()->slots;
+    JSCompartment *comp = cx->compartment;
     for (Value *src = argv; src < srcend; ++src, ++dst) {
         if (!dst->isMagic(JS_ARGS_HOLE))
-            *dst = *src;
+            dst->set(comp, *src);
     }
 
     argsobj->clearOnTrace();
     return true;
 }
 JS_DEFINE_CALLINFO_3(extern, BOOL, js_PutArgumentsOnTrace, CONTEXT, OBJECT, VALUEPTR, 0,
                      nanojit::ACCSET_STORE_ANY)
 
@@ -596,35 +601,32 @@ args_finalize(JSContext *cx, JSObject *o
  * generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only
  * set on the StackFrame kept in the generator object's JSGenerator.
  */
 static inline void
 MaybeMarkGenerator(JSTracer *trc, JSObject *obj)
 {
 #if JS_HAS_GENERATORS
     StackFrame *fp = (StackFrame *) obj->getPrivate();
-    if (fp && fp->isFloatingGenerator()) {
-        JSObject *genobj = js_FloatingFrameToGenerator(fp)->obj;
-        MarkObject(trc, *genobj, "generator object");
-    }
+    if (fp && fp->isFloatingGenerator())
+        MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object");
 #endif
 }
 
 static void
 args_trace(JSTracer *trc, JSObject *obj)
 {
     ArgumentsObject *argsobj = obj->asArguments();
     if (argsobj->onTrace()) {
         JS_ASSERT(!argsobj->isStrictArguments());
         return;
     }
 
     ArgumentsData *data = argsobj->data();
-    if (data->callee.isObject())
-        MarkObject(trc, data->callee.toObject(), js_callee_str);
+    MarkValue(trc, data->callee, js_callee_str);
     MarkValueRange(trc, argsobj->initialLength(), data->slots, js_arguments_str);
 
     MaybeMarkGenerator(trc, argsobj);
 }
 
 /*
  * The classes below collaborate to lazily reflect and synchronize actual
  * argument values, argument count, and callee function object stored in a
@@ -702,17 +704,17 @@ NewDeclEnvObject(JSContext *cx, StackFra
     JSObject *envobj = js_NewGCObject(cx, FINALIZE_OBJECT2);
     if (!envobj)
         return NULL;
 
     EmptyShape *emptyDeclEnvShape = EmptyShape::getEmptyDeclEnvShape(cx);
     if (!emptyDeclEnvShape)
         return NULL;
     envobj->init(cx, &DeclEnvClass, &emptyTypeObject, &fp->scopeChain(), fp, false);
-    envobj->setMap(emptyDeclEnvShape);
+    envobj->initMap(emptyDeclEnvShape);
 
     return envobj;
 }
 
 namespace js {
 
 CallObject *
 CreateFunCallObject(JSContext *cx, StackFrame *fp)
@@ -781,17 +783,17 @@ js_PutCallObject(StackFrame *fp)
     CallObject &callobj = fp->callObj().asCall();
     JS_ASSERT(callobj.maybeStackFrame() == fp);
     JS_ASSERT_IF(fp->isEvalFrame(), fp->isStrictEvalFrame());
     JS_ASSERT(fp->isEvalFrame() == callobj.isForEval());
 
     /* Get the arguments object to snapshot fp's actual argument values. */
     if (fp->hasArgsObj()) {
         if (!fp->hasOverriddenArgs())
-            callobj.setArguments(ObjectValue(fp->argsObj()));
+            callobj.initArguments(ObjectValue(fp->argsObj()));
         js_PutArgsObject(fp);
     }
 
     JSScript *script = fp->script();
     Bindings &bindings = script->bindings;
 
     if (callobj.isForEval()) {
         JS_ASSERT(script->strictModeCode);
@@ -818,28 +820,31 @@ js_PutCallObject(StackFrame *fp)
 #ifdef JS_METHODJIT
                 || script->debugMode
 #endif
                 ) {
                 callobj.copyValues(nargs, fp->formalArgs(), nvars, fp->slots());
             } else {
                 /*
                  * For each arg & var that is closed over, copy it from the stack
-                 * into the call object.
+                 * into the call object. We use initArg/VarUnchecked because,
+                 * when you call a getter on a call object, js_NativeGetInline
+                 * caches the return value in the slot, so we can't assert that
+                 * it's undefined.
                  */
                 uint32 nclosed = script->nClosedArgs;
                 for (uint32 i = 0; i < nclosed; i++) {
                     uint32 e = script->getClosedArg(i);
-                    callobj.setArg(e, fp->formalArg(e));
+                    callobj.initArgUnchecked(e, fp->formalArg(e));
                 }
 
                 nclosed = script->nClosedVars;
                 for (uint32 i = 0; i < nclosed; i++) {
                     uint32 e = script->getClosedVar(i);
-                    callobj.setVar(e, fp->slots()[e]);
+                    callobj.initVarUnchecked(e, fp->slots()[e]);
                 }
             }
 
             /*
              * Update the args and vars for the active call if this is an outer
              * function in a script nesting.
              */
             types::TypeScriptNesting *nesting = script->nesting();
@@ -1587,29 +1592,34 @@ static void
 fun_trace(JSTracer *trc, JSObject *obj)
 {
     /* A newborn function object may have a not yet initialized private slot. */
     JSFunction *fun = (JSFunction *) obj->getPrivate();
     if (!fun)
         return;
 
     if (fun != obj) {
-        /* obj is a cloned function object, trace the clone-parent, fun. */
-        MarkObject(trc, *fun, "private");
+        /*
+         * obj is a cloned function object, trace the clone-parent, fun.
+         * This is safe to leave Unbarriered for incremental GC because any
+         * change to fun will trigger a setPrivate barrer. But we'll need to
+         * fix this for generational GC.
+         */
+        MarkObjectUnbarriered(trc, fun, "private");
 
         /* The function could be a flat closure with upvar copies in the clone. */
         if (fun->isFlatClosure() && fun->script()->bindings.hasUpvars()) {
             MarkValueRange(trc, fun->script()->bindings.countUpvars(),
-                           obj->getFlatClosureUpvars(), "upvars");
+                           obj->getFlatClosureData()->upvars, "upvars");
         }
         return;
     }
 
     if (fun->atom)
-        MarkString(trc, fun->atom, "atom");
+        MarkAtom(trc, fun->atom, "atom");
 
     if (fun->isInterpreted() && fun->script())
         MarkScript(trc, fun->script(), "script");
 }
 
 static void
 fun_finalize(JSContext *cx, JSObject *obj)
 {
@@ -1844,17 +1854,17 @@ JSObject::initBoundFunction(JSContext *c
 
         empty->slotSpan += argslen;
         setMap(empty);
 
         if (!ensureInstanceReservedSlots(cx, argslen))
             return false;
 
         JS_ASSERT(numSlots() >= argslen + FUN_CLASS_RESERVED_SLOTS);
-        copySlotRange(FUN_CLASS_RESERVED_SLOTS, args, argslen);
+        copySlotRange(FUN_CLASS_RESERVED_SLOTS, args, argslen, false);
     }
     return true;
 }
 
 inline JSObject *
 JSObject::getBoundFunctionTarget() const
 {
     JS_ASSERT(isFunction());
@@ -2273,17 +2283,17 @@ js_NewFunction(JSContext *cx, JSObject *
 
     /* Initialize all function members. */
     fun->nargs = uint16(nargs);
     fun->flags = flags & (JSFUN_FLAGS_MASK | JSFUN_KINDMASK | JSFUN_TRCINFO);
     if ((flags & JSFUN_KINDMASK) >= JSFUN_INTERPRETED) {
         JS_ASSERT(!native);
         JS_ASSERT(nargs == 0);
         fun->u.i.skipmin = 0;
-        fun->u.i.script_ = NULL;
+        fun->script().init(NULL);
     } else {
         fun->u.n.clasp = NULL;
         if (flags & JSFUN_TRCINFO) {
 #ifdef JS_TRACER
             JSNativeTraceInfo *trcinfo =
                 JS_FUNC_TO_DATA_PTR(JSNativeTraceInfo *, native);
             fun->u.n.native = (Native) trcinfo->native;
             fun->u.n.trcinfo = trcinfo;
@@ -2323,17 +2333,17 @@ js_CloneFunctionObject(JSContext *cx, JS
         /*
          * We can use the same type as the original function provided that (a)
          * its prototype is correct, and (b) its type is not a singleton. The
          * first case will hold in all compileAndGo code, and the second case
          * will have been caught by CloneFunctionObject coming from function
          * definitions or read barriers, so will not get here.
          */
         if (fun->getProto() == proto && !fun->hasSingletonType())
-            clone->setType(fun->type());
+            clone->initType(fun->type());
 
         clone->setPrivate(fun);
     } else {
         /*
          * Across compartments we have to deep copy JSFunction and clone the
          * script (for interpreted functions).
          */
         clone = NewFunction(cx, parent);
@@ -2347,21 +2357,21 @@ js_CloneFunctionObject(JSContext *cx, JS
         cfun->atom = fun->atom;
         clone->setPrivate(cfun);
         if (cfun->isInterpreted()) {
             JSScript *script = fun->script();
             JS_ASSERT(script);
             JS_ASSERT(script->compartment() == fun->compartment());
             JS_ASSERT(script->compartment() != cx->compartment);
 
-            cfun->u.i.script_ = NULL;
+            cfun->script().init(NULL);
             JSScript *cscript = js_CloneScript(cx, script);
             if (!cscript)
                 return NULL;
-            cscript->u.globalObject = cfun->getGlobal();
+            cscript->globalObject = cfun->getGlobal();
             cfun->setScript(cscript);
             if (!cscript->typeSetFunction(cx, cfun))
                 return NULL;
 
             js_CallNewScriptHook(cx, cfun->script(), cfun);
             Debugger::onNewScript(cx, cfun->script(), NULL);
         }
     }
@@ -2390,21 +2400,21 @@ js_AllocFlatClosure(JSContext *cx, JSFun
     JSObject *closure = CloneFunctionObject(cx, fun, scopeChain, true);
     if (!closure)
         return closure;
 
     uint32 nslots = fun->script()->bindings.countUpvars();
     if (nslots == 0)
         return closure;
 
-    Value *upvars = (Value *) cx->malloc_(nslots * sizeof(Value));
-    if (!upvars)
+    FlatClosureData *data = (FlatClosureData *) cx->malloc_(nslots * sizeof(HeapValue));
+    if (!data)
         return NULL;
 
-    closure->setFlatClosureUpvars(upvars);
+    closure->setFlatClosureData(data);
     return closure;
 }
 
 JS_DEFINE_CALLINFO_3(extern, OBJECT, js_AllocFlatClosure,
                      CONTEXT, FUNCTION, OBJECT, 0, nanojit::ACCSET_STORE_ANY)
 
 JSObject *
 js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen)
@@ -2420,22 +2430,22 @@ js_NewFlatClosure(JSContext *cx, JSFunct
      */
     VOUCH_DOES_NOT_REQUIRE_STACK();
     JSObject *scopeChain = &cx->fp()->scopeChain();
 
     JSObject *closure = js_AllocFlatClosure(cx, fun, scopeChain);
     if (!closure || !fun->script()->bindings.hasUpvars())
         return closure;
 
-    Value *upvars = closure->getFlatClosureUpvars();
+    FlatClosureData *data = closure->getFlatClosureData();
     uintN level = fun->script()->staticLevel;
     JSUpvarArray *uva = fun->script()->upvars();
 
     for (uint32 i = 0, n = uva->length; i < n; i++)
-        upvars[i] = GetUpvar(cx, level, uva->vector[i]);
+        data->upvars[i].init(GetUpvar(cx, level, uva->vector[i]));
 
     return closure;
 }
 
 JSFunction *
 js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, Native native,
                   uintN nargs, uintN attrs)
 {
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -45,16 +45,18 @@
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsobj.h"
 #include "jsatom.h"
 #include "jsscript.h"
 #include "jsstr.h"
 #include "jsopcode.h"
 
+#include "gc/Barrier.h"
+
 /*
  * The high two bits of JSFunction.flags encode whether the function is native
  * or interpreted, and if interpreted, what kind of optimized closure form (if
  * any) it might be.
  *
  *   00   not interpreted
  *   01   interpreted, neither flat nor null closure
  *   10   interpreted, flat closure
@@ -190,28 +192,26 @@ struct JSFunction : public JSObject_Slot
     JSAtom *methodAtom() const {
         return (joinable() && getSlot(METHOD_ATOM_SLOT).isString())
                ? &getSlot(METHOD_ATOM_SLOT).toString()->asAtom()
                : NULL;
     }
 
     inline void setMethodAtom(JSAtom *atom);
 
-    JSScript *script() const {
+    js::HeapPtrScript &script() const {
         JS_ASSERT(isInterpreted());
-        return u.i.script_;
+        return *(js::HeapPtrScript *)&u.i.script_;
     }
 
-    void setScript(JSScript *script) {
-        JS_ASSERT(isInterpreted());
-        u.i.script_ = script;
-    }
+    inline void setScript(JSScript *script_);
+    inline void initScript(JSScript *script_);
 
     JSScript *maybeScript() const {
-        return isInterpreted() ? script() : NULL;
+        return isInterpreted() ? script().get() : NULL;
     }
 
     JSNative native() const {
         JS_ASSERT(isNative());
         return u.n.native;
     }
 
     JSNative maybeNative() const {
@@ -264,16 +264,20 @@ inline JSFunction *
 JSObject::getFunctionPrivate() const
 {
     JS_ASSERT(isFunction());
     return reinterpret_cast<JSFunction *>(getPrivate());
 }
 
 namespace js {
 
+struct FlatClosureData {
+    HeapValue upvars[1];
+};
+
 static JS_ALWAYS_INLINE bool
 IsFunctionObject(const js::Value &v)
 {
     return v.isObject() && v.toObject().isFunction();
 }
 
 static JS_ALWAYS_INLINE bool
 IsFunctionObject(const js::Value &v, JSObject **funobj)
--- a/js/src/jsfuninlines.h
+++ b/js/src/jsfuninlines.h
@@ -92,9 +92,23 @@ CloneFunctionObject(JSContext *cx, JSFun
         JS_ASSERT(fun->getProto() == proto);
         fun->setParent(parent);
         return fun;
     }
 
     return js_CloneFunctionObject(cx, fun, parent, proto);
 }
 
+inline void
+JSFunction::setScript(JSScript *script_)
+{
+    JS_ASSERT(isInterpreted());
+    script() = script_;
+}
+
+inline void
+JSFunction::initScript(JSScript *script_)
+{
+    JS_ASSERT(isInterpreted());
+    script().init(script_);
+}
+
 #endif /* jsfuninlines_h___ */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -783,29 +783,36 @@ PickChunk(JSCompartment *comp)
     chunk->info.prevp = NULL;
     chunk->info.next = NULL;
     chunk->addToAvailableList(comp);
 
     return chunk;
 }
 
 JS_FRIEND_API(bool)
-IsAboutToBeFinalized(JSContext *cx, const void *thing)
+IsAboutToBeFinalized(JSContext *cx, const Cell *thing)
 {
     JS_ASSERT(cx);
 
     JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(rt == thingCompartment->rt);
     if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
         return false;
 
     return !reinterpret_cast<const Cell *>(thing)->isMarked();
 }
 
+bool
+IsAboutToBeFinalized(JSContext *cx, const Value &v)
+{
+    JS_ASSERT(v.isMarkable());
+    return IsAboutToBeFinalized(cx, (Cell *)v.toGCThing());
+}
+
 JS_FRIEND_API(bool)
 js_GCThingIsMarked(void *thing, uintN color = BLACK)
 {
     JS_ASSERT(thing);
     AssertValidColor(thing, color);
     return reinterpret_cast<Cell *>(thing)->isMarked(color);
 }
 
@@ -913,17 +920,17 @@ MarkIfGCThingWord(JSTracer *trc, jsuword
 #if JS_BITS_PER_WORD == 32
     jsuword addr = w & JSID_PAYLOAD_MASK;
 #elif JS_BITS_PER_WORD == 64
     jsuword addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
 #endif
 
     Chunk *chunk = Chunk::fromAddress(addr);
 
-    if (!trc->context->runtime->gcChunkSet.has(chunk))
+    if (!trc->runtime->gcChunkSet.has(chunk))
         return CGCT_NOTCHUNK;
 
     /*
      * We query for pointers outside the arena array after checking for an
      * allocated chunk. Such pointers are rare and we want to reject them
      * after doing more likely rejections.
      */
     if (!Chunk::withinArenasRange(addr))
@@ -934,17 +941,17 @@ MarkIfGCThingWord(JSTracer *trc, jsuword
     if (chunk->decommittedArenas.get(arenaOffset))
         return CGCT_FREEARENA;
 
     ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader;
 
     if (!aheader->allocated())
         return CGCT_FREEARENA;
 
-    JSCompartment *curComp = trc->context->runtime->gcCurrentCompartment;
+    JSCompartment *curComp = trc->runtime->gcCurrentCompartment;
     if (curComp && curComp != aheader->compartment)
         return CGCT_OTHERCOMPARTMENT;
 
     AllocKind thingKind = aheader->getAllocKind();
     uintptr_t offset = addr & ArenaMask;
     uintptr_t minOffset = Arena::firstThingOffset(thingKind);
     if (offset < minOffset)
         return CGCT_NOTARENA;
@@ -1040,17 +1047,17 @@ MarkStackRangeConservatively(JSTracer *t
         JSRuntime *runtime;
         JSCompartment *savedCompartment;
 
         AutoSkipChecking(JSRuntime *rt)
           : runtime(rt), savedCompartment(rt->gcCheckCompartment) {
             rt->gcCheckCompartment = NULL;
         }
         ~AutoSkipChecking() { runtime->gcCheckCompartment = savedCompartment; }
-    } as(trc->context->runtime);
+    } as(trc->runtime);
 
     const jsuword *begin = beginv->payloadWord();
     const jsuword *end = endv->payloadWord();
 #ifdef JS_NUNBOX32
     /*
      * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
      * scanning only the payloads.
      */
@@ -1061,29 +1068,29 @@ MarkStackRangeConservatively(JSTracer *t
     MarkRangeConservatively(trc, begin, end);
 #endif
 }
 
 void
 MarkConservativeStackRoots(JSTracer *trc)
 {
 #ifdef JS_THREADSAFE
-    for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
+    for (JSThread::Map::Range r = trc->runtime->threads.all(); !r.empty(); r.popFront()) {
         JSThread *thread = r.front().value;
         ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
         if (ctd->hasStackToScan()) {
             JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
             MarkThreadDataConservatively(trc, &thread->data);
         } else {
             JS_ASSERT(!thread->suspendCount);
             JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
         }
     }
 #else
-    MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
+    MarkThreadDataConservatively(trc, &trc->runtime->threadData);
 #endif
 }
 
 JS_NEVER_INLINE void
 ConservativeGCThreadData::recordStackTop()
 {
     /* Update the native stack pointer if it points to a bigger stack. */
     jsuword dummy;
@@ -1817,17 +1824,17 @@ gc_root_traversal(JSTracer *trc, const R
     void *ptr;
     if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
         ptr = *reinterpret_cast<void **>(entry.key);
     } else {
         Value *vp = reinterpret_cast<Value *>(entry.key);
         ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
     }
 
-    if (ptr && !trc->context->runtime->gcCurrentCompartment) {
+    if (ptr && !trc->runtime->gcCurrentCompartment) {
         /*
          * Use conservative machinery to find if ptr is a valid GC thing.
          * We only do this during global GCs, to preserve the invariant
          * that mark callbacks are not in place during compartment GCs.
          */
         JSTracer checker;
         JS_TRACER_INIT(&checker, trc->context, EmptyMarkCallback);
         ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<jsuword>(ptr));
@@ -1836,190 +1843,191 @@ gc_root_traversal(JSTracer *trc, const R
 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
 "invalid gcthing.  This is usually caused by a missing call to JS_RemoveRoot.\n"
 "The root's name is \"%s\".\n",
                     entry.value.name);
         }
         JS_ASSERT(test == CGCT_VALID);
     }
 #endif
-    JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
+    const char *name = entry.value.name ? entry.value.name : "root";
     if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
-        MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
+        MarkRootGCThing(trc, *reinterpret_cast<void **>(entry.key), name);
     else
-        MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
+        MarkRoot(trc, *reinterpret_cast<Value *>(entry.key), name);
 }
 
 static void
 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
 {
     JS_ASSERT(entry.value >= 1);
-    MarkGCThing(trc, entry.key, "locked object");
+    MarkRootGCThing(trc, entry.key, "locked object");
 }
 
 void
 js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
 {
-    MarkObject(trc, fp->scopeChain(), "scope chain");
+    MarkRoot(trc, &fp->scopeChain(), "scope chain");
     if (fp->isDummyFrame())
         return;
     if (fp->hasArgsObj())
-        MarkObject(trc, fp->argsObj(), "arguments");
-    MarkScript(trc, fp->script(), "script");
+        MarkRoot(trc, &fp->argsObj(), "arguments");
+    MarkRoot(trc, fp->script(), "script");
     fp->script()->compartment()->active = true;
-    MarkValue(trc, fp->returnValue(), "rval");
+    MarkRoot(trc, fp->returnValue(), "rval");
 }
 
 void
 AutoIdArray::trace(JSTracer *trc)
 {
     JS_ASSERT(tag == IDARRAY);
-    gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
+    gc::MarkIdRange(trc, idArray->vector, idArray->vector + idArray->length,
+                    "JSAutoIdArray.idArray");
 }
 
 void
 AutoEnumStateRooter::trace(JSTracer *trc)
 {
-    gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
+    gc::MarkRoot(trc, obj, "js::AutoEnumStateRooter.obj");
 }
 
 inline void
 AutoGCRooter::trace(JSTracer *trc)
 {
     switch (tag) {
       case JSVAL:
-        MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
+        MarkRoot(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
         return;
 
       case PARSER:
         static_cast<Parser *>(this)->trace(trc);
         return;
 
       case ENUMERATOR:
         static_cast<AutoEnumStateRooter *>(this)->trace(trc);
         return;
 
       case IDARRAY: {
         JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
-        MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
+        MarkIdRange(trc, ida->vector, ida->vector + ida->length, "js::AutoIdArray.idArray");
         return;
       }
 
       case DESCRIPTORS: {
         PropDescArray &descriptors =
             static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
         for (size_t i = 0, len = descriptors.length(); i < len; i++) {
             PropDesc &desc = descriptors[i];
-            MarkValue(trc, desc.pd, "PropDesc::pd");
-            MarkValue(trc, desc.value, "PropDesc::value");
-            MarkValue(trc, desc.get, "PropDesc::get");
-            MarkValue(trc, desc.set, "PropDesc::set");
+            MarkRoot(trc, desc.pd, "PropDesc::pd");
+            MarkRoot(trc, desc.value, "PropDesc::value");
+            MarkRoot(trc, desc.get, "PropDesc::get");
+            MarkRoot(trc, desc.set, "PropDesc::set");
         }
         return;
       }
 
       case DESCRIPTOR : {
         PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
         if (desc.obj)
-            MarkObject(trc, *desc.obj, "Descriptor::obj");
-        MarkValue(trc, desc.value, "Descriptor::value");
+            MarkRoot(trc, desc.obj, "Descriptor::obj");
+        MarkRoot(trc, desc.value, "Descriptor::value");
         if ((desc.attrs & JSPROP_GETTER) && desc.getter)
-            MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
+            MarkRoot(trc, CastAsObject(desc.getter), "Descriptor::get");
         if (desc.attrs & JSPROP_SETTER && desc.setter)
-            MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
+            MarkRoot(trc, CastAsObject(desc.setter), "Descriptor::set");
         return;
       }
 
       case NAMESPACES: {
-        JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
-        MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
-                        "JSXMLArray.vector");
+        JSXMLArray<JSObject> &array = static_cast<AutoNamespaceArray *>(this)->array;
+        MarkObjectRange(trc, array.length, array.vector, "JSXMLArray.vector");
         array.cursors->trace(trc);
         return;
       }
 
       case XML:
         js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
         return;
 
       case OBJECT:
         if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
-            MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
+            MarkRoot(trc, obj, "js::AutoObjectRooter.obj");
         return;
 
       case ID:
-        MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
+        MarkRoot(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
         return;
 
       case VALVECTOR: {
         AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
-        MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
+        MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
         return;
       }
 
       case STRING:
         if (JSString *str = static_cast<AutoStringRooter *>(this)->str)
-            MarkString(trc, str, "js::AutoStringRooter.str");
+            MarkRoot(trc, str, "js::AutoStringRooter.str");
         return;
 
       case IDVECTOR: {
         AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
-        MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
+        MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
         return;
       }
 
       case SHAPEVECTOR: {
         AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
-        MarkShapeRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
+        MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
         return;
       }
 
       case OBJVECTOR: {
         AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector;
-        MarkObjectRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
+        MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
         return;
       }
 
       case VALARRAY: {
         AutoValueArray *array = static_cast<AutoValueArray *>(this);
-        MarkValueRange(trc, array->length(), array->start(), "js::AutoValueArray");
+        MarkRootRange(trc, array->length(), array->start(), "js::AutoValueArray");
         return;
       }
     }
 
     JS_ASSERT(tag >= 0);
-    MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
+    MarkRootRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array,
+                  "js::AutoArrayRooter.array");
 }
 
 namespace js {
 
 JS_FRIEND_API(void)
 MarkContext(JSTracer *trc, JSContext *acx)
 {
     /* Stack frames and slots are traced by StackSpace::mark. */
 
     /* Mark other roots-by-definition in acx. */
     if (acx->globalObject && !acx->hasRunOption(JSOPTION_UNROOTED_GLOBAL))
-        MarkObject(trc, *acx->globalObject, "global object");
+        MarkRoot(trc, acx->globalObject, "global object");
     if (acx->isExceptionPending())
-        MarkValue(trc, acx->getPendingException(), "exception");
+        MarkRoot(trc, acx->getPendingException(), "exception");
 
     for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
         gcr->trace(trc);
 
     if (acx->sharpObjectMap.depth > 0)
         js_TraceSharpMap(trc, &acx->sharpObjectMap);
 
-    MarkValue(trc, acx->iterValue, "iterValue");
+    MarkRoot(trc, acx->iterValue, "iterValue");
 }
 
 JS_REQUIRES_STACK void
 MarkRuntime(JSTracer *trc)
 {
-    JSRuntime *rt = trc->context->runtime;
+    JSRuntime *rt = trc->runtime;
 
     if (rt->state != JSRTS_LANDING)
         MarkConservativeStackRoots(trc);
 
     for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
         gc_root_traversal(trc, r.front());
 
     for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
@@ -2511,17 +2519,17 @@ EndMarkPhase(JSContext *cx, GCMarker *gc
      */
     while (WatchpointMap::markAllIteratively(gcmarker) ||
            WeakMapBase::markAllIteratively(gcmarker) ||
            Debugger::markAllIteratively(gcmarker))
     {
         gcmarker->drainMarkStack();
     }
 
-    rt->gcMarkingTracer = NULL;
+    rt->gcIncrementalTracer = NULL;
 
     rt->gcStats.endPhase(gcstats::PHASE_MARK);
 
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_MARK_END);
 
 #ifdef DEBUG
     /* Make sure that we didn't mark an object in another compartment */
@@ -2662,17 +2670,17 @@ MarkAndSweep(JSContext *cx, JSGCInvocati
     /* Reset malloc counter. */
     rt->resetGCMallocBytes();
 
     AutoUnlockGC unlock(rt);
 
     GCMarker gcmarker(cx);
     JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
     JS_ASSERT(gcmarker.getMarkColor() == BLACK);
-    rt->gcMarkingTracer = &gcmarker;
+    rt->gcIncrementalTracer = &gcmarker;
 
     BeginMarkPhase(cx, &gcmarker, gckind);
     gcmarker.drainMarkStack();
     EndMarkPhase(cx, &gcmarker, gckind);
     SweepPhase(cx, &gcmarker, gckind);
 }
 
 #ifdef JS_THREADSAFE
@@ -3036,17 +3044,17 @@ TraceRuntime(JSTracer *trc)
 
             AutoCopyFreeListToArenas copy(rt);
             RecordNativeStackTopForGC(trc->context);
             MarkRuntime(trc);
             return;
         }
     }
 #else
-    AutoCopyFreeListToArenas copy(trc->context->runtime);
+    AutoCopyFreeListToArenas copy(trc->runtime);
     RecordNativeStackTopForGC(trc->context);
 #endif
 
     /*
      * Calls from inside a normal GC or a recursive calls are OK and do not
      * require session setup.
      */
     MarkRuntime(trc);
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -83,17 +83,17 @@ struct Shape;
 namespace gc {
 
 struct Arena;
 
 /*
  * This must be an upper bound, but we do not need the least upper bound, so
  * we just exclude non-background objects.
  */
-const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - (FINALIZE_OBJECT_LAST + 1) / 2;
+const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
 
 const size_t ArenaShift = 12;
 const size_t ArenaSize = size_t(1) << ArenaShift;
 const size_t ArenaMask = ArenaSize - 1;
 
 /*
  * This is the maximum number of arenas we allow in the FreeCommitted state
  * before we trigger a GC_SHRINK to release free arenas to the OS.
@@ -1322,17 +1322,20 @@ js_ReserveObjects(JSContext *cx, size_t 
 
 extern JSBool
 js_LockGCThingRT(JSRuntime *rt, void *thing);
 
 extern void
 js_UnlockGCThingRT(JSRuntime *rt, void *thing);
 
 extern JS_FRIEND_API(bool)
-IsAboutToBeFinalized(JSContext *cx, const void *thing);
+IsAboutToBeFinalized(JSContext *cx, const js::gc::Cell *thing);
+
+extern bool
+IsAboutToBeFinalized(JSContext *cx, const js::Value &value);
 
 extern JS_FRIEND_API(bool)
 js_GCThingIsMarked(void *thing, uintN color);
 
 extern void
 js_TraceStackFrame(JSTracer *trc, js::StackFrame *fp);
 
 namespace js {
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -91,17 +91,17 @@ GetGCObjectFixedSlotsKind(size_t numFixe
 
     JS_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT);
     return slotsToThingKind[numFixedSlots];
 }
 
 static inline bool
 IsBackgroundAllocKind(AllocKind kind)
 {
-    JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
+    JS_ASSERT(kind < FINALIZE_OBJECT_LIMIT);
     return kind % 2 == 1;
 }
 
 static inline AllocKind
 GetBackgroundAllocKind(AllocKind kind)
 {
     JS_ASSERT(!IsBackgroundAllocKind(kind));
     return (AllocKind) (kind + 1);
@@ -110,17 +110,17 @@ GetBackgroundAllocKind(AllocKind kind)
 /*
  * Try to get the next larger size for an object, keeping BACKGROUND
  * consistent.
  */
 static inline bool
 TryIncrementAllocKind(AllocKind *kindp)
 {
     size_t next = size_t(*kindp) + 2;
-    if (next > size_t(FINALIZE_OBJECT_LAST))
+    if (next >= size_t(FINALIZE_OBJECT_LIMIT))
         return false;
     *kindp = AllocKind(next);
     return true;
 }
 
 /* Get the number of fixed slots and initial capacity associated with a kind. */
 static inline size_t
 GetGCKindSlots(AllocKind thingKind)
@@ -346,24 +346,27 @@ NewGCThing(JSContext *cx, js::gc::AllocK
     JS_ASSERT(!cx->runtime->gcRunning);
     JS_ASSERT(!JS_THREAD_DATA(cx)->noGCOrAllocationCheck);
 
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC())
         js::gc::RunDebugGC(cx);
 #endif
 
-    void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
-    return static_cast<T *>(t ? t : js::gc::ArenaLists::refillFreeList(cx, kind));
+    JSCompartment *comp = cx->compartment;
+    void *t = comp->arenas.allocateFromFreeList(kind, thingSize);
+    if (!t)
+        t = js::gc::ArenaLists::refillFreeList(cx, kind);
+    return static_cast<T *>(t);
 }
 
 inline JSObject *
 js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
 {
-    JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
+    JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind < js::gc::FINALIZE_OBJECT_LIMIT);
     JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
     if (obj)
         obj->earlyInit(js::gc::GetGCKindSlots(kind));
     return obj;
 }
 
 inline JSString *
 js_NewGCString(JSContext *cx)
@@ -383,20 +386,18 @@ js_NewGCExternalString(JSContext *cx)
     return NewGCThing<JSExternalString>(cx, js::gc::FINALIZE_EXTERNAL_STRING,
                                         sizeof(JSExternalString));
 }
 
 inline JSFunction*
 js_NewGCFunction(JSContext *cx)
 {
     JSFunction *fun = NewGCThing<JSFunction>(cx, js::gc::FINALIZE_FUNCTION, sizeof(JSFunction));
-    if (fun) {
-        fun->capacity = JSObject::FUN_CLASS_RESERVED_SLOTS;
-        fun->lastProp = NULL; /* Stops fun from being scanned until initializated. */
-    }
+    if (fun)
+        fun->earlyInit(JSObject::FUN_CLASS_RESERVED_SLOTS);
     return fun;
 }
 
 inline JSScript *
 js_NewGCScript(JSContext *cx)
 {
     return NewGCThing<JSScript>(cx, js::gc::FINALIZE_SCRIPT, sizeof(JSScript));
 }
--- a/js/src/jsgcmark.cpp
+++ b/js/src/jsgcmark.cpp
@@ -105,31 +105,31 @@ static inline void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing);
 
 template<typename T>
 static inline void
 CheckMarkedThing(JSTracer *trc, T *thing)
 {
     JS_ASSERT(thing);
     JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
-    JS_ASSERT_IF(trc->context->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc));
+    JS_ASSERT_IF(trc->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc));
 
     JS_ASSERT(thing->isAligned());
 
     JS_ASSERT(thing->compartment());
-    JS_ASSERT(thing->compartment()->rt == trc->context->runtime);
+    JS_ASSERT(thing->compartment()->rt == trc->runtime);
 }
 
 template<typename T>
 void
 Mark(JSTracer *trc, T *thing)
 {
     CheckMarkedThing(trc, thing);
 
-    JSRuntime *rt = trc->context->runtime;
+    JSRuntime *rt = trc->runtime;
 
     JS_OPT_ASSERT_IF(rt->gcCheckCompartment,
                      thing->compartment() == rt->gcCheckCompartment ||
                      thing->compartment() == rt->atomsCompartment);
 
     /*
      * Don't mark things outside a compartment if we are in a per-compartment
      * GC.
@@ -143,175 +143,208 @@ Mark(JSTracer *trc, T *thing)
 
 #ifdef DEBUG
     trc->debugPrinter = NULL;
     trc->debugPrintArg = NULL;
 #endif
 }
 
 void
-MarkString(JSTracer *trc, JSString *str)
+MarkStringUnbarriered(JSTracer *trc, JSString *str, const char *name)
 {
     JS_ASSERT(str);
+    JS_SET_TRACING_NAME(trc, name);
     Mark(trc, str);
 }
 
 void
-MarkString(JSTracer *trc, JSString *str, const char *name)
+MarkString(JSTracer *trc, const MarkablePtr<JSString> &str, const char *name)
 {
-    JS_ASSERT(str);
-    JS_SET_TRACING_NAME(trc, name);
-    MarkString(trc, str);
+    MarkStringUnbarriered(trc, str.value, name);
 }
 
 void
-MarkObject(JSTracer *trc, JSObject &obj, const char *name)
+MarkAtom(JSTracer *trc, JSAtom *atom)
 {
     JS_ASSERT(trc);
-    JS_ASSERT(&obj);
-    JS_SET_TRACING_NAME(trc, name);
-    Mark(trc, &obj);
+    JS_ASSERT(atom);
+    Mark(trc, atom);
+}
+
+void
+MarkAtom(JSTracer *trc, JSAtom *atom, const char *name)
+{
+    MarkStringUnbarriered(trc, atom, name);
 }
 
 void
-MarkCrossCompartmentObject(JSTracer *trc, JSObject &obj, const char *name)
+MarkObjectUnbarriered(JSTracer *trc, JSObject *obj, const char *name)
 {
-    JSRuntime *rt = trc->context->runtime;
-    if (rt->gcCurrentCompartment && rt->gcCurrentCompartment != obj.compartment())
-        return;
-
-    MarkObject(trc, obj, name);
+    JS_ASSERT(trc);
+    JS_ASSERT(obj);
+    JS_SET_TRACING_NAME(trc, name);
+    Mark(trc, obj);
 }
 
 void
-MarkObjectWithPrinter(JSTracer *trc, JSObject &obj, JSTraceNamePrinter printer,
-                      const void *arg, size_t index)
+MarkObjectWithPrinterUnbarriered(JSTracer *trc, JSObject *obj, JSTraceNamePrinter printer,
+                                 const void *arg, size_t index)
 {
     JS_ASSERT(trc);
-    JS_ASSERT(&obj);
+    JS_ASSERT(obj);
     JS_SET_TRACING_DETAILS(trc, printer, arg, index);
-    Mark(trc, &obj);
+    Mark(trc, obj);
 }
 
 void
-MarkScript(JSTracer *trc, JSScript *script, const char *name)
+MarkObject(JSTracer *trc, const MarkablePtr<JSObject> &obj, const char *name)
+{
+    MarkObjectUnbarriered(trc, obj.value, name);
+}
+
+void
+MarkScriptUnbarriered(JSTracer *trc, JSScript *script, const char *name)
 {
     JS_ASSERT(trc);
     JS_ASSERT(script);
     JS_SET_TRACING_NAME(trc, name);
     Mark(trc, script);
 }
 
 void
-MarkShape(JSTracer *trc, const Shape *shape, const char *name)
+MarkScript(JSTracer *trc, const MarkablePtr<JSScript> &script, const char *name)
+{
+    MarkScriptUnbarriered(trc, script.value, name);
+}
+
+void
+MarkShapeUnbarriered(JSTracer *trc, const Shape *shape, const char *name)
 {
     JS_ASSERT(trc);
     JS_ASSERT(shape);
     JS_SET_TRACING_NAME(trc, name);
     Mark(trc, shape);
 }
 
 void
-MarkTypeObject(JSTracer *trc, types::TypeObject *type, const char *name)
+MarkShape(JSTracer *trc, const MarkablePtr<const Shape> &shape, const char *name)
+{
+    MarkShapeUnbarriered(trc, shape.value, name);
+}
+
+void
+MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name)
 {
     JS_ASSERT(trc);
     JS_ASSERT(type);
     JS_SET_TRACING_NAME(trc, name);
     if (type == &types::emptyTypeObject)
         return;
     Mark(trc, type);
 
     /*
      * Mark parts of a type object skipped by ScanTypeObject. ScanTypeObject is
      * only used for marking tracers; for tracers with a callback, if we
      * reenter through JS_TraceChildren then MarkChildren will *not* skip these
      * members, and we don't need to handle them here.
      */
     if (IS_GC_MARKING_TRACER(trc)) {
         if (type->singleton)
-            MarkObject(trc, *type->singleton, "type_singleton");
+            MarkObject(trc, type->singleton, "type_singleton");
         if (type->interpretedFunction)
-            MarkObject(trc, *type->interpretedFunction, "type_function");
+            MarkObject(trc, type->interpretedFunction, "type_function");
     }
 }
 
+void
+MarkTypeObject(JSTracer *trc, const MarkablePtr<types::TypeObject> &type, const char *name)
+{
+    MarkTypeObjectUnbarriered(trc, type.value, name);
+}
+
 #if JS_HAS_XML_SUPPORT
 void
-MarkXML(JSTracer *trc, JSXML *xml, const char *name)
+MarkXMLUnbarriered(JSTracer *trc, JSXML *xml, const char *name)
 {
     JS_ASSERT(trc);
     JS_ASSERT(xml);
     JS_SET_TRACING_NAME(trc, name);
     Mark(trc, xml);
 }
+
+void
+MarkXML(JSTracer *trc, const MarkablePtr<JSXML> &xml, const char *name)
+{
+    MarkXMLUnbarriered(trc, xml.value, name);
+}
 #endif
 
 void
 PushMarkStack(GCMarker *gcmarker, JSXML *thing)
 {
-    JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                     thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
+    JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                     thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushXML(thing);
 }
 
 void
 PushMarkStack(GCMarker *gcmarker, JSObject *thing)
 {
-    JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                     thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
+    JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                     thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
 {
-    JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                     thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
+    JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                     thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
 {
-    JS_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                 thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
+    JS_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                 thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushType(thing);
 }
 
 void
 PushMarkStack(GCMarker *gcmarker, JSScript *thing)
 {
-    JS_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                 thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
+    JS_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                 thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
 
     /*
      * We mark scripts directly rather than pushing on the stack as they can
      * refer to other scripts only indirectly (like via nested functions) and
      * we cannot get to deep recursion.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
 }
 
 static void
 ScanShape(GCMarker *gcmarker, const Shape *shape);
 
 void
 PushMarkStack(GCMarker *gcmarker, const Shape *thing)
 {
-    JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                     thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
+    JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                     thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
 
     /* We mark shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanShape(gcmarker, thing);
 }
 
 static void
 MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name)
@@ -320,88 +353,104 @@ MarkAtomRange(JSTracer *trc, size_t len,
         if (JSAtom *atom = vec[i]) {
             JS_SET_TRACING_INDEX(trc, name, i);
             Mark(trc, atom);
         }
     }
 }
 
 void
-MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
+MarkObjectRange(JSTracer *trc, size_t len, HeapPtr<JSObject> *vec, const char *name)
 {
     for (uint32 i = 0; i < len; i++) {
         if (JSObject *obj = vec[i]) {
             JS_SET_TRACING_INDEX(trc, name, i);
             Mark(trc, obj);
         }
     }
 }
 
 void
-MarkXMLRange(JSTracer *trc, size_t len, JSXML **vec, const char *name)
+MarkXMLRange(JSTracer *trc, size_t len, HeapPtr<JSXML> *vec, const char *name)
 {
     for (size_t i = 0; i < len; i++) {
         if (JSXML *xml = vec[i]) {
             JS_SET_TRACING_INDEX(trc, "xml_vector", i);
             Mark(trc, xml);
         }
     }
 }
 
 void
-MarkId(JSTracer *trc, jsid id)
+MarkIdUnbarriered(JSTracer *trc, jsid id)
 {
     if (JSID_IS_STRING(id))
         Mark(trc, JSID_TO_STRING(id));
     else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
         Mark(trc, JSID_TO_OBJECT(id));
 }
 
 void
-MarkId(JSTracer *trc, jsid id, const char *name)
+MarkIdUnbarriered(JSTracer *trc, jsid id, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
-    MarkId(trc, id);
+    MarkIdUnbarriered(trc, id);
 }
 
 void
-MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name)
+MarkId(JSTracer *trc, const HeapId &id, const char *name)
+{
+    JS_SET_TRACING_NAME(trc, name);
+    MarkIdUnbarriered(trc, id.get(), name);
+}
+
+void
+MarkIdRangeUnbarriered(JSTracer *trc, jsid *beg, jsid *end, const char *name)
 {
     for (jsid *idp = beg; idp != end; ++idp) {
         JS_SET_TRACING_INDEX(trc, name, (idp - beg));
-        MarkId(trc, *idp);
+        MarkIdUnbarriered(trc, *idp);
     }
 }
 
 void
-MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
+MarkIdRangeUnbarriered(JSTracer *trc, size_t len, jsid *vec, const char *name)
 {
-    MarkIdRange(trc, vec, vec + len, name);
+    MarkIdRangeUnbarriered(trc, vec, vec + len, name);
+}
+
+void
+MarkIdRange(JSTracer *trc, HeapId *beg, HeapId *end, const char *name)
+{
+    for (HeapId *idp = beg; idp != end; ++idp) {
+        JS_SET_TRACING_INDEX(trc, name, (idp - beg));
+        MarkIdUnbarriered(trc, *idp);
+    }
 }
 
 void
 MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind)
 {
     JS_ASSERT(thing);
     JS_ASSERT(kind == GetGCThingTraceKind(thing));
     switch (kind) {
       case JSTRACE_OBJECT:
         Mark(trc, reinterpret_cast<JSObject *>(thing));
         break;
       case JSTRACE_STRING:
-        MarkString(trc, reinterpret_cast<JSString *>(thing));
+        Mark(trc, reinterpret_cast<JSString *>(thing));
         break;
       case JSTRACE_SCRIPT:
         Mark(trc, static_cast<JSScript *>(thing));
         break;
       case JSTRACE_SHAPE:
         Mark(trc, reinterpret_cast<Shape *>(thing));
         break;
       case JSTRACE_TYPE_OBJECT:
-        MarkTypeObject(trc, reinterpret_cast<types::TypeObject *>(thing), "type_stack");
+        MarkTypeObjectUnbarriered(trc, reinterpret_cast<types::TypeObject *>(thing), "type_stack");
         break;
 #if JS_HAS_XML_SUPPORT
       case JSTRACE_XML:
         Mark(trc, static_cast<JSXML *>(thing));
         break;
 #endif
     }
 }
@@ -412,65 +461,56 @@ MarkValueRaw(JSTracer *trc, const js::Va
 {
     if (v.isMarkable()) {
         JS_ASSERT(v.toGCThing());
         return MarkKind(trc, v.toGCThing(), v.gcKind());
     }
 }
 
 void
-MarkValue(JSTracer *trc, const js::Value &v, const char *name)
+MarkValueUnbarriered(JSTracer *trc, const js::Value &v, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkValueRaw(trc, v);
 }
 
 void
-MarkCrossCompartmentValue(JSTracer *trc, const js::Value &v, const char *name)
+MarkValue(JSTracer *trc, const js::HeapValue &v, const char *name)
+{
+    MarkValueUnbarriered(trc, v, name);
+}
+
+void
+MarkCrossCompartmentValue(JSTracer *trc, const js::HeapValue &v, const char *name)
 {
     if (v.isMarkable()) {
         js::gc::Cell *cell = (js::gc::Cell *)v.toGCThing();
-        JSRuntime *rt = trc->context->runtime;
+        JSRuntime *rt = trc->runtime;
         if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
             return;
 
         MarkValue(trc, v, name);
     }
 }
 
 void
-MarkValueRange(JSTracer *trc, const Value *beg, const Value *end, const char *name)
+MarkValueRange(JSTracer *trc, const HeapValue *beg, const HeapValue *end, const char *name)
 {
-    for (const Value *vp = beg; vp < end; ++vp) {
+    for (const HeapValue *vp = beg; vp < end; ++vp) {
         JS_SET_TRACING_INDEX(trc, name, vp - beg);
-        MarkValueRaw(trc, *vp);
+        MarkValueRaw(trc, vp->get());
     }
 }
 
 void
-MarkValueRange(JSTracer *trc, size_t len, const Value *vec, const char *name)
+MarkValueRange(JSTracer *trc, size_t len, const HeapValue *vec, const char *name)
 {
     MarkValueRange(trc, vec, vec + len, name);
 }
 
-void
-MarkShapeRange(JSTracer *trc, const Shape **beg, const Shape **end, const char *name)
-{
-    for (const Shape **sp = beg; sp < end; ++sp) {
-        JS_SET_TRACING_INDEX(trc, name, sp - beg);
-        MarkShape(trc, *sp, name);
-    }
-}
-
-void
-MarkShapeRange(JSTracer *trc, size_t len, const Shape **vec, const char *name)
-{
-    MarkShapeRange(trc, vec, vec + len, name);
-}
-
 /* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */
 void
 MarkGCThing(JSTracer *trc, void *thing, JSGCTraceKind kind)
 {
     if (!thing)
         return;
 
     MarkKind(trc, thing, kind);
@@ -480,23 +520,16 @@ void
 MarkGCThing(JSTracer *trc, void *thing)
 {
     if (!thing)
         return;
     MarkKind(trc, thing, GetGCThingTraceKind(thing));
 }
 
 void
-MarkGCThing(JSTracer *trc, void *thing, const char *name)
-{
-    JS_SET_TRACING_NAME(trc, name);
-    MarkGCThing(trc, thing);
-}
-
-void
 MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index)
 {
     JS_SET_TRACING_INDEX(trc, name, index);
     MarkGCThing(trc, thing);
 }
 
 void
 Mark(JSTracer *trc, void *thing, JSGCTraceKind kind, const char *name)
@@ -504,47 +537,114 @@ Mark(JSTracer *trc, void *thing, JSGCTra
     JS_ASSERT(thing);
     JS_SET_TRACING_NAME(trc, name);
     MarkKind(trc, thing, kind);
 }
 
 void
 MarkRoot(JSTracer *trc, JSObject *thing, const char *name)
 {
-    MarkObject(trc, *thing, name);
+    MarkObjectUnbarriered(trc, thing, name);
 }
 
 void
 MarkRoot(JSTracer *trc, JSString *thing, const char *name)
 {
-    MarkString(trc, thing, name);
+    MarkStringUnbarriered(trc, thing, name);
 }
 
 void
 MarkRoot(JSTracer *trc, JSScript *thing, const char *name)
 {
-    MarkScript(trc, thing, name);
+    MarkScriptUnbarriered(trc, thing, name);
 }
 
 void
 MarkRoot(JSTracer *trc, const Shape *thing, const char *name)
 {
-    MarkShape(trc, thing, name);
+    MarkShapeUnbarriered(trc, thing, name);
 }
 
 void
 MarkRoot(JSTracer *trc, types::TypeObject *thing, const char *name)
 {
-    MarkTypeObject(trc, thing, name);
+    MarkTypeObjectUnbarriered(trc, thing, name);
 }
 
 void
 MarkRoot(JSTracer *trc, JSXML *thing, const char *name)
 {
-    MarkXML(trc, thing, name);
+    MarkXMLUnbarriered(trc, thing, name);
+}
+
+void
+MarkRoot(JSTracer *trc, const Value &v, const char *name)
+{
+    MarkValueUnbarriered(trc, v, name);
+}
+
+void
+MarkRoot(JSTracer *trc, jsid id, const char *name)
+{
+    JS_SET_TRACING_NAME(trc, name);
+    MarkIdUnbarriered(trc, id);
+}
+
+void
+MarkRootGCThing(JSTracer *trc, void *thing, const char *name)
+{
+    JS_SET_TRACING_NAME(trc, name);
+    MarkGCThing(trc, thing);
+}
+
+void
+MarkRootRange(JSTracer *trc, size_t len, const Shape **vec, const char *name)
+{
+    const Shape **end = vec + len;
+    for (const Shape **sp = vec; sp < end; ++sp) {
+        JS_SET_TRACING_INDEX(trc, name, sp - vec);
+        MarkShapeUnbarriered(trc, *sp, name);
+    }
+}
+
+void
+MarkRootRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
+{
+    JSObject **end = vec + len;
+    for (JSObject **sp = vec; sp < end; ++sp) {
+        JS_SET_TRACING_INDEX(trc, name, sp - vec);
+        MarkObjectUnbarriered(trc, *sp, name);
+    }
+}
+
+void
+MarkRootRange(JSTracer *trc, const Value *beg, const Value *end, const char *name)
+{
+    for (const Value *vp = beg; vp < end; ++vp) {
+        JS_SET_TRACING_INDEX(trc, name, vp - beg);
+        MarkValueRaw(trc, *vp);
+    }
+}
+
+void
+MarkRootRange(JSTracer *trc, size_t len, const Value *vec, const char *name)
+{
+    MarkRootRange(trc, vec, vec + len, name);
+}
+
+void
+MarkRootRange(JSTracer *trc, jsid *beg, jsid *end, const char *name)
+{
+    MarkIdRangeUnbarriered(trc, beg, end, name);
+}
+
+void
+MarkRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
+{
+    MarkIdRangeUnbarriered(trc, len, vec, name);
 }
 
 static void
 PrintPropertyId(char *buf, size_t bufsize, jsid propid, const char *label)
 {
     JS_ASSERT(!JSID_IS_VOID(propid));
     if (JSID_IS_ATOM(propid)) {
         size_t n = PutEscapedString(buf, bufsize, JSID_TO_ATOM(propid), 0);
@@ -587,17 +687,17 @@ ScanValue(GCMarker *gcmarker, const Valu
         }
     }
 }
 
 static void
 ScanShape(GCMarker *gcmarker, const Shape *shape)
 {
 restart:
-    JSRuntime *rt = gcmarker->context->runtime;
+    JSRuntime *rt = gcmarker->runtime;
     if (rt->gcRegenShapes)
         shape->shapeid = js_RegenerateShapeForGC(rt);
 
     if (JSID_IS_STRING(shape->propid))
         PushMarkStack(gcmarker, JSID_TO_STRING(shape->propid));
     else if (JS_UNLIKELY(JSID_IS_OBJECT(shape->propid)))
         PushMarkStack(gcmarker, JSID_TO_OBJECT(shape->propid));
 
@@ -612,19 +712,19 @@ restart:
     shape = shape->previous();
     if (shape && shape->markIfUnmarked(gcmarker->getMarkColor()))
         goto restart;
 }
 
 static inline void
 ScanRope(GCMarker *gcmarker, JSRope *rope)
 {
-    JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                     rope->compartment() == gcmarker->context->runtime->gcCurrentCompartment
-                     || rope->compartment() == gcmarker->context->runtime->atomsCompartment);
+    JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                     rope->compartment() == gcmarker->runtime->gcCurrentCompartment
+                     || rope->compartment() == gcmarker->runtime->atomsCompartment);
     JS_ASSERT(rope->isMarked());
 
     JSString *leftChild = NULL;
     do {
         JSString *rightChild = rope->rightChild();
 
         if (rightChild->isRope()) {
             if (rightChild->markIfUnmarked())
@@ -640,19 +740,19 @@ ScanRope(GCMarker *gcmarker, JSRope *rop
         }
         rope = &leftChild->asRope();
     } while (leftChild->markIfUnmarked());
 }
 
 static inline void
 PushMarkStack(GCMarker *gcmarker, JSString *str)
 {
-    JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
-                     str->compartment() == gcmarker->context->runtime->gcCurrentCompartment
-                     || str->compartment() == gcmarker->context->runtime->atomsCompartment);
+    JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
+                     str->compartment() == gcmarker->runtime->gcCurrentCompartment
+                     || str->compartment() == gcmarker->runtime->atomsCompartment);
 
     if (str->isLinear()) {
         str->asLinear().mark(gcmarker);
     } else {
         JS_ASSERT(str->isRope());
         if (str->markIfUnmarked())
             ScanRope(gcmarker, &str->asRope());
     }
@@ -695,21 +795,21 @@ ScanObject(GCMarker *gcmarker, JSObject 
         if (obj->newType)
             PushMarkStack(gcmarker, obj->newType);
     }
 
     if (obj->isNative()) {
         js::Shape *shape = obj->lastProp;
         PushMarkStack(gcmarker, shape);
 
-        if (gcmarker->context->runtime->gcRegenShapes) {
+        if (gcmarker->runtime->gcRegenShapes) {
             /* We need to regenerate our shape if hasOwnShape(). */
             uint32 newShape = shape->shapeid;
             if (obj->hasOwnShape()) {
-                newShape = js_RegenerateShapeForGC(gcmarker->context->runtime);
+                newShape = js_RegenerateShapeForGC(gcmarker->runtime);
                 JS_ASSERT(newShape != shape->shapeid);
             }
             obj->objShape = newShape;
         }
 
         uint32 nslots = obj->slotSpan();
         JS_ASSERT(obj->slotSpan() <= obj->numSlots());
         if (nslots > LARGE_OBJECT_CHUNK_SIZE) {
@@ -756,18 +856,18 @@ MarkChildren(JSTracer *trc, JSObject *ob
     if (obj->isNewborn())
         return;
 
     MarkTypeObject(trc, obj->typeFromGC(), "type");
 
     /* Trace universal (ops-independent) members. */
     if (!obj->isDenseArray() && obj->newType)
         MarkTypeObject(trc, obj->newType, "new_type");
-    if (JSObject *parent = obj->getParent())
-        MarkObject(trc, *parent, "parent");
+    if (obj->parent)
+        MarkObject(trc, obj->parent, "parent");
 
     Class *clasp = obj->getClass();
     if (clasp->trace)
         clasp->trace(trc, obj);
 
     if (obj->isNative()) {
         MarkShape(trc, obj->lastProp, "shape");
 
@@ -778,36 +878,40 @@ MarkChildren(JSTracer *trc, JSObject *ob
             MarkValueRaw(trc, obj->nativeGetSlot(i));
         }
     }
 }
 
 void
 MarkChildren(JSTracer *trc, JSString *str)
 {
+    /*
+     * We use custom barriers in JSString, so it's safe to use unbarriered
+     * marking here.
+     */
     if (str->isDependent()) {
-        MarkString(trc, str->asDependent().base(), "base");
+        MarkStringUnbarriered(trc, str->asDependent().base(), "base");
     } else if (str->isRope()) {
         JSRope &rope = str->asRope();
-        MarkString(trc, rope.leftChild(), "left child");
-        MarkString(trc, rope.rightChild(), "right child");
+        MarkStringUnbarriered(trc, rope.leftChild(), "left child");
+        MarkStringUnbarriered(trc, rope.rightChild(), "right child");
     }
 }
 
 
 void
 MarkChildren(JSTracer *trc, JSScript *script)
 {
     CheckScript(script, NULL);
 
 #ifdef JS_CRASH_DIAGNOSTICS
-    JSRuntime *rt = trc->context->runtime;
+    JSRuntime *rt = trc->runtime;
     JS_OPT_ASSERT_IF(rt->gcCheckCompartment, script->compartment() == rt->gcCheckCompartment);
 #endif
-    
+
     MarkAtomRange(trc, script->natoms, script->atoms, "atoms");
 
     if (JSScript::isValidOffset(script->objectsOffset)) {
         JSObjectArray *objarray = script->objects();
         MarkObjectRange(trc, objarray->length, objarray->vector, "objects");
     }
 
     if (JSScript::isValidOffset(script->regexpsOffset)) {
@@ -815,18 +919,18 @@ MarkChildren(JSTracer *trc, JSScript *sc
         MarkObjectRange(trc, objarray->length, objarray->vector, "objects");
     }
 
     if (JSScript::isValidOffset(script->constOffset)) {
         JSConstArray *constarray = script->consts();
         MarkValueRange(trc, constarray->length, constarray->vector, "consts");
     }
 
-    if (!script->isCachedEval && script->u.globalObject)
-        MarkObject(trc, *script->u.globalObject, "object");
+    if (!script->isCachedEval && script->globalObject)
+        MarkObject(trc, script->globalObject, "object");
 
     if (IS_GC_MARKING_TRACER(trc) && script->filename)
         js_MarkScriptFilename(script->filename);
 
     script->bindings.trace(trc);
 
     if (script->types)
         script->types->trace(trc);
@@ -834,22 +938,25 @@ MarkChildren(JSTracer *trc, JSScript *sc
 
 void
 MarkChildren(JSTracer *trc, const Shape *shape)
 {
 restart:
     MarkId(trc, shape->propid, "propid");
 
     if (shape->hasGetterValue() && shape->getter())
-        MarkObjectWithPrinter(trc, *shape->getterObject(), PrintPropertyGetterOrSetter, shape, 0);
+        MarkObjectWithPrinterUnbarriered(trc, shape->getterObject(),
+                                         PrintPropertyGetterOrSetter, shape, 0);
     if (shape->hasSetterValue() && shape->setter())
-        MarkObjectWithPrinter(trc, *shape->setterObject(), PrintPropertyGetterOrSetter, shape, 1);
+        MarkObjectWithPrinterUnbarriered(trc, shape->setterObject(),
+                                         PrintPropertyGetterOrSetter, shape, 1);
 
     if (shape->isMethod())
-        MarkObjectWithPrinter(trc, shape->methodObject(), PrintPropertyMethod, shape, 0);
+        MarkObjectWithPrinterUnbarriered(trc, &shape->methodObject(),
+                                         PrintPropertyMethod, shape, 0);
 
     shape = shape->previous();
     if (shape)
         goto restart;
 }
 
 static void
 ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
@@ -859,18 +966,17 @@ ScanTypeObject(GCMarker *gcmarker, types
         for (unsigned i = 0; i < count; i++) {
             types::Property *prop = type->getProperty(i);
             if (prop && JSID_IS_STRING(prop->id))
                 PushMarkStack(gcmarker, JSID_TO_STRING(prop->id));
         }
     }
 
     if (type->emptyShapes) {
-        int count = FINALIZE_OBJECT_LAST - FINALIZE_OBJECT0 + 1;
-        for (int i = 0; i < count; i++) {
+        for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) {
             if (type->emptyShapes[i])
                 PushMarkStack(gcmarker, type->emptyShapes[i]);
         }
     }
 
     if (type->proto)
         PushMarkStack(gcmarker, type->proto);
 
@@ -896,52 +1002,51 @@ MarkChildren(JSTracer *trc, types::TypeO
         for (unsigned i = 0; i < count; i++) {
             types::Property *prop = type->getProperty(i);
             if (prop)
                 MarkId(trc, prop->id, "type_prop");
         }
     }
 
     if (type->emptyShapes) {
-        int count = FINALIZE_OBJECT_LAST - FINALIZE_OBJECT0 + 1;
-        for (int i = 0; i < count; i++) {
+        for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) {
             if (type->emptyShapes[i])
                 MarkShape(trc, type->emptyShapes[i], "empty_shape");
         }
     }
 
     if (type->proto)
-        MarkObject(trc, *type->proto, "type_proto");
+        MarkObject(trc, type->proto, "type_proto");
 
     if (type->singleton)
-        MarkObject(trc, *type->singleton, "type_singleton");
+        MarkObject(trc, type->singleton, "type_singleton");
 
     if (type->newScript) {
-        MarkObject(trc, *type->newScript->fun, "type_new_function");
+        MarkObject(trc, type->newScript->fun, "type_new_function");
         MarkShape(trc, type->newScript->shape, "type_new_shape");
     }
 
     if (type->interpretedFunction)
-        MarkObject(trc, *type->interpretedFunction, "type_function");
+        MarkObject(trc, type->interpretedFunction, "type_function");
 }
 
 #ifdef JS_HAS_XML_SUPPORT
 void
 MarkChildren(JSTracer *trc, JSXML *xml)
 {
     js_TraceXML(trc, xml);
 }
 #endif
 
 } /* namespace gc */
 
 void
 GCMarker::drainMarkStack()
 {
-    JSRuntime *rt = context->runtime;
+    JSRuntime *rt = runtime;
     rt->gcCheckCompartment = rt->gcCurrentCompartment;
 
     while (!isMarkStackEmpty()) {
         while (!ropeStack.isEmpty())
             ScanRope(this, ropeStack.pop());
 
         while (!objStack.isEmpty())
             ScanObject(this, objStack.pop());
@@ -965,20 +1070,18 @@ GCMarker::drainMarkStack()
              */
             markDelayedChildren();
         }
     }
 
     rt->gcCheckCompartment = NULL;
 }
 
-} /* namespace js */
-
-JS_PUBLIC_API(void)
-JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
+void
+TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
 {
     switch (kind) {
       case JSTRACE_OBJECT:
         MarkChildren(trc, static_cast<JSObject *>(thing));
         break;
 
       case JSTRACE_STRING:
         MarkChildren(trc, static_cast<JSString *>(thing));
@@ -999,34 +1102,43 @@ JS_TraceChildren(JSTracer *trc, void *th
 #if JS_HAS_XML_SUPPORT
       case JSTRACE_XML:
         MarkChildren(trc, static_cast<JSXML *>(thing));
         break;
 #endif
     }
 }
 
+void
+CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind)
+{
+    JS_ASSERT(thing);
+    MarkKind(trc, thing, kind);
+}
+
+} /* namespace js */
+
 inline void
 JSObject::scanSlots(GCMarker *gcmarker)
 {
     /*
      * Scan the fixed slots and the dynamic slots separately, to avoid
      * branching inside nativeGetSlot().
      */
     JS_ASSERT(slotSpan() <= numSlots());
     unsigned i, nslots = slotSpan();
     if (slots) {
         unsigned nfixed = numFixedSlots();
         if (nslots > nfixed) {
-            Value *vp = fixedSlots();
+            HeapValue *vp = fixedSlots();
             for (i = 0; i < nfixed; i++, vp++)
                 ScanValue(gcmarker, *vp);
             vp = slots;
             for (; i < nslots; i++, vp++)
                 ScanValue(gcmarker, *vp);
             return;
         }
     }
     JS_ASSERT(nslots <= numFixedSlots());
-    Value *vp = fixedSlots();
+    HeapValue *vp = fixedSlots();
     for (i = 0; i < nslots; i++, vp++)
         ScanValue(gcmarker, *vp);
 }
--- a/js/src/jsgcmark.h
+++ b/js/src/jsgcmark.h
@@ -1,9 +1,9 @@
-/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
  * http://www.mozilla.org/MPL/
@@ -40,115 +40,103 @@
 #ifndef jsgcmark_h___
 #define jsgcmark_h___
 
 #include "jsgc.h"
 #include "jscntxt.h"
 #include "jscompartment.h"
 #include "jslock.h"
 
-
+#include "gc/Barrier.h"
 #include "js/TemplateLib.h"
 
 namespace js {
 namespace gc {
 
 void
-MarkString(JSTracer *trc, JSString *str);
+MarkAtom(JSTracer *trc, JSAtom *str);
 
 void
-MarkString(JSTracer *trc, JSString *str, const char *name);
+MarkAtom(JSTracer *trc, JSAtom *str, const char *name);
+
+void
+MarkObjectUnbarriered(JSTracer *trc, JSObject *obj, const char *name);
+
+void
+MarkObject(JSTracer *trc, const MarkablePtr<JSObject> &obj, const char *name);
 
 void
-MarkObject(JSTracer *trc, JSObject &obj, const char *name);
+MarkStringUnbarriered(JSTracer *trc, JSString *str, const char *name);
 
-/*
- * Mark an object that may be in a different compartment from the compartment
- * being GC'd. (Although it won't be marked if it's in the wrong compartment.)
- */
 void
-MarkCrossCompartmentObject(JSTracer *trc, JSObject &obj, const char *name);
+MarkString(JSTracer *trc, const MarkablePtr<JSString> &str, const char *name);
 
 void
-MarkObjectWithPrinter(JSTracer *trc, JSObject &obj, JSTraceNamePrinter printer,
-		      const void *arg, size_t index);
+MarkScriptUnbarriered(JSTracer *trc, JSScript *script, const char *name);
 
 void
-MarkScript(JSTracer *trc, JSScript *script, const char *name);
+MarkScript(JSTracer *trc, const MarkablePtr<JSScript> &script, const char *name);
+
+void
+MarkShapeUnbarriered(JSTracer *trc, const Shape *shape, const char *name);
 
 void
-MarkShape(JSTracer *trc, const Shape *shape, const char *name);
+MarkShape(JSTracer *trc, const MarkablePtr<const Shape> &shape, const char *name);
+
+void
+MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name);
 
 void
-MarkTypeObject(JSTracer *trc, types::TypeObject *type, const char *name);
+MarkTypeObject(JSTracer *trc, const MarkablePtr<types::TypeObject> &type, const char *name);
 
 void
-MarkXML(JSTracer *trc, JSXML *xml, const char *name);
+MarkXMLUnbarriered(JSTracer *trc, JSXML *xml, const char *name);
 
 void
-MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name);
+MarkXML(JSTracer *trc, const MarkablePtr<JSXML> &xml, const char *name);
 
 void
-MarkXMLRange(JSTracer *trc, size_t len, JSXML **vec, const char *name);
+MarkObjectRange(JSTracer *trc, size_t len, HeapPtr<JSObject> *vec, const char *name);
 
 void
-MarkId(JSTracer *trc, jsid id);
+MarkXMLRange(JSTracer *trc, size_t len, HeapPtr<JSXML> *vec, const char *name);
 
 void
-MarkId(JSTracer *trc, jsid id, const char *name);
+MarkId(JSTracer *trc, const HeapId &id, const char *name);
 
 void
-MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name);
+MarkIdRange(JSTracer *trc, js::HeapId *beg, js::HeapId *end, const char *name);
 
 void
-MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name);
+MarkIdRangeUnbarriered(JSTracer *trc, size_t len, jsid *vec, const char *name);
+
+void
+MarkIdRangeUnbarriered(JSTracer *trc, jsid *beg, jsid *end, const char *name);
 
 void
 MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind);
 
 void
-MarkValueRaw(JSTracer *trc, const js::Value &v);
+MarkValueUnbarriered(JSTracer *trc, const js::Value &v, const char *name);
 
 void
-MarkValue(JSTracer *trc, const js::Value &v, const char *name);
+MarkValue(JSTracer *trc, const js::HeapValue &v, const char *name);
 
 /*
  * Mark a value that may be in a different compartment from the compartment
  * being GC'd. (Although it won't be marked if it's in the wrong compartment.)
  */
 void
-MarkCrossCompartmentValue(JSTracer *trc, const js::Value &v, const char *name);
-
-void
-MarkValueRange(JSTracer *trc, const Value *beg, const Value *end, const char *name);
-
-void
-MarkValueRange(JSTracer *trc, size_t len, const Value *vec, const char *name);
-
-void
-MarkShapeRange(JSTracer *trc, const Shape **beg, const Shape **end, const char *name);
+MarkCrossCompartmentValue(JSTracer *trc, const js::HeapValue &v, const char *name);
 
 void
-MarkShapeRange(JSTracer *trc, size_t len, const Shape **vec, const char *name);
-
-/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */
-void
-MarkGCThing(JSTracer *trc, void *thing, uint32 kind);
+MarkValueRange(JSTracer *trc, const HeapValue *beg, const HeapValue *end, const char *name);
 
 void
-MarkGCThing(JSTracer *trc, void *thing);
-
-void
-MarkGCThing(JSTracer *trc, void *thing, const char *name);
-
-void
-MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index);
-
-void
-Mark(JSTracer *trc, void *thing, uint32 kind, const char *name);
+MarkValueRange(JSTracer *trc, size_t len, const HeapValue *vec, const char *name);
 
 void
 MarkRoot(JSTracer *trc, JSObject *thing, const char *name);
 
 void
 MarkRoot(JSTracer *trc, JSString *thing, const char *name);
 
 void
@@ -159,16 +147,43 @@ MarkRoot(JSTracer *trc, const Shape *thi
 
 void
 MarkRoot(JSTracer *trc, types::TypeObject *thing, const char *name);
 
 void
 MarkRoot(JSTracer *trc, JSXML *thing, const char *name);
 
 void
+MarkRoot(JSTracer *trc, const Value &v, const char *name);
+
+void
+MarkRoot(JSTracer *trc, jsid id, const char *name);
+
+void
+MarkRootGCThing(JSTracer *trc, void *thing, const char *name);
+
+void
+MarkRootRange(JSTracer *trc, size_t len, const Shape **vec, const char *name);
+
+void
+MarkRootRange(JSTracer *trc, size_t len, JSObject **vec, const char *name);
+
+void
+MarkRootRange(JSTracer *trc, const Value *beg, const Value *end, const char *name);
+
+void
+MarkRootRange(JSTracer *trc, size_t len, const Value *vec, const char *name);
+
+void
+MarkRootRange(JSTracer *trc, jsid *beg, jsid *end, const char *name);
+
+void
+MarkRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name);
+
+void
 MarkChildren(JSTracer *trc, JSObject *obj);
 
 void
 MarkChildren(JSTracer *trc, JSString *str);
 
 void
 MarkChildren(JSTracer *trc, const Shape *shape);
 
@@ -179,43 +194,50 @@ void
 MarkChildren(JSTracer *trc, JSXML *xml);
 
 /*
  * Use function overloading to decide which function should be called based on
  * the type of the object. The static type is used at compile time to link to
  * the corresponding Mark/IsMarked function.
  */
 inline void
-Mark(JSTracer *trc, const js::Value &v, const char *name)
+Mark(JSTracer *trc, const js::HeapValue &v, const char *name)
 {
     MarkValue(trc, v, name);
 }
 
 inline void
-Mark(JSTracer *trc, JSObject *o, const char *name)
+Mark(JSTracer *trc, const MarkablePtr<JSObject> &o, const char *name)
 {
-    MarkObject(trc, *o, name);
+    MarkObject(trc, o, name);
 }
 
 inline bool
 IsMarked(JSContext *cx, const js::Value &v)
 {
     if (v.isMarkable())
-        return !IsAboutToBeFinalized(cx, v.toGCThing());
+        return !IsAboutToBeFinalized(cx, v);
     return true;
 }
 
 inline bool
 IsMarked(JSContext *cx, JSObject *o)
 {
     return !IsAboutToBeFinalized(cx, o);
 }
 
 inline bool
 IsMarked(JSContext *cx, Cell *cell)
 {
     return !IsAboutToBeFinalized(cx, cell);
 }
 
-}
-}
+} /* namespace gc */
+
+void
+TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind);
+
+void
+CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind);
+
+} /* namespace js */
 
 #endif
--- a/js/src/jsgcstats.cpp
+++ b/js/src/jsgcstats.cpp
@@ -42,16 +42,17 @@
 #include "jscntxt.h"
 #include "jsgcstats.h"
 #include "jsgc.h"
 #include "jsxml.h"
 #include "jsbuiltins.h"
 #include "jscompartment.h"
 
 #include "jsgcinlines.h"
+#include "jsobjinlines.h"
 
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 
 #define UL(x)       ((unsigned long)(x))
 #define PERCENT(x,y)  (100.0 * (double) (x) / (double) (y))
 
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -469,16 +469,43 @@ TypeSet::print(JSContext *cx)
         for (unsigned i = 0; i < count; i++) {
             TypeObjectKey *object = getObject(i);
             if (object)
                 printf(" %s", TypeString(Type::ObjectType(object)));
         }
     }
 }
 
+bool
+TypeSet::propertyNeedsBarrier(JSContext *cx, jsid id)
+{
+    id = MakeTypeId(cx, id);
+
+    if (unknownObject())
+        return true;
+
+    for (unsigned i = 0; i < getObjectCount(); i++) {
+        if (getSingleObject(i))
+            return true;
+
+        if (types::TypeObject *otype = getTypeObject(i)) {
+            if (otype->unknownProperties())
+                return true;
+
+            if (types::TypeSet *propTypes = otype->maybeGetProperty(cx, id)) {
+                if (propTypes->needsBarrier(cx))
+                    return true;
+            }
+        }
+    }
+
+    addFreeze(cx);
+    return false;
+}
+
 /////////////////////////////////////////////////////////////////////
 // TypeSet constraints
 /////////////////////////////////////////////////////////////////////
 
 /* Standard subset constraint, propagate all types from one set to another. */
 class TypeConstraintSubset : public TypeConstraint
 {
 public:
@@ -1914,16 +1941,27 @@ TypeSet::hasGlobalObject(JSContext *cx, 
     }
 
     add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeGlobal>(
               cx->compartment->types.compiledScript, global), false);
 
     return true;
 }
 
+bool
+TypeSet::needsBarrier(JSContext *cx)
+{
+    bool result = unknownObject()
+               || getObjectCount() > 0
+               || hasAnyFlag(TYPE_FLAG_STRING);
+    if (!result)
+        addFreeze(cx);
+    return result;
+}
+
 /////////////////////////////////////////////////////////////////////
 // TypeCompartment
 /////////////////////////////////////////////////////////////////////
 
 TypeObject types::emptyTypeObject(NULL, false, true);
 
 void
 TypeCompartment::init(JSContext *cx)
@@ -2541,17 +2579,17 @@ struct types::ObjectTableKey
     jsid *ids;
     uint32 nslots;
     uint32 nfixed;
     JSObject *proto;
 
     typedef JSObject * Lookup;
 
     static inline uint32 hash(JSObject *obj) {
-        return (uint32) (JSID_BITS(obj->lastProperty()->propid) ^
+        return (uint32) (JSID_BITS(obj->lastProperty()->propid.get()) ^
                          obj->slotSpan() ^ obj->numFixedSlots() ^
                          ((uint32)(size_t)obj->getProto() >> 2));
     }
 
     static inline bool match(const ObjectTableKey &v, JSObject *obj) {
         if (obj->slotSpan() != v.nslots ||
             obj->numFixedSlots() != v.nfixed ||
             obj->getProto() != v.proto) {
@@ -3079,18 +3117,20 @@ TypeObject::clearNewScript(JSContext *cx
                 }
             }
 
             if (!finished)
                 obj->rollbackProperties(cx, numProperties);
         }
     }
 
-    cx->free_(newScript);
+    /* We NULL out newScript *before* freeing it so the write barrier works. */
+    TypeNewScript *savedNewScript = newScript;
     newScript = NULL;
+    cx->free_(savedNewScript);
 
     markStateChange(cx);
 }
 
 void
 TypeObject::print(JSContext *cx)
 {
     printf("%s : %s",
@@ -4735,17 +4775,17 @@ CheckNewScriptProperties(JSContext *cx, 
         return;
     }
 
     type->newScript->fun = fun;
     type->newScript->allocKind = kind;
     type->newScript->shape = baseobj->lastProperty();
 
     type->newScript->initializerList = (TypeNewScript::Initializer *)
-        ((char *) type->newScript + sizeof(TypeNewScript));
+        ((char *) type->newScript.get() + sizeof(TypeNewScript));
     PodCopy(type->newScript->initializerList, initializerList.begin(), initializerList.length());
 }
 
 /////////////////////////////////////////////////////////////////////
 // Printing
 /////////////////////////////////////////////////////////////////////
 
 void
@@ -5196,18 +5236,18 @@ TypeScript::SetScope(JSContext *cx, JSSc
      * marked as reentrant.
      */
     if (!parent->ensureHasTypes(cx, parentFun))
         return false;
     if (!parent->types->hasScope()) {
         if (!SetScope(cx, parent, scope->getParent()))
             return false;
         parent->nesting()->activeCall = scope;
-        parent->nesting()->argArray = call.argArray();
-        parent->nesting()->varArray = call.varArray();
+        parent->nesting()->argArray = Valueify(call.argArray());
+        parent->nesting()->varArray = Valueify(call.varArray());
     }
 
     JS_ASSERT(!script->types->nesting);
 
     /* Construct and link nesting information for the two functions. */
 
     script->types->nesting = cx->new_<TypeScriptNesting>();
     if (!script->types->nesting)
@@ -5714,17 +5754,17 @@ JSObject::makeNewType(JSContext *cx, JSF
 {
     JS_ASSERT(!newType);
 
     TypeObject *type = cx->compartment->types.newTypeObject(cx, NULL,
                                                             JSProto_Object, this, unknown);
     if (!type)
         return;
 
-    newType = type;
+    newType.init(type);
     setDelegate();
 
     if (!cx->typeInferenceEnabled())
         return;
 
     AutoEnterTypeInference enter(cx);
 
     /*
@@ -6259,17 +6299,17 @@ JS_GetTypeInferenceObjectStats(void *obj
                     break;
             }
         }
     }
 
     if (object->emptyShapes) {
         size_t usable = usf(object->emptyShapes);
         stats->emptyShapes +=
-            usable ? usable : sizeof(EmptyShape*) * gc::FINALIZE_FUNCTION_AND_OBJECT_LAST;
+            usable ? usable : sizeof(EmptyShape*) * gc::FINALIZE_OBJECT_LIMIT;
     }
 
     /*
      * This counts memory that is in the temp pool but gets attributed
      * elsewhere.  See JS_GetTypeInferenceMemoryStats for more details.
      */
     size_t bytes = object->dynamicSize();
     stats->objects += bytes;
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -43,16 +43,17 @@
 #define jsinfer_h___
 
 #include "jsalloc.h"
 #include "jscell.h"
 #include "jsfriendapi.h"
 #include "jsprvtd.h"
 
 #include "ds/LifoAlloc.h"
+#include "gc/Barrier.h"
 #include "js/HashTable.h"
 
 namespace js {
 namespace types {
 
 /* Type set entry for either a JSObject with singleton type or a non-singleton TypeObject. */
 struct TypeObjectKey {
     static intptr_t keyBits(TypeObjectKey *obj) { return (intptr_t) obj; }
@@ -503,16 +504,25 @@ class TypeSet
     /* Get the single value which can appear in this type set, otherwise NULL. */
     JSObject *getSingleton(JSContext *cx, bool freeze = true);
 
     /* Whether all objects in this set are parented to a particular global. */
     bool hasGlobalObject(JSContext *cx, JSObject *global);
 
     inline void clearObjects();
 
+    /*
+     * Whether a location with this TypeSet needs a write barrier (i.e., whether
+     * it can hold GC things). The type set is frozen if no barrier is needed.
+     */
+    bool needsBarrier(JSContext *cx);
+
+    /* The type set is frozen if no barrier is needed. */
+    bool propertyNeedsBarrier(JSContext *cx, jsid id);
+
   private:
     uint32 baseObjectCount() const {
         return (flags & TYPE_FLAG_OBJECT_COUNT_MASK) >> TYPE_FLAG_OBJECT_COUNT_SHIFT;
     }
     inline void setBaseObjectCount(uint32 count);
 };
 
 /*
@@ -616,28 +626,23 @@ struct TypeBarrier
           singleton(singleton), singletonId(singletonId)
     {}
 };
 
 /* Type information about a property. */
 struct Property
 {
     /* Identifier for this property, JSID_VOID for the aggregate integer index property. */
-    jsid id;
+    HeapId id;
 
     /* Possible types for this property, including types inherited from prototypes. */
     TypeSet types;
 
-    Property(jsid id)
-        : id(id)
-    {}
-
-    Property(const Property &o)
-        : id(o.id), types(o.types)
-    {}
+    inline Property(jsid id);
+    inline Property(const Property &o);
 
     static uint32 keyBits(jsid id) { return (uint32) JSID_BITS(id); }
     static jsid getKey(Property *p) { return p->id; }
 };
 
 /*
  * Information attached to a TypeObject if it is always constructed using 'new'
  * on a particular script. This is used to manage state related to the definite
@@ -645,26 +650,26 @@ struct Property
  * information which could change as the script executes (e.g. a scripted
  * setter is added to a prototype object), and we need to ensure both that the
  * appropriate type constraints are in place when necessary, and that we can
  * remove the definite property information and repair the JS stack if the
  * constraints are violated.
  */
 struct TypeNewScript
 {
-    JSFunction *fun;
+    HeapPtrFunction fun;
 
     /* Allocation kind to use for newly constructed objects. */
     gc::AllocKind allocKind;
 
     /*
      * Shape to use for newly constructed objects. Reflects all definite
      * properties the object will have.
      */
-    const Shape *shape;
+    HeapPtr<const Shape> shape;
 
     /*
      * Order in which properties become initialized. We need this in case a
      * scripted setter is added to one of the object's prototypes while it is
      * in the middle of being initialized, so we can walk the stack and fixup
      * any objects which look for in-progress objects which were prematurely
      * set with their final shape. Initialization can traverse stack frames,
      * in which case FRAME_PUSH/FRAME_POP are used.
@@ -677,16 +682,19 @@ struct TypeNewScript
             DONE
         } kind;
         uint32 offset;
         Initializer(Kind kind, uint32 offset)
           : kind(kind), offset(offset)
         {}
     };
     Initializer *initializerList;
+
+    static inline void writeBarrierPre(TypeNewScript *newScript);
+    static inline void writeBarrierPost(TypeNewScript *newScript, void *addr);
 };
 
 /*
  * Lazy type objects overview.
  *
  * Type objects which represent at most one JS object are constructed lazily.
  * These include types for native functions, standard classes, scripted
  * functions defined at the top level of global/eval scripts, and in some
@@ -709,37 +717,37 @@ struct TypeNewScript
  * type object itself, the type object is also destroyed, and the JS object
  * reverts to having a lazy type.
  */
 
 /* Type information about an object accessed by a script. */
 struct TypeObject : gc::Cell
 {
     /* Prototype shared by objects using this type. */
-    JSObject *proto;
+    HeapPtrObject proto;
 
     /*
      * Whether there is a singleton JS object with this type. That JS object
      * must appear in type sets instead of this; we include the back reference
      * here to allow reverting the JS object to a lazy type.
      */
-    JSObject *singleton;
+    HeapPtrObject singleton;
 
     /* Lazily filled array of empty shapes for each size of objects with this type. */
-    js::EmptyShape **emptyShapes;
+    HeapPtr<EmptyShape> *emptyShapes;
 
     /* Flags for this object. */
     TypeObjectFlags flags;
 
     /*
      * If non-NULL, objects of this type have always been constructed using
      * 'new' on the specified script, which adds some number of properties to
      * the object in a definite order before the object escapes.
      */
-    TypeNewScript *newScript;
+    HeapPtr<TypeNewScript> newScript;
 
     /*
      * Estimate of the contribution of this object to the type sets it appears in.
      * This is the sum of the sizes of those sets at the point when the object
      * was added.
      *
      * When the contribution exceeds the CONTRIBUTION_LIMIT, any type sets the
      * object is added to are instead marked as unknown. If we get to this point
@@ -778,17 +786,17 @@ struct TypeObject : gc::Cell
      *
      * We establish these by using write barriers on calls to setProperty and
      * defineProperty which are on native properties, and by using the inference
      * analysis to determine the side effects of code which is JIT-compiled.
      */
     Property **propertySet;
 
     /* If this is an interpreted function, the function object. */
-    JSFunction *interpretedFunction;
+    HeapPtrFunction interpretedFunction;
 
     inline TypeObject(JSObject *proto, bool isFunction, bool unknown);
 
     bool isFunction() { return !!(flags & OBJECT_FLAG_FUNCTION); }
 
     bool hasAnyFlags(TypeObjectFlags flags) {
         JS_ASSERT((flags & OBJECT_FLAG_DYNAMIC_MASK) == flags);
         return !!(this->flags & flags);
@@ -863,16 +871,19 @@ struct TypeObject : gc::Cell
 
     /*
      * Type objects don't have explicit finalizers. Memory owned by a type
      * object pending deletion is released when weak references are sweeped
      * from all the compartment's type objects.
      */
     void finalize(JSContext *cx) {}
 
+    static inline void writeBarrierPre(TypeObject *type);
+    static inline void writeBarrierPost(TypeObject *type, void *addr);
+
   private:
     inline uint32 basePropertyCount() const;
     inline void setBasePropertyCount(uint32 count);
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(TypeObject, proto) == offsetof(js::shadow::TypeObject, proto));
     }
 };
@@ -979,18 +990,18 @@ struct TypeScriptNesting
     /*
      * If this is an outer function, pointers to the most recent activation's
      * arguments and variables arrays. These could be referring either to stack
      * values in activeCall's frame (if it has not finished yet) or to the
      * internal slots of activeCall (if the frame has finished). Pointers to
      * these fields can be embedded directly in JIT code (though remember to
      * use 'addDependency == true' when calling resolveNameAccess).
      */
-    Value *argArray;
-    Value *varArray;
+    const Value *argArray;
+    const Value *varArray;
 
     /* Number of frames for this function on the stack. */
     uint32 activeFrames;
 
     TypeScriptNesting() { PodZero(this); }
     ~TypeScriptNesting();
 };
 
@@ -1005,42 +1016,40 @@ void NestingEpilogue(StackFrame *fp);
 class TypeScript
 {
     friend struct ::JSScript;
 
     /* Analysis information for the script, cleared on each GC. */
     analyze::ScriptAnalysis *analysis;
 
     /* Function for the script, if it has one. */
-    JSFunction *function;
+    HeapPtrFunction function;
 
     /*
      * Information about the scope in which a script executes. This information
      * is not set until the script has executed at least once and SetScope
      * called, before that 'global' will be poisoned per GLOBAL_MISSING_SCOPE.
      */
     static const size_t GLOBAL_MISSING_SCOPE = 0x1;
 
     /* Global object for the script, if compileAndGo. */
-    js::GlobalObject *global;
+    HeapPtr<GlobalObject> global;
 
   public:
 
     /* Nesting state for outer or inner function scripts. */
     TypeScriptNesting *nesting;
 
     /* Dynamic types generated at points within this script. */
     TypeResult *dynamicList;
 
-    TypeScript(JSFunction *fun) {
-        this->function = fun;
-        this->global = (js::GlobalObject *) GLOBAL_MISSING_SCOPE;
-    }
+    inline TypeScript(JSFunction *fun);
+    inline ~TypeScript();
 
-    bool hasScope() { return size_t(global) != GLOBAL_MISSING_SCOPE; }
+    bool hasScope() { return size_t(global.get()) != GLOBAL_MISSING_SCOPE; }
 
     /* Array of type type sets for variables and JOF_TYPESET ops. */
     TypeSet *typeArray() { return (TypeSet *) (jsuword(this) + sizeof(TypeScript)); }
 
     static inline unsigned NumTypeSets(JSScript *script);
 
     static bool SetScope(JSContext *cx, JSScript *script, JSObject *scope);
 
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -66,17 +66,17 @@ Type::ObjectType(JSObject *obj)
         return Type((jsuword) obj | 1);
     return Type((jsuword) obj->type());
 }
 
 /* static */ inline Type
 Type::ObjectType(TypeObject *obj)
 {
     if (obj->singleton)
-        return Type((jsuword) obj->singleton | 1);
+        return Type((jsuword) obj->singleton.get() | 1);
     return Type((jsuword) obj);
 }
 
 /* static */ inline Type
 Type::ObjectType(TypeObjectKey *obj)
 {
     return Type((jsuword) obj);
 }
@@ -454,16 +454,28 @@ UseNewTypeAtEntry(JSContext *cx, StackFr
            fp->prev() && fp->prev()->isScriptFrame() &&
            UseNewType(cx, fp->prev()->script(), fp->prev()->pcQuadratic(cx->stack, fp));
 }
 
 /////////////////////////////////////////////////////////////////////
 // Script interface functions
 /////////////////////////////////////////////////////////////////////
 
+inline
+TypeScript::TypeScript(JSFunction *fun)
+  : function(fun),
+    global((js::GlobalObject *) GLOBAL_MISSING_SCOPE)
+{
+}
+
+inline
+TypeScript::~TypeScript()
+{
+}
+
 /* static */ inline unsigned
 TypeScript::NumTypeSets(JSScript *script)
 {
     return script->nTypeSets + analyze::TotalSlots(script);
 }
 
 /* static */ inline TypeSet *
 TypeScript::ReturnTypes(JSScript *script)
@@ -684,19 +696,19 @@ TypeScript::SetArgument(JSContext *cx, J
         SetArgument(cx, script, arg, type);
     }
 }
 
 void
 TypeScript::trace(JSTracer *trc)
 {
     if (function)
-        gc::MarkObject(trc, *function, "script_fun");
+        gc::MarkObject(trc, function, "script_fun");
     if (hasScope() && global)
-        gc::MarkObject(trc, *global, "script_global");
+        gc::MarkObject(trc, global, "script_global");
 
     /* Note: nesting does not keep anything alive. */
 }
 
 /////////////////////////////////////////////////////////////////////
 // TypeCompartment
 /////////////////////////////////////////////////////////////////////
 
@@ -1247,16 +1259,66 @@ TypeObject::getGlobal()
 {
     if (singleton)
         return singleton->getGlobal();
     if (interpretedFunction && interpretedFunction->script()->compileAndGo)
         return interpretedFunction->getGlobal();
     return NULL;
 }
 
+inline void
+TypeObject::writeBarrierPre(TypeObject *type)
+{
+#ifdef JSGC_INCREMENTAL
+    if (!type || type == &js::types::emptyTypeObject)
+        return;
+
+    JSCompartment *comp = type->compartment();
+    if (comp->needsBarrier())
+        MarkTypeObjectUnbarriered(comp->barrierTracer(), type, "write barrier");
+#endif
+}
+
+inline void
+TypeObject::writeBarrierPost(TypeObject *type, void *addr)
+{
+}
+
+inline void
+TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
+{
+#ifdef JSGC_INCREMENTAL
+    if (!newScript)
+        return;
+
+    JSCompartment *comp = newScript->fun->compartment();
+    if (comp->needsBarrier()) {
+        MarkObjectUnbarriered(comp->barrierTracer(), newScript->fun, "write barrier");
+        MarkShapeUnbarriered(comp->barrierTracer(), newScript->shape, "write barrier");
+    }
+#endif
+}
+
+inline void
+TypeNewScript::writeBarrierPost(TypeNewScript *newScript, void *addr)
+{
+}
+
+inline
+Property::Property(jsid id)
+  : id(id)
+{
+}
+
+inline
+Property::Property(const Property &o)
+  : id(o.id.get()), types(o.types)
+{
+}
+
 } } /* namespace js::types */
 
 inline bool
 JSScript::ensureHasTypes(JSContext *cx, JSFunction *fun)
 {
     return types || makeTypes(cx, fun);
 }
 
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -447,18 +447,18 @@ js::BoxNonStrictThis(JSContext *cx, cons
 #if JS_HAS_NO_SUCH_METHOD
 
 const uint32 JSSLOT_FOUND_FUNCTION  = 0;
 const uint32 JSSLOT_SAVED_ID        = 1;
 
 static void
 no_such_method_trace(JSTracer *trc, JSObject *obj)
 {
-    gc::MarkValue(trc, obj->getSlot(JSSLOT_FOUND_FUNCTION), "found function");
-    gc::MarkValue(trc, obj->getSlot(JSSLOT_SAVED_ID), "saved id");
+    gc::MarkValue(trc, obj->getSlotRef(JSSLOT_FOUND_FUNCTION), "found function");
+    gc::MarkValue(trc, obj->getSlotRef(JSSLOT_SAVED_ID), "saved id");
 }
 
 Class js_NoSuchMethodClass = {
     "NoSuchMethod",
     JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -119,28 +119,28 @@ Class js::IteratorClass = {
     }
 };
 
 void
 NativeIterator::mark(JSTracer *trc)
 {
     MarkIdRange(trc, begin(), end(), "props");
     if (obj)
-        MarkObject(trc, *obj, "obj");
+        MarkObject(trc, obj, "obj");
 }
 
 static void
 iterator_finalize(JSContext *cx, JSObject *obj)
 {
     JS_ASSERT(obj->isIterator());
 
     NativeIterator *ni = obj->getNativeIterator();
     if (ni) {
+        obj->setPrivate(NULL);
         cx->free_(ni);
-        obj->setNativeIterator(NULL);
     }
 }
 
 static void
 iterator_trace(JSTracer *trc, JSObject *obj)
 {
     NativeIterator *ni = obj->getNativeIterator();
 
@@ -332,17 +332,19 @@ js::VectorToIdArray(JSContext *cx, AutoI
     size_t len = props.length();
     size_t idsz = len * sizeof(jsid);
     size_t sz = (sizeof(JSIdArray) - sizeof(jsid)) + idsz;
     JSIdArray *ida = static_cast<JSIdArray *>(cx->malloc_(sz));
     if (!ida)
         return false;
 
     ida->length = static_cast<jsint>(len);
-    memcpy(ida->vector, props.begin(), idsz);
+    jsid *v = props.begin();
+    for (jsint i = 0; i < ida->length; i++)
+        ida->vector[i].init(v[i]);
     *idap = ida;
     return true;
 }
 
 JS_FRIEND_API(bool)
 js::GetPropertyNames(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector *props)
 {
     return Snapshot(cx, obj, flags & (JSITER_OWNONLY | JSITER_HIDDEN), props);
@@ -436,27 +438,29 @@ NewIteratorObject(JSContext *cx, uintN f
 NativeIterator *
 NativeIterator::allocateIterator(JSContext *cx, uint32 slength, const AutoIdVector &props)
 {
     size_t plength = props.length();
     NativeIterator *ni = (NativeIterator *)
         cx->malloc_(sizeof(NativeIterator) + plength * sizeof(jsid) + slength * sizeof(uint32));
     if (!ni)
         return NULL;
-    ni->props_array = ni->props_cursor = (jsid *) (ni + 1);
-    ni->props_end = (jsid *)ni->props_array + plength;
-    if (plength)
-        memcpy(ni->props_array, props.begin(), plength * sizeof(jsid));
+    ni->props_array = ni->props_cursor = (HeapId *) (ni + 1);
+    ni->props_end = ni->props_array + plength;
+    if (plength) {
+        for (size_t i = 0; i < plength; i++)
+            ni->props_array[i].init(props[i]);
+    }
     return ni;
 }
 
 inline void
 NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key)
 {
-    this->obj = obj;
+    this->obj.init(obj);
     this->flags = flags;
     this->shapes_array = (uint32 *) this->props_end;
     this->shapes_length = slength;
     this->shapes_key = key;
 }
 
 static inline void
 RegisterEnumerator(JSContext *cx, JSObject *iterobj, NativeIterator *ni)
@@ -848,19 +852,19 @@ SuppressDeletedPropertyHelper(JSContext 
 {
     JSObject *iterobj = cx->enumerators;
     while (iterobj) {
       again:
         NativeIterator *ni = iterobj->getNativeIterator();
         /* This only works for identified surpressed keys, not values. */
         if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
             /* Check whether id is still to come. */
-            jsid *props_cursor = ni->current();
-            jsid *props_end = ni->end();
-            for (jsid *idp = props_cursor; idp < props_end; ++idp) {
+            HeapId *props_cursor = ni->current();
+            HeapId *props_end = ni->end();
+            for (HeapId *idp = props_cursor; idp < props_end; ++idp) {
                 if (predicate(*idp)) {
                     /*
                      * Check whether another property along the prototype chain
                      * became visible as a result of this deletion.
                      */
                     if (obj->getProto()) {
                         AutoObjectRooter proto(cx, obj->getProto());
                         AutoObjectRooter obj2(cx);
@@ -889,17 +893,18 @@ SuppressDeletedPropertyHelper(JSContext 
                     /*
                      * No property along the prototype chain stepped in to take the
                      * property's place, so go ahead and delete id from the list.
                      * If it is the next property to be enumerated, just skip it.
                      */
                     if (idp == props_cursor) {
                         ni->incCursor();
                     } else {
-                        memmove(idp, idp + 1, (props_end - (idp + 1)) * sizeof(jsid));
+                        for (HeapId *p = idp; p + 1 != props_end; p++)
+                            *p = *(p + 1);
                         ni->props_end = ni->end() - 1;
                     }
 
                     /* Don't reuse modified native iterators. */
                     ni->flags |= JSITER_UNREUSABLE;
 
                     if (predicate.matchesAtMostOne())
                         break;
@@ -1105,42 +1110,62 @@ generator_finalize(JSContext *cx, JSObje
      */
     JS_ASSERT(gen->state == JSGEN_NEWBORN ||
               gen->state == JSGEN_CLOSED ||
               gen->state == JSGEN_OPEN);
     cx->free_(gen);
 }
 
 static void
+MarkGenerator(JSTracer *trc, JSGenerator *gen)
+{
+    StackFrame *fp = gen->floatingFrame();
+
+    /*
+     * MarkGenerator should only be called when regs is based on the floating frame.
+     * See calls to RebaseRegsFromTo.
+     */
+    JS_ASSERT(size_t(gen->regs.sp - fp->slots()) <= fp->numSlots());
+
+    /*
+     * Currently, generators are not mjitted. Still, (overflow) args can be
+     * pushed by the mjit and need to be conservatively marked. Technically, the
+     * formal args and generator slots are safe for exact marking, but since the
+     * plan is to eventually mjit generators, it makes sense to future-proof
+     * this code and save someone an hour later.
+     */
+    MarkStackRangeConservatively(trc, gen->floatingStack, fp->formalArgsEnd());
+    js_TraceStackFrame(trc, fp);
+    MarkStackRangeConservatively(trc, fp->slots(), gen->regs.sp);
+}
+
+static void
+GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
+{
+    JSCompartment *comp = cx->compartment;
+    if (comp->needsBarrier())
+        MarkGenerator(comp->barrierTracer(), gen);
+}
+
+static void
 generator_trace(JSTracer *trc, JSObject *obj)
 {
     JSGenerator *gen = (JSGenerator *) obj->getPrivate();
     if (!gen)
         return;
 
     /*
      * Do not mark if the generator is running; the contents may be trash and
      * will be replaced when the generator stops.
      */
     if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
         return;
 
-    StackFrame *fp = gen->floatingFrame();
-    JS_ASSERT(gen->liveFrame() == fp);
-
-    /*
-     * Currently, generators are not mjitted. Still, (overflow) args can be
-     * pushed by the mjit and need to be conservatively marked. Technically, the
-     * formal args and generator slots are safe for exact marking, but since the
-     * plan is to eventually mjit generators, it makes sense to future-proof
-     * this code and save someone an hour later.
-     */
-    MarkStackRangeConservatively(trc, gen->floatingStack, fp->formalArgsEnd());
-    js_TraceStackFrame(trc, fp);
-    MarkStackRangeConservatively(trc, fp->slots(), gen->regs.sp);
+    JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
+    MarkGenerator(trc, gen);
 }
 
 Class js::GeneratorClass = {
     "Generator",
     JSCLASS_HAS_PRIVATE,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
@@ -1204,17 +1229,17 @@ js_NewGenerator(JSContext *cx)
     if (!gen)
         return NULL;
 
     /* Cut up floatingStack space. */
     Value *genvp = gen->floatingStack;
     StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
 
     /* Initialize JSGenerator. */
-    gen->obj = obj;
+    gen->obj.init(obj);
     gen->state = JSGEN_NEWBORN;
     gen->enumerators = NULL;
     gen->floating = genfp;
 
     /* Copy from the stack to the generator's floating frame. */
     gen->regs.rebaseFromTo(stackRegs, *genfp);
     genfp->stealFrameAndSlots(genvp, stackfp, stackvp, stackRegs.sp);
     genfp->initFloatingGenerator();
@@ -1253,16 +1278,29 @@ SendToGenerator(JSContext *cx, JSGenerat
                             JS_GetFunctionId(gen->floatingFrame()->fun()));
         return JS_FALSE;
     }
 
     /* Check for OOM errors here, where we can fail easily. */
     if (!cx->ensureGeneratorStackSpace())
         return JS_FALSE;
 
+    /*
+     * Write barrier is needed since the generator stack can be updated,
+     * and it's not barriered in any other way. We need to do it before
+     * gen->state changes, which can cause us to trace the generator
+     * differently.
+     *
+     * We could optimize this by setting a bit on the generator to signify
+     * that it has been marked. If this bit has already been set, there is no
+     * need to mark again. The bit would have to be reset before the next GC,
+     * or else some kind of epoch scheme would have to be used.
+     */
+    GeneratorWriteBarrierPre(cx, gen);
+
     JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
     switch (op) {
       case JSGENOP_NEXT:
       case JSGENOP_SEND:
         if (gen->state == JSGEN_OPEN) {
             /*
              * Store the argument to send as the result of the yield
              * expression.
--- a/js/src/jsiter.h
+++ b/js/src/jsiter.h
@@ -43,53 +43,54 @@
 /*
  * JavaScript iterators.
  */
 #include "jscntxt.h"
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsversion.h"
 
+#include "gc/Barrier.h"
 #include "vm/Stack.h"
 
 /*
  * For cacheable native iterators, whether the iterator is currently active.
  * Not serialized by XDR.
  */
 #define JSITER_ACTIVE       0x1000
 #define JSITER_UNREUSABLE   0x2000
 
 namespace js {
 
 struct NativeIterator {
-    JSObject  *obj;
-    jsid      *props_array;
-    jsid      *props_cursor;
-    jsid      *props_end;
+    HeapPtrObject  obj;
+    HeapId    *props_array;
+    HeapId    *props_cursor;
+    HeapId    *props_end;
     uint32    *shapes_array;
     uint32    shapes_length;
     uint32    shapes_key;
     uint32    flags;
     JSObject  *next;  /* Forms cx->enumerators list, garbage otherwise. */
 
     bool isKeyIter() const { return (flags & JSITER_FOREACH) == 0; }
 
-    inline jsid *begin() const {
+    inline HeapId *begin() const {
         return props_array;
     }
 
-    inline jsid *end() const {
+    inline HeapId *end() const {
         return props_end;
     }
 
     size_t numKeys() const {
         return end() - begin();
     }
 
-    jsid *current() const {
+    HeapId *current() const {
         JS_ASSERT(props_cursor < props_end);
         return props_cursor;
     }
 
     void incCursor() {
         props_cursor = props_cursor + 1;
     }
 
@@ -165,17 +166,17 @@ typedef enum JSGeneratorState {
     JSGEN_NEWBORN,  /* not yet started */
     JSGEN_OPEN,     /* started by a .next() or .send(undefined) call */
     JSGEN_RUNNING,  /* currently executing via .next(), etc., call */
     JSGEN_CLOSING,  /* close method is doing asynchronous return */
     JSGEN_CLOSED    /* closed, cannot be started or closed again */
 } JSGeneratorState;
 
 struct JSGenerator {
-    JSObject            *obj;
+    js::HeapPtrObject   obj;
     JSGeneratorState    state;
     js::FrameRegs       regs;
     JSObject            *enumerators;
     js::StackFrame      *floating;
     js::Value           floatingStack[1];
 
     js::StackFrame *floatingFrame() {
         return floating;
--- a/js/src/jslock.cpp
+++ b/js/src/jslock.cpp
@@ -56,16 +56,18 @@
 #include "jsutil.h"
 #include "jsstdint.h"
 #include "jscntxt.h"
 #include "jsgc.h"
 #include "jslock.h"
 #include "jsscope.h"
 #include "jsstr.h"
 
+#include "jsscopeinlines.h"
+
 using namespace js;
 
 #define ReadWord(W) (W)
 
 #if !defined(__GNUC__)
 # define __asm__ asm
 # define __volatile__ volatile
 #endif
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -458,17 +458,17 @@ js_LeaveSharpObject(JSContext *cx, JSIdA
             *idap = NULL;
         }
     }
 }
 
 static intN
 gc_sharp_table_entry_marker(JSHashEntry *he, intN i, void *arg)
 {
-    MarkObject((JSTracer *)arg, *(JSObject *)he->key, "sharp table entry");
+    MarkRoot((JSTracer *)arg, (JSObject *)he->key, "sharp table entry");
     return JS_DHASH_NEXT;
 }
 
 void
 js_TraceSharpMap(JSTracer *trc, JSSharpObjectMap *map)
 {
     JS_ASSERT(map->depth > 0);
     JS_ASSERT(map->table);
@@ -1056,41 +1056,41 @@ EvalCacheLookup(JSContext *cx, JSLinearS
                             i = 0;
                         } else {
                             i = -1;
                         }
                     }
                     if (i < 0 ||
                         objarray->vector[i]->getParent() == &scopeobj) {
                         JS_ASSERT(staticLevel == script->staticLevel);
-                        *scriptp = script->u.evalHashLink;
-                        script->u.evalHashLink = NULL;
+                        *scriptp = script->evalHashLink();
+                        script->evalHashLink() = NULL;
                         return script;
                     }
                 }
             }
         }
 
         if (++count == EVAL_CACHE_CHAIN_LIMIT)
             return NULL;
-        scriptp = &script->u.evalHashLink;
+        scriptp = &script->evalHashLink();
     }
     return NULL;
 }
 
 /*
  * There are two things we want to do with each script executed in EvalKernel:
  *  1. notify jsdbgapi about script creation/destruction
  *  2. add the script to the eval cache when EvalKernel is finished
  *
  * NB: Although the eval cache keeps a script alive wrt to the JS engine, from
  * a jsdbgapi user's perspective, we want each eval() to create and destroy a
  * script. This hides implementation details and means we don't have to deal
  * with calls to JS_GetScriptObject for scripts in the eval cache (currently,
- * script->u.object aliases script->u.evalHashLink).
+ * script->object aliases script->evalHashLink()).
  */
 class EvalScriptGuard
 {
     JSContext *cx_;
     JSLinearString *str_;
     JSScript **bucket_;
     JSScript *script_;
 
@@ -1102,17 +1102,17 @@ class EvalScriptGuard
         bucket_ = EvalCacheHash(cx, str);
     }
 
     ~EvalScriptGuard() {
         if (script_) {
             js_CallDestroyScriptHook(cx_, script_);
             script_->isActiveEval = false;
             script_->isCachedEval = true;
-            script_->u.evalHashLink = *bucket_;
+            script_->evalHashLink() = *bucket_;
             *bucket_ = script_;
         }
     }
 
     void lookupInEvalCache(StackFrame *caller, uintN staticLevel,
                            JSPrincipals *principals, JSObject &scopeobj) {
         if (JSScript *found = EvalCacheLookup(cx_, str_, caller, staticLevel,
                                               principals, scopeobj, bucket_)) {
@@ -2999,17 +2999,17 @@ CreateThisForFunctionWithType(JSContext 
         /*
          * Make an object with the type's associated finalize kind and shape,
          * which reflects any properties that will definitely be added to the
          * object before it is read from.
          */
         gc::AllocKind kind = type->newScript->allocKind;
         JSObject *res = NewObjectWithType(cx, type, parent, kind);
         if (res)
-            res->setMap((Shape *) type->newScript->shape);
+            res->initMap((Shape *) type->newScript->shape.get());
         return res;
     }
 
     gc::AllocKind kind = NewObjectGCKind(cx, &ObjectClass);
     return NewObjectWithType(cx, type, parent, kind);
 }
 
 JSObject *
@@ -3499,17 +3499,17 @@ js_NewWithObject(JSContext *cx, JSObject
     StackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp());
 
     obj->init(cx, &WithClass, type, parent, priv, false);
 
     EmptyShape *emptyWithShape = EmptyShape::getEmptyWithShape(cx);
     if (!emptyWithShape)
         return NULL;
 
-    obj->setMap(emptyWithShape);
+    obj->initMap(emptyWithShape);
     OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
 
     AutoObjectRooter tvr(cx, obj);
     JSObject *thisp = proto->thisObject(cx);
     if (!thisp)
         return NULL;
 
     assertSameCompartment(cx, obj, thisp);
@@ -3528,17 +3528,17 @@ js_NewBlockObject(JSContext *cx)
     JSObject *blockObj = js_NewGCObject(cx, FINALIZE_OBJECT2);
     if (!blockObj)
         return NULL;
 
     EmptyShape *emptyBlockShape = EmptyShape::getEmptyBlockShape(cx);
     if (!emptyBlockShape)
         return NULL;
     blockObj->init(cx, &BlockClass, &emptyTypeObject, NULL, NULL, false);
-    blockObj->setMap(emptyBlockShape);
+    blockObj->initMap(emptyBlockShape);
 
     return blockObj;
 }
 
 JSObject *
 js_CloneBlockObject(JSContext *cx, JSObject *proto, StackFrame *fp)
 {
     JS_ASSERT(proto->isStaticBlock());
@@ -3586,17 +3586,17 @@ js_PutBlockObject(JSContext *cx, JSBool 
     JS_ASSERT(count <= size_t(cx->regs().sp - fp->base() - depth));
 
     /* See comments in CheckDestructuring in frontend/Parser.cpp. */
     JS_ASSERT(count >= 1);
 
     if (normalUnwind) {
         uintN slot = JSSLOT_BLOCK_FIRST_FREE_SLOT;
         depth += fp->numFixed();
-        obj->copySlotRange(slot, fp->slots() + depth, count);
+        obj->copySlotRange(slot, fp->slots() + depth, count, true);
     }
 
     /* We must clear the private slot even with errors. */
     obj->setPrivate(NULL);
     fp->setScopeChainNoCallObj(*obj->getParent());
     return normalUnwind;
 }
 
@@ -3789,18 +3789,18 @@ JS_CloneObject(JSContext *cx, JSObject *
 
     return clone;
 }
 
 struct JSObject::TradeGutsReserved {
     JSContext *cx;
     Vector<Value> avals;
     Vector<Value> bvals;
-    Value *newaslots;
-    Value *newbslots;
+    HeapValue *newaslots;
+    HeapValue *newbslots;
 
     TradeGutsReserved(JSContext *cx)
         : cx(cx), avals(cx), bvals(cx), newaslots(NULL), newbslots(NULL)
     {}
 
     ~TradeGutsReserved()
     {
         if (newaslots)
@@ -3838,22 +3838,22 @@ JSObject::ReserveForTradeGuts(JSContext 
      * if they do not have enough fixed slots to accomodate the slots in the
      * other object.
      */
 
     unsigned afixed = a->numFixedSlots();
     unsigned bfixed = b->numFixedSlots();
 
     if (afixed < bcap) {
-        reserved.newaslots = (Value *) cx->malloc_(sizeof(Value) * (bcap - afixed));
+        reserved.newaslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * (bcap - afixed));
         if (!reserved.newaslots)
             return false;
     }
     if (bfixed < acap) {
-        reserved.newbslots = (Value *) cx->malloc_(sizeof(Value) * (acap - bfixed));
+        reserved.newbslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * (acap - bfixed));
         if (!reserved.newbslots)
             return false;
     }
 
     return true;
 }
 
 void
@@ -3880,16 +3880,29 @@ JSObject::TradeGuts(JSContext *cx, JSObj
 
     /*
      * Callers should not try to swap dense arrays or ArrayBuffer objects,
      * these use a different slot representation from other objects.
      */
     JS_ASSERT(!a->isDenseArray() && !b->isDenseArray());
     JS_ASSERT(!a->isArrayBuffer() && !b->isArrayBuffer());
 
+#ifdef JSGC_INCREMENTAL
+    /*
+     * We need a write barrier here. If |a| was marked and |b| was not, then
+     * after the swap, |b|'s guts would never be marked. The write barrier
+     * solves this.
+     */
+    JSCompartment *comp = a->compartment();
+    if (comp->needsBarrier()) {
+        MarkChildren(comp->barrierTracer(), a);
+        MarkChildren(comp->barrierTracer(), b);
+    }
+#endif
+
     /* New types for a JSObject need to be stable when trading guts. */
     TypeObject *newTypeA = a->newType;
     TypeObject *newTypeB = b->newType;
 
     /* Trade the guts of the objects. */
     const size_t size = a->structSize();
     if (size == b->structSize()) {
         /*
@@ -3933,31 +3946,31 @@ JSObject::TradeGuts(JSContext *cx, JSObj
         if (a->hasSlotsArray())
             cx->free_(a->slots);
         if (b->hasSlotsArray())
             cx->free_(b->slots);
 
         unsigned afixed = a->numFixedSlots();
         unsigned bfixed = b->numFixedSlots();
 
-        JSObject tmp;
+        char tmp[sizeof(JSObject)];
         memcpy(&tmp, a, sizeof tmp);
         memcpy(a, b, sizeof tmp);
         memcpy(b, &tmp, sizeof tmp);
 
         a->updateFixedSlots(afixed);
         a->slots = reserved.newaslots;
         a->capacity = Max(afixed, bcap);
-        a->copySlotRange(0, reserved.bvals.begin(), bcap);
+        a->copySlotRange(0, reserved.bvals.begin(), bcap, false);
         a->clearSlotRange(bcap, a->capacity - bcap);
 
         b->updateFixedSlots(bfixed);
         b->slots = reserved.newbslots;
         b->capacity = Max(bfixed, acap);
-        b->copySlotRange(0, reserved.avals.begin(), acap);
+        b->copySlotRange(0, reserved.avals.begin(), acap, false);
         b->clearSlotRange(acap, b->capacity - acap);
 
         /* Make sure the destructor for reserved doesn't free the slots. */
         reserved.newaslots = NULL;
         reserved.newbslots = NULL;
     }
 
     a->newType = newTypeA;
@@ -4443,37 +4456,41 @@ js_InitClass(JSContext *cx, JSObject *ob
                                          ps, fs, static_ps, static_fs, ctorp);
 }
 
 void
 JSObject::clearSlotRange(size_t start, size_t length)
 {
     JS_ASSERT(start + length <= capacity);
     if (isDenseArray()) {
-        ClearValueRange(slots + start, length, true);
+        ClearValueRange(compartment(), slots + start, length, true);
     } else {
         size_t fixed = numFixedSlots();
         if (start < fixed) {
             if (start + length < fixed) {
-                ClearValueRange(fixedSlots() + start, length, false);
+                ClearValueRange(compartment(), fixedSlots() + start, length, false);
             } else {
                 size_t localClear = fixed - start;
-                ClearValueRange(fixedSlots() + start, localClear, false);
-                ClearValueRange(slots, length - localClear, false);
+                ClearValueRange(compartment(), fixedSlots() + start, localClear, false);
+                ClearValueRange(compartment(), slots, length - localClear, false);
             }
         } else {
-            ClearValueRange(slots + start - fixed, length, false);
+            ClearValueRange(compartment(), slots + start - fixed, length, false);
         }
     }
 }
 
 void
-JSObject::copySlotRange(size_t start, const Value *vector, size_t length)
+JSObject::copySlotRange(size_t start, const Value *vector, size_t length, bool valid)
 {
     JS_ASSERT(start + length <= capacity);
+
+    if (valid)
+        prepareSlotRangeForOverwrite(start, start + length);
+
     if (isDenseArray()) {
         memcpy(slots + start, vector, length * sizeof(Value));
     } else {
         size_t fixed = numFixedSlots();
         if (start < fixed) {
             if (start + length < fixed) {
                 memcpy(fixedSlots() + start, vector, length * sizeof(Value));
             } else {
@@ -4517,30 +4534,30 @@ JSObject::allocSlots(JSContext *cx, size
     if (newcap > NSLOTS_LIMIT) {
         if (!JS_ON_TRACE(cx))
             js_ReportAllocationOverflow(cx);
         return false;
     }
 
     uint32 allocCount = numDynamicSlots(newcap);
 
-    Value *tmpslots = (Value*) cx->malloc_(allocCount * sizeof(Value));
+    HeapValue *tmpslots = (HeapValue*) cx->malloc_(allocCount * sizeof(HeapValue));
     if (!tmpslots)
         return false;  /* Leave slots at inline buffer. */
     slots = tmpslots;
     capacity = newcap;
 
     if (isDenseArray()) {
         /* Copy over anything from the inline buffer. */
-        memcpy(slots, fixedSlots(), getDenseArrayInitializedLength() * sizeof(Value));
+        memcpy(slots, fixedSlots(), getDenseArrayInitializedLength() * sizeof(HeapValue));
         if (!cx->typeInferenceEnabled())
             backfillDenseArrayHoles(cx);
     } else {
         /* Clear out the new slots without copying. */
-        ClearValueRange(slots, allocCount, false);
+        InitValueRange(slots, allocCount, false);
     }
 
     Probes::resizeObject(cx, this, oldSize, slotsAndStructSize());
 
     return true;
 }
 
 bool
@@ -4587,31 +4604,31 @@ JSObject::growSlots(JSContext *cx, size_
 
     /* If nothing was allocated yet, treat it as initial allocation. */
     if (!hasSlotsArray())
         return allocSlots(cx, actualCapacity);
 
     uint32 oldAllocCount = numDynamicSlots(oldcap);
     uint32 allocCount = numDynamicSlots(actualCapacity);
 
-    Value *tmpslots = (Value*) cx->realloc_(slots, oldAllocCount * sizeof(Value),
-                                            allocCount * sizeof(Value));
+    HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, oldAllocCount * sizeof(HeapValue),
+                                                    allocCount * sizeof(HeapValue));
     if (!tmpslots)
         return false;    /* Leave dslots as its old size. */
 
     bool changed = slots != tmpslots;
     slots = tmpslots;
     capacity = actualCapacity;
 
     if (isDenseArray()) {
         if (!cx->typeInferenceEnabled())
             backfillDenseArrayHoles(cx);
     } else {
         /* Clear the new slots we added. */
-        ClearValueRange(slots + oldAllocCount, allocCount - oldAllocCount, false);
+        InitValueRange(slots + oldAllocCount, allocCount - oldAllocCount, false);
     }
 
     if (changed && isGlobal())
         types::MarkObjectStateChange(cx, this);
 
     Probes::resizeObject(cx, this, oldSize, slotsAndStructSize());
 
     return true;
@@ -4624,16 +4641,18 @@ JSObject::shrinkSlots(JSContext *cx, siz
      * Refuse to shrink slots for call objects. This only happens in a very
      * obscure situation (deleting names introduced by a direct 'eval') and
      * allowing the slots pointer to change may require updating pointers in
      * the function's active args/vars information.
      */
     if (isCall())
         return;
 
+    JS_ASSERT_IF(isDenseArray(), initializedLength() <= newcap);
+
     uint32 oldcap = numSlots();
     JS_ASSERT(newcap <= oldcap);
     JS_ASSERT(newcap >= slotSpan());
 
     size_t oldSize = slotsAndStructSize();
 
     if (oldcap <= SLOT_CAPACITY_MIN || !hasSlotsArray()) {
         /*
@@ -4645,17 +4664,17 @@ JSObject::shrinkSlots(JSContext *cx, siz
             clearSlotRange(newcap, oldcap - newcap);
         return;
     }
 
     uint32 fill = newcap;
     newcap = Max(newcap, size_t(SLOT_CAPACITY_MIN));
     newcap = Max(newcap, numFixedSlots());
 
-    Value *tmpslots = (Value*) cx->realloc_(slots, newcap * sizeof(Value));
+    HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, newcap * sizeof(HeapValue));
     if (!tmpslots)
         return;  /* Leave slots at its old size. */
 
     bool changed = slots != tmpslots;
     slots = tmpslots;
     capacity = newcap;
 
     if (fill < newcap) {
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -54,16 +54,17 @@
 #include "jsfriendapi.h"
 #include "jsinfer.h"
 #include "jshash.h"
 #include "jspubtd.h"
 #include "jsprvtd.h"
 #include "jslock.h"
 #include "jscell.h"
 
+#include "gc/Barrier.h"
 #include "vm/String.h"
 
 namespace nanojit { class ValidateWriter; }
 
 namespace js {
 
 class AutoPropDescArrayRooter;
 class ProxyHandler;
@@ -422,17 +423,17 @@ struct JSObject : js::gc::Cell {
      */
     friend class js::TraceRecorder;
     friend class nanojit::ValidateWriter;
 
     /*
      * Private pointer to the last added property and methods to manipulate the
      * list it links among properties in this scope.
      */
-    js::Shape           *lastProp;
+    js::HeapPtrShape    lastProp;
 
   private:
     js::Class           *clasp;
 
   protected:
     inline void setLastProperty(const js::Shape *shape);
 
   private:
@@ -483,66 +484,64 @@ struct JSObject : js::gc::Cell {
      * Impose a sane upper bound, originally checked only for dense arrays, on
      * number of slots in an object.
      */
     enum {
         NSLOTS_BITS     = 29,
         NSLOTS_LIMIT    = JS_BIT(NSLOTS_BITS)
     };
 
-    uint32      flags;                      /* flags */
-    uint32      objShape;                   /* copy of lastProp->shape, or override if different */
+    uint32            flags;                /* flags */
+    uint32            objShape;             /* copy of lastProp->shape, or override if different */
 
-    union {
-        /* If prototype, type of values using this as their prototype. */
-        js::types::TypeObject *newType;
+    /*
+     * If prototype, type of values using this as their prototype. If a dense
+     * array, this holds the initialized length (see jsarray.cpp).
+     */
+    js::HeapPtr<js::types::TypeObject, jsuword> newType;
 
-        /* If dense array, the initialized length (see jsarray.cpp). */
-        jsuword initializedLength;
-    };
+    jsuword &initializedLength() { return *newType.unsafeGetUnioned(); }
 
     JS_FRIEND_API(size_t) sizeOfSlotsArray(JSUsableSizeFun usf);
 
-    JSObject    *parent;                    /* object's parent */
-    void        *privateData;               /* private data */
-    jsuword     capacity;                   /* total number of available slots */
+    js::HeapPtrObject parent;               /* object's parent */
+    void              *privateData;         /* private data */
+    jsuword           capacity;             /* total number of available slots */
 
   private:
-    js::Value   *slots;                     /* dynamically allocated slots,
+    js::HeapValue     *slots;               /* dynamically allocated slots,
                                                or pointer to fixedSlots() for
                                                dense arrays. */
 
     /*
      * The object's type and prototype. For objects with the LAZY_TYPE flag
      * set, this is the prototype's default 'new' type and can only be used
      * to get that prototype.
      */
-    js::types::TypeObject *type_;
+    js::HeapPtr<js::types::TypeObject> type_;
 
     /* Make the type object to use for LAZY_TYPE objects. */
     void makeLazyType(JSContext *cx);
 
   public:
-
     inline bool isNative() const;
     inline bool isNewborn() const;
 
     void setClass(js::Class *c) { clasp = c; }
     js::Class *getClass() const { return clasp; }
     JSClass *getJSClass() const { return Jsvalify(clasp); }
 
     bool hasClass(const js::Class *c) const {
         return c == clasp;
     }
 
     const js::ObjectOps *getOps() const {
         return &getClass()->ops;
     }
 
-    inline void trace(JSTracer *trc);
     inline void scanSlots(js::GCMarker *gcmarker);
 
     uint32 shape() const {
         JS_ASSERT(objShape != INVALID_SHAPE);
         return objShape;
     }
 
     bool isDelegate() const     { return !!(flags & DELEGATE); }
@@ -612,16 +611,17 @@ struct JSObject : js::gc::Cell {
     inline void setOwnShape(uint32 s);
     inline void clearOwnShape();
 
   public:
     inline bool nativeEmpty() const;
 
     bool hasOwnShape() const    { return !!(flags & OWN_SHAPE); }
 
+    inline void initMap(js::Shape *amap);
     inline void setMap(js::Shape *amap);
 
     inline void setSharedNonNativeMap();
 
     /* Functions for setting up scope chain object maps and shapes. */
     void initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent);
     void initClonedBlock(JSContext *cx, js::types::TypeObject *type, js::StackFrame *priv);
     void setBlockOwnShape(JSContext *cx);
@@ -725,40 +725,41 @@ struct JSObject : js::gc::Cell {
     inline size_t structSize() const;
     inline size_t slotsAndStructSize() const;
 
     /* Slot accessors for JITs. */
 
     static inline size_t getFixedSlotOffset(size_t slot);
     static inline size_t offsetOfCapacity() { return offsetof(JSObject, capacity); }
     static inline size_t offsetOfSlots() { return offsetof(JSObject, slots); }
+    static inline size_t offsetOfInitializedLength() { return offsetof(JSObject, newType); }
 
     /*
      * Get a raw pointer to the object's slots, or a slot of the object given
      * a previous value for its since-reallocated dynamic slots.
      */
-    inline const js::Value *getRawSlots();
-    inline const js::Value *getRawSlot(size_t slot, const js::Value *slots);
+    inline const js::HeapValue *getRawSlots();
+    inline const js::HeapValue *getRawSlot(size_t slot, const js::HeapValue *slots);
 
     /* Whether a slot is at a fixed offset from this object. */
     inline bool isFixedSlot(size_t slot);
 
     /* Index into the dynamic slots array to use for a dynamic slot. */
     inline size_t dynamicSlotIndex(size_t slot);
 
     inline size_t numFixedSlots() const;
 
     /* Whether this object has any dynamic slots at all. */
     inline bool hasSlotsArray() const;
 
     /* Get the number of dynamic slots required for a given capacity. */
     inline size_t numDynamicSlots(size_t capacity) const;
 
   private:
-    inline js::Value* fixedSlots() const;
+    inline js::HeapValue *fixedSlots() const;
 
   protected:
     inline bool hasContiguousSlots(size_t start, size_t count) const;
 
   public:
     /* Minimum size for dynamically allocated slots. */
     static const uint32 SLOT_CAPACITY_MIN = 8;
 
@@ -768,26 +769,34 @@ struct JSObject : js::gc::Cell {
 
     bool ensureSlots(JSContext *cx, size_t nslots) {
         if (numSlots() < nslots)
             return growSlots(cx, nslots);
         return true;
     }
 
     /*
+     * Trigger the write barrier on a range of slots that will no longer be
+     * reachable.
+     */
+    inline void prepareSlotRangeForOverwrite(size_t start, size_t end);
+
+    /*
      * Fill a range of slots with holes or undefined, depending on whether this
      * is a dense array.
      */
     void clearSlotRange(size_t start, size_t length);
 
     /*
      * Copy a flat array of slots to this object at a start slot. Caller must
-     * ensure there are enough slots in this object.
+     * ensure there are enough slots in this object. If |valid|, then the slots
+     * being overwritten hold valid data and must be invalidated for the write
+     * barrier.
      */
-    void copySlotRange(size_t start, const js::Value *vector, size_t length);
+    void copySlotRange(size_t start, const js::Value *vector, size_t length, bool valid);
 
     /*
      * Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp) +
      * nreserved slots.
      *
      * This method may be called only for native objects freshly created using
      * NewObject or one of its variant where the new object will both (a) never
      * escape to script and (b) never be extended with ad-hoc properties that
@@ -812,75 +821,74 @@ struct JSObject : js::gc::Cell {
     inline bool ensureClassReservedSlots(JSContext *cx);
 
     inline uint32 slotSpan() const;
 
     inline bool containsSlot(uint32 slot) const;
 
     void rollbackProperties(JSContext *cx, uint32 slotSpan);
 
-    js::Value *getSlotAddress(uintN slot) {
+    js::HeapValue *getSlotAddress(uintN slot) {
         /*
          * This can be used to get the address of the end of the slots for the
          * object, which may be necessary when fetching zero-length arrays of
          * slots (e.g. for callObjVarArray).
          */
+        JS_ASSERT(!isDenseArray());
         JS_ASSERT(slot <= capacity);
         size_t fixed = numFixedSlots();
         if (slot < fixed)
             return fixedSlots() + slot;
         return slots + (slot - fixed);
     }
 
-    js::Value &getSlotRef(uintN slot) {
+    js::HeapValue &getSlotRef(uintN slot) {
         JS_ASSERT(slot < capacity);
         return *getSlotAddress(slot);
     }
 
-    inline js::Value &nativeGetSlotRef(uintN slot);
+    inline js::HeapValue &nativeGetSlotRef(uintN slot);
 
     const js::Value &getSlot(uintN slot) const {
         JS_ASSERT(slot < capacity);
         size_t fixed = numFixedSlots();
         if (slot < fixed)
             return fixedSlots()[slot];
         return slots[slot - fixed];
     }
 
     inline const js::Value &nativeGetSlot(uintN slot) const;
 
-    void setSlot(uintN slot, const js::Value &value) {
-        JS_ASSERT(slot < capacity);
-        getSlotRef(slot) = value;
-    }
+    inline void setSlot(uintN slot, const js::Value &value);
+    inline void initSlot(uintN slot, const js::Value &value);
+    inline void initSlotUnchecked(uintN slot, const js::Value &value);
 
     inline void nativeSetSlot(uintN slot, const js::Value &value);
     inline void nativeSetSlotWithType(JSContext *cx, const js::Shape *shape, const js::Value &value);
 
     inline js::Value getReservedSlot(uintN index) const;
+    inline js::HeapValue &getReservedSlotRef(uintN index);
 
     /* Call this only after the appropriate ensure{Class,Instance}ReservedSlots call. */
     inline void setReservedSlot(uintN index, const js::Value &v);
 
     /* For slots which are known to always be fixed, due to the way they are allocated. */
 
-    js::Value &getFixedSlotRef(uintN slot) {
+    js::HeapValue &getFixedSlotRef(uintN slot) {
         JS_ASSERT(slot < numFixedSlots());
         return fixedSlots()[slot];
     }
 
     const js::Value &getFixedSlot(uintN slot) const {
         JS_ASSERT(slot < numFixedSlots());
         return fixedSlots()[slot];
     }
 
-    void setFixedSlot(uintN slot, const js::Value &value) {
-        JS_ASSERT(slot < numFixedSlots());
-        fixedSlots()[slot] = value;
-    }
+    inline void setFixedSlot(uintN slot, const js::Value &value);
+    inline void initFixedSlot(uintN slot, const js::Value &value);
 
     /* Defined in jsscopeinlines.h to avoid including implementation dependencies here. */
     inline void updateShape(JSContext *cx);
     inline void updateFlags(const js::Shape *shape, bool isDefinitelyAtom = false);
 
     /* Extend this object to have shape as its last-added property. */
     inline void extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom = false);
 
@@ -907,32 +915,33 @@ struct JSObject : js::gc::Cell {
 
     inline js::types::TypeObject *getType(JSContext *cx);
 
     js::types::TypeObject *type() const {
         JS_ASSERT(!hasLazyType());
         return type_;
     }
 
-    js::types::TypeObject *typeFromGC() const {
+    const js::HeapPtr<js::types::TypeObject> &typeFromGC() const {
         /* Direct field access for use by GC. */
         return type_;
     }
 
     static inline size_t offsetOfType() { return offsetof(JSObject, type_); }
 
     inline void clearType();
     inline void setType(js::types::TypeObject *newType);
+    inline void initType(js::types::TypeObject *newType);
 
     inline js::types::TypeObject *getNewType(JSContext *cx, JSFunction *fun = NULL,
                                              bool markUnknown = false);
   private:
     void makeNewType(JSContext *cx, JSFunction *fun, bool markUnknown);
+
   public:
-
     /* Set a new prototype for an object with a singleton type. */
     bool splicePrototype(JSContext *cx, JSObject *proto);
 
     /*
      * For bootstrapping, whether to splice a prototype for Function.prototype
      * or the global object.
      */
     bool shouldSplicePrototype(JSContext *cx);
@@ -940,46 +949,35 @@ struct JSObject : js::gc::Cell {
     JSObject * getProto() const {
         return type_->proto;
     }
 
     JSObject *getParent() const {
         return parent;
     }
 
-    void clearParent() {
-        parent = NULL;
-    }
-
-    void setParent(JSObject *newParent) {
-#ifdef DEBUG
-        for (JSObject *obj = newParent; obj; obj = obj->getParent())
-            JS_ASSERT(obj != this);
-#endif
-        setDelegateNullSafe(newParent);
-        parent = newParent;
-    }
+    inline void clearParent();
+    inline void setParent(JSObject *newParent);
+    inline void initParent(JSObject *newParent);
 
     JS_FRIEND_API(js::GlobalObject *) getGlobal() const;
 
     bool isGlobal() const {
         return !!(getClass()->flags & JSCLASS_IS_GLOBAL);
     }
 
     inline js::GlobalObject *asGlobal();
 
     void *getPrivate() const {
         JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
         return privateData;
     }
 
-    void setPrivate(void *data) {
-        JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
-        privateData = data;
-    }
+    inline void initPrivate(void *data);
+    inline void setPrivate(void *data);
 
     /* N.B. Infallible: NULL means 'no principal', not an error. */
     inline JSPrincipals *principals(JSContext *cx);
 
     /*
      * ES5 meta-object properties and operations.
      */
 
@@ -1038,21 +1036,24 @@ struct JSObject : js::gc::Cell {
     inline void setArrayLength(JSContext *cx, uint32 length);
 
     inline uint32 getDenseArrayCapacity();
     inline uint32 getDenseArrayInitializedLength();
     inline void setDenseArrayLength(uint32 length);
     inline void setDenseArrayInitializedLength(uint32 length);
     inline void ensureDenseArrayInitializedLength(JSContext *cx, uintN index, uintN extra);
     inline void backfillDenseArrayHoles(JSContext *cx);
-    inline const js::Value* getDenseArrayElements();
+    inline js::HeapValueArray getDenseArrayElements();
     inline const js::Value &getDenseArrayElement(uintN idx);
     inline void setDenseArrayElement(uintN idx, const js::Value &val);
+    inline void initDenseArrayElement(uintN idx, const js::Value &val);
     inline void setDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val);
+    inline void initDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val);
     inline void copyDenseArrayElements(uintN dstStart, const js::Value *src, uintN count);
+    inline void initDenseArrayElements(uintN dstStart, const js::Value *src, uintN count);
     inline void moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count);
     inline void shrinkDenseArrayElements(JSContext *cx, uintN cap);
     inline bool denseArrayHasInlineSlots() const;
 
     /* Packed information for this array. */
     inline bool isPackedDenseArray();
     inline void markDenseArrayNotPacked(JSContext *cx);
 
@@ -1151,21 +1152,21 @@ struct JSObject : js::gc::Cell {
     static const uint32 FUN_CLASS_RESERVED_SLOTS = 2;
 
     static size_t getFlatClosureUpvarsOffset() {
         return getFixedSlotOffset(JSSLOT_FLAT_CLOSURE_UPVARS);
     }
 
     inline JSFunction *getFunctionPrivate() const;
 
-    inline js::Value *getFlatClosureUpvars() const;
+    inline js::FlatClosureData *getFlatClosureData() const;
     inline js::Value getFlatClosureUpvar(uint32 i) const;
     inline const js::Value &getFlatClosureUpvar(uint32 i);
     inline void setFlatClosureUpvar(uint32 i, const js::Value &v);
-    inline void setFlatClosureUpvars(js::Value *upvars);
+    inline void setFlatClosureData(js::FlatClosureData *data);
 
     /* See comments in fun_finalize. */
     inline void finalizeUpvarsIfFlatClosure();
 
     inline bool hasMethodObj(const JSObject& obj) const;
     inline void setMethodObj(JSObject& obj);
 
     inline bool initBoundFunction(JSContext *cx, const js::Value &thisArg,
@@ -1235,22 +1236,17 @@ struct JSObject : js::gc::Cell {
     inline void setWithThis(JSObject *thisp);
 
     /*
      * Back to generic stuff.
      */
     inline bool isCallable();
 
     /* Do initialization required immediately after allocation. */
-    void earlyInit(jsuword capacity) {
-        this->capacity = capacity;
-
-        /* Stops obj from being scanned until initializated. */
-        lastProp = NULL;
-    }
+    inline void earlyInit(jsuword capacity);
 
     /* The map field is not initialized here and should be set separately. */
     void init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type,
               JSObject *parent, void *priv, bool denseArray);
 
     inline void finish(JSContext *cx);
     JS_ALWAYS_INLINE void finalize(JSContext *cx);
 
@@ -1498,16 +1494,21 @@ struct JSObject : js::gc::Cell {
         JS_STATIC_ASSERT(offsetof(JSObject, type_) == offsetof(js::shadow::Object, type));
         JS_STATIC_ASSERT(sizeof(JSObject) == sizeof(js::shadow::Object));
         JS_STATIC_ASSERT(FIXED_SLOTS_SHIFT == js::shadow::Object::FIXED_SLOTS_SHIFT);
     }
 
     /*** For jit compiler: ***/
 
     static size_t offsetOfClassPointer() { return offsetof(JSObject, clasp); }
+
+    static inline void writeBarrierPre(JSObject *obj);
+    static inline void writeBarrierPost(JSObject *obj, void *addr);
+    inline void privateWriteBarrierPre(void **oldval);
+    inline void privateWriteBarrierPost(void **oldval);
 };
 
 /*
  * The only sensible way to compare JSObject with == is by identity. We use
  * const& instead of * as a syntactic way to assert non-null. This leads to an
  * abundance of address-of operators to identity. Hence this overload.
  */
 static JS_ALWAYS_INLINE bool
@@ -1517,19 +1518,20 @@ operator==(const JSObject &lhs, const JS
 }
 
 static JS_ALWAYS_INLINE bool
 operator!=(const JSObject &lhs, const JSObject &rhs)
 {
     return &lhs != &rhs;
 }
 
-inline js::Value*
-JSObject::fixedSlots() const {
-    return (js::Value*) (jsuword(this) + sizeof(JSObject));
+inline js::HeapValue*
+JSObject::fixedSlots() const
+{
+    return (js::HeapValue *) (jsuword(this) + sizeof(JSObject));
 }
 
 inline size_t
 JSObject::numFixedSlots() const
 {
     return flags >> FIXED_SLOTS_SHIFT;
 }
 
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -1,9 +1,9 @@
-/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sw=4 et tw=99:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
@@ -37,19 +37,21 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef jsobjinlines_h___
 #define jsobjinlines_h___
 
 #include <new>
+
 #include "jsarray.h"
 #include "jsdate.h"
 #include "jsfun.h"
+#include "jsgcmark.h"
 #include "jsiter.h"
 #include "jslock.h"
 #include "jsobj.h"
 #include "jsprobes.h"
 #include "jspropertytree.h"
 #include "jsproxy.h"
 #include "jsscope.h"
 #include "jstypedarray.h"
@@ -60,23 +62,28 @@
 #include "jsbool.h"
 #include "jscntxt.h"
 #include "jsnum.h"
 #include "jsinferinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 #include "jsstr.h"
 
+#include "gc/Barrier.h"
+#include "js/TemplateLib.h"
 #include "vm/GlobalObject.h"
 
 #include "jsatominlines.h"
 #include "jsfuninlines.h"
 #include "jsgcinlines.h"
 #include "jsscopeinlines.h"
 
+#include "gc/Barrier-inl.h"
+#include "vm/String-inl.h"
+
 inline bool
 JSObject::preventExtensions(JSContext *cx, js::AutoIdVector *props)
 {
     JS_ASSERT(isExtensible());
 
     if (js::FixOp fix = getOps()->fix) {
         bool success;
         if (!fix(cx, this, &success, props))
@@ -272,17 +279,17 @@ JSObject::finalize(JSContext *cx)
 /* 
  * Initializer for Call objects for functions and eval frames. Set class,
  * parent, map, and shape, and allocate slots.
  */
 inline void
 JSObject::initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent)
 {
     init(cx, &js::CallClass, &js::types::emptyTypeObject, parent, NULL, false);
-    lastProp = bindings.lastShape();
+    lastProp.init(bindings.lastShape());
 
     /*
      * If |bindings| is for a function that has extensible parents, that means
      * its Call should have its own shape; see js::Bindings::extensibleParents.
      */
     if (bindings.extensibleParents())
         setOwnShape(js_GenerateShape(cx));
     else
@@ -312,17 +319,18 @@ JSObject::initClonedBlock(JSContext *cx,
         objShape = lastProp->shapeid;
 }
 
 /* 
  * Mark a compile-time block as OWN_SHAPE, indicating that its run-time clones
  * also need unique shapes. See js::Bindings::extensibleParents.
  */
 inline void
-JSObject::setBlockOwnShape(JSContext *cx) {
+JSObject::setBlockOwnShape(JSContext *cx)
+{
     JS_ASSERT(isStaticBlock());
     setOwnShape(js_GenerateShape(cx));
 }
 
 /*
  * Property read barrier for deferred cloning of compiler-created function
  * objects optimized as typically non-escaping, ad-hoc methods in obj.
  */
@@ -392,25 +400,25 @@ JSObject::methodWriteBarrier(JSContext *
         const js::Value &prev = nativeGetSlot(slot);
 
         if (ChangesMethodValue(prev, v))
             return methodShapeChange(cx, slot);
     }
     return true;
 }
 
-inline const js::Value *
+inline const js::HeapValue *
 JSObject::getRawSlots()
 {
     JS_ASSERT(isGlobal());
     return slots;
 }
 
-inline const js::Value *
-JSObject::getRawSlot(size_t slot, const js::Value *slots)
+inline const js::HeapValue *
+JSObject::getRawSlot(size_t slot, const js::HeapValue *slots)
 {
     JS_ASSERT(isGlobal());
     size_t fixed = numFixedSlots();
     if (slot < fixed)
         return fixedSlots() + slot;
     return slots + slot - fixed;
 }
 
@@ -442,16 +450,23 @@ JSObject::ensureClassReservedSlots(JSCon
 }
 
 inline js::Value
 JSObject::getReservedSlot(uintN index) const
 {
     return (index < numSlots()) ? getSlot(index) : js::UndefinedValue();
 }
 
+inline js::HeapValue &
+JSObject::getReservedSlotRef(uintN index)
+{
+    JS_ASSERT(index < numSlots());
+    return getSlotRef(index);
+}
+
 inline void
 JSObject::setReservedSlot(uintN index, const js::Value &v)
 {
     JS_ASSERT(index < JSSLOT_FREE(getClass()));
     setSlot(index, v);
 }
 
 inline bool
@@ -488,16 +503,29 @@ JSObject::hasContiguousSlots(size_t star
     /*
      * Check that the range [start, start+count) is either all inline or all
      * out of line.
      */
     JS_ASSERT(start + count <= numSlots());
     return (start + count <= numFixedSlots()) || (start >= numFixedSlots());
 }
 
+inline void
+JSObject::prepareSlotRangeForOverwrite(size_t start, size_t end)
+{
+    if (isDenseArray()) {
+        JS_ASSERT(end <= initializedLength());
+        for (size_t i = start; i < end; i++)
+            slots[i].js::HeapValue::~HeapValue();
+    } else {
+        for (size_t i = start; i < end; i++)
+            getSlotRef(i).js::HeapValue::~HeapValue();
+    }
+}
+
 inline size_t
 JSObject::structSize() const
 {
     return (isFunction() && !getPrivate())
            ? sizeof(JSFunction)
            : (sizeof(JSObject) + sizeof(js::Value) * numFixedSlots());
 }
 
@@ -536,40 +564,40 @@ JSObject::setArrayLength(JSContext *cx, 
         js::types::MarkTypeObjectFlags(cx, this,
                                        js::types::OBJECT_FLAG_NON_PACKED_ARRAY |
                                        js::types::OBJECT_FLAG_NON_DENSE_ARRAY);
         jsid lengthId = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom);
         js::types::AddTypePropertyId(cx, this, lengthId,
                                      js::types::Type::DoubleType());
     }
 
-    setPrivate((void*)(uintptr_t) length);
+    privateData = (void*)(uintptr_t) length;
 }
 
 inline void
 JSObject::setDenseArrayLength(uint32 length)
 {
     /* Variant of setArrayLength for use on dense arrays where the length cannot overflow int32. */
     JS_ASSERT(isDenseArray());
     JS_ASSERT(length <= INT32_MAX);
-    setPrivate((void*)(uintptr_t) length);
+    privateData = (void*)(uintptr_t) length;
 }
 
 inline uint32
 JSObject::getDenseArrayCapacity()
 {
     JS_ASSERT(isDenseArray());
     return numSlots();
 }
 
-inline const js::Value *
+inline js::HeapValueArray
 JSObject::getDenseArrayElements()
 {
     JS_ASSERT(isDenseArray());
-    return slots;
+    return js::HeapValueArray(slots);
 }
 
 inline const js::Value &
 JSObject::getDenseArrayElement(uintN idx)
 {
     JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength());
     return slots[idx];
 }
@@ -577,36 +605,74 @@ JSObject::getDenseArrayElement(uintN idx
 inline void
 JSObject::setDenseArrayElement(uintN idx, const js::Value &val)
 {
     JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength());
     slots[idx] = val;
 }
 
 inline void
+JSObject::initDenseArrayElement(uintN idx, const js::Value &val)
+{
+    JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength());
+    slots[idx].init(val);
+}
+
+inline void
 JSObject::setDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val)
 {
     js::types::AddTypePropertyId(cx, this, JSID_VOID, val);
     setDenseArrayElement(idx, val);
 }
 
 inline void
+JSObject::initDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val)
+{
+    js::types::AddTypePropertyId(cx, this, JSID_VOID, val);
+    initDenseArrayElement(idx, val);
+}
+
+inline void
 JSObject::copyDenseArrayElements(uintN dstStart, const js::Value *src, uintN count)
 {
     JS_ASSERT(isDenseArray());
     JS_ASSERT(dstStart + count <= capacity);
+    prepareSlotRangeForOverwrite(dstStart, dstStart + count);
+    memcpy(slots + dstStart, src, count * sizeof(js::Value));
+}
+
+inline void
+JSObject::initDenseArrayElements(uintN dstStart, const js::Value *src, uintN count)
+{
+    JS_ASSERT(isDenseArray());
+    JS_ASSERT(dstStart + count <= capacity);
     memcpy(slots + dstStart, src, count * sizeof(js::Value));
 }
 
 inline void
 JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count)
 {
     JS_ASSERT(isDenseArray());
     JS_ASSERT(dstStart + count <= capacity);
     JS_ASSERT(srcStart + count <= capacity);
+
+    /*
+     * Use a custom write barrier here since it's performance sensitive. We
+     * only want to barrier the slots that are being overwritten.
+     */
+    uintN markStart, markEnd;
+    if (dstStart > srcStart) {
+        markStart = js::Max(srcStart + count, dstStart);
+        markEnd = dstStart + count;
+    } else {
+        markStart = dstStart;
+        markEnd = js::Min(dstStart + count, srcStart);
+    }
+    prepareSlotRangeForOverwrite(markStart, markEnd);
+
     memmove(slots + dstStart, slots + srcStart, count * sizeof(js::Value));
 }
 
 inline void
 JSObject::shrinkDenseArrayElements(JSContext *cx, uintN cap)
 {
     JS_ASSERT(isDenseArray());
     shrinkSlots(cx, cap);
@@ -642,25 +708,25 @@ JSObject::getDateUTCTime() const
 
 inline void 
 JSObject::setDateUTCTime(const js::Value &time)
 {
     JS_ASSERT(isDate());
     setFixedSlot(JSSLOT_DATE_UTC_TIME, time);
 }
 
-inline js::Value *
-JSObject::getFlatClosureUpvars() const
+inline js::FlatClosureData *
+JSObject::getFlatClosureData() const
 {
 #ifdef DEBUG
     JSFunction *fun = getFunctionPrivate();
     JS_ASSERT(fun->isFlatClosure());
     JS_ASSERT(fun->script()->bindings.countUpvars() == fun->script()->upvars()->length);
 #endif
-    return (js::Value *) getFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS).toPrivate();
+    return (js::FlatClosureData *) getFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS).toPrivate();
 }
 
 inline void
 JSObject::finalizeUpvarsIfFlatClosure()
 {
     /*
      * Cloned function objects may be flat closures with upvars to free.
      *
@@ -688,39 +754,39 @@ JSObject::finalizeUpvarsIfFlatClosure()
             js::Foreground::free_(v.toPrivate());
     }
 }
 
 inline js::Value
 JSObject::getFlatClosureUpvar(uint32 i) const
 {
     JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars());
-    return getFlatClosureUpvars()[i];
+    return getFlatClosureData()->upvars[i];
 }
 
 inline const js::Value &
 JSObject::getFlatClosureUpvar(uint32 i)
 {
     JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars());
-    return getFlatClosureUpvars()[i];
+    return getFlatClosureData()->upvars[i];
 }
 
 inline void
 JSObject::setFlatClosureUpvar(uint32 i, const js::Value &v)
 {
     JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars());
-    getFlatClosureUpvars()[i] = v;
+    getFlatClosureData()->upvars[i] = v;
 }
 
 inline void
-JSObject::setFlatClosureUpvars(js::Value *upvars)
+JSObject::setFlatClosureData(js::FlatClosureData *data)
 {
     JS_ASSERT(isFunction());
     JS_ASSERT(getFunctionPrivate()->isFlatClosure());
-    setFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS, js::PrivateValue(upvars));
+    setFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS, js::PrivateValue(data));
 }
 
 inline bool
 JSObject::hasMethodObj(const JSObject& obj) const
 {
     return JSSLOT_FUN_METHOD_OBJ < numSlots() &&
            getFixedSlot(JSSLOT_FUN_METHOD_OBJ).isObject() &&
            getFixedSlot(JSSLOT_FUN_METHOD_OBJ).toObject() == obj;
@@ -835,17 +901,17 @@ inline JSObject *
 JSObject::getWithThis() const
 {
     return &getFixedSlot(JSSLOT_WITH_THIS).toObject();
 }
 
 inline void
 JSObject::setWithThis(JSObject *thisp)
 {
-    getFixedSlotRef(JSSLOT_WITH_THIS).setObject(*thisp);
+    setFixedSlot(JSSLOT_WITH_THIS, js::ObjectValue(*thisp));
 }
 
 inline bool
 JSObject::setSingletonType(JSContext *cx)
 {
     if (!cx->typeInferenceEnabled())
         return true;
 
@@ -909,16 +975,38 @@ JSObject::setType(js::types::TypeObject 
         JS_ASSERT(obj != this);
 #endif
     JS_ASSERT_IF(hasSpecialEquality(), newType->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY));
     JS_ASSERT(!hasSingletonType());
     type_ = newType;
 }
 
 inline void
+JSObject::earlyInit(jsuword capacity)
+{
+    this->capacity = capacity;
+
+    /* Stops obj from being scanned until initializated. */
+    lastProp.init(NULL);
+}
+
+inline void
+JSObject::initType(js::types::TypeObject *newType)
+{
+#ifdef DEBUG
+    JS_ASSERT(newType);
+    for (JSObject *obj = newType->proto; obj; obj = obj->getProto())
+        JS_ASSERT(obj != this);
+#endif
+    JS_ASSERT_IF(hasSpecialEquality(), newType->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY));
+    JS_ASSERT(!hasSingletonType());
+    type_.init(newType);
+}
+
+inline void
 JSObject::init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type,
                JSObject *parent, void *priv, bool denseArray)
 {
     clasp = aclasp;
     flags = capacity << FIXED_SLOTS_SHIFT;
 
     JS_ASSERT(denseArray == (aclasp == &js::ArrayClass));
 
@@ -939,24 +1027,22 @@ JSObject::init(JSContext *cx, js::Class 
      * is disabled, NewArray will backfill holes up to the array's capacity
      * and unset the PACKED_ARRAY flag.
      */
     slots = NULL;
     if (denseArray) {
         slots = fixedSlots();
         flags |= PACKED_ARRAY;
     } else {
-        js::ClearValueRange(fixedSlots(), capacity, denseArray);
+        js::InitValueRange(fixedSlots(), capacity, denseArray);
     }
 
-    newType = NULL;
-    JS_ASSERT(initializedLength == 0);
-
-    setType(type);
-    setParent(parent);
+    newType.init(NULL);
+    initType(type);
+    initParent(parent);
 }
 
 inline void
 JSObject::finish(JSContext *cx)
 {
     if (hasSlotsArray())
         cx->free_(slots);
 }
@@ -972,17 +1058,17 @@ JSObject::initSharingEmptyShape(JSContex
     init(cx, aclasp, type, parent, privateValue, false);
 
     JS_ASSERT(!isDenseArray());
 
     js::EmptyShape *empty = type->getEmptyShape(cx, aclasp, kind);
     if (!empty)
         return false;
 
-    setMap(empty);
+    initMap(empty);
     return true;
 }
 
 inline bool
 JSObject::hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags)
 {
     JSObject *pobj;
     JSProperty *prop;
@@ -1023,17 +1109,25 @@ JSObject::containsSlot(uint32 slot) cons
 inline void
 JSObject::setMap(js::Shape *amap)
 {
     JS_ASSERT(!hasOwnShape());
     lastProp = amap;
     objShape = lastProp->shapeid;
 }
 
-inline js::Value &
+inline void
+JSObject::initMap(js::Shape *amap)
+{
+    JS_ASSERT(!hasOwnShape());
+    lastProp.init(amap);
+    objShape = lastProp->shapeid;
+}
+
+inline js::HeapValue &
 JSObject::nativeGetSlotRef(uintN slot)
 {
     JS_ASSERT(isNative());
     JS_ASSERT(containsSlot(slot));
     return getSlotRef(slot);
 }
 
 inline const js::Value &
@@ -1395,17 +1489,17 @@ class AutoPropertyDescriptorRooter : pri
         getter = desc->getter;
         setter = desc->setter;
         value = desc->value;
     }
 
     friend void AutoGCRooter::trace(JSTracer *trc);
 };
 
-static inline bool
+static inline js::EmptyShape *
 InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::TypeObject *type,
                    gc::AllocKind kind)
 {
     JS_ASSERT(clasp->isNative());
 
     /* Share proto's emptyShape only if obj is similar to proto. */
     js::EmptyShape *empty = NULL;
 
@@ -1415,30 +1509,29 @@ InitScopeForObject(JSContext* cx, JSObje
 
     if (type->canProvideEmptyShape(clasp))
         empty = type->getEmptyShape(cx, clasp, kind);
     else
         empty = js::EmptyShape::create(cx, clasp);
     if (!empty)
         goto bad;
 
-    obj->setMap(empty);
-    return true;
+    return empty;
 
   bad:
     /* The GC nulls map initially. It should still be null on error. */
     JS_ASSERT(obj->isNewborn());
-    return false;
+    return NULL;
 }
 
 static inline bool
 CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
 {
 #ifdef JS_THREADSAFE
-    JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
+    JS_ASSERT(kind < gc::FINALIZE_OBJECT_LIMIT);
     /* If the class has no finalizer or a finalizer that is safe to call on
      * a different thread, we change the finalize kind. For example,
      * FINALIZE_OBJECT0 calls the finalizer on the main thread,
      * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
      * IsBackgroundAllocKind is called to prevent recursively incrementing
      * the finalize kind; kind may already be a background finalize kind.
      */
     if (!gc::IsBackgroundAllocKind(kind) &&
@@ -1456,17 +1549,17 @@ CanBeFinalizedInBackground(gc::AllocKind
  * and its parent global as parent.
  */
 static inline JSObject *
 NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
                        JSObject *parent, gc::AllocKind kind)
 {
     JS_ASSERT(proto);
     JS_ASSERT(parent);
-    JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
+    JS_ASSERT(kind < gc::FINALIZE_OBJECT_LIMIT);
 
     types::TypeObject *type = proto->getNewType(cx);
     if (!type)
         return NULL;
 
     /*
      * Allocate an object from the GC heap and initialize all its fields before
      * doing any operation that can potentially trigger GC.
@@ -1482,19 +1575,18 @@ NewNativeClassInstance(JSContext *cx, Cl
          * Default parent to the parent of the prototype, which was set from
          * the parent of the prototype's constructor.
          */
         bool denseArray = (clasp == &ArrayClass);
         obj->init(cx, clasp, type, parent, NULL, denseArray);
 
         JS_ASSERT(type->canProvideEmptyShape(clasp));
         js::EmptyShape *empty = type->getEmptyShape(cx, clasp, kind);
-
         if (empty)
-            obj->setMap(empty);
+            obj->initMap(empty);
         else
             obj = NULL;
     }
 
     return obj;
 }
 
 static inline JSObject *
@@ -1655,20 +1747,22 @@ NewObject(JSContext *cx, js::Class *clas
      * Default parent to the parent of the prototype, which was set from
      * the parent of the prototype's constructor.
      */
     obj->init(cx, clasp, type,
               (!parent && proto) ? proto->getParent() : parent,
               NULL, clasp == &ArrayClass);
 
     if (clasp->isNative()) {
-        if (!InitScopeForObject(cx, obj, clasp, type, kind)) {
+        js::EmptyShape *empty = InitScopeForObject(cx, obj, clasp, type, kind);
+        if (!empty) {
             obj = NULL;
             goto out;
         }
+        obj->initMap(empty);
     } else {
         obj->setSharedNonNativeMap();
     }
 
 out:
     Probes::createObject(cx, obj);
     return obj;
 }
@@ -1744,20 +1838,23 @@ NewObjectWithType(JSContext *cx, types::
     /*
      * Default parent to the parent of the prototype, which was set from
      * the parent of the prototype's constructor.
      */
     obj->init(cx, &ObjectClass, type,
               (!parent && type->proto) ? type->proto->getParent() : parent,
               NULL, false);
 
-    if (!InitScopeForObject(cx, obj, &ObjectClass, type, kind)) {
+    js::EmptyShape *empty;
+    empty = InitScopeForObject(cx, obj, &ObjectClass, type, kind);
+    if (!empty) {
         obj = NULL;
         goto out;
     }
+    obj->initMap(empty);
 
 out:
     Probes::createObject(cx, obj);
     return obj;
 }
 
 extern JSObject *
 NewReshapedObject(JSContext *cx, js::types::TypeObject *type, JSObject *parent,
@@ -1992,9 +2089,131 @@ ValueIsSpecial(JSObject *obj, Value *pro
 } /* namespace js */
 
 inline JSObject *
 js_GetProtoIfDenseArray(JSObject *obj)
 {
     return obj->isDenseArray() ? obj->getProto() : obj;
 }
 
+inline void
+JSObject::setSlot(uintN slot, const js::Value &value)
+{
+    JS_ASSERT(slot < capacity);
+    getSlotRef(slot).set(compartment(), value);
+}
+
+inline void
+JSObject::initSlot(uintN slot, const js::Value &value)
+{
+    JS_ASSERT(getSlot(slot).isUndefined() || getSlot(slot).isMagic(JS_ARRAY_HOLE));
+    initSlotUnchecked(slot, value);
+}
+
+inline void
+JSObject::initSlotUnchecked(uintN slot, const js::Value &value)
+{
+    JS_ASSERT(slot < capacity);
+    getSlotRef(slot).init(value);
+}
+
+inline void
+JSObject::setFixedSlot(uintN slot, const js::Value &value)
+{
+    JS_ASSERT(slot < numFixedSlots());
+    fixedSlots()[slot] = value;
+}
+
+inline void
+JSObject::initFixedSlot(uintN slot, const js::Value &value)
+{
+    JS_ASSERT(slot < numFixedSlots());
+    fixedSlots()[slot].init(value);
+}
+
+inline void
+JSObject::clearParent()
+{
+    parent.clear();
+}
+
+inline void
+JSObject::setParent(JSObject *newParent)
+{
+#ifdef DEBUG
+    for (JSObject *obj = newParent; obj; obj = obj->getParent())
+        JS_ASSERT(obj != this);
+#endif
+    setDelegateNullSafe(newParent);
+    parent = newParent;
+}
+
+inline void
+JSObject::initParent(JSObject *newParent)
+{
+    JS_ASSERT(isNewborn());
+#ifdef DEBUG
+    for (JSObject *obj = newParent; obj; obj = obj->getParent())
+        JS_ASSERT(obj != this);
+#endif
+    setDelegateNullSafe(newParent);
+    parent.init(newParent);
+}
+
+inline void
+JSObject::setPrivate(void *data)
+{
+    JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
+
+    privateWriteBarrierPre(&privateData);
+    privateData = data;
+    privateWriteBarrierPost(&privateData);
+}
+
+inline void
+JSObject::initPrivate(void *data)
+{
+    JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
+    privateData = data;
+}
+
+inline void
+JSObject::privateWriteBarrierPre(void **old)
+{
+#ifdef JSGC_INCREMENTAL
+    JSCompartment *comp = compartment();
+    if (comp->needsBarrier()) {
+        if (clasp->trace && *old)
+            clasp->trace(comp->barrierTracer(), this);
+    }
+#endif
+}
+
+inline void
+JSObject::privateWriteBarrierPost(void **old)
+{
+}
+
+inline void
+JSObject::writeBarrierPre(JSObject *obj)
+{
+#ifdef JSGC_INCREMENTAL
+    /*
+     * This would normally be a null test, but TypeScript::global uses 0x1 as a
+     * special value.
+     */
+    if (uintptr_t(obj) < 32)
+        return;
+
+    JSCompartment *comp = obj->compartment();
+    if (comp->needsBarrier()) {
+        JS_ASSERT(!comp->rt->gcRunning);
+        MarkObjectUnbarriered(comp->barrierTracer(), obj, "write barrier");
+    }
+#endif
+}
+
+inline void
+JSObject::writeBarrierPost(JSObject *obj, void *addr)
+{
+}
+
 #endif /* jsobjinlines_h___ */
--- a/js/src/jsprobes.cpp
+++ b/js/src/jsprobes.cpp
@@ -31,34 +31,35 @@
  * the provisions above, a recipient may use your version of this file under
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifdef MOZ_ETW
 #include "jswin.h"
 #include <evntprov.h>
+#include <sys/types.h>
 
 /* Generated from ETWProvider.man */
 #include "ETWProvider.h"
 #endif
 
 #include "jsapi.h"
 #include "jsutil.h"
 #include "jsatom.h"
 #include "jscntxt.h"
 #include "jsdbgapi.h"
 #include "jsfun.h"
 #include "jsinterp.h"
 #include "jsobj.h"
+#include "jsprobes.h"
 #include "jsscript.h"
 #include "jsstr.h"
 
-#include "jsprobes.h"
-#include <sys/types.h>
+#include "jsobjinlines.h"
 
 #define TYPEOF(cx,v)    (JSVAL_IS_NULL(v) ? JSTYPE_NULL : JS_TypeOfValue(cx,v))
 
 using namespace js;
 
 const char Probes::nullName[] = "(null)";
 const char Probes::anonymousName[] = "(anonymous)";
 
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -154,16 +154,30 @@ Shape::removeChild(Shape *child)
         KidsHash::Range r = hash->all(); 
         Shape *otherChild = r.front();
         JS_ASSERT((r.popFront(), r.empty()));    /* No more elements! */
         kidp->setShape(otherChild);
         js::UnwantedForeground::delete_(hash);
     }
 }
 
+/*
+ * We need a read barrier for the shape tree, since these are weak pointers.
+ */
+static Shape *
+ReadBarrier(Shape *shape)
+{
+#ifdef JSGC_INCREMENTAL
+    JSCompartment *comp = shape->compartment();
+    if (comp->needsBarrier())
+        MarkShapeUnbarriered(comp->barrierTracer(), shape, "read barrier");
+#endif
+    return shape;
+}
+
 Shape *
 PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
 {
     Shape *shape;
 
     JS_ASSERT(parent);
     JS_ASSERT(!JSID_IS_VOID(parent->propid));
 
@@ -174,30 +188,30 @@ PropertyTree::getChild(JSContext *cx, Sh
      * getters or setters on the new object that is passed in as
      * |this| can significantly increase fan-out below the property
      * tree root -- see bug 335700 for details.
      */
     KidsPointer *kidp = &parent->kids;
     if (kidp->isShape()) {
         shape = kidp->toShape();
         if (shape->matches(&child))
-            return shape;
+            return ReadBarrier(shape);
     } else if (kidp->isHash()) {
         shape = *kidp->toHash()->lookup(&child);
         if (shape)
-            return shape;
+            return ReadBarrier(shape);
     } else {
         /* If kidp->isNull(), we always insert. */
     }
 
     shape = newShape(cx);
     if (!shape)
         return NULL;
 
-    new (shape) Shape(child.propid, child.rawGetter, child.rawSetter, child.slot, child.attrs,
+    new (shape) Shape(child.propid, child.getter(), child.setter(), child.slot, child.attrs,
                       child.flags, child.shortid, js_GenerateShape(cx));
 
     if (!insertChild(cx, parent, shape))
         return NULL;
 
     return shape;
 }
 
@@ -251,18 +265,18 @@ Shape::dump(JSContext *cx, FILE *fp) con
         }
         if (!str)
             fputs("<error>", fp);
         else
             FileEscapedString(fp, str, '"');
     }
 
     fprintf(fp, " g/s %p/%p slot %u attrs %x ",
-            JS_FUNC_TO_DATA_PTR(void *, rawGetter),
-            JS_FUNC_TO_DATA_PTR(void *, rawSetter),
+            JS_FUNC_TO_DATA_PTR(void *, getter()),
+            JS_FUNC_TO_DATA_PTR(void *, setter()),
             slot, attrs);
     if (attrs) {
         int first = 1;
         fputs("(", fp);
 #define DUMP_ATTR(name, display) if (attrs & JSPROP_##name) fputs(" " #display + first, fp), first = 0
         DUMP_ATTR(ENUMERATE, enumerate);
         DUMP_ATTR(READONLY, readonly);
         DUMP_ATTR(PERMANENT, permanent);
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -52,29 +52,39 @@
 
 #include "jsatominlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
-static inline const Value &
-GetCall(JSObject *proxy) {
+static inline const HeapValue &
+GetCall(JSObject *proxy)
+{
     JS_ASSERT(IsFunctionProxy(proxy));
-    return proxy->getSlot(JSSLOT_PROXY_CALL);
+    return proxy->getSlotRef(JSSLOT_PROXY_CALL);
 }
 
 static inline Value
-GetConstruct(JSObject *proxy) {
+GetConstruct(JSObject *proxy)
+{
     if (proxy->numSlots() <= JSSLOT_PROXY_CONSTRUCT)
         return UndefinedValue();
     return proxy->getSlot(JSSLOT_PROXY_CONSTRUCT);
 }
 
+static inline const HeapValue &
+GetFunctionProxyConstruct(JSObject *proxy)
+{
+    JS_ASSERT(IsFunctionProxy(proxy));
+    JS_ASSERT(proxy->numSlots() > JSSLOT_PROXY_CONSTRUCT);
+    return proxy->getSlotRef(JSSLOT_PROXY_CONSTRUCT);
+}
+
 static bool
 OperationInProgress(JSContext *cx, JSObject *proxy)
 {
     PendingProxyOperation *op = JS_THREAD_DATA(cx)->pendingProxyOperation;
     while (op) {
         if (op->object == proxy)
             return true;
         op = op->next;
@@ -1186,31 +1196,31 @@ proxy_DeleteSpecial(JSContext *cx, JSObj
 {
     return proxy_DeleteGeneric(cx, obj, SPECIALID_TO_JSID(sid), rval, strict);
 }
 
 static void
 proxy_TraceObject(JSTracer *trc, JSObject *obj)
 {
     GetProxyHandler(obj)->trace(trc, obj);
-    MarkCrossCompartmentValue(trc, GetProxyPrivate(obj), "private");
-    MarkCrossCompartmentValue(trc, GetProxyExtra(obj, 0), "extra0");
-    MarkCrossCompartmentValue(trc, GetProxyExtra(obj, 1), "extra1");
+    MarkCrossCompartmentValue(trc, obj->getReservedSlotRef(JSSLOT_PROXY_PRIVATE), "private");
+    MarkCrossCompartmentValue(trc, obj->getReservedSlotRef(JSSLOT_PROXY_EXTRA + 0), "extra0");
+    MarkCrossCompartmentValue(trc, obj->getReservedSlotRef(JSSLOT_PROXY_EXTRA + 1), "extra1");
     if (IsFunctionProxy(obj)) {
         MarkCrossCompartmentValue(trc, GetCall(obj), "call");
-        MarkCrossCompartmentValue(trc, GetConstruct(obj), "construct");
+        MarkCrossCompartmentValue(trc, GetFunctionProxyConstruct(obj), "construct");
     }
 }
 
 static void
 proxy_TraceFunction(JSTracer *trc, JSObject *obj)
 {
     proxy_TraceObject(trc, obj);
     MarkCrossCompartmentValue(trc, GetCall(obj), "call");
-    MarkCrossCompartmentValue(trc, GetConstruct(obj), "construct");
+    MarkCrossCompartmentValue(trc, GetFunctionProxyConstruct(obj), "construct");
 }
 
 static JSBool
 proxy_Convert(JSContext *cx, JSObject *proxy, JSType hint, Value *vp)
 {
     JS_ASSERT(proxy->isProxy());
     return Proxy::defaultValue(cx, proxy, hint, vp);
 }
--- a/js/src/jsprvtd.h
+++ b/js/src/jsprvtd.h
@@ -88,18 +88,16 @@ typedef struct JSTryNote            JSTr
 typedef struct JSAtomState          JSAtomState;
 typedef struct JSCodeSpec           JSCodeSpec;
 typedef struct JSPrinter            JSPrinter;
 typedef struct JSStackHeader        JSStackHeader;
 typedef struct JSSubString          JSSubString;
 typedef struct JSNativeTraceInfo    JSNativeTraceInfo;
 typedef struct JSSpecializedNative  JSSpecializedNative;
 typedef struct JSXML                JSXML;
-typedef struct JSXMLArray           JSXMLArray;
-typedef struct JSXMLArrayCursor     JSXMLArrayCursor;
 
 /*
  * Template declarations.
  *
  * jsprvtd.h can be included in both C and C++ translation units. For C++, it
  * may possibly be wrapped in an extern "C" block which does not agree with
  * templates.
  */
@@ -115,16 +113,17 @@ class JSFixedString;
 class JSStaticAtom;
 class JSRope;
 class JSAtom;
 class JSWrapper;
 
 namespace js {
 
 struct ArgumentsData;
+struct FlatClosureData;
 struct Class;
 
 class RegExpObject;
 class RegExpPrivate;
 class RegExpObjectBuilder;
 class RegExpStatics;
 class MatchPairs;
 
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -209,17 +209,17 @@ PropertyTable::search(jsid id, bool addi
 
     /* Miss: return space for a new entry. */
     stored = *spp;
     if (SHAPE_IS_FREE(stored))
         return spp;
 
     /* Hit: return entry. */
     shape = SHAPE_CLEAR_COLLISION(stored);
-    if (shape && shape->propid == id)
+    if (shape && shape->propid.get() == id)
         return spp;
 
     /* Collision: double hash. */
     sizeLog2 = JS_DHASH_BITS - hashShift;
     hash2 = HASH2(hash0, sizeLog2, hashShift);
     sizeMask = JS_BITMASK(sizeLog2);
 
 #ifdef DEBUG
@@ -243,17 +243,17 @@ PropertyTable::search(jsid id, bool addi
         hash1 &= sizeMask;
         spp = entries + hash1;
 
         stored = *spp;
         if (SHAPE_IS_FREE(stored))
             return (adding && firstRemoved) ? firstRemoved : spp;
 
         shape = SHAPE_CLEAR_COLLISION(stored);
-        if (shape && shape->propid == id) {
+        if (shape && shape->propid.get() == id) {
             JS_ASSERT(collision_flag);
             return spp;
         }
 
         if (SHAPE_IS_REMOVED(stored)) {
             if (!firstRemoved)
                 firstRemoved = spp;
         } else {
@@ -317,18 +317,36 @@ PropertyTable::grow(JSContext *cx)
 
     if (!change(delta, cx) && entryCount + removedCount == size - 1) {
         JS_ReportOutOfMemory(cx);
         return false;
     }
     return true;
 }
 
+void
+Shape::update(js::PropertyOp getter, js::StrictPropertyOp setter, uint8 attrs)
+{
+    if (hasGetterValue())
+        JSObject::writeBarrierPre(getterObject());
+    if (hasSetterValue())
+        JSObject::writeBarrierPre(setterObject());
+
+    this->rawGetter = getter;
+    this->rawSetter = setter;
+    this->attrs = attrs;
+
+    if (hasGetterValue())
+        JSObject::writeBarrierPost(getterObject(), this);
+    if (hasSetterValue())
+        JSObject::writeBarrierPost(setterObject(), this);
+}
+
 Shape *
-Shape::getChild(JSContext *cx, const js::Shape &child, Shape **listp)
+Shape::getChild(JSContext *cx, const js::Shape &child, HeapPtr<Shape> *listp)
 {
     JS_ASSERT(!JSID_IS_VOID(child.propid));
     JS_ASSERT(!child.inDictionary());
 
     if (inDictionary()) {
         Shape *oldShape = *listp;
         PropertyTable *table = (oldShape && oldShape->hasTable()) ? oldShape->getTable() : NULL;
 
@@ -439,44 +457,44 @@ JSObject::getChildProperty(JSContext *cx
     }
 
     updateFlags(shape);
     updateShape(cx);
     return shape;
 }
 
 Shape *
-Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
+Shape::newDictionaryShape(JSContext *cx, const Shape &child, HeapPtr<Shape> *listp)
 {
     Shape *dprop = JS_PROPERTY_TREE(cx).newShape(cx);
     if (!dprop)
         return NULL;
 
-    new (dprop) Shape(child.propid, child.rawGetter, child.rawSetter, child.slot, child.attrs,
+    new (dprop) Shape(child.propid, child.getter(), child.setter(), child.slot, child.attrs,
                       (child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid,
                       js_GenerateShape(cx), child.slotSpan);
 
     dprop->listp = NULL;
     dprop->insertIntoDictionary(listp);
     return dprop;
 }
 
 Shape *
-Shape::newDictionaryList(JSContext *cx, Shape **listp)
+Shape::newDictionaryList(JSContext *cx, HeapPtr<Shape> *listp)
 {
     Shape *shape = *listp;
     Shape *list = shape;
 
     /*
      * We temporarily create the dictionary shapes using a root located on the
      * stack. This way, the GC doesn't see any intermediate state until we
      * switch listp at the end.
      */
-    Shape *root = NULL;
-    Shape **childp = &root;
+    HeapPtrShape root(NULL);
+    HeapPtrShape *childp = &root;
 
     while (shape) {
         JS_ASSERT_IF(!shape->frozen(), !shape->inDictionary());
 
         Shape *dprop = Shape::newDictionaryShape(cx, *shape, childp);
         if (!dprop) {
             *listp = list;
             return NULL;
@@ -814,19 +832,17 @@ JSObject::putProperty(JSContext *cx, jsi
             shape->slotSpan = slot + 1;
 
             for (Shape *temp = lastProp; temp != shape; temp = temp->parent) {
                 if (temp->slotSpan <= slot)
                     temp->slotSpan = slot + 1;
             }
         }
 
-        shape->rawGetter = getter;
-        shape->rawSetter = setter;
-        shape->attrs = uint8(attrs);
+        shape->update(getter, setter, uint8(attrs));
         shape->flags = flags | Shape::IN_DICTIONARY;
         shape->shortid = int16(shortid);
 
         /*
          * We are done updating shape and lastProp. Now we may need to update
          * flags and we will need to update objShape, which is no longer "own".
          * In the last non-dictionary property case in the else clause just
          * below, getChildProperty handles this for us. First update flags.
@@ -895,17 +911,17 @@ JSObject::changeProperty(JSContext *cx, 
 
     attrs |= shape->attrs & mask;
 
     /* Allow only shared (slotless) => unshared (slotful) transition. */
     JS_ASSERT(!((attrs ^ shape->attrs) & JSPROP_SHARED) ||
               !(attrs & JSPROP_SHARED));
 
     /* Don't allow method properties to be changed to have a getter. */
-    JS_ASSERT_IF(getter != shape->rawGetter, !shape->isMethod());
+    JS_ASSERT_IF(getter != shape->getter(), !shape->isMethod());
 
     types::MarkTypePropertyConfigured(cx, this, shape->propid);
     if (attrs & (JSPROP_GETTER | JSPROP_SETTER))
         types::AddTypePropertyId(cx, this, shape->propid, types::Type::UnknownType());
 
     if (getter == JS_PropertyStub)
         getter = NULL;
     if (setter == JS_StrictPropertyStub)
@@ -937,19 +953,17 @@ JSObject::changeProperty(JSContext *cx, 
             mutableShape->slotSpan = slot + 1;
 
             for (Shape *temp = lastProp; temp != shape; temp = temp->parent) {
                 if (temp->slotSpan <= slot)
                     temp->slotSpan = slot + 1;
             }
         }
 
-        mutableShape->rawGetter = getter;
-        mutableShape->rawSetter = setter;
-        mutableShape->attrs = uint8(attrs);
+        mutableShape->update(getter, setter, uint8(attrs));
 
         updateFlags(shape);
 
         /* See the corresponding code in putProperty. */
         lastProp->shapeid = js_GenerateShape(cx);
         clearOwnShape();
 
         newShape = mutableShape;
@@ -971,17 +985,17 @@ JSObject::changeProperty(JSContext *cx, 
         /*
          * Let JSObject::putProperty handle this |overwriting| case, including
          * the conservation of shape->slot (if it's valid). We must not call
          * removeProperty because it will free an allocated shape->slot, and
          * putProperty won't re-allocate it.
          */
         Shape child(shape->propid, getter, setter, shape->slot, attrs, shape->flags,
                     shape->shortid);
-        newShape = putProperty(cx, child.propid, child.rawGetter, child.rawSetter, child.slot,
+        newShape = putProperty(cx, child.propid, child.getter(), child.setter(), child.slot,
                                child.attrs, child.flags, child.shortid);
     }
 
     CHECK_SHAPE_CONSISTENCY(this);
     return newShape;
 }
 
 bool
@@ -1184,26 +1198,26 @@ JSObject::methodShapeChange(JSContext *c
 
     JS_ASSERT(!JSID_IS_VOID(shape.propid));
     if (shape.isMethod()) {
 #ifdef DEBUG
         const Value &prev = nativeGetSlot(shape.slot);
         JS_ASSERT(shape.methodObject() == prev.toObject());
         JS_ASSERT(canHaveMethodBarrier());
         JS_ASSERT(hasMethodBarrier());
-        JS_ASSERT(!shape.rawSetter);
+        JS_ASSERT(!shape.setter());
 #endif
 
         /*
-         * Pass null to make a stub getter, but pass along shape.rawSetter to
+         * Pass null to make a stub getter, but pass along shape.setter() to
          * preserve watchpoints. Clear Shape::METHOD from flags as we are
          * despecializing from a method memoized in the property tree to a
          * plain old function-valued property.
          */
-        result = putProperty(cx, shape.propid, NULL, shape.rawSetter, shape.slot,
+        result = putProperty(cx, shape.propid, NULL, shape.setter(), shape.slot,
                              shape.attrs,
                              shape.getFlags() & ~Shape::METHOD,
                              shape.shortid);
         if (!result)
             return NULL;
     }
 
     if (branded()) {
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -325,17 +325,17 @@ struct Shape : public js::gc::Cell
      */
     union {
         mutable size_t numLinearSearches;
         mutable js::PropertyTable *table;
     };
 
     inline void freeTable(JSContext *cx);
 
-    jsid                propid;
+    HeapId              propid;
 
   protected:
     union {
         PropertyOp      rawGetter;      /* getter and setter hooks or objects */
         JSObject        *getterObj;     /* user-defined callable "get" object or
                                            null if shape->hasGetterValue(); or
                                            joined function object if METHOD flag
                                            is set. */
@@ -353,36 +353,37 @@ struct Shape : public js::gc::Cell
     uint32              slot;           /* abstract index in object slots */
   private:
     uint8               attrs;          /* attributes, see jsapi.h JSPROP_* */
     mutable uint8       flags;          /* flags, see below for defines */
   public:
     int16               shortid;        /* tinyid, or local arg/var index */
 
   protected:
-    mutable js::Shape   *parent;        /* parent node, reverse for..in order */
+    mutable HeapPtrShape parent;        /* parent node, reverse for..in order */
     /* kids is valid when !inDictionary(), listp is valid when inDictionary(). */
     union {
         mutable js::KidsPointer kids;   /* null, single child, or a tagged ptr
                                            to many-kids data structure */
-        mutable js::Shape **listp;      /* dictionary list starting at lastProp
+        mutable HeapPtrShape *listp;    /* dictionary list starting at lastProp
                                            has a double-indirect back pointer,
                                            either to shape->parent if not last,
                                            else to obj->lastProp */
     };
 
-    static inline js::Shape **search(JSContext *cx, js::Shape **startp, jsid id,
+    static inline js::Shape **search(JSContext *cx, HeapPtr<Shape> *startp, jsid id,
                                      bool adding = false);
-    static js::Shape *newDictionaryShape(JSContext *cx, const js::Shape &child, js::Shape **listp);
-    static js::Shape *newDictionaryList(JSContext *cx, js::Shape **listp);
+    static js::Shape *newDictionaryShape(JSContext *cx, const js::Shape &child,
+                                         HeapPtr<Shape> *listp);
+    static js::Shape *newDictionaryList(JSContext *cx, HeapPtr<Shape> *listp);
 
     inline void removeFromDictionary(JSObject *obj) const;
-    inline void insertIntoDictionary(js::Shape **dictp);
+    inline void insertIntoDictionary(HeapPtr<Shape> *dictp);
 
-    js::Shape *getChild(JSContext *cx, const js::Shape &child, js::Shape **listp);
+    js::Shape *getChild(JSContext *cx, const js::Shape &child, HeapPtr<Shape> *listp);
 
     bool hashify(JSContext *cx);
 
     void setTable(js::PropertyTable *t) const {
         JS_ASSERT_IF(t && t->freelist != SHAPE_INVALID_SLOT, t->freelist < slotSpan);
         table = t;
     }
 
@@ -569,16 +570,18 @@ struct Shape : public js::gc::Cell
         JS_ASSERT(hasSetterValue());
         return setterObj ? js::ObjectValue(*setterObj) : js::UndefinedValue();
     }
 
     Value setterOrUndefined() const {
         return hasSetterValue() && setterObj ? js::ObjectValue(*setterObj) : js::UndefinedValue();
     }
 
+    void update(js::PropertyOp getter, js::StrictPropertyOp setter, uint8 attrs);
+
     inline JSDHashNumber hash() const;
     inline bool matches(const js::Shape *p) const;
     inline bool matchesParamsAfterId(PropertyOp agetter, StrictPropertyOp asetter,
                                      uint32 aslot, uintN aattrs, uintN aflags,
                                      intN ashortid) const;
 
     bool get(JSContext* cx, JSObject *receiver, JSObject *obj, JSObject *pobj, js::Value* vp) const;
     bool set(JSContext* cx, JSObject *obj, bool strict, js::Value* vp) const;
@@ -642,43 +645,45 @@ struct Shape : public js::gc::Cell
 
 #ifdef DEBUG
     void dump(JSContext *cx, FILE *fp) const;
     void dumpSubtree(JSContext *cx, int level, FILE *fp) const;
 #endif
 
     void finalize(JSContext *cx);
     void removeChild(js::Shape *child);
+
+    inline static void writeBarrierPre(const js::Shape *shape);
+    inline static void writeBarrierPost(const js::Shape *shape, void *addr);
+
+    /*
+     * All weak references need a read barrier for incremental GC. This getter
+     * method implements the read barrier. It's used to obtain initial shapes
+     * from the compartment.
+     */
+    inline static void readBarrier(const js::Shape *shape);
 };
 
 struct EmptyShape : public js::Shape
 {
     EmptyShape(JSCompartment *comp, js::Class *aclasp);
 
     js::Class *getClass() const { return clasp; };
 
     static EmptyShape *create(JSContext *cx, js::Class *clasp) {
         js::Shape *eprop = JS_PROPERTY_TREE(cx).newShape(cx);
         if (!eprop)
             return NULL;
         return new (eprop) EmptyShape(cx->compartment, clasp);
     }
 
-    static EmptyShape *ensure(JSContext *cx, js::Class *clasp, EmptyShape **shapep) {
-        EmptyShape *shape = *shapep;
-        if (!shape) {
-            if (!(shape = create(cx, clasp)))
-                return NULL;
-            return *shapep = shape;
-        }
-        return shape;
-    }
+    static inline EmptyShape *ensure(JSContext *cx, js::Class *clasp,
+                                     ReadBarriered<EmptyShape> *shapep);
 
     static inline EmptyShape *getEmptyArgumentsShape(JSContext *cx);
-
     static inline EmptyShape *getEmptyBlockShape(JSContext *cx);
     static inline EmptyShape *getEmptyCallShape(JSContext *cx);
     static inline EmptyShape *getEmptyDeclEnvShape(JSContext *cx);
     static inline EmptyShape *getEmptyEnumeratorShape(JSContext *cx);
     static inline EmptyShape *getEmptyWithShape(JSContext *cx);
 };
 
 } /* namespace js */
@@ -725,17 +730,17 @@ namespace js {
  *   is &obj->lastProp or something similar.
  * - Otherwise, we return &shape->parent, where |shape| is the successor to the
  *   found Shape.
  *
  * There is one failure case:  we return &emptyShape->parent, where
  * |emptyShape| is the EmptyShape at the start of the shape lineage.
  */
 JS_ALWAYS_INLINE js::Shape **
-Shape::search(JSContext *cx, js::Shape **startp, jsid id, bool adding)
+Shape::search(JSContext *cx, HeapPtr<js::Shape> *startp, jsid id, bool adding)
 {
     js::Shape *start = *startp;
     if (start->hasTable())
         return start->getTable()->search(id, adding);
 
     if (start->numLinearSearches == PropertyTable::MAX_LINEAR_SEARCHES) {
         if (start->isBigEnoughForAPropertyTable() && start->hashify(cx))
             return start->getTable()->search(id, adding);
@@ -752,22 +757,22 @@ Shape::search(JSContext *cx, js::Shape *
     /*
      * Not enough searches done so far to justify hashing: search linearly
      * from *startp.
      *
      * We don't use a Range here, or stop at null parent (the empty shape at
      * the end).  This avoids an extra load per iteration at the cost (if the
      * search fails) of an extra load and id test at the end.
      */
-    js::Shape **spp;
+    HeapPtr<js::Shape> *spp;
     for (spp = startp; js::Shape *shape = *spp; spp = &shape->parent) {
-        if (shape->propid == id)
-            return spp;
+        if (shape->propid.get() == id)
+            return spp->unsafeGet();
     }
-    return spp;
+    return spp->unsafeGet();
 }
 
 } // namespace js
 
 #ifdef _MSC_VER
 #pragma warning(pop)
 #pragma warning(pop)
 #endif
--- a/js/src/jsscopeinlines.h
+++ b/js/src/jsscopeinlines.h
@@ -44,16 +44,17 @@
 #include "jsarray.h"
 #include "jsbool.h"
 #include "jscntxt.h"
 #include "jsdbgapi.h"
 #include "jsfun.h"
 #include "jsobj.h"
 #include "jsscope.h"
 #include "jsgc.h"
+#include "jsgcmark.h"
 
 #include "vm/ArgumentsObject.h"
 #include "vm/StringObject.h"
 
 #include "jscntxtinlines.h"
 #include "jsgcinlines.h"
 #include "jsobjinlines.h"
 
@@ -74,41 +75,41 @@ js::types::TypeObject::getEmptyShape(JSC
 
     /*
      * Empty shapes can only be on the default 'new' type for a prototype.
      * Objects with a common prototype use the same shape lineage, even if
      * their prototypes differ.
      */
     JS_ASSERT(this == proto->newType);
 
-    JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
+    JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind < js::gc::FINALIZE_OBJECT_LIMIT);
     int i = kind - js::gc::FINALIZE_OBJECT0;
 
     if (!emptyShapes) {
-        emptyShapes = (js::EmptyShape**)
-            cx->calloc_(sizeof(js::EmptyShape*) * js::gc::FINALIZE_FUNCTION_AND_OBJECT_LAST);
+        emptyShapes = (js::HeapPtr<js::EmptyShape>*)
+            cx->calloc_(sizeof(js::HeapPtr<js::EmptyShape>) * js::gc::FINALIZE_OBJECT_LIMIT);
         if (!emptyShapes)
             return NULL;
 
         /*
          * Always fill in emptyShapes[0], so canProvideEmptyShape works.
          * Other empty shapes are filled in lazily.
          */
-        emptyShapes[0] = js::EmptyShape::create(cx, aclasp);
+        emptyShapes[0].init(js::EmptyShape::create(cx, aclasp));
         if (!emptyShapes[0]) {
             cx->free_(emptyShapes);
             emptyShapes = NULL;
             return NULL;
         }
     }
 
     JS_ASSERT(aclasp == emptyShapes[0]->getClass());
 
     if (!emptyShapes[i]) {
-        emptyShapes[i] = js::EmptyShape::create(cx, aclasp);
+        emptyShapes[i].init(js::EmptyShape::create(cx, aclasp));
         if (!emptyShapes[i])
             return NULL;
     }
 
     return emptyShapes[i];
 }
 
 inline bool
@@ -149,25 +150,26 @@ JSObject::extend(JSContext *cx, const js
 
 namespace js {
 
 inline bool
 StringObject::init(JSContext *cx, JSString *str)
 {
     JS_ASSERT(nativeEmpty());
 
-    const Shape **shapep = &cx->compartment->initialStringShape;
-    if (*shapep) {
-        setLastProperty(*shapep);
+    const Shape *shape = cx->compartment->initialStringShape;
+    if (shape) {
+        setLastProperty(shape);
     } else {
-        *shapep = assignInitialShape(cx);
-        if (!*shapep)
+        shape = assignInitialShape(cx);
+        if (!shape)
             return false;
+        cx->compartment->initialStringShape = shape;
     }
-    JS_ASSERT(*shapep == lastProperty());
+    JS_ASSERT(shape == lastProperty());
     JS_ASSERT(!nativeEmpty());
     JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))->slot == LENGTH_SLOT);
 
     setStringThis(str);
     return true;
 }
 
 inline
@@ -227,45 +229,45 @@ Shape::Shape(uint32 shapeid)
 
 inline JSDHashNumber
 Shape::hash() const
 {
     JSDHashNumber hash = 0;
 
     /* Accumulate from least to most random so the low bits are most random. */
     JS_ASSERT_IF(isMethod(), !rawSetter);
-    if (rawGetter)
-        hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(rawGetter);
-    if (rawSetter)
-        hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(rawSetter);
+    if (getter())
+        hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(getter());
+    if (setter())
+        hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(setter());
     hash = JS_ROTATE_LEFT32(hash, 4) ^ (flags & PUBLIC_FLAGS);
     hash = JS_ROTATE_LEFT32(hash, 4) ^ attrs;
     hash = JS_ROTATE_LEFT32(hash, 4) ^ shortid;
     hash = JS_ROTATE_LEFT32(hash, 4) ^ slot;
-    hash = JS_ROTATE_LEFT32(hash, 4) ^ JSID_BITS(propid);
+    hash = JS_ROTATE_LEFT32(hash, 4) ^ JSID_BITS(propid.get());
     return hash;
 }
 
 inline bool
 Shape::matches(const js::Shape *other) const
 {
     JS_ASSERT(!JSID_IS_VOID(propid));
     JS_ASSERT(!JSID_IS_VOID(other->propid));
-    return propid == other->propid &&
-           matchesParamsAfterId(other->rawGetter, other->rawSetter, other->slot, other->attrs,
+    return propid.get() == other->propid.get() &&
+           matchesParamsAfterId(other->getter(), other->setter(), other->slot, other->attrs,
                                 other->flags, other->shortid);
 }
 
 inline bool
 Shape::matchesParamsAfterId(PropertyOp agetter, StrictPropertyOp asetter, uint32 aslot,
                             uintN aattrs, uintN aflags, intN ashortid) const
 {
     JS_ASSERT(!JSID_IS_VOID(propid));
-    return rawGetter == agetter &&
-           rawSetter == asetter &&
+    return getter() == agetter &&
+           setter() == asetter &&
            slot == aslot &&
            attrs == aattrs &&
            ((flags ^ aflags) & PUBLIC_FLAGS) == 0 &&
            shortid == ashortid;
 }
 
 inline bool
 Shape::get(JSContext* cx, JSObject *receiver, JSObject* obj, JSObject *pobj, js::Value* vp) const
@@ -328,17 +330,17 @@ Shape::removeFromDictionary(JSObject *ob
 
     if (parent)
         parent->listp = listp;
     *listp = parent;
     listp = NULL;
 }
 
 inline void
-Shape::insertIntoDictionary(js::Shape **dictp)
+Shape::insertIntoDictionary(HeapPtr<js::Shape> *dictp)
 {
     /*
      * Don't assert inDictionaryMode() here because we may be called from
      * JSObject::toDictionaryMode via JSObject::newDictionaryShape.
      */
     JS_ASSERT(inDictionary());
     JS_ASSERT(!listp);
     JS_ASSERT(!JSID_IS_VOID(propid));
@@ -357,16 +359,28 @@ Shape::insertIntoDictionary(js::Shape **
 }
 
 inline
 EmptyShape::EmptyShape(JSCompartment *comp, js::Class *aclasp)
   : js::Shape(comp, aclasp)
 {}
 
 /* static */ inline EmptyShape *
+EmptyShape::ensure(JSContext *cx, js::Class *clasp, ReadBarriered<EmptyShape> *shapep)
+{
+    EmptyShape *shape = shapep->get();
+    if (!shape) {
+        if (!(shape = create(cx, clasp)))
+            return NULL;
+        shapep->set(shape);
+    }
+    return shape;
+}
+
+/* static */ inline EmptyShape *
 EmptyShape::getEmptyArgumentsShape(JSContext *cx)
 {
     return ensure(cx, &NormalArgumentsObjectClass, &cx->compartment->emptyArgumentsShape);
 }
 
 /* static */ inline EmptyShape *
 EmptyShape::getEmptyBlockShape(JSContext *cx)
 {
@@ -392,11 +406,39 @@ EmptyShape::getEmptyEnumeratorShape(JSCo
 }
 
 /* static */ inline EmptyShape *
 EmptyShape::getEmptyWithShape(JSContext *cx)
 {
     return ensure(cx, &WithClass, &cx->compartment->emptyWithShape);
 }
 
+inline void
+Shape::writeBarrierPre(const js::Shape *shape)
+{
+#ifdef JSGC_INCREMENTAL
+    if (!shape || shape == &sharedNonNative)
+        return;
+
+    JSCompartment *comp = shape->compartment();
+    if (comp->needsBarrier())
+        MarkShapeUnbarriered(comp->barrierTracer(), shape, "write barrier");
+#endif
+}
+
+inline void
+Shape::writeBarrierPost(const js::Shape *shape, void *addr)
+{
+}
+
+inline void
+Shape::readBarrier(const js::Shape *shape)
+{
+#ifdef JSGC_INCREMENTAL
+    JSCompartment *comp = shape->compartment();
+    if (comp->needsBarrier())
+        MarkShapeUnbarriered(comp->barrierTracer(), shape, "read barrier");
+#endif
+}
+
 } /* namespace js */
 
 #endif /* jsscopeinlines_h___ */
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -86,17 +86,17 @@ namespace js {
 
 BindingKind
 Bindings::lookup(JSContext *cx, JSAtom *name, uintN *indexp) const
 {
     if (!lastBinding)
         return NONE;
 
     Shape *shape =
-        SHAPE_FETCH(Shape::search(cx, const_cast<Shape **>(&lastBinding),
+        SHAPE_FETCH(Shape::search(cx, const_cast<HeapPtr<Shape> *>(&lastBinding),
                     ATOM_TO_JSID(name)));
     if (!shape)
         return NONE;
 
     if (indexp)
         *indexp = shape->shortid;
 
     if (shape->getter() == GetCallArg)
@@ -190,17 +190,17 @@ Bindings::getLocalNameArray(JSContext *c
         return false;
 
 #ifdef DEBUG
     JSAtom * const POISON = reinterpret_cast<JSAtom *>(0xdeadbeef);
     for (uintN i = 0; i < n; i++)
         names[i] = POISON;
 #endif
 
-    for (Shape::Range r = lastBinding; !r.empty(); r.popFront()) {
+    for (Shape::Range r = lastBinding->all(); !r.empty(); r.popFront()) {
         const Shape &shape = r.front();
         uintN index = uint16(shape.shortid);
 
         if (shape.getter() == GetCallArg) {
             JS_ASSERT(index < nargs);
         } else if (shape.getter() == GetCallUpvar) {
             JS_ASSERT(index < nupvars);
             index += nargs + nvars;
@@ -639,42 +639,46 @@ js_XDRScript(JSXDRState *xdr, JSScript *
 
     /*
      * Here looping from 0-to-length to xdr objects is essential. It ensures
      * that block objects from the script->objects array will be written and
      * restored in the outer-to-inner order. js_XDRBlockObject relies on this
      * to restore the parent chain.
      */
     for (i = 0; i != nobjects; ++i) {
-        JSObject **objp = &script->objects()->vector[i];
+        HeapPtr<JSObject> *objp = &script->objects()->vector[i];
         uint32 isBlock;
         if (xdr->mode == JSXDR_ENCODE) {
             Class *clasp = (*objp)->getClass();
             JS_ASSERT(clasp == &FunctionClass ||
                       clasp == &BlockClass);
             isBlock = (clasp == &BlockClass) ? 1 : 0;
         }
         if (!JS_XDRUint32(xdr, &isBlock))
             goto error;
+        JSObject *tmp = *objp;
         if (isBlock == 0) {
-            if (!js_XDRFunctionObject(xdr, objp))
+            if (!js_XDRFunctionObject(xdr, &tmp))
                 goto error;
         } else {
             JS_ASSERT(isBlock == 1);
-            if (!js_XDRBlockObject(xdr, objp))
+            if (!js_XDRBlockObject(xdr, &tmp))
                 goto error;
         }
+        *objp = tmp;
     }
     for (i = 0; i != nupvars; ++i) {
         if (!JS_XDRUint32(xdr, reinterpret_cast<uint32 *>(&script->upvars()->vector[i])))
             goto error;
     }
     for (i = 0; i != nregexps; ++i) {
-        if (!js_XDRRegExpObject(xdr, &script->regexps()->vector[i]))
+        JSObject *tmp = script->regexps()->vector[i];
+        if (!js_XDRRegExpObject(xdr, &tmp))
             goto error;
+        script->regexps()->vector[i] = tmp;
     }
     for (i = 0; i != nClosedArgs; ++i) {
         if (!JS_XDRUint32(xdr, &script->closedSlots[i]))
             goto error;
     }
     for (i = 0; i != nClosedVars; ++i) {
         if (!JS_XDRUint32(xdr, &script->closedSlots[nClosedArgs + i]))
             goto error;
@@ -707,18 +711,20 @@ js_XDRScript(JSXDRState *xdr, JSScript *
             if (xdr->mode == JSXDR_DECODE) {
                 tn->kind = (uint8)(kindAndDepth >> 16);
                 tn->stackDepth = (uint16)kindAndDepth;
             }
         } while (tn != tnfirst);
     }
 
     for (i = 0; i != nconsts; ++i) {
-        if (!JS_XDRValue(xdr, &script->consts()->vector[i]))
+        Value tmp = script->consts()->vector[i];
+        if (!JS_XDRValue(xdr, &tmp))
             goto error;
+        script->consts()->vector[i] = tmp;
     }
 
     xdr->script = oldscript;
     return JS_TRUE;
 
   error:
     if (xdr->mode == JSXDR_DECODE)
         *scriptp = NULL;
@@ -743,16 +749,17 @@ void
 JSPCCounters::destroy(JSContext *cx)
 {
     if (counts) {
         cx->free_(counts);
         counts = NULL;
     }
 }
 
+
 /*
  * Shared script filename management.
  */
 
 static const char *
 SaveScriptFilename(JSContext *cx, const char *filename)
 {
     JSCompartment *comp = cx->compartment;
@@ -983,35 +990,35 @@ JSScript::NewScript(JSContext *cx, uint3
                      sizeof(JSObjectArray) +
                      sizeof(JSTryNoteArray) +
                      sizeof(GlobalSlotArray) < 0xFF);
 
 
     if (nconsts != 0) {
         JS_ASSERT(reinterpret_cast<jsuword>(cursor) % sizeof(jsval) == 0);
         script->consts()->length = nconsts;
-        script->consts()->vector = reinterpret_cast<Value *>(cursor);
+        script->consts()->vector = (HeapValue *)cursor;
         cursor += nconsts * sizeof(script->consts()->vector[0]);
     }
 
     if (natoms != 0) {
         script->natoms = natoms;
         script->atoms = reinterpret_cast<JSAtom **>(cursor);
         cursor += natoms * sizeof(script->atoms[0]);
     }
 
     if (nobjects != 0) {
         script->objects()->length = nobjects;
-        script->objects()->vector = reinterpret_cast<JSObject **>(cursor);
+        script->objects()->vector = (HeapPtr<JSObject> *)cursor;
         cursor += nobjects * sizeof(script->objects()->vector[0]);
     }
 
     if (nregexps != 0) {
         script->regexps()->length = nregexps;
-        script->regexps()->vector = reinterpret_cast<JSObject **>(cursor);
+        script->regexps()->vector = (HeapPtr<JSObject> *)cursor;
         cursor += nregexps * sizeof(script->regexps()->vector[0]);
     }
 
     if (ntrynotes != 0) {
         script->trynotes()->length = ntrynotes;
         script->trynotes()->vector = reinterpret_cast<JSTryNote *>(cursor);
         size_t vectorSize = ntrynotes * sizeof(script->trynotes()->vector[0]);
 #ifdef DEBUG
@@ -1197,32 +1204,32 @@ JSScript::NewScriptFromEmitter(JSContext
         bool singleton =
             cx->typeInferenceEnabled() && bce->parent && bce->parent->compiling() &&
             bce->parent->asBytecodeEmitter()->checkSingletonContext();
 
         if (!script->typeSetFunction(cx, fun, singleton))
             return NULL;
 
         fun->setScript(script);
-        script->u.globalObject = fun->getParent() ? fun->getParent()->getGlobal() : NULL;
+        script->globalObject = fun->getParent() ? fun->getParent()->getGlobal() : NULL;
     } else {
         /*
          * Initialize script->object, if necessary, so that the debugger has a
          * valid holder object.
          */
         if (bce->flags & TCF_NEED_SCRIPT_GLOBAL)
-            script->u.globalObject = GetCurrentGlobal(cx);
+            script->globalObject = GetCurrentGlobal(cx);
     }
 
     /* Tell the debugger about this compiled script. */
     js_CallNewScriptHook(cx, script, fun);
     if (!bce->parent) {
         GlobalObject *compileAndGoGlobal = NULL;
         if (script->compileAndGo) {
-            compileAndGoGlobal = script->u.globalObject;
+            compileAndGoGlobal = script->globalObject;
             if (!compileAndGoGlobal)
                 compileAndGoGlobal = bce->scopeChain()->getGlobal();
         }
         Debugger::onNewScript(cx, script, compileAndGoGlobal);
     }
 
     return script;
 }
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -44,16 +44,18 @@
  * JS script descriptor.
  */
 #include "jsatom.h"
 #include "jsprvtd.h"
 #include "jsdbgapi.h"
 #include "jsclist.h"
 #include "jsinfer.h"
 
+#include "gc/Barrier.h"
+
 /*
  * Type of try note associated with each catch or finally block, and also with
  * for-in loops.
  */
 typedef enum JSTryNoteKind {
     JSTRY_CATCH,
     JSTRY_FINALLY,
     JSTRY_ITER
@@ -127,27 +129,27 @@ struct JSTryNote {
 };
 
 typedef struct JSTryNoteArray {
     JSTryNote       *vector;    /* array of indexed try notes */
     uint32          length;     /* count of indexed try notes */
 } JSTryNoteArray;
 
 typedef struct JSObjectArray {
-    JSObject        **vector;   /* array of indexed objects */
+    js::HeapPtrObject *vector;  /* array of indexed objects */
     uint32          length;     /* count of indexed objects */
 } JSObjectArray;
 
 typedef struct JSUpvarArray {
     js::UpvarCookie *vector;    /* array of indexed upvar cookies */
     uint32          length;     /* count of indexed upvar cookies */
 } JSUpvarArray;
 
 typedef struct JSConstArray {
-    js::Value       *vector;    /* array of indexed constant values */
+    js::HeapValue   *vector;    /* array of indexed constant values */
     uint32          length;
 } JSConstArray;
 
 namespace js {
 
 struct GlobalSlotArray {
     struct Entry {
         uint32      atomIndex;  /* index into atom table */
@@ -163,27 +165,25 @@ enum BindingKind { NONE, ARGUMENT, VARIA
 
 /*
  * Formal parameters, local variables, and upvars are stored in a shape tree
  * path encapsulated within this class.  This class represents bindings for
  * both function and top-level scripts (the latter is needed to track names in
  * strict mode eval code, to give such code its own lexical environment).
  */
 class Bindings {
-    js::Shape *lastBinding;
+    HeapPtr<Shape> lastBinding;
     uint16 nargs;
     uint16 nvars;
     uint16 nupvars;
     bool hasExtensibleParents;
 
   public:
-    inline Bindings(JSContext *cx)
-        : lastBinding(NULL), nargs(0), nvars(0), nupvars(0), hasExtensibleParents(false)
-    {
-    }
+    inline Bindings(JSContext *cx);
+    inline ~Bindings();
 
     /*
      * Transfers ownership of bindings data from bindings into this fresh
      * Bindings instance. Once such a transfer occurs, the old bindings must
      * not be used again.
      */
     inline void transfer(JSContext *cx, Bindings *bindings);
 
@@ -551,33 +551,31 @@ struct JSScript : public js::gc::Cell {
                                  * or has had backedges taken. Reset if the
                                  * script's JIT code is forcibly discarded. */
   public:
     js::Bindings    bindings;   /* names of top-level variables in this script
                                    (and arguments if this is a function script) */
     JSPrincipals    *principals;/* principals for this script */
     jschar          *sourceMap; /* source map file or null */
 
-    union {
-        /*
-         * A global object for the script.
-         * - All scripts returned by JSAPI functions (JS_CompileScript,
-         *   JS_CompileFile, etc.) have a non-null globalObject.
-         * - A function script has a globalObject if the function comes from a
-         *   compile-and-go script.
-         * - Temporary scripts created by obj_eval, JS_EvaluateScript, and
-         *   similar functions never have the globalObject field set; for such
-         *   scripts the global should be extracted from the JS frame that
-         *   execute scripts.
-         */
-        js::GlobalObject    *globalObject;
+    /*
+     * A global object for the script.
+     * - All scripts returned by JSAPI functions (JS_CompileScript,
+     *   JS_CompileFile, etc.) have a non-null globalObject.
+     * - A function script has a globalObject if the function comes from a
+     *   compile-and-go script.
+     * - Temporary scripts created by obj_eval, JS_EvaluateScript, and
+     *   similar functions never have the globalObject field set; for such
+     *   scripts the global should be extracted from the JS frame that
+     *   execute scripts.
+     */
+    js::HeapPtr<js::GlobalObject, JSScript*> globalObject;
 
-        /* Hash table chaining for JSCompartment::evalCache. */
-        JSScript            *evalHashLink;
-    } u;
+    /* Hash table chaining for JSCompartment::evalCache. */
+    JSScript        *&evalHashLink() { return *globalObject.unsafeGetUnioned(); }
 
     uint32          *closedSlots; /* vector of closed slots; args first, then vars. */
 
     /* array of execution counters for every JSOp in the script, by runmode */
     JSPCCounters    pcCounters;
 
 #ifdef JS_CRASH_DIAGNOSTICS
     /* All diagnostic fields must be multiples of Cell::CellSize. */
@@ -629,17 +627,17 @@ struct JSScript : public js::gc::Cell {
     inline JSFunction *function() const;
     inline js::GlobalObject *global() const;
     inline js::types::TypeScriptNesting *nesting() const;
 
     inline void clearNesting();
 
     /* Return creation time global or null. */
     js::GlobalObject *getGlobalObjectOrNull() const {
-        return isCachedEval ? NULL : u.globalObject;
+        return isCachedEval ? NULL : globalObject.get();
     }
 
   private:
     bool makeTypes(JSContext *cx, JSFunction *fun);
     bool makeAnalysis(JSContext *cx);
   public:
 
 #ifdef JS_METHODJIT
@@ -813,16 +811,19 @@ struct JSScript : public js::gc::Cell {
 
     bool stepModeEnabled() { return !!stepMode; }
 
 #ifdef DEBUG
     uint32 stepModeCount() { return stepMode & stepCountMask; }
 #endif
 
     void finalize(JSContext *cx);
+
+    static inline void writeBarrierPre(JSScript *script);
+    static inline void writeBarrierPost(JSScript *script, void *addr);
 };
 
 JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
 
 #define SHARP_NSLOTS            2       /* [#array, #depth] slots if the script
                                            uses sharp variables */
 static JS_INLINE uintN
 StackDepth(JSScript *script)
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -1,9 +1,9 @@
-/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sw=4 et tw=79 ft=cpp:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
@@ -50,16 +50,27 @@
 
 #include "vm/GlobalObject.h"
 #include "vm/RegExpObject.h"
 
 #include "jsscopeinlines.h"
 
 namespace js {
 
+inline
+Bindings::Bindings(JSContext *cx)
+    : nargs(0), nvars(0), nupvars(0), hasExtensibleParents(false)
+{
+}
+
+inline
+Bindings::~Bindings()
+{
+}
+
 inline void
 Bindings::transfer(JSContext *cx, Bindings *bindings)
 {
     JS_ASSERT(!lastBinding);
 
     *this = *bindings;
 #ifdef DEBUG
     bindings->lastBinding = NULL;
@@ -210,9 +221,29 @@ JSScript::clearNesting()
 {
     js::types::TypeScriptNesting *nesting = this->nesting();
     if (nesting) {
         js::Foreground::delete_(nesting);
         types->nesting = NULL;
     }
 }
 
+inline void
+JSScript::writeBarrierPre(JSScript *script)
+{
+#ifdef JSGC_INCREMENTAL
+    if (!script)
+        return;
+
+    JSCompartment *comp = script->compartment();
+    if (comp->needsBarrier()) {
+        JS_ASSERT(!comp->rt->gcRunning);
+        MarkScriptUnbarriered(comp->barrierTracer(), script, "write barrier");
+    }
+#endif
+}
+
+inline void
+JSScript::writeBarrierPost(JSScript *script, void *addr)
+{
+}
+
 #endif /* jsscriptinlines_h___ */
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1878,17 +1878,17 @@ public:
                                          JSValueType* typeMap, bool speculate)
       : mCx(cx),
         mTypeMap(typeMap),
         mPtr(typeMap),
         mOracle(speculate ? oracle : NULL)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
             JSValueType type = getCoercedType(*vp);
             if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isGlobalSlotUndemotable(mCx, slot)))
                 type = JSVAL_TYPE_DOUBLE;
             JS_ASSERT(type != JSVAL_TYPE_BOXED);
             debug_only_printf(LC_TMTracer,
                               "capture type global%d: %c\n",
                               n, TypeToChar(type));
             *mPtr++ = type;
@@ -2472,31 +2472,46 @@ TraceRecorder::w_immpIdGC(jsid id)
         tree->gcthings.addUnique(IdToValue(id));
     return w.immpNonGC((void*)JSID_BITS(id));
 }
 
 ptrdiff_t
 TraceRecorder::nativeGlobalSlot(const Value* p) const
 {
     JS_ASSERT(isGlobal(p));
+    return ptrdiff_t(p - Valueify(globalObj->slots) + globalObj->numFixedSlots());
+}
+
+ptrdiff_t
+TraceRecorder::nativeGlobalSlot(const HeapValue* p) const
+{
+    JS_ASSERT(isGlobal(p));
     return ptrdiff_t(p - globalObj->slots + globalObj->numFixedSlots());
 }
 
 /* Determine the offset in the native global frame for a jsval we track. */
 ptrdiff_t
 TraceRecorder::nativeGlobalOffset(const Value* p) const
 {
     return nativeGlobalSlot(p) * sizeof(double);
 }
 
 /* Determine whether a value is a global stack slot. */
 bool
 TraceRecorder::isGlobal(const Value* p) const
 {
-    return (size_t(p - globalObj->slots) < globalObj->numSlots() - globalObj->numFixedSlots());
+    return (size_t(p - Valueify(globalObj->slots)) <
+            globalObj->numSlots() - globalObj->numFixedSlots());
+}
+
+bool
+TraceRecorder::isGlobal(const HeapValue* p) const
+{
+    return (size_t(p - globalObj->slots) <
+            globalObj->numSlots() - globalObj->numFixedSlots());
 }
 
 bool
 TraceRecorder::isVoidPtrGlobal(const void* p) const
 {
     return isGlobal((const Value *)p);
 }
 
@@ -2714,17 +2729,17 @@ HasUnreachableGCThings(JSContext *cx, Tr
      * See bug 584860.
      */
     if (IsAboutToBeFinalized(cx, f->globalObj))
         return true;
     Value* vp = f->gcthings.data();
     for (unsigned len = f->gcthings.length(); len; --len) {
         Value &v = *vp++;
         JS_ASSERT(v.isMarkable());
-        if (IsAboutToBeFinalized(cx, v.toGCThing()))
+        if (IsAboutToBeFinalized(cx, v))
             return true;
     }
     const Shape** shapep = f->shapes.data();
     for (unsigned len = f->shapes.length(); len; --len) {
         const Shape* shape = *shapep++;
         if (IsAboutToBeFinalized(cx, shape))
             return true;
     }
@@ -2856,41 +2871,56 @@ TraceMonitor::sweep(JSContext *cx)
 }
 
 void
 TraceMonitor::mark(JSTracer *trc)
 {
     TracerState* state = tracerState;
     while (state) {
         if (state->nativeVp)
-            MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
+            MarkRootRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
         state = state->prev;
     }
 }
 
+template<class VALUE>
+static void
+SetValue(JSCompartment *comp, VALUE& dst, const Value &src)
+{
+    dst = src;
+}
+
+template<>
+void
+SetValue(JSCompartment *comp, HeapValue& dst, const Value &src)
+{
+    dst.set(comp, src);
+}
+
 /*
  * Box a value from the native stack back into the Value format.
  */
+template<typename VALUE>
 static inline void
-NativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot)
+NativeToValue(JSContext* cx, VALUE& v, JSValueType type, double* slot)
 {
     if (type == JSVAL_TYPE_DOUBLE) {
-        v = NumberValue(*slot);
+        SetValue(cx->compartment, v, NumberValue(*slot));
     } else if (JS_LIKELY(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET)) {
         v.boxNonDoubleFrom(type, (uint64 *)slot);
     } else if (type == JSVAL_TYPE_STRORNULL) {
         JSString *str = *(JSString **)slot;
-        v = str ? StringValue(str) : NullValue();
+        SetValue(cx->compartment, v, str ? StringValue(str) : NullValue());
     } else if (type == JSVAL_TYPE_OBJORNULL) {
         JSObject *obj = *(JSObject **)slot;
-        v = obj ? ObjectValue(*obj) : NullValue();
+        SetValue(cx->compartment, v, obj ? ObjectValue(*obj) : NullValue());
     } else {
         JS_ASSERT(type == JSVAL_TYPE_BOXED);
         JS_STATIC_ASSERT(sizeof(Value) == sizeof(double));
-        v = *(Value *)slot;
+        SetValue(cx->compartment, v, *(Value *)slot);
     }
 
 #ifdef DEBUG
     switch (type) {
       case JSVAL_TYPE_NONFUNOBJ:
         JS_ASSERT(!IsFunctionObject(v));
         debug_only_printf(LC_TMTracer,
                           "object<%p:%s> ",
@@ -2969,17 +2999,17 @@ public:
                             double *stack) :
         mCx(cx),
         mTypeMap(typemap),
         mGlobal(global),
         mStack(stack)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
         debug_only_printf(LC_TMTracer, "global%d: ", n);
         ValueToNative(*vp, *mTypeMap++, &mGlobal[slot]);
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
     visitStackSlots(Value *vp, int count, StackFrame* fp) {
         for (int i = 0; i < count; ++i) {
             debug_only_printf(LC_TMTracer, "%s%d: ", stackSlotKind(), i);
@@ -3033,17 +3063,17 @@ public:
                                   JSValueType *typeMap,
                                   double *global) :
         mCx(cx),
         mTypeMap(typeMap),
         mGlobal(global)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
         debug_only_printf(LC_TMTracer, "global%d=", n);
         JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
         NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
     }
 };
 
 class FlushNativeStackFrameVisitor : public SlotVisitorBase
 {
@@ -3835,16 +3865,22 @@ TraceRecorder::getImpl(const void *p)
 }
 
 JS_REQUIRES_STACK LIns*
 TraceRecorder::get(const Value *p)
 {
     return getImpl(p);
 }
 
+JS_REQUIRES_STACK LIns*
+TraceRecorder::get(const HeapValue *p)
+{
+    return getImpl(p);
+}
+
 #ifdef DEBUG
 bool
 TraceRecorder::isValidFrameObjPtr(void *p)
 {
     StackFrame *fp = cx->fp();
     for (; fp; fp = fp->prev()) {
         if (fp->addressOfScopeChain() == p || fp->addressOfArgs() == p)
             return true;
@@ -3891,27 +3927,27 @@ TraceRecorder::known(JSObject** p)
  * The slots of the global object are sometimes reallocated by the interpreter.
  * This function check for that condition and re-maps the entries of the tracker
  * accordingly.
  */
 JS_REQUIRES_STACK void
 TraceRecorder::checkForGlobalObjectReallocationHelper()
 {
     debug_only_print0(LC_TMTracer, "globalObj->slots relocated, updating tracker\n");
-    const Value* src = global_slots;
-    const Value* dst = globalObj->getRawSlots();
+    const HeapValue* src = global_slots;
+    const HeapValue* dst = globalObj->getRawSlots();
     jsuint length = globalObj->capacity;
     LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
     for (jsuint n = 0; n < length; ++n) {
-        const Value *slot = globalObj->getRawSlot(n, src);
+        const HeapValue *slot = globalObj->getRawSlot(n, src);
         map[n] = tracker.get(slot);
         tracker.set(slot, NULL);
     }
     for (jsuint n = 0; n < length; ++n) {
-        const Value *slot = globalObj->getRawSlot(n, dst);
+        const HeapValue *slot = globalObj->getRawSlot(n, dst);
         tracker.set(slot, map[n]);
     }
     global_slots = globalObj->getRawSlots();
 }
 
 /* Determine whether the current branch is a loop edge (taken or not taken). */
 static JS_REQUIRES_STACK bool
 IsLoopEdge(jsbytecode* pc, jsbytecode* header)
@@ -3946,22 +3982,22 @@ public:
     {}
 
     JSValueType* getTypeMap()
     {
         return mTypeMap;
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
         LIns *ins = mRecorder.get(vp);
         bool isPromote = IsPromotedInt32(ins);
         if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
             mRecorder.w.st(mRecorder.get(vp),
-                           EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(vp)));
+                           EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(&vp->get())));
             /*
              * Aggressively undo speculation so the inner tree will compile
              * if this fails.
              */
             mRecorder.traceMonitor->oracle->markGlobalSlotUndemotable(mCx, slot);
         }
         JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
         ++mTypeMap;
@@ -4034,17 +4070,17 @@ TraceRecorder::adjustCallerTypes(TreeFra
 
     AdjustCallerStackTypesVisitor stackVisitor(*this, f->stackTypeMap());
     VisitStackSlots(stackVisitor, cx, 0);
 
     JS_ASSERT(f == f->root);
 }
 
 JS_REQUIRES_STACK inline JSValueType
-TraceRecorder::determineSlotType(Value* vp)
+TraceRecorder::determineSlotType(const Value* vp)
 {
     if (vp->isNumber()) {
         LIns *i = getFromTracker(vp);
         JSValueType t;
         if (i) {
             t = IsPromotedInt32(i) ? JSVAL_TYPE_INT32 : JSVAL_TYPE_DOUBLE;
         } else if (isGlobal(vp)) {
             int offset = tree->globalSlots->offsetOf(uint16(nativeGlobalSlot(vp)));
@@ -4069,18 +4105,18 @@ class DetermineTypesVisitor : public Slo
 public:
     DetermineTypesVisitor(TraceRecorder &recorder,
                           JSValueType *typeMap) :
         mRecorder(recorder),
         mTypeMap(typeMap)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
-        *mTypeMap++ = mRecorder.determineSlotType(vp);
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
+        *mTypeMap++ = mRecorder.determineSlotType(&vp->get());
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
     visitStackSlots(Value *vp, size_t count, StackFrame* fp) {
         for (size_t i = 0; i < count; ++i)
             *mTypeMap++ = mRecorder.determineSlotType(vp++);
         return true;
     }
@@ -4523,27 +4559,27 @@ enum TypeCheckResult
 class SlotMap : public SlotVisitorBase
 {
   public:
     struct SlotInfo
     {
         SlotInfo()
           : vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad)
         {}
-        SlotInfo(Value* vp, bool isPromotedInt32)
+        SlotInfo(const Value* vp, bool isPromotedInt32)
           : vp(vp), isPromotedInt32(isPromotedInt32), lastCheck(TypeCheck_Bad),
             type(getCoercedType(*vp))
         {}
         SlotInfo(JSValueType t)
           : vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad), type(t)
         {}
         SlotInfo(Value* vp, JSValueType t)
           : vp(vp), isPromotedInt32(t == JSVAL_TYPE_INT32), lastCheck(TypeCheck_Bad), type(t)
         {}
-        void            *vp;
+        const void      *vp;
         bool            isPromotedInt32;
         TypeCheckResult lastCheck;
         JSValueType     type;
     };
 
     SlotMap(TraceRecorder& rec)
         : mRecorder(rec),
           mCx(rec.cx),
@@ -4551,19 +4587,19 @@ class SlotMap : public SlotVisitorBase
     {
     }
 
     virtual ~SlotMap()
     {
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot)
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot)
     {
-        addSlot(vp);
+        addSlot(&vp->get());
     }
 
     JS_ALWAYS_INLINE SlotMap::SlotInfo&
     operator [](unsigned i)
     {
         return slots[i];
     }
 
@@ -4605,17 +4641,17 @@ class SlotMap : public SlotVisitorBase
             slots[i].lastCheck = result;
         }
         if (has_undemotes)
             return TypeConsensus_Undemotes;
         return TypeConsensus_Okay;
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    addSlot(Value* vp)
+    addSlot(const Value* vp)
     {
         bool isPromotedInt32 = false;
         if (vp->isNumber()) {
             if (LIns* i = mRecorder.getFromTracker(vp)) {
                 isPromotedInt32 = IsPromotedInt32(i);
             } else if (mRecorder.isGlobal(vp)) {
                 int offset = mRecorder.tree->globalSlots->offsetOf(uint16(mRecorder.nativeGlobalSlot(vp)));
                 JS_ASSERT(offset != -1);
@@ -6132,17 +6168,17 @@ public:
         mCx(mRecorder.cx),
         mOracle(recorder.traceMonitor->oracle),
         mTypeMap(typeMap),
         mStackSlotNum(0),
         mOk(true)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
         debug_only_printf(LC_TMTracer, "global%d=", n);
         if (!IsEntryTypeCompatible(*vp, *mTypeMap)) {
             mOk = false;
         } else if (!IsPromotedInt32(mRecorder.get(vp)) && *mTypeMap == JSVAL_TYPE_INT32) {
             mOracle->markGlobalSlotUndemotable(mCx, slot);
             mOk = false;
         } else if (vp->isInt32() && *mTypeMap == JSVAL_TYPE_DOUBLE) {
             mOracle->markGlobalSlotUndemotable(mCx, slot);
@@ -6237,17 +6273,17 @@ public:
 
     JS_ALWAYS_INLINE void checkSlot(const Value &v, char const *name, int i) {
         debug_only_printf(LC_TMTracer, "%s%d=", name, i);
         JS_ASSERT(*(uint8_t*)mTypeMap != 0xCD);
         mOk = IsEntryTypeCompatible(v, *mTypeMap++);
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
-    visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
+    visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
         if (mOk)
             checkSlot(*vp, "global", n);
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
     visitStackSlots(Value *vp, size_t count, StackFrame* fp) {
         for (size_t i = 0; i < count; ++i) {
             if (!mOk)
@@ -6473,16 +6509,18 @@ static JS_REQUIRES_STACK bool
 ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
             VMSideExit** innermostNestedGuardp, VMSideExit **lrp)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_EXECUTE);
 #endif
     JS_ASSERT(f->root == f && f->code());
 
+    JS_ASSERT(!cx->compartment->needsBarrier());
+
     if (!ScopeChainCheck(cx, f) ||
         !cx->stack.space().ensureEnoughSpaceToEnterTrace(cx)) {
         *lrp = NULL;
         return true;
     }
 
     /* Make sure the global object is sane. */
     JS_ASSERT(f->globalObj->numSlots() <= MAX_GLOBAL_SLOTS);
@@ -6927,16 +6965,19 @@ TraceRecorder::assertInsideLoop()
 
 JS_REQUIRES_STACK MonitorResult
 RecordLoopEdge(JSContext* cx, TraceMonitor* tm)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_MONITOR);
 #endif
 
+    if (cx->compartment->needsBarrier())
+        return MONITOR_NOT_RECORDING;
+
     /* Is the recorder currently active? */
     if (tm->recorder) {
         tm->recorder->assertInsideLoop();
         jsbytecode* pc = cx->regs().pc;
         if (pc == tm->recorder->tree->ip) {
             AbortableRecordingStatus status = tm->recorder->closeLoop();
             if (status != ARECORD_COMPLETED) {
                 if (tm->recorder)
@@ -12031,17 +12072,17 @@ TraceRecorder::nativeSet(JSObject* obj, 
                 leaveDeepBailCall();
             }
         }
 
         // Store the value.
         if (obj == globalObj) {
             if (!lazilyImportGlobalSlot(slot))
                 RETURN_STOP("lazy import of global slot failed");
-            set(&obj->getSlotRef(slot), v_ins);
+            set(&obj->getSlot(slot), v_ins);
         } else {
             LIns* slots_ins = NULL;
             stobj_set_slot(obj, obj_ins, slot, slots_ins, v, v_ins);
         }
     }
 
     return RECORD_CONTINUE;
 }
@@ -13010,17 +13051,17 @@ TraceRecorder::setElem(int lval_spindex,
 
     if (obj == globalObj)
         RETURN_STOP_A("can't trace setting elements on the global object");
 
     if (!idx.isInt32()) {
         if (!idx.isPrimitive())
             RETURN_STOP_A("non-primitive index");
         CHECK_STATUS_A(initOrSetPropertyByName(obj_ins, &idx, &v,
-                                             *cx->regs().pc == JSOP_INITELEM));
+                                               *cx->regs().pc == JSOP_INITELEM));
     } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
         // Fast path: assigning to element of typed array.
         VMSideExit* branchExit = snapshot(BRANCH_EXIT);
 
         // Ensure array is a typed array and is the same type as what was written
         guardClass(obj_ins, obj->getClass(), branchExit, LOAD_CONST);
 
         JSObject* tarray = js::TypedArray::getTypedArray(obj);
@@ -13838,17 +13879,17 @@ TraceRecorder::name(const Value*& vp, LI
         if (!pcval.isSlot())
             RETURN_STOP_A("PCE is not a slot");
         slot = pcval.toSlot();
     }
 
     if (!lazilyImportGlobalSlot(slot))
         RETURN_STOP_A("lazy import of global slot failed");
 
-    vp = &obj->getSlotRef(slot);
+    vp = &obj->getSlot(slot);
     ins = get(vp);
     nr.tracked = true;
     return ARECORD_CONTINUE;
 }
 
 static JSObject* FASTCALL
 MethodReadBarrier(JSContext* cx, JSObject* obj, Shape* shape, JSObject* funobj)
 {
@@ -16361,16 +16402,19 @@ class AutoRetBlacklist
     {
         *blacklist = IsBlacklisted(pc);
     }
 };
 
 JS_REQUIRES_STACK TracePointAction
 RecordTracePoint(JSContext* cx, TraceMonitor* tm, bool* blacklist, bool execAllowed)
 {
+    if (cx->compartment->needsBarrier())
+        return TPA_Nothing;
+
     StackFrame* fp = cx->fp();
     jsbytecode* pc = cx->regs().pc;
 
     JS_ASSERT(!tm->recorder);
     JS_ASSERT(!tm->profile);
 
     JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
     uint32 globalShape = -1;
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -1057,24 +1057,24 @@ class TraceRecorder
 
     /* Maps interpreter stack values to the instruction generating that value. */
     Tracker                         tracker;
 
     /* Maps interpreter stack values to the instruction writing back to the native stack. */
     Tracker                         nativeFrameTracker;
 
     /* The start of the global object's slots we assume for the trackers. */
-    const Value*                    global_slots;
+    const HeapValue*                global_slots;
 
     /* The number of interpreted calls entered (and not yet left) since recording began. */
     unsigned                        callDepth;
 
     /* The current atom table, mirroring the interpreter loop's variable of the same name. */
     JSAtom**                        atoms;
-    Value*                          consts;
+    HeapValue*                      consts;
 
     /* An instruction yielding the current script's strict mode code flag.  */
     nanojit::LIns*                  strictModeCode_ins;
 
     /* FIXME: Dead, but soon to be used for something or other. */
     Queue<jsbytecode*>              cfgMerges;
 
     /* Indicates whether the current tree should be trashed when the recording session ends. */
@@ -1180,17 +1180,19 @@ class TraceRecorder
                             Queue<unsigned>& undemotes);
 
     JS_REQUIRES_STACK void assertDownFrameIsConsistent(VMSideExit* anchor, FrameInfo* fi);
 
     JS_REQUIRES_STACK void captureStackTypes(unsigned callDepth, JSValueType* typeMap);
 
     bool isVoidPtrGlobal(const void* p) const;
     bool isGlobal(const Value* p) const;
+    bool isGlobal(const HeapValue* p) const;
     ptrdiff_t nativeGlobalSlot(const Value *p) const;
+    ptrdiff_t nativeGlobalSlot(const HeapValue *p) const;
     ptrdiff_t nativeGlobalOffset(const Value* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackOffsetImpl(const void* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(const Value* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackSlotImpl(const void* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackSlot(const Value* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativespOffsetImpl(const void* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativespOffset(const Value* p) const;
     JS_REQUIRES_STACK void importImpl(tjit::Address addr, const void* p, JSValueType t,
@@ -1223,16 +1225,17 @@ class TraceRecorder
     JS_REQUIRES_STACK void setImpl(const void* p, nanojit::LIns* l, bool shouldDemoteToInt32 = true);
     JS_REQUIRES_STACK void set(const Value* p, nanojit::LIns* l, bool shouldDemoteToInt32 = true);
     JS_REQUIRES_STACK void setFrameObjPtr(void* p, nanojit::LIns* l,
                                           bool shouldDemoteToInt32 = true);
     nanojit::LIns* getFromTrackerImpl(const void *p);
     nanojit::LIns* getFromTracker(const Value* p);
     JS_REQUIRES_STACK nanojit::LIns* getImpl(const void* p);
     JS_REQUIRES_STACK nanojit::LIns* get(const Value* p);
+    JS_REQUIRES_STACK nanojit::LIns* get(const HeapValue* p);
     JS_REQUIRES_STACK nanojit::LIns* getFrameObjPtr(void* p);
     JS_REQUIRES_STACK nanojit::LIns* attemptImport(const Value* p);
     JS_REQUIRES_STACK nanojit::LIns* addr(Value* p);
 
     JS_REQUIRES_STACK bool knownImpl(const void* p);
     JS_REQUIRES_STACK bool known(const Value* p);
     JS_REQUIRES_STACK bool known(JSObject** p);
     /*
@@ -1523,17 +1526,17 @@ class TraceRecorder
     JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
     JS_REQUIRES_STACK AbortableRecordingStatus checkTraceEnd(jsbytecode* pc);
 
     AbortableRecordingStatus hasMethod(JSObject* obj, jsid id, bool& found);
     JS_REQUIRES_STACK AbortableRecordingStatus hasIteratorMethod(JSObject* obj, bool& found);
 
     JS_REQUIRES_STACK jsatomid getFullIndex(ptrdiff_t pcoff = 0);
 
-    JS_REQUIRES_STACK JSValueType determineSlotType(Value* vp);
+    JS_REQUIRES_STACK JSValueType determineSlotType(const Value* vp);
 
     JS_REQUIRES_STACK RecordingStatus setUpwardTrackedVar(Value* stackVp, const Value& v,
                                                           nanojit::LIns* v_ins);
 
     JS_REQUIRES_STACK AbortableRecordingStatus compile();
     JS_REQUIRES_STACK AbortableRecordingStatus closeLoop();
     JS_REQUIRES_STACK AbortableRecordingStatus endLoop();
     JS_REQUIRES_STACK AbortableRecordingStatus endLoop(VMSideExit* exit);
@@ -1611,17 +1614,17 @@ class TraceRecorder
 
     bool globalSetExpected(unsigned slot) {
         unsigned *pi = Find(pendingGlobalSlotsToSet, slot);
         if (pi == pendingGlobalSlotsToSet.end()) {
             /*
              * Do slot arithmetic manually to avoid getSlotRef assertions which
              * do not need to be satisfied for this purpose.
              */
-            const Value *vp = globalObj->getRawSlot(slot, globalObj->getRawSlots());
+            const HeapValue *vp = globalObj->getRawSlot(slot, globalObj->getRawSlots());
 
             /* If this global is definitely being tracked, then the write is unexpected. */
             if (tracker.has(vp))
                 return false;
             
             /*
              * Otherwise, only abort if the global is not present in the
              * import typemap. Just deep aborting false here is not acceptable,
--- a/js/src/jstypedarray.cpp
+++ b/js/src/jstypedarray.cpp
@@ -159,27 +159,27 @@ JSObject::allocateArrayBufferSlots(JSCon
 {
     /*
      * ArrayBuffer objects delegate added properties to another JSObject, so
      * their internal layout can use the object's fixed slots for storage.
      */
     JS_ASSERT(isArrayBuffer() && !hasSlotsArray());
 
     uint32 bytes = size + sizeof(Value);
-    if (size > sizeof(Value) * ARRAYBUFFER_RESERVED_SLOTS - sizeof(Value) ) {
-        Value *tmpslots = (Value *)cx->calloc_(bytes);
+    if (size > sizeof(HeapValue) * ARRAYBUFFER_RESERVED_SLOTS - sizeof(HeapValue) ) {
+        HeapValue *tmpslots = (HeapValue *)cx->calloc_(bytes);
         if (!tmpslots)
             return false;
         slots = tmpslots;
         /*
          * Note that |bytes| may not be a multiple of |sizeof(Value)|, so
          * |capacity * sizeof(Value)| may underestimate the size by up to
          * |sizeof(Value) - 1| bytes.
          */
-        capacity = bytes / sizeof(Value);
+        capacity = bytes / sizeof(HeapValue);
     } else {
         slots = fixedSlots();
         memset(slots, 0, bytes);
     }
     *((uint32*)slots) = size;
     return true;
 }
 
@@ -227,19 +227,23 @@ ArrayBuffer::create(JSContext *cx, int32
 
 ArrayBuffer::~ArrayBuffer()
 {
 }
 
 void
 ArrayBuffer::obj_trace(JSTracer *trc, JSObject *obj)
 {
+    /*
+     * If this object changes, it will get marked via the private data barrier,
+     * so it's safe to leave it Unbarriered.
+     */
     JSObject *delegate = static_cast<JSObject*>(obj->getPrivate());
     if (delegate)
-        MarkObject(trc, *delegate, "arraybuffer.delegate");
+        MarkObjectUnbarriered(trc, delegate, "arraybuffer.delegate");
 }
 
 static JSProperty * const PROPERTY_FOUND = reinterpret_cast<JSProperty *>(1);
 
 JSBool
 ArrayBuffer::obj_lookupGeneric(JSContext *cx, JSObject *obj, jsid id,
                                JSObject **objp, JSProperty **propp)
 {
@@ -990,19 +994,17 @@ class TypedArrayTemplate
     static inline Class *fastClass()
     {
         return &TypedArray::fastClasses[ArrayTypeID()];
     }
 
     static void
     obj_trace(JSTracer *trc, JSObject *obj)
     {
-        JSObject *buffer = static_cast<JSObject*>(getBuffer(obj));
-        if (buffer)
-            MarkObject(trc, *buffer, "typedarray.buffer");
+        MarkValue(trc, obj->getFixedSlotRef(FIELD_BUFFER), "typedarray.buffer");
     }
 
     static JSBool
     obj_getGeneric(JSContext *cx, JSObject *obj, JSObject *receiver, jsid id, Value *vp)
     {
         JSObject *tarray = getTypedArray(obj);
 
         if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
--- a/js/src/jstypedarray.h
+++ b/js/src/jstypedarray.h
@@ -38,16 +38,18 @@
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef jstypedarray_h
 #define jstypedarray_h
 
 #include "jsapi.h"
 #include "jsclass.h"
 
+#include "gc/Barrier.h"
+
 typedef struct JSProperty JSProperty;
 
 namespace js {
 
 /*
  * ArrayBuffer
  *
  * This class holds the underlying raw buffer that the TypedArray
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -43,17 +43,17 @@
 #include "jsobjinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 inline HashNumber
 DefaultHasher<WatchKey>::hash(const Lookup &key)
 {
-    return DefaultHasher<JSObject *>::hash(key.object) ^ HashId(key.id);
+    return DefaultHasher<JSObject *>::hash(key.object.get()) ^ HashId(key.id.get());
 }
 
 class AutoEntryHolder {
     typedef WatchpointMap::Map Map;
     Map &map;
     Map::Ptr p;
     uint32 gen;
     WatchKey key;
@@ -172,17 +172,17 @@ WatchpointMap::triggerWatchpoint(JSConte
 
     /* Call the handler. */
     return handler(cx, obj, id, old, vp, closure);
 }
 
 bool
 WatchpointMap::markAllIteratively(JSTracer *trc)
 {
-    JSRuntime *rt = trc->context->runtime;
+    JSRuntime *rt = trc->runtime;
     if (rt->gcCurrentCompartment) {
         WatchpointMap *wpmap = rt->gcCurrentCompartment->watchpointMap;
         return wpmap && wpmap->markIteratively(trc);
     }
 
     bool mutated = false;
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
         if ((*c)->watchpointMap)
@@ -196,26 +196,26 @@ WatchpointMap::markIteratively(JSTracer 
 {
     JSContext *cx = trc->context;
     bool marked = false;
     for (Map::Range r = map.all(); !r.empty(); r.popFront()) {
         Map::Entry &e = r.front();
         bool objectIsLive = !IsAboutToBeFinalized(cx, e.key.object);
         if (objectIsLive || e.value.held) {
             if (!objectIsLive) {
-                MarkObject(trc, *e.key.object, "held Watchpoint object");
+                MarkObject(trc, e.key.object, "held Watchpoint object");
                 marked = true;
             }
 
-            jsid id = e.key.id;
+            const HeapId &id = e.key.id;
             JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
             MarkId(trc, id, "WatchKey::id");
 
             if (e.value.closure && IsAboutToBeFinalized(cx, e.value.closure)) {
-                MarkObject(trc, *e.value.closure, "Watchpoint::closure");
+                MarkObject(trc, e.value.closure, "Watchpoint::closure");
                 marked = true;
             }
         }
     }
     return marked;
 }
 
 void
--- a/js/src/jswatchpoint.h
+++ b/js/src/jswatchpoint.h
@@ -39,40 +39,42 @@
 
 #ifndef jswatchpoint_h___
 #define jswatchpoint_h___
 
 #include "jsalloc.h"
 #include "jsprvtd.h"
 #include "jsapi.h"
 
+#include "gc/Barrier.h"
 #include "js/HashTable.h"
 
 namespace js {
 
 struct WatchKey {
     WatchKey() {}
     WatchKey(JSObject *obj, jsid id) : object(obj), id(id) {}
-    JSObject *object;
-    jsid id;
+    WatchKey(const WatchKey &key) : object(key.object.get()), id(key.id.get()) {}
+    HeapPtrObject object;
+    HeapId id;
 };
 
 struct Watchpoint {
     JSWatchPointHandler handler;
-    JSObject *closure;
+    HeapPtrObject closure;
     bool held;  /* true if currently running handler */
 };
 
 template <>
 struct DefaultHasher<WatchKey> {
     typedef WatchKey Lookup;
     static inline js::HashNumber hash(const Lookup &key);
 
     static bool match(const WatchKey &k, const Lookup &l) {
-        return k.object == l.object && k.id == l.id;
+        return k.object == l.object && k.id.get() == l.id.get();
     }
 };
 
 class WatchpointMap {
   public:
     typedef HashMap<WatchKey, Watchpoint, DefaultHasher<WatchKey>, SystemAllocPolicy> Map;
 
     bool init();
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -75,17 +75,17 @@ WeakMapBase::sweepAll(JSTracer *tracer)
 {
     JSRuntime *rt = tracer->context->runtime;
     for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next)
         m->sweep(tracer);
 }
 
 } /* namespace js */
 
-typedef WeakMap<JSObject *, Value> ObjectValueMap;
+typedef WeakMap<HeapPtr<JSObject>, HeapValue> ObjectValueMap;
 
 static ObjectValueMap *
 GetObjectMap(JSObject *obj)
 {
     JS_ASSERT(obj->isWeakMap());
     return (ObjectValueMap *)obj->getPrivate();
 }
 
@@ -270,17 +270,17 @@ WeakMap_finalize(JSContext *cx, JSObject
 
 static JSBool
 WeakMap_construct(JSContext *cx, uintN argc, Value *vp)
 {
     JSObject *obj = NewBuiltinClassInstance(cx, &WeakMapClass);
     if (!obj)
         return false;
 
-    obj->setPrivate(NULL);
+    obj->initPrivate(NULL);
 
     vp->setObject(*obj);
     return true;
 }
 
 Class js::WeakMapClass = {
     "WeakMap",
     JSCLASS_HAS_PRIVATE |
@@ -315,17 +315,17 @@ js_InitWeakMapClass(JSContext *cx, JSObj
 {
     JS_ASSERT(obj->isNative());
 
     GlobalObject *global = obj->asGlobal();
 
     JSObject *weakMapProto = global->createBlankPrototype(cx, &WeakMapClass);
     if (!weakMapProto)
         return NULL;
-    weakMapProto->setPrivate(NULL);
+    weakMapProto->initPrivate(NULL);
 
     JSFunction *ctor = global->createConstructor(cx, WeakMap_construct, &WeakMapClass,
                                                  CLASS_ATOM(cx, WeakMap), 0);
     if (!ctor)
         return NULL;
 
     if (!LinkConstructorAndPrototype(cx, ctor, weakMapProto))
         return NULL;
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -75,69 +75,36 @@ namespace js {
 //
 // - You must call the WeakMap's 'trace' member function when you discover that the map is
 //   part of a live object. (You'll typically call this from the containing type's 'trace'
 //   function.)
 //
 // - There is no AllocPolicy parameter; these are used with our garbage collector, so
 //   RuntimeAllocPolicy is hard-wired.
 //
-// - Optional fourth template parameter is a class MarkPolicy, with the following constructor:
-//   
+// - Optional fourth and fifth parameters are the MarkPolicies for the key and value type.
+//   A MarkPolicy has the constructor:
+//
 //     MarkPolicy(JSTracer *)
 //
 //   and the following member functions:
 //
-//     bool keyMarked(Key &k)
-//     bool valueMarked(Value &v)
-//        Return true if k/v has been marked as live by the garbage collector.
-//
-//     bool markEntryIfLive(Key &k, Value &v)
-//        If a table entry whose key is k should be retained, ensure its key and
-//        value are marked. Return true if any previously unmarked objects
-//        became marked.
-//
-//        To ensure that the WeakMap's behavior isn't visibly affected by
-//        garbage collection, this should leave k unmarked only when no key
-//        matching k could ever be produced after this GC cycle completes ---
-//        removing entries whose keys this function leaves unmarked should never
-//        make future lookups fail.
-//
-//        A typical definition of markEntryIfLive would be:
+//     bool isMarked(const Type &x)
+//        Return true if x has been marked as live by the garbage collector.
 //
-//          if (keyMarked(k) && !valueMarked(v)) {
-//              markObject(*v, "WeakMap entry value");
-//              return true;
-//          }
-//          return false;
-//
-//        This meets the above constraint when, for example, Key is JSObject *:
-//        if k isn't marked, it won't exist once the collection cycle completes,
-//        and thus can't be supplied as a key.
+//     bool mark(const Type &x)
+//        Return false if x is already marked. Otherwise, mark x and return true.
 //
-//        Note that this may mark entries where keyMarked(k) is not initially
-//        true. For example, you could have a table whose keys match when the
-//        values of one of their properties are equal: k1.x === k2.x. An entry
-//        in such a table could be live even when its key is not marked. The
-//        markEntryIfLive function for such a table would generally mark both k and v.
-//
-//     void markEntry(Value &v)
-//        Mark the table entry's value v as reachable by the collector. WeakMap
-//        uses this function for non-marking tracers: other code using the GC
-//        heap tracing functions to map the heap for some purpose or other.
-//        This provides a conservative approximation of the true reachability
-//        relation of the heap graph.
-//
-//   If omitted, the MarkPolicy parameter defaults to js::DefaultMarkPolicy<Key,
-//   Value>, a policy template with the obvious definitions for some typical
+//   If omitted, the MarkPolicy parameter defaults to js::DefaultMarkPolicy<Type>,
+//   a policy template with the obvious definitions for some typical
 //   SpiderMonkey type combinations.
 
 // A policy template holding default marking algorithms for common type combinations. This
 // provides default types for WeakMap's MarkPolicy template parameter.
-template <class Key, class Value> class DefaultMarkPolicy;
+template <class Type> class DefaultMarkPolicy;
 
 // Common base class for all WeakMap specializations. The collector uses this to call
 // their markIteratively and sweep methods.
 class WeakMapBase {
   public:
     WeakMapBase() : next(NULL) { }
     virtual ~WeakMapBase() { }
 
@@ -183,17 +150,18 @@ class WeakMapBase {
   private:
     // Link in a list of WeakMaps to mark iteratively and sweep in this garbage
     // collection, headed by JSRuntime::gcWeakMapList.
     WeakMapBase *next;
 };
 
 template <class Key, class Value,
           class HashPolicy = DefaultHasher<Key>,
-          class MarkPolicy = DefaultMarkPolicy<Key, Value> >
+          class KeyMarkPolicy = DefaultMarkPolicy<Key>,
+          class ValueMarkPolicy = DefaultMarkPolicy<Value> >
 class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, public WeakMapBase {
   private:
     typedef HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy> Base;
     typedef typename Base::Enum Enum;
 
   public:
     typedef typename Base::Range Range;
 
@@ -201,135 +169,129 @@ class WeakMap : public HashMap<Key, Valu
     explicit WeakMap(JSContext *cx) : Base(cx) { }
 
     // Use with caution, as result can be affected by garbage collection.
     Range nondeterministicAll() {
         return Base::all();
     }
 
   private:
-    void nonMarkingTrace(JSTracer *tracer) {
-        MarkPolicy t(tracer);
+    void nonMarkingTrace(JSTracer *trc) {
+        ValueMarkPolicy vp(trc);
         for (Range r = Base::all(); !r.empty(); r.popFront())
-            t.markEntry(r.front().value);
+            vp.mark(r.front().value);
     }
 
-    bool markIteratively(JSTracer *tracer) {
-        MarkPolicy t(tracer);
+    bool markIteratively(JSTracer *trc) {
+        KeyMarkPolicy kp(trc);
+        ValueMarkPolicy vp(trc);
         bool markedAny = false;
         for (Range r = Base::all(); !r.empty(); r.popFront()) {
+            const Key &k = r.front().key;
+            const Value &v = r.front().value;
             /* If the entry is live, ensure its key and value are marked. */
-            if (t.markEntryIfLive(r.front().key, r.front().value)) {
-                /* We revived a value with children, we have to iterate again. */
+            if (kp.isMarked(k)) {
+                markedAny |= vp.mark(v);
+            } else if (kp.overrideKeyMarking(k)) {
+                // We always mark wrapped natives.  This will cause leaks, but WeakMap+CC
+                // integration is currently busted anyways.  When WeakMap+CC integration is
+                // fixed in Bug 668855, XPC wrapped natives should only be marked during
+                // non-BLACK marking (ie grey marking).
+                kp.mark(k);
+                vp.mark(v);
                 markedAny = true;
             }
-            JS_ASSERT_IF(t.keyMarked(r.front().key), t.valueMarked(r.front().value));
+            JS_ASSERT_IF(kp.isMarked(k), vp.isMarked(v));
         }
         return markedAny;
     }
 
-    void sweep(JSTracer *tracer) {
-        MarkPolicy t(tracer);
+    void sweep(JSTracer *trc) {
+        KeyMarkPolicy kp(trc);
 
         /* Remove all entries whose keys remain unmarked. */
         for (Enum e(*this); !e.empty(); e.popFront()) {
-            if (!t.keyMarked(e.front().key))
+            if (!kp.isMarked(e.front().key))
                 e.removeFront();
         }
 
 #if DEBUG
-        /* 
+        ValueMarkPolicy vp(trc);
+        /*
          * Once we've swept, all remaining edges should stay within the
          * known-live part of the graph.
          */
         for (Range r = Base::all(); !r.empty(); r.popFront()) {
-            JS_ASSERT(t.keyMarked(r.front().key));
-            JS_ASSERT(t.valueMarked(r.front().value));
+            JS_ASSERT(kp.isMarked(r.front().key));
+            JS_ASSERT(vp.isMarked(r.front().value));
         }
 #endif
     }
 };
 
-// Marking policy for maps from JSObject pointers to js::Values.
-//
-// We always mark wrapped natives.  This will cause leaks, but WeakMap+CC
-// integration is currently busted anyways.  When WeakMap+CC integration is
-// fixed in Bug 668855, XPC wrapped natives should only be marked during
-// non-BLACK marking (ie grey marking).
 template <>
-class DefaultMarkPolicy<JSObject *, Value> {
+class DefaultMarkPolicy<HeapValue> {
   private:
     JSTracer *tracer;
   public:
     DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
-    bool keyMarked(JSObject *k) { return !IsAboutToBeFinalized(tracer->context, k); }
-    bool valueMarked(const Value &v) {
-        if (v.isMarkable())
-            return !IsAboutToBeFinalized(tracer->context, v.toGCThing());
+    bool isMarked(const HeapValue &x) {
+        if (x.isMarkable())
+            return !IsAboutToBeFinalized(tracer->context, x);
+        return true;
+    }
+    bool mark(const HeapValue &x) {
+        if (isMarked(x))
+            return false;
+        js::gc::MarkValue(tracer, x, "WeakMap entry");
         return true;
     }
+    bool overrideKeyMarking(const HeapValue &k) { return false; }
+};
+
+template <>
+class DefaultMarkPolicy<HeapPtrObject> {
   private:
-    bool markUnmarkedValue(const Value &v) {
-        if (valueMarked(v))
+    JSTracer *tracer;
+  public:
+    DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
+    bool isMarked(const HeapPtrObject &x) {
+        return !IsAboutToBeFinalized(tracer->context, x);
+    }
+    bool mark(const HeapPtrObject &x) {
+        if (isMarked(x))
             return false;
-        js::gc::MarkValue(tracer, v, "WeakMap entry value");
+        js::gc::MarkObject(tracer, x, "WeakMap entry");
         return true;
     }
-
-    // Return true if we should override the GC's default marking
-    // behavior for this key.
-    bool overrideKeyMarking(JSObject *k) {
+    bool overrideKeyMarking(const HeapPtrObject &k) {
         // We only need to worry about extra marking of keys when
         // we're doing a GC marking pass.
         if (!IS_GC_MARKING_TRACER(tracer))
             return false;
         return k->getClass()->ext.isWrappedNative;
     }
-  public:
-    bool markEntryIfLive(JSObject *k, const Value &v) {
-        if (keyMarked(k))
-            return markUnmarkedValue(v);
-        if (!overrideKeyMarking(k))
-            return false;
-        js::gc::MarkObject(tracer, *k, "WeakMap entry wrapper key");
-        markUnmarkedValue(v);
-        return true;
-    }
-    void markEntry(const Value &v) {
-        js::gc::MarkValue(tracer, v, "WeakMap entry value");
-    }
 };
 
 template <>
-class DefaultMarkPolicy<gc::Cell *, JSObject *> {
-  protected:
+class DefaultMarkPolicy<HeapPtrScript> {
+  private:
     JSTracer *tracer;
   public:
     DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
-    bool keyMarked(gc::Cell *k)   { return !IsAboutToBeFinalized(tracer->context, k); }
-    bool valueMarked(JSObject *v) { return !IsAboutToBeFinalized(tracer->context, v); }
-    bool markEntryIfLive(gc::Cell *k, JSObject *v) {
-        if (keyMarked(k) && !valueMarked(v)) {
-            js::gc::MarkObject(tracer, *v, "WeakMap entry value");
-            return true;
-        }
-        return false;
+    bool isMarked(const HeapPtrScript &x) {
+        return !IsAboutToBeFinalized(tracer->context, x);
     }
-    void markEntry(JSObject *v) {
-        js::gc::MarkObject(tracer, *v, "WeakMap entry value");
+    bool mark(const HeapPtrScript &x) {
+        if (isMarked(x))
+            return false;
+        js::gc::MarkScript(tracer, x, "WeakMap entry");
+        return true;
     }
+    bool overrideKeyMarking(const HeapPtrScript &k) { return false; }
 };
 
-// A MarkPolicy for WeakMaps whose keys and values may be objects in arbitrary
-// compartments within a runtime.
-//
-// With the current GC, the implementation turns out to be identical to the
-// default mark policy. We give it a distinct name anyway, in case this ever
-// changes.
-//
-typedef DefaultMarkPolicy<gc::Cell *, JSObject *> CrossCompartmentMarkPolicy;
-
 }
 
 extern JSObject *
 js_InitWeakMapClass(JSContext *cx, JSObject *obj);
 
 #endif
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -341,17 +341,17 @@ Wrapper::defaultValue(JSContext *cx, JSO
     if (hint == JSTYPE_VOID)
         return ToPrimitive(cx, vp);
     return ToPrimitive(cx, hint, vp);
 }
 
 void
 Wrapper::trace(JSTracer *trc, JSObject *wrapper)
 {
-    MarkObject(trc, *wrappedObject(wrapper), "wrappedObject");
+    MarkValue(trc, wrapper->getReservedSlotRef(JSSLOT_PROXY_PRIVATE), "wrappedObject");
 }
 
 JSObject *
 Wrapper::wrappedObject(const JSObject *wrapper)
 {
     return GetProxyPrivate(wrapper).toObjectOrNull();
 }
 
@@ -839,17 +839,18 @@ CrossCompartmentWrapper::defaultValue(JS
 
     call.leave();
     return call.origin->wrap(cx, vp);
 }
 
 void
 CrossCompartmentWrapper::trace(JSTracer *trc, JSObject *wrapper)
 {
-    MarkCrossCompartmentObject(trc, *wrappedObject(wrapper), "wrappedObject");
+    MarkCrossCompartmentValue(trc, wrapper->getReservedSlotRef(JSSLOT_PROXY_PRIVATE),
+                              "wrappedObject");
 }
 
 CrossCompartmentWrapper CrossCompartmentWrapper::singleton(0u);
 
 /* Security wrappers. */
 
 template <class Base>
 SecurityWrapper<Base>::SecurityWrapper(uintN flags)
--- a/js/src/jsxdrapi.cpp
+++ b/js/src/jsxdrapi.cpp
@@ -734,17 +734,17 @@ JS_XDRScript(JSXDRState *xdr, JSScript *
     if (!JS_XDRCStringOrNull(xdr, (char **) &state.filename))
         return false;
 
     if (!js_XDRScript(xdr, &script))
         return false;
 
     if (xdr->mode == JSXDR_DECODE) {
         JS_ASSERT(!script->compileAndGo);
-        script->u.globalObject = GetCurrentGlobal(xdr->cx);
+        script->globalObject = GetCurrentGlobal(xdr->cx);
         js_CallNewScriptHook(xdr->cx, script, NULL);
         Debugger::onNewScript(xdr->cx, script, NULL);
         *scriptp = script;
     }
 
     return true;
 }
 
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -73,26 +73,40 @@
 #include "vm/GlobalObject.h"
 
 #include "jsatominlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 #include "jsstrinlines.h"
 
 #include "vm/Stack-inl.h"
+#include "vm/String-inl.h"
 
 #ifdef DEBUG
 #include <string.h>     /* for #ifdef DEBUG memset calls */
 #endif
 
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 using namespace js::types;
 
+template<class T, class U>
+struct IdentityOp
+{
+    typedef JSBool (* compare)(const T *a, const U *b);
+};
+
+template<class T>
+static JSBool
+pointer_match(const T *a, const T *b)