Backed out 20 changesets (bug 903519) for detected memory leaks on a CLOSED TREE
authorNoemi Erli <nerli@mozilla.com>
Thu, 11 Jan 2018 22:18:23 +0200
changeset 450700 65e92478e09d3f1628fccbf5edc4cb080046249e
parent 450699 2ed6a8d139a3bf4d764243be060248811ca420fa
child 450701 b580ce0f1d0623ddf76b272c13aa1a1a444107c5
push id8531
push userryanvm@gmail.com
push dateFri, 12 Jan 2018 16:47:01 +0000
treeherdermozilla-beta@0bc627ade5a0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs903519
milestone59.0a1
backs out3f72f8747e2938a5b0e6632818e85d79e5f670a2
bb2cc298a15583b524102372d42f722aae5d505a
cc56f32ddae8443542abcb00ff93bd535e7e7069
ec5b307a28aa6e939446eb4c19a09f8b6c7f2287
38f4e0426bdd81c0acdce3929e3e3d81370d1273
71831e232df2957c9ea178986218e3d6eeef6c0b
6f3666e9540e849056347f7b9d8a40e41396115e
c62e5867d763dea5a05117e8ab7c02a9b5994dd8
7854bfe5d68346a38b25d93ea8870f47bce4f901
f5f72c93adf94f4ae670c38941f68c75d19f8072
7d56db66836900bc7758c6829b9235a3dd26947e
7c96258a64595d287eb72a54ee52a656dbb40365
11b3f0fda4adb74211c197ab157796fcafc27b95
2bc9d427f427b7dec19d7c27a44361338ad3f88d
fdb6431ea4ffbbc254e051f3f08eeef23d502062
6d7d15b254896c00ef8e00a6b1d921980d661ac2
457008b194a87ec4dd57858d25fa94f2606492db
80b9d97bf1fe4e23066f4ac7283ab276c3e942f0
1fc5ee0d0116300963a2490fb485207eaa9ada02
9316d8f7b92ad2746505218b30fb8acd376e6edd
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 20 changesets (bug 903519) for detected memory leaks on a CLOSED TREE Backed out changeset 3f72f8747e29 (bug 903519) Backed out changeset bb2cc298a155 (bug 903519) Backed out changeset cc56f32ddae8 (bug 903519) Backed out changeset ec5b307a28aa (bug 903519) Backed out changeset 38f4e0426bdd (bug 903519) Backed out changeset 71831e232df2 (bug 903519) Backed out changeset 6f3666e9540e (bug 903519) Backed out changeset c62e5867d763 (bug 903519) Backed out changeset 7854bfe5d683 (bug 903519) Backed out changeset f5f72c93adf9 (bug 903519) Backed out changeset 7d56db668369 (bug 903519) Backed out changeset 7c96258a6459 (bug 903519) Backed out changeset 11b3f0fda4ad (bug 903519) Backed out changeset 2bc9d427f427 (bug 903519) Backed out changeset fdb6431ea4ff (bug 903519) Backed out changeset 6d7d15b25489 (bug 903519) Backed out changeset 457008b194a8 (bug 903519) Backed out changeset 80b9d97bf1fe (bug 903519) Backed out changeset 1fc5ee0d0116 (bug 903519) Backed out changeset 9316d8f7b92a (bug 903519)
js/public/HeapAPI.h
js/public/RootingAPI.h
js/src/gc/AllocKind.h
js/src/gc/Allocator.cpp
js/src/gc/Allocator.h
js/src/gc/AtomMarking-inl.h
js/src/gc/AtomMarking.cpp
js/src/gc/Barrier.cpp
js/src/gc/Barrier.h
js/src/gc/Cell.h
js/src/gc/GCInternals.h
js/src/gc/GCRuntime.h
js/src/gc/GCTrace.cpp
js/src/gc/GCTrace.h
js/src/gc/Heap.h
js/src/gc/Marking-inl.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/Nursery-inl.h
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/gc/NurseryAwareHashMap.h
js/src/gc/RelocationOverlay.h
js/src/gc/StoreBuffer.h
js/src/gc/Verifier.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/gdb/mozilla/JSString.py
js/src/jit-test/tests/heap-analysis/byteSize-of-string.js
js/src/jit/BaselineCacheIRCompiler.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/CacheIRCompiler.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/CodeGenerator.h
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/Ion.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonCacheIRCompiler.cpp
js/src/jit/JitCompartment.h
js/src/jit/Lowering.cpp
js/src/jit/MIR.h
js/src/jit/MIRGenerator.h
js/src/jit/MIRGraph.cpp
js/src/jit/MacroAssembler-inl.h
js/src/jit/MacroAssembler.cpp
js/src/jit/MacroAssembler.h
js/src/jit/TypePolicy.cpp
js/src/jit/TypePolicy.h
js/src/jit/VMFunctions.cpp
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/arm64/MacroAssembler-arm64.cpp
js/src/jit/mips32/MacroAssembler-mips32.cpp
js/src/jit/mips64/MacroAssembler-mips64.cpp
js/src/jit/mips64/MacroAssembler-mips64.h
js/src/jit/shared/CodeGenerator-shared.h
js/src/jit/shared/LIR-shared.h
js/src/jit/shared/LOpcodes-shared.h
js/src/jit/x64/MacroAssembler-x64.cpp
js/src/jit/x86/MacroAssembler-x86.cpp
js/src/jsapi.cpp
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsfriendapi.h
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/tests/lib/tests.py
js/src/vm/Caches.h
js/src/vm/HelperThreads.cpp
js/src/vm/HelperThreads.h
js/src/vm/MemoryMetrics.cpp
js/src/vm/NativeObject-inl.h
js/src/vm/NativeObject.h
js/src/vm/Scope.h
js/src/vm/String-inl.h
js/src/vm/String.cpp
js/src/vm/String.h
js/src/vm/UnboxedObject-inl.h
js/src/vm/UnboxedObject.cpp
js/src/vm/UnboxedObject.h
toolkit/components/aboutmemory/tests/test_memoryReporters.xul
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -19,22 +19,16 @@ namespace js {
 
 JS_FRIEND_API(bool)
 CurrentThreadCanAccessZone(JS::Zone* zone);
 
 namespace gc {
 
 struct Cell;
 
-/*
- * The low bit is set so this should never equal a normal pointer, and the high
- * bit is set so this should never equal the upper 32 bits of a 64-bit pointer.
- */
-const uint32_t Relocated = uintptr_t(0xbad0bad1);
-
 const size_t ArenaShift = 12;
 const size_t ArenaSize = size_t(1) << ArenaShift;
 const size_t ArenaMask = ArenaSize - 1;
 
 #ifdef JS_GC_SMALL_CHUNK_SIZE
 const size_t ChunkShift = 18;
 #else
 const size_t ChunkShift = 20;
@@ -62,17 +56,17 @@ const size_t ChunkMarkBitmapOffset = 258
 const size_t ChunkMarkBitmapBits = 31744;
 #else
 const size_t ChunkMarkBitmapOffset = 1032352;
 const size_t ChunkMarkBitmapBits = 129024;
 #endif
 const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
 const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t);
 const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize;
-const size_t ChunkStoreBufferOffset = ChunkSize - ChunkTrailerSize + sizeof(uint64_t);
+const size_t ChunkStoreBufferOffset = ChunkLocationOffset + sizeof(uint64_t);
 const size_t ArenaZoneOffset = sizeof(size_t);
 const size_t ArenaHeaderSize = sizeof(size_t) + 2 * sizeof(uintptr_t) +
                                sizeof(size_t) + sizeof(uintptr_t);
 
 /*
  * Live objects are marked black or gray. Everything reachable from a JS root is
  * marked black. Objects marked gray are eligible for cycle collection.
  *
@@ -461,25 +455,20 @@ namespace JS {
 
 static MOZ_ALWAYS_INLINE Zone*
 GetTenuredGCThingZone(GCCellPtr thing)
 {
     MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
     return js::gc::detail::GetGCThingZone(thing.unsafeAsUIntPtr());
 }
 
-extern JS_PUBLIC_API(Zone*)
-GetNurseryStringZone(JSString* str);
-
 static MOZ_ALWAYS_INLINE Zone*
 GetStringZone(JSString* str)
 {
-    if (!js::gc::IsInsideNursery(reinterpret_cast<js::gc::Cell*>(str)))
-        return js::gc::detail::GetGCThingZone(reinterpret_cast<uintptr_t>(str));
-    return GetNurseryStringZone(str);
+    return js::gc::detail::GetGCThingZone(uintptr_t(str));
 }
 
 extern JS_PUBLIC_API(Zone*)
 GetObjectZone(JSObject* obj);
 
 extern JS_PUBLIC_API(Zone*)
 GetValueZone(const Value& value);
 
@@ -489,22 +478,16 @@ GCThingIsMarkedGray(GCCellPtr thing)
     if (thing.mayBeOwnedByOtherRuntime())
         return false;
     return js::gc::detail::CellIsMarkedGrayIfKnown(thing.asCell());
 }
 
 extern JS_PUBLIC_API(JS::TraceKind)
 GCThingTraceKind(void* thing);
 
-extern JS_PUBLIC_API(void)
-EnableNurseryStrings(JSContext* cx);
-
-extern JS_PUBLIC_API(void)
-DisableNurseryStrings(JSContext* cx);
-
 /*
  * Returns true when writes to GC thing pointers (and reads from weak pointers)
  * must call an incremental barrier. This is generally only true when running
  * mutator code in-between GC slices. At other times, the barrier may be elided
  * for performance.
  */
 extern JS_PUBLIC_API(bool)
 IsIncrementalBarrierNeeded(JSContext* cx);
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -194,32 +194,31 @@ namespace JS {
 
 template <typename T> class Rooted;
 template <typename T> class PersistentRooted;
 
 /* This is exposing internal state of the GC for inlining purposes. */
 JS_FRIEND_API(bool) isGCEnabled();
 
 JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next);
-JS_FRIEND_API(void) HeapStringPostBarrier(JSString** objp, JSString* prev, JSString* next);
 
 #ifdef JS_DEBUG
 /**
  * For generational GC, assert that an object is in the tenured generation as
  * opposed to being in the nursery.
  */
 extern JS_FRIEND_API(void)
 AssertGCThingMustBeTenured(JSObject* obj);
 extern JS_FRIEND_API(void)
-AssertGCThingIsNotNurseryAllocable(js::gc::Cell* cell);
+AssertGCThingIsNotAnObjectSubclass(js::gc::Cell* cell);
 #else
 inline void
 AssertGCThingMustBeTenured(JSObject* obj) {}
 inline void
-AssertGCThingIsNotNurseryAllocable(js::gc::Cell* cell) {}
+AssertGCThingIsNotAnObjectSubclass(js::gc::Cell* cell) {}
 #endif
 
 /**
  * The Heap<T> class is a heap-stored reference to a JS GC thing. All members of
  * heap classes that refer to GC things should use Heap<T> (or possibly
  * TenuredHeap<T>, described below).
  *
  * Heap<T> is an abstraction that hides some of the complexity required to
@@ -620,17 +619,17 @@ struct BarrierMethods<T*>
     static gc::Cell* asGCThingOrNull(T* v) {
         if (!v)
             return nullptr;
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
     static void postBarrier(T** vp, T* prev, T* next) {
         if (next)
-            JS::AssertGCThingIsNotNurseryAllocable(reinterpret_cast<js::gc::Cell*>(next));
+            JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(next));
     }
     static void exposeToJS(T* t) {
         if (t)
             js::gc::ExposeGCThingToActiveJS(JS::GCCellPtr(t));
     }
 };
 
 template <>
@@ -668,31 +667,16 @@ struct BarrierMethods<JSFunction*>
                                   reinterpret_cast<JSObject*>(next));
     }
     static void exposeToJS(JSFunction* fun) {
         if (fun)
             JS::ExposeObjectToActiveJS(reinterpret_cast<JSObject*>(fun));
     }
 };
 
-template <>
-struct BarrierMethods<JSString*>
-{
-    static JSString* initial() { return nullptr; }
-    static gc::Cell* asGCThingOrNull(JSString* v) {
-        if (!v)
-            return nullptr;
-        MOZ_ASSERT(uintptr_t(v) > 32);
-        return reinterpret_cast<gc::Cell*>(v);
-    }
-    static void postBarrier(JSString** vp, JSString* prev, JSString* next) {
-        JS::HeapStringPostBarrier(vp, prev, next);
-    }
-};
-
 // Provide hash codes for Cell kinds that may be relocated and, thus, not have
 // a stable address to use as the base for a hash code. Instead of the address,
 // this hasher uses Cell::getUniqueId to provide exact matches and as a base
 // for generating hash codes.
 //
 // Note: this hasher, like PointerHasher can "hash" a nullptr. While a nullptr
 // would not likely be a useful key, there are some cases where being able to
 // hash a nullptr is useful, either on purpose or because of bugs:
--- a/js/src/gc/AllocKind.h
+++ b/js/src/gc/AllocKind.h
@@ -46,41 +46,35 @@ namespace gc {
     D(OBJECT4_BACKGROUND,  Object,       JSObject,          JSObject_Slots4,   true,   true)  \
     D(OBJECT8,             Object,       JSObject,          JSObject_Slots8,   false,  false) \
     D(OBJECT8_BACKGROUND,  Object,       JSObject,          JSObject_Slots8,   true,   true)  \
     D(OBJECT12,            Object,       JSObject,          JSObject_Slots12,  false,  false) \
     D(OBJECT12_BACKGROUND, Object,       JSObject,          JSObject_Slots12,  true,   true)  \
     D(OBJECT16,            Object,       JSObject,          JSObject_Slots16,  false,  false) \
     D(OBJECT16_BACKGROUND, Object,       JSObject,          JSObject_Slots16,  true,   true)
 
-#define FOR_EACH_NONOBJECT_NONNURSERY_ALLOCKIND(D) \
+#define FOR_EACH_NONOBJECT_ALLOCKIND(D) \
  /* AllocKind              TraceKind     TypeName           SizedType          BGFinal Nursery */ \
     D(SCRIPT,              Script,       JSScript,          JSScript,          false,  false) \
     D(LAZY_SCRIPT,         LazyScript,   js::LazyScript,    js::LazyScript,    true,   false) \
     D(SHAPE,               Shape,        js::Shape,         js::Shape,         true,   false) \
     D(ACCESSOR_SHAPE,      Shape,        js::AccessorShape, js::AccessorShape, true,   false) \
     D(BASE_SHAPE,          BaseShape,    js::BaseShape,     js::BaseShape,     true,   false) \
     D(OBJECT_GROUP,        ObjectGroup,  js::ObjectGroup,   js::ObjectGroup,   true,   false) \
+    D(FAT_INLINE_STRING,   String,       JSFatInlineString, JSFatInlineString, true,   false) \
+    D(STRING,              String,       JSString,          JSString,          true,   false) \
     D(EXTERNAL_STRING,     String,       JSExternalString,  JSExternalString,  true,   false) \
     D(FAT_INLINE_ATOM,     String,       js::FatInlineAtom, js::FatInlineAtom, true,   false) \
     D(ATOM,                String,       js::NormalAtom,    js::NormalAtom,    true,   false) \
     D(SYMBOL,              Symbol,       JS::Symbol,        JS::Symbol,        true,   false) \
     D(JITCODE,             JitCode,      js::jit::JitCode,  js::jit::JitCode,  false,  false) \
     D(SCOPE,               Scope,        js::Scope,         js::Scope,         true,   false) \
     D(REGEXP_SHARED,       RegExpShared, js::RegExpShared,  js::RegExpShared,  true,   false)
 
-#define FOR_EACH_NURSERY_STRING_ALLOCKIND(D) \
-    D(FAT_INLINE_STRING,   String,        JSFatInlineString, JSFatInlineString, true,   true) \
-    D(STRING,              String,        JSString,          JSString,          true,   true)
-
-#define FOR_EACH_NONOBJECT_ALLOCKIND(D) \
-    FOR_EACH_NONOBJECT_NONNURSERY_ALLOCKIND(D) \
-    FOR_EACH_NURSERY_STRING_ALLOCKIND(D)
-
-#define FOR_EACH_ALLOCKIND(D)    \
+#define FOR_EACH_ALLOCKIND(D) \
     FOR_EACH_OBJECT_ALLOCKIND(D) \
     FOR_EACH_NONOBJECT_ALLOCKIND(D)
 
 enum class AllocKind : uint8_t {
 #define DEFINE_ALLOC_KIND(allocKind, _1, _2, _3, _4, _5) allocKind,
 
     FOR_EACH_OBJECT_ALLOCKIND(DEFINE_ALLOC_KIND)
 
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -70,19 +70,18 @@ js::Allocate(JSContext* cx, AllocKind ki
 }
 template JSObject* js::Allocate<JSObject, NoGC>(JSContext* cx, gc::AllocKind kind,
                                                 size_t nDynamicSlots, gc::InitialHeap heap,
                                                 const Class* clasp);
 template JSObject* js::Allocate<JSObject, CanGC>(JSContext* cx, gc::AllocKind kind,
                                                  size_t nDynamicSlots, gc::InitialHeap heap,
                                                  const Class* clasp);
 
-// Attempt to allocate a new JSObject out of the nursery. If there is not
-// enough room in the nursery or there is an OOM, this method will return
-// nullptr.
+// Attempt to allocate a new GC thing out of the nursery. If there is not enough
+// room in the nursery or there is an OOM, this method will return nullptr.
 template <AllowGC allowGC>
 JSObject*
 GCRuntime::tryNewNurseryObject(JSContext* cx, size_t thingSize, size_t nDynamicSlots, const Class* clasp)
 {
     MOZ_ASSERT(cx->isNurseryAllocAllowed());
     MOZ_ASSERT(!cx->helperThread());
     MOZ_ASSERT(!IsAtomsCompartment(cx->compartment()));
     JSObject* obj = cx->nursery().allocateObject(cx, thingSize, nDynamicSlots, clasp);
@@ -123,94 +122,16 @@ GCRuntime::tryNewTenuredObject(JSContext
     if (obj)
         obj->setInitialSlotsMaybeNonNative(slots);
     else
         js_free(slots);
 
     return obj;
 }
 
-// Attempt to allocate a new string out of the nursery. If there is not enough
-// room in the nursery or there is an OOM, this method will return nullptr.
-template <AllowGC allowGC>
-JSString*
-GCRuntime::tryNewNurseryString(JSContext* cx, size_t thingSize, AllocKind kind)
-{
-    MOZ_ASSERT(IsNurseryAllocable(kind));
-    MOZ_ASSERT(cx->isNurseryAllocAllowed());
-    MOZ_ASSERT(!cx->helperThread());
-    MOZ_ASSERT(!IsAtomsCompartment(cx->compartment()));
-
-    Cell* cell = cx->nursery().allocateString(cx, cx->zone(), thingSize, kind);
-    if (cell)
-        return static_cast<JSString*>(cell);
-
-    if (allowGC && !cx->suppressGC) {
-        cx->runtime()->gc.minorGC(JS::gcreason::OUT_OF_NURSERY);
-
-        // Exceeding gcMaxBytes while tenuring can disable the Nursery.
-        if (cx->nursery().isEnabled()) {
-            cell = cx->nursery().allocateString(cx, cx->zone(), thingSize, kind);
-            MOZ_ASSERT(cell);
-            return static_cast<JSString*>(cell);
-        }
-    }
-    return nullptr;
-}
-
-template <typename StringAllocT, AllowGC allowGC /* = CanGC */>
-StringAllocT*
-js::AllocateString(JSContext* cx, InitialHeap heap)
-{
-    static_assert(mozilla::IsConvertible<StringAllocT*, JSString*>::value, "must be JSString derived");
-
-    AllocKind kind = MapTypeToFinalizeKind<StringAllocT>::kind;
-    size_t size = sizeof(StringAllocT);
-    MOZ_ASSERT(size == Arena::thingSize(kind));
-    MOZ_ASSERT(size == sizeof(JSString) || size == sizeof(JSFatInlineString));
-
-    // Off-thread alloc cannot trigger GC or make runtime assertions.
-    if (cx->helperThread()) {
-        StringAllocT* str = GCRuntime::tryNewTenuredThing<StringAllocT, NoGC>(cx, kind, size);
-        if (MOZ_UNLIKELY(allowGC && !str))
-            ReportOutOfMemory(cx);
-        return str;
-    }
-
-    JSRuntime* rt = cx->runtime();
-    if (!rt->gc.checkAllocatorState<allowGC>(cx, kind))
-        return nullptr;
-
-    if (cx->nursery().isEnabled() &&
-        heap != TenuredHeap &&
-        cx->nursery().canAllocateStrings() &&
-        cx->zone()->allocNurseryStrings)
-    {
-        auto str = static_cast<StringAllocT*>(rt->gc.tryNewNurseryString<allowGC>(cx, size, kind));
-        if (str)
-            return str;
-
-        // Our most common non-jit allocation path is NoGC; thus, if we fail the
-        // alloc and cannot GC, we *must* return nullptr here so that the caller
-        // will do a CanGC allocation to clear the nursery. Failing to do so will
-        // cause all allocations on this path to land in Tenured, and we will not
-        // get the benefit of the nursery.
-        if (!allowGC)
-            return nullptr;
-    }
-
-    return GCRuntime::tryNewTenuredThing<StringAllocT, allowGC>(cx, kind, size);
-}
-
-#define DECL_ALLOCATOR_INSTANCES(allocKind, traceKind, type, sizedType, bgfinal, nursery) \
-    template type* js::AllocateString<type, NoGC>(JSContext* cx, InitialHeap heap);\
-    template type* js::AllocateString<type, CanGC>(JSContext* cx, InitialHeap heap);
-FOR_EACH_NURSERY_STRING_ALLOCKIND(DECL_ALLOCATOR_INSTANCES)
-#undef DECL_ALLOCATOR_INSTANCES
-
 template <typename T, AllowGC allowGC /* = CanGC */>
 T*
 js::Allocate(JSContext* cx)
 {
     static_assert(!mozilla::IsConvertible<T*, JSObject*>::value, "must not be JSObject derived");
     static_assert(sizeof(T) >= MinCellSize,
                   "All allocations must be at least the allocator-imposed minimum size.");
 
@@ -224,17 +145,17 @@ js::Allocate(JSContext* cx)
     }
 
     return GCRuntime::tryNewTenuredThing<T, allowGC>(cx, kind, thingSize);
 }
 
 #define DECL_ALLOCATOR_INSTANCES(allocKind, traceKind, type, sizedType, bgFinal, nursery) \
     template type* js::Allocate<type, NoGC>(JSContext* cx);\
     template type* js::Allocate<type, CanGC>(JSContext* cx);
-FOR_EACH_NONOBJECT_NONNURSERY_ALLOCKIND(DECL_ALLOCATOR_INSTANCES)
+FOR_EACH_NONOBJECT_ALLOCKIND(DECL_ALLOCATOR_INSTANCES)
 #undef DECL_ALLOCATOR_INSTANCES
 
 template <typename T, AllowGC allowGC>
 /* static */ T*
 GCRuntime::tryNewTenuredThing(JSContext* cx, AllocKind kind, size_t thingSize)
 {
     // Bump allocate in the arena's current free-list span.
     T* t = reinterpret_cast<T*>(cx->arenas()->allocateFromFreeList(kind, thingSize));
--- a/js/src/gc/Allocator.h
+++ b/js/src/gc/Allocator.h
@@ -5,64 +5,32 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_Allocator_h
 #define gc_Allocator_h
 
 #include "gc/Heap.h"
 #include "js/RootingAPI.h"
 
-class JSFatInlineString;
-
 namespace js {
 
 struct Class;
 
 // Allocate a new GC thing. After a successful allocation the caller must
 // fully initialize the thing before calling any function that can potentially
 // trigger GC. This will ensure that GC tracing never sees junk values stored
 // in the partially initialized thing.
-
+//
+// Note that JSObject allocation must use the longer signature below that
+// includes slot, heap, and finalizer information in support of various
+// object-specific optimizations.
 template <typename T, AllowGC allowGC = CanGC>
 T*
 Allocate(JSContext* cx);
 
-// Use for JSObject. A longer signature that includes additional information in
-// support of various optimizations.
 template <typename, AllowGC allowGC = CanGC>
 JSObject*
 Allocate(JSContext* cx, gc::AllocKind kind, size_t nDynamicSlots, gc::InitialHeap heap,
          const Class* clasp);
 
-// Internal function used for nursery-allocatable strings.
-template <typename StringAllocT, AllowGC allowGC = CanGC>
-StringAllocT*
-AllocateString(JSContext* cx, gc::InitialHeap heap);
-
-// Use for nursery-allocatable strings. Returns a value cast to the correct
-// type.
-template <typename StringT, AllowGC allowGC = CanGC>
-StringT*
-Allocate(JSContext* cx, gc::InitialHeap heap)
-{
-    return static_cast<StringT*>(js::AllocateString<JSString, allowGC>(cx, heap));
-}
-
-// Specialization for JSFatInlineString that must use a different allocation
-// type. Note that we have to explicitly specialize for both values of AllowGC
-// because partial function specialization is not allowed.
-template <>
-inline JSFatInlineString*
-Allocate<JSFatInlineString, CanGC>(JSContext* cx, gc::InitialHeap heap)
-{
-    return static_cast<JSFatInlineString*>(js::AllocateString<JSFatInlineString, CanGC>(cx, heap));
-}
-
-template <>
-inline JSFatInlineString*
-Allocate<JSFatInlineString, NoGC>(JSContext* cx, gc::InitialHeap heap)
-{
-    return static_cast<JSFatInlineString*>(js::AllocateString<JSFatInlineString, NoGC>(cx, heap));
-}
-
 } // namespace js
 
 #endif // gc_Allocator_h
--- a/js/src/gc/AtomMarking-inl.h
+++ b/js/src/gc/AtomMarking-inl.h
@@ -38,28 +38,27 @@ template <typename T>
 MOZ_ALWAYS_INLINE void
 AtomMarkingRuntime::inlinedMarkAtom(JSContext* cx, T* thing)
 {
     static_assert(mozilla::IsSame<T, JSAtom>::value ||
                   mozilla::IsSame<T, JS::Symbol>::value,
                   "Should only be called with JSAtom* or JS::Symbol* argument");
 
     MOZ_ASSERT(thing);
-    js::gc::TenuredCell* cell = &thing->asTenured();
-    MOZ_ASSERT(cell->zoneFromAnyThread()->isAtomsZone());
+    MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
 
     // The context's zone will be null during initialization of the runtime.
     if (!cx->zone())
         return;
     MOZ_ASSERT(!cx->zone()->isAtomsZone());
 
     if (ThingIsPermanent(thing))
         return;
 
-    size_t bit = GetAtomBit(cell);
+    size_t bit = GetAtomBit(thing);
     MOZ_ASSERT(bit / JS_BITS_PER_WORD < allocatedWords);
 
     cx->zone()->markedAtoms().setBit(bit);
 
     if (!cx->helperThread()) {
         // Trigger a read barrier on the atom, in case there is an incremental
         // GC in progress. This is necessary if the atom is being marked
         // because a reference to it was obtained from another zone which is
--- a/js/src/gc/AtomMarking.cpp
+++ b/js/src/gc/AtomMarking.cpp
@@ -218,35 +218,33 @@ AtomMarkingRuntime::atomIsMarked(Zone* z
         return true;
 
     if (mozilla::IsSame<T, JSAtom>::value) {
         JSAtom* atom = reinterpret_cast<JSAtom*>(thing);
         if (AtomIsPinnedInRuntime(zone->runtimeFromAnyThread(), atom))
             return true;
     }
 
-    size_t bit = GetAtomBit(&thing->asTenured());
+    size_t bit = GetAtomBit(thing);
     return zone->markedAtoms().getBit(bit);
 }
 
 template bool AtomMarkingRuntime::atomIsMarked(Zone* zone, JSAtom* thing);
 template bool AtomMarkingRuntime::atomIsMarked(Zone* zone, JS::Symbol* thing);
 
 template<>
 bool
 AtomMarkingRuntime::atomIsMarked(Zone* zone, TenuredCell* thing)
 {
     if (!thing)
         return true;
 
     if (thing->is<JSString>()) {
         JSString* str = thing->as<JSString>();
-        if (!str->isAtom())
-            return true;
-        return atomIsMarked(zone, &str->asAtom());
+        return str->isAtom() ? atomIsMarked(zone, &str->asAtom()) : true;
     }
 
     if (thing->is<JS::Symbol>())
         return atomIsMarked(zone, thing->as<JS::Symbol>());
 
     return true;
 }
 
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -216,20 +216,13 @@ template struct JS_PUBLIC_API(MovableCel
 JS_PUBLIC_API(void)
 JS::HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next)
 {
     MOZ_ASSERT(objp);
     js::InternalBarrierMethods<JSObject*>::postBarrier(objp, prev, next);
 }
 
 JS_PUBLIC_API(void)
-JS::HeapStringPostBarrier(JSString** strp, JSString* prev, JSString* next)
-{
-    MOZ_ASSERT(strp);
-    js::InternalBarrierMethods<JSString*>::postBarrier(strp, prev, next);
-}
-
-JS_PUBLIC_API(void)
 JS::HeapValuePostBarrier(JS::Value* valuep, const Value& prev, const Value& next)
 {
     MOZ_ASSERT(valuep);
     js::InternalBarrierMethods<JS::Value>::postBarrier(valuep, prev, next);
 }
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -284,28 +284,28 @@ struct InternalBarrierMethods<Value>
     }
 
     static MOZ_ALWAYS_INLINE void postBarrier(Value* vp, const Value& prev, const Value& next) {
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
         MOZ_ASSERT(vp);
 
         // If the target needs an entry, add it.
         js::gc::StoreBuffer* sb;
-        if ((next.isObject() || next.isString()) && (sb = next.toGCThing()->storeBuffer())) {
+        if (next.isObject() && (sb = reinterpret_cast<gc::Cell*>(&next.toObject())->storeBuffer())) {
             // If we know that the prev has already inserted an entry, we can
             // skip doing the lookup to add the new entry. Note that we cannot
             // safely assert the presence of the entry because it may have been
             // added via a different store buffer.
-            if ((prev.isObject() || prev.isString()) && prev.toGCThing()->storeBuffer())
+            if (prev.isObject() && reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer())
                 return;
             sb->putValue(vp);
             return;
         }
         // Remove the prev entry if the new value does not need it.
-        if ((prev.isObject() || prev.isString()) && (sb = prev.toGCThing()->storeBuffer()))
+        if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
             sb->unputValue(vp);
     }
 
     static void readBarrier(const Value& v) {
         DispatchTyped(ReadBarrierFunctor<Value>(), v);
     }
 };
 
@@ -682,18 +682,18 @@ class HeapSlot : public WriteBarrieredBa
         post(owner, kind, slot, v);
     }
 
   private:
     void post(NativeObject* owner, Kind kind, uint32_t slot, const Value& target) {
 #ifdef DEBUG
         assertPreconditionForWriteBarrierPost(owner, kind, slot, target);
 #endif
-        if (this->value.isObject() || this->value.isString()) {
-            gc::Cell* cell = this->value.toGCThing();
+        if (this->value.isObject()) {
+            gc::Cell* cell = reinterpret_cast<gc::Cell*>(&this->value.toObject());
             if (cell->storeBuffer())
                 cell->storeBuffer()->putSlot(owner, kind, slot, 1);
         }
     }
 };
 
 class HeapSlotArray
 {
--- a/js/src/gc/Cell.h
+++ b/js/src/gc/Cell.h
@@ -237,21 +237,17 @@ inline StoreBuffer*
 Cell::storeBuffer() const
 {
     return chunk()->trailer.storeBuffer;
 }
 
 inline JS::TraceKind
 Cell::getTraceKind() const
 {
-    if (isTenured())
-        return asTenured().getTraceKind();
-    if (js::shadow::String::nurseryCellIsString(this))
-        return JS::TraceKind::String;
-    return JS::TraceKind::Object;
+    return isTenured() ? asTenured().getTraceKind() : JS::TraceKind::Object;
 }
 
 /* static */ MOZ_ALWAYS_INLINE bool
 Cell::needWriteBarrierPre(JS::Zone* zone) {
     return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 }
 
 /* static */ MOZ_ALWAYS_INLINE TenuredCell*
@@ -415,18 +411,17 @@ TenuredCell::writeBarrierPre(TenuredCell
         MOZ_ASSERT(tmp == thing);
     }
 }
 
 static MOZ_ALWAYS_INLINE void
 AssertValidToSkipBarrier(TenuredCell* thing)
 {
     MOZ_ASSERT(!IsInsideNursery(thing));
-    MOZ_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object &&
-                         MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::String);
+    MOZ_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object);
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 TenuredCell::writeBarrierPost(void* cellp, TenuredCell* prior, TenuredCell* next)
 {
     AssertValidToSkipBarrier(next);
 }
 
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -242,12 +242,26 @@ DelayCrossCompartmentGrayMarking(JSObjec
 
 inline bool
 IsOOMReason(JS::gcreason::Reason reason)
 {
     return reason == JS::gcreason::LAST_DITCH ||
            reason == JS::gcreason::MEM_PRESSURE;
 }
 
+inline void
+RelocationOverlay::forwardTo(Cell* cell)
+{
+    MOZ_ASSERT(!isForwarded());
+    // The location of magic_ is important because it must never be valid to see
+    // the value Relocated there in a GC thing that has not been moved.
+    static_assert(offsetof(RelocationOverlay, magic_) == offsetof(JSObject, group_) &&
+                  offsetof(RelocationOverlay, magic_) == offsetof(js::Shape, base_) &&
+                  offsetof(RelocationOverlay, magic_) == offsetof(JSString, d.u1.flags),
+                  "RelocationOverlay::magic_ is in the wrong location");
+    magic_ = Relocated;
+    newLocation_ = cell;
+}
+
 } /* namespace gc */
 } /* namespace js */
 
 #endif /* gc_GCInternals_h */
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -791,18 +791,16 @@ class GCRuntime
 
     enum TraceOrMarkRuntime {
         TraceRuntime,
         MarkRuntime
     };
     void traceRuntime(JSTracer* trc, AutoLockForExclusiveAccess& lock);
     void traceRuntimeForMinorGC(JSTracer* trc, AutoLockForExclusiveAccess& lock);
 
-    void purgeRuntimeForMinorGC();
-
     void shrinkBuffers();
     void onOutOfMallocMemory();
     void onOutOfMallocMemory(const AutoLockGC& lock);
 
 #ifdef JS_GC_ZEAL
     const void* addressOfZealModeBits() { return &zealModeBits; }
     void getZealBits(uint32_t* zealBits, uint32_t* frequency, uint32_t* nextScheduled);
     void setZeal(uint8_t zeal, uint32_t frequency);
@@ -995,18 +993,16 @@ class GCRuntime
     template <AllowGC allowGC>
     JSObject* tryNewNurseryObject(JSContext* cx, size_t thingSize, size_t nDynamicSlots,
                                   const Class* clasp);
     template <AllowGC allowGC>
     static JSObject* tryNewTenuredObject(JSContext* cx, AllocKind kind, size_t thingSize,
                                          size_t nDynamicSlots);
     template <typename T, AllowGC allowGC>
     static T* tryNewTenuredThing(JSContext* cx, AllocKind kind, size_t thingSize);
-    template <AllowGC allowGC>
-    JSString* tryNewNurseryString(JSContext* cx, size_t thingSize, AllocKind kind);
     static TenuredCell* refillFreeListInGC(Zone* zone, AllocKind thingKind);
 
     void bufferGrayRoots();
 
     /*
      * Concurrent sweep infrastructure.
      */
     void startTask(GCParallelTask& task, gcstats::PhaseKind phase, AutoLockHelperThreadState& locked);
@@ -1498,19 +1494,16 @@ class GCRuntime
     ActiveThreadData<LifoAlloc> blocksToFreeAfterMinorGC;
 
     const void* addressOfNurseryPosition() {
         return nursery_.refNoCheck().addressOfPosition();
     }
     const void* addressOfNurseryCurrentEnd() {
         return nursery_.refNoCheck().addressOfCurrentEnd();
     }
-    const void* addressOfStringNurseryCurrentEnd() {
-        return nursery_.refNoCheck().addressOfCurrentStringEnd();
-    }
 
     void minorGC(JS::gcreason::Reason reason,
                  gcstats::PhaseKind phase = gcstats::PhaseKind::MINOR_GC) JS_HAZ_GC_CALL;
     void evictNursery(JS::gcreason::Reason reason = JS::gcreason::EVICT_NURSERY) {
         minorGC(reason, gcstats::PhaseKind::EVICT_NURSERY);
     }
     void freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo);
 
--- a/js/src/gc/GCTrace.cpp
+++ b/js/src/gc/GCTrace.cpp
@@ -129,23 +129,16 @@ js::gc::TraceNurseryAlloc(Cell* thing, s
         /* We don't have AllocKind here, but we can work it out from size. */
         unsigned slots = (size - sizeof(JSObject)) / sizeof(JS::Value);
         AllocKind kind = GetBackgroundAllocKind(GetGCObjectKind(slots));
         TraceEvent(TraceEventNurseryAlloc, uint64_t(thing), kind);
     }
 }
 
 void
-js::gc::TraceNurseryAlloc(Cell* thing, AllocKind kind)
-{
-    if (thing)
-        TraceEvent(TraceEventNurseryAlloc, uint64_t(thing), kind);
-}
-
-void
 js::gc::TraceTenuredAlloc(Cell* thing, AllocKind kind)
 {
     if (thing)
         TraceEvent(TraceEventTenuredAlloc, uint64_t(thing), kind);
 }
 
 static void
 MaybeTraceClass(const Class* clasp)
--- a/js/src/gc/GCTrace.h
+++ b/js/src/gc/GCTrace.h
@@ -16,34 +16,32 @@ class ObjectGroup;
 namespace gc {
 
 #ifdef JS_GC_TRACE
 
 extern MOZ_MUST_USE bool InitTrace(GCRuntime& gc);
 extern void FinishTrace();
 extern bool TraceEnabled();
 extern void TraceNurseryAlloc(Cell* thing, size_t size);
-extern void TraceNurseryAlloc(Cell* thing, AllocKind kind);
 extern void TraceTenuredAlloc(Cell* thing, AllocKind kind);
 extern void TraceCreateObject(JSObject* object);
 extern void TraceMinorGCStart();
 extern void TracePromoteToTenured(Cell* src, Cell* dst);
 extern void TraceMinorGCEnd();
 extern void TraceMajorGCStart();
 extern void TraceTenuredFinalize(Cell* thing);
 extern void TraceMajorGCEnd();
 extern void TraceTypeNewScript(js::ObjectGroup* group);
 
 #else
 
 inline MOZ_MUST_USE bool InitTrace(GCRuntime& gc) { return true; }
 inline void FinishTrace() {}
 inline bool TraceEnabled() { return false; }
 inline void TraceNurseryAlloc(Cell* thing, size_t size) {}
-inline void TraceNurseryAlloc(Cell* thing, AllocKind kind) {}
 inline void TraceTenuredAlloc(Cell* thing, AllocKind kind) {}
 inline void TraceCreateObject(JSObject* object) {}
 inline void TraceMinorGCStart() {}
 inline void TracePromoteToTenured(Cell* src, Cell* dst) {}
 inline void TraceMinorGCEnd() {}
 inline void TraceMajorGCStart() {}
 inline void TraceTenuredFinalize(Cell* thing) {}
 inline void TraceMajorGCEnd() {}
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -883,18 +883,16 @@ InFreeList(Arena* arena, void* thing)
 {
     uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
     MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
     return arena->inFreeList(addr);
 }
 
 static const int32_t ChunkLocationOffsetFromLastByte =
     int32_t(gc::ChunkLocationOffset) - int32_t(gc::ChunkMask);
-static const int32_t ChunkStoreBufferOffsetFromLastByte =
-    int32_t(gc::ChunkStoreBufferOffset) - int32_t(gc::ChunkMask);
 
 } /* namespace gc */
 
 namespace debug {
 
 // Utility functions meant to be called from an interactive debugger.
 enum class MarkInfo : int {
     BLACK = 0,
--- a/js/src/gc/Marking-inl.h
+++ b/js/src/gc/Marking-inl.h
@@ -29,48 +29,48 @@ struct MightBeForwarded
                               mozilla::IsBaseOf<JSScript, T>::value ||
                               mozilla::IsBaseOf<js::LazyScript, T>::value ||
                               mozilla::IsBaseOf<js::Scope, T>::value ||
                               mozilla::IsBaseOf<js::RegExpShared, T>::value;
 };
 
 template <typename T>
 inline bool
-IsForwarded(const T* t)
+IsForwarded(T* t)
 {
-    const RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
+    RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
     if (!MightBeForwarded<T>::value) {
         MOZ_ASSERT(!overlay->isForwarded());
         return false;
     }
 
     return overlay->isForwarded();
 }
 
 struct IsForwardedFunctor : public BoolDefaultAdaptor<Value, false> {
-    template <typename T> bool operator()(const T* t) { return IsForwarded(t); }
+    template <typename T> bool operator()(T* t) { return IsForwarded(t); }
 };
 
 inline bool
 IsForwarded(const JS::Value& value)
 {
     return DispatchTyped(IsForwardedFunctor(), value);
 }
 
 template <typename T>
 inline T*
-Forwarded(const T* t)
+Forwarded(T* t)
 {
-    const RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
+    RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
     MOZ_ASSERT(overlay->isForwarded());
     return reinterpret_cast<T*>(overlay->forwardingAddress());
 }
 
 struct ForwardedFunctor : public IdentityDefaultAdaptor<Value> {
-    template <typename T> inline Value operator()(const T* t) {
+    template <typename T> inline Value operator()(T* t) {
         return js::gc::RewrapTaggedPointer<Value, T>::wrap(Forwarded(t));
     }
 };
 
 inline Value
 Forwarded(const JS::Value& value)
 {
     return DispatchTyped(ForwardedFunctor(), value);
@@ -81,32 +81,16 @@ inline T
 MaybeForwarded(T t)
 {
     if (IsForwarded(t))
         t = Forwarded(t);
     MakeAccessibleAfterMovingGC(t);
     return t;
 }
 
-inline void
-RelocationOverlay::forwardTo(Cell* cell)
-{
-    MOZ_ASSERT(!isForwarded());
-    // The location of magic_ is important because it must never be valid to see
-    // the value Relocated there in a GC thing that has not been moved.
-    static_assert(offsetof(RelocationOverlay, magic_) == offsetof(JSObject, group_) + sizeof(uint32_t),
-                  "RelocationOverlay::magic_ is in the wrong location");
-    static_assert(offsetof(RelocationOverlay, magic_) == offsetof(js::Shape, base_) + sizeof(uint32_t),
-                  "RelocationOverlay::magic_ is in the wrong location");
-    static_assert(offsetof(RelocationOverlay, magic_) == offsetof(JSString, d.u1.length),
-                  "RelocationOverlay::magic_ is in the wrong location");
-    magic_ = Relocated;
-    newLocation_ = cell;
-}
-
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 template <typename T>
 inline bool
 IsGCThingValidAfterMovingGC(T* t)
 {
     return !IsInsideNursery(t) && !RelocationOverlay::isCellForwarded(t);
 }
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -620,17 +620,17 @@ js::TraceProcessGlobalRoot(JSTracer* trc
     // We have to mark permanent atoms and well-known symbols through a special
     // method because the default DoMarking implementation automatically skips
     // them. Fortunately, atoms (permanent and non) cannot refer to other GC
     // things so they do not need to go through the mark stack and may simply
     // be marked directly.  Moreover, well-known symbols can refer only to
     // permanent atoms, so likewise require no subsquent marking.
     CheckTracedThing(trc, *ConvertToBase(&thing));
     if (trc->isMarkingTracer())
-        thing->asTenured().markIfUnmarked(gc::MarkColor::Black);
+        thing->markIfUnmarked(gc::MarkColor::Black);
     else
         DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
 }
 template void js::TraceProcessGlobalRoot<JSAtom>(JSTracer*, JSAtom*, const char*);
 template void js::TraceProcessGlobalRoot<JS::Symbol>(JSTracer*, JS::Symbol*, const char*);
 
 // A typed functor adaptor for TraceRoot.
 struct TraceRootFunctor {
@@ -792,28 +792,16 @@ ShouldMark<JSObject*>(GCMarker* gcmarker
         return false;
 
     // Don't mark things outside a zone if we are in a per-zone GC. It is
     // faster to check our own arena, which we can do since we know that
     // the object is tenured.
     return obj->asTenured().zone()->shouldMarkInZone();
 }
 
-// JSStrings can also be in the nursery. See ShouldMark<JSObject*> for comments.
-template <>
-bool
-ShouldMark<JSString*>(GCMarker* gcmarker, JSString* str)
-{
-    if (IsOwnedByOtherRuntime(gcmarker->runtime(), str))
-        return false;
-    if (IsInsideNursery(str))
-        return false;
-    return str->asTenured().zone()->shouldMarkInZone();
-}
-
 template <typename T>
 void
 DoMarking(GCMarker* gcmarker, T* thing)
 {
     // Do per-type marking precondition checks.
     if (!ShouldMark(gcmarker, thing))
         return;
 
@@ -2701,43 +2689,30 @@ TenuringTracer::traverse(T** tp)
 
 template <>
 void
 TenuringTracer::traverse(JSObject** objp)
 {
     // We only ever visit the internals of objects after moving them to tenured.
     MOZ_ASSERT(!nursery().isInside(objp));
 
-    Cell** cellp = reinterpret_cast<Cell**>(objp);
-    if (!IsInsideNursery(*cellp) || nursery().getForwardedPointer(cellp))
+    JSObject* obj = *objp;
+    if (!IsInsideNursery(obj) || nursery().getForwardedPointer(objp))
         return;
 
     // Take a fast path for tenuring a plain object which is by far the most
     // common case.
-    JSObject* obj = *objp;
     if (obj->is<PlainObject>()) {
         *objp = movePlainObjectToTenured(&obj->as<PlainObject>());
         return;
     }
 
     *objp = moveToTenuredSlow(obj);
 }
 
-template <>
-void
-TenuringTracer::traverse(JSString** strp)
-{
-    // We only ever visit the internals of strings after moving them to tenured.
-    MOZ_ASSERT(!nursery().isInside(strp));
-
-    Cell** cellp = reinterpret_cast<Cell**>(strp);
-    if (IsInsideNursery(*cellp) && !nursery().getForwardedPointer(cellp))
-        *strp = moveToTenured(*strp);
-}
-
 template <typename S>
 struct TenuringTraversalFunctor : public IdentityDefaultAdaptor<S> {
     template <typename T> S operator()(T* t, TenuringTracer* trc) {
         trc->traverse(&t);
         return js::gc::RewrapTaggedPointer<S, T>::wrap(t);
     }
 };
 
@@ -2817,22 +2792,16 @@ TraceWholeCell(TenuringTracer& mover, JS
 
     if (object->is<UnboxedPlainObject>()) {
         if (UnboxedExpandoObject* expando = object->as<UnboxedPlainObject>().maybeExpando())
             expando->traceChildren(&mover);
     }
 }
 
 static inline void
-TraceWholeCell(TenuringTracer& mover, JSString* str)
-{
-    str->traceChildren(&mover);
-}
-
-static inline void
 TraceWholeCell(TenuringTracer& mover, JSScript* script)
 {
     script->traceChildren(&mover);
 }
 
 static inline void
 TraceWholeCell(TenuringTracer& mover, jit::JitCode* jitcode)
 {
@@ -2861,19 +2830,16 @@ js::gc::StoreBuffer::traceWholeCells(Ten
         MOZ_ASSERT(arena->bufferedCells() == cells);
         arena->bufferedCells() = &ArenaCellSet::Empty;
 
         JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
         switch (kind) {
           case JS::TraceKind::Object:
             TraceBufferedCells<JSObject>(mover, arena, cells);
             break;
-          case JS::TraceKind::String:
-            TraceBufferedCells<JSString>(mover, arena, cells);
-            break;
           case JS::TraceKind::Script:
             TraceBufferedCells<JSScript>(mover, arena, cells);
             break;
           case JS::TraceKind::JitCode:
             TraceBufferedCells<jit::JitCode>(mover, arena, cells);
             break;
           default:
             MOZ_CRASH("Unexpected trace kind");
@@ -2885,42 +2851,27 @@ js::gc::StoreBuffer::traceWholeCells(Ten
 
 void
 js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
 {
     if (!*edge)
         return;
 
     MOZ_ASSERT(IsCellPointerValid(*edge));
-
-#ifdef DEBUG
-    auto traceKind = (*edge)->getTraceKind();
-    MOZ_ASSERT(traceKind == JS::TraceKind::Object || traceKind == JS::TraceKind::String);
-#endif
-
-    // Bug 1376646: Make separate store buffers for strings and objects, and
-    // only check IsInsideNursery once.
-
-    if (!IsInsideNursery(*edge))
-        return;
-
-    if (JSString::nurseryCellIsString(*edge))
-        mover.traverse(reinterpret_cast<JSString**>(edge));
-    else
-        mover.traverse(reinterpret_cast<JSObject**>(edge));
+    MOZ_ASSERT((*edge)->getTraceKind() == JS::TraceKind::Object);
+    mover.traverse(reinterpret_cast<JSObject**>(edge));
 }
 
 void
 js::gc::StoreBuffer::ValueEdge::trace(TenuringTracer& mover) const
 {
     if (deref())
         mover.traverse(edge);
 }
 
-
 struct TenuringFunctor
 {
     template <typename T>
     void operator()(T* thing, TenuringTracer& mover) {
         mover.traverse(thing);
     }
 };
 
@@ -2968,36 +2919,30 @@ js::TenuringTracer::traceSlots(Value* vp
 }
 
 inline void
 js::TenuringTracer::traceSlots(JS::Value* vp, uint32_t nslots)
 {
     traceSlots(vp, vp + nslots);
 }
 
-void
-js::TenuringTracer::traceString(JSString* str)
-{
-    str->traceChildren(this);
-}
-
 #ifdef DEBUG
 static inline ptrdiff_t
 OffsetToChunkEnd(void* p)
 {
     return ChunkLocationOffset - (uintptr_t(p) & gc::ChunkMask);
 }
 #endif
 
 /* Insert the given relocation entry into the list of things to visit. */
 inline void
-js::TenuringTracer::insertIntoObjectFixupList(RelocationOverlay* entry) {
-    *objTail = entry;
-    objTail = &entry->nextRef();
-    *objTail = nullptr;
+js::TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
+    *tail = entry;
+    tail = &entry->nextRef();
+    *tail = nullptr;
 }
 
 template <typename T>
 inline T*
 js::TenuringTracer::allocTenured(Zone* zone, AllocKind kind) {
     TenuredCell* t = zone->arenas.allocateFromFreeList(kind, Arena::thingSize(kind));
     if (!t) {
         AutoEnterOOMUnsafeRegion oomUnsafe;
@@ -3074,17 +3019,17 @@ js::TenuringTracer::moveToTenuredSlow(JS
         tenuredSize += op(dst, src);
     } else {
         MOZ_ASSERT_IF(src->getClass()->hasFinalize(),
                       CanNurseryAllocateFinalizedClass(src->getClass()));
     }
 
     RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
     overlay->forwardTo(dst);
-    insertIntoObjectFixupList(overlay);
+    insertIntoFixupList(overlay);
 
     TracePromoteToTenured(src, dst);
     return dst;
 }
 
 inline JSObject*
 js::TenuringTracer::movePlainObjectToTenured(PlainObject* src)
 {
@@ -3106,17 +3051,17 @@ js::TenuringTracer::movePlainObjectToTen
     // Move the slots and elements.
     tenuredSize += moveSlotsToTenured(dst, src, dstKind);
     tenuredSize += moveElementsToTenured(dst, src, dstKind);
 
     MOZ_ASSERT(!dst->getClass()->extObjectMovedOp());
 
     RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
     overlay->forwardTo(dst);
-    insertIntoObjectFixupList(overlay);
+    insertIntoFixupList(overlay);
 
     TracePromoteToTenured(src, dst);
     return dst;
 }
 
 size_t
 js::TenuringTracer::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
 {
@@ -3190,89 +3135,31 @@ js::TenuringTracer::moveElementsToTenure
 
     js_memcpy(dstHeader, srcAllocatedHeader, nslots * sizeof(HeapSlot));
     dst->elements_ = dstHeader->elements() + numShifted;
     nursery().setElementsForwardingPointer(srcHeader, dst->getElementsHeader(),
                                            srcHeader->capacity);
     return nslots * sizeof(HeapSlot);
 }
 
-inline void
-js::TenuringTracer::insertIntoStringFixupList(RelocationOverlay* entry) {
-    *stringTail = entry;
-    stringTail = &entry->nextRef();
-    *stringTail = nullptr;
-}
-
-JSString*
-js::TenuringTracer::moveToTenured(JSString* src)
-{
-    MOZ_ASSERT(IsInsideNursery(src));
-    MOZ_ASSERT(!src->zone()->usedByHelperThread());
-
-    AllocKind dstKind = src->getAllocKind();
-    Zone* zone = src->zone();
-
-    TenuredCell* t = zone->arenas.allocateFromFreeList(dstKind, Arena::thingSize(dstKind));
-    if (!t) {
-        AutoEnterOOMUnsafeRegion oomUnsafe;
-        t = runtime()->gc.refillFreeListInGC(zone, dstKind);
-        if (!t)
-            oomUnsafe.crash(ChunkSize, "Failed to allocate string while tenuring.");
-    }
-    JSString* dst = reinterpret_cast<JSString*>(t);
-    tenuredSize += moveStringToTenured(dst, src, dstKind);
-
-    RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
-    overlay->forwardTo(dst);
-    insertIntoStringFixupList(overlay);
-
-    TracePromoteToTenured(src, dst);
-    return dst;
-}
-
 void
 js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
 {
-    for (RelocationOverlay* p = mover.objHead; p; p = p->next()) {
+    for (RelocationOverlay* p = mover.head; p; p = p->next()) {
         JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
         mover.traceObject(obj);
 
         TenureCount& entry = tenureCounts.findEntry(obj->groupRaw());
         if (entry.group == obj->groupRaw()) {
             entry.count++;
         } else if (!entry.group) {
             entry.group = obj->groupRaw();
             entry.count = 1;
         }
     }
-
-    for (RelocationOverlay* p = mover.stringHead; p; p = p->next())
-        mover.traceString(static_cast<JSString*>(p->forwardingAddress()));
-}
-
-size_t
-js::TenuringTracer::moveStringToTenured(JSString* dst, JSString* src, AllocKind dstKind)
-{
-    size_t size = Arena::thingSize(dstKind);
-
-    // At the moment, strings always have the same AllocKind between src and
-    // dst. This may change in the future.
-    MOZ_ASSERT(dst->asTenured().getAllocKind() == src->getAllocKind());
-
-    // Copy the Cell contents.
-    MOZ_ASSERT(OffsetToChunkEnd(src) >= ptrdiff_t(size));
-    js_memcpy(dst, src, size);
-
-    if (src->isLinear() && !src->isInline() && !src->hasBase()) {
-        void* chars = src->asLinear().nonInlineCharsRaw();
-        nursery().removeMallocedBuffer(chars);
-    }
-
-    return size;
 }
 
 
 /*** IsMarked / IsAboutToBeFinalized **************************************************************/
 
 template <typename T>
 static inline void
 CheckIsMarkedThing(T* thingp)
@@ -3328,18 +3215,17 @@ template <>
 /* static */ bool
 IsMarkedInternal(JSRuntime* rt, JSObject** thingp)
 {
     if (IsOwnedByOtherRuntime(rt, *thingp))
         return true;
 
     if (IsInsideNursery(*thingp)) {
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
-        Cell** cellp = reinterpret_cast<Cell**>(thingp);
-        return Nursery::getForwardedPointer(cellp);
+        return Nursery::getForwardedPointer(thingp);
     }
     return IsMarkedInternalCommon(thingp);
 }
 
 template <typename S>
 struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
     template <typename T> S operator()(T* t, JSRuntime* rt, bool* rv) {
         *rv = IsMarkedInternal(rt, &t);
@@ -3375,17 +3261,17 @@ IsAboutToBeFinalizedInternal(T** thingp)
     JSRuntime* rt = thing->runtimeFromAnyThread();
 
     /* Permanent atoms are never finalized by non-owning runtimes. */
     if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && TlsContext.get()->runtime() != rt)
         return false;
 
     if (IsInsideNursery(thing)) {
         return JS::CurrentThreadIsHeapMinorCollecting() &&
-               !Nursery::getForwardedPointer(reinterpret_cast<Cell**>(thingp));
+               !Nursery::getForwardedPointer(reinterpret_cast<JSObject**>(thingp));
     }
 
     Zone* zone = thing->asTenured().zoneFromAnyThread();
     if (zone->isGCSweeping()) {
         return IsAboutToBeFinalizedDuringSweep(thing->asTenured());
     } else if (zone->isGCCompacting() && IsForwarded(thing)) {
         *thingp = Forwarded(thing);
         return false;
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -159,21 +159,21 @@ namespace gc {
 //                  pointer to old location.
 //
 // MaybeForwarded - used before dereferencing a pointer that may refer to a
 //                  moved GC thing without updating it. For JSObjects this will
 //                  also update the object's shape pointer if it has been moved
 //                  to allow slots to be accessed.
 
 template <typename T>
-inline bool IsForwarded(const T* t);
+inline bool IsForwarded(T* t);
 inline bool IsForwarded(const JS::Value& value);
 
 template <typename T>
-inline T* Forwarded(const T* t);
+inline T* Forwarded(T* t);
 
 inline Value Forwarded(const JS::Value& value);
 
 template <typename T>
 inline T MaybeForwarded(T t);
 
 inline void
 MakeAccessibleAfterMovingGC(void* anyp) {}
--- a/js/src/gc/Nursery-inl.h
+++ b/js/src/gc/Nursery-inl.h
@@ -22,24 +22,24 @@
 template<typename T>
 bool
 js::Nursery::isInside(const SharedMem<T>& p) const
 {
     return isInside(p.unwrap(/*safe - used for value in comparison above*/));
 }
 
 MOZ_ALWAYS_INLINE /* static */ bool
-js::Nursery::getForwardedPointer(js::gc::Cell** ref)
+js::Nursery::getForwardedPointer(JSObject** ref)
 {
     MOZ_ASSERT(ref);
     MOZ_ASSERT(IsInsideNursery(*ref));
     const gc::RelocationOverlay* overlay = reinterpret_cast<const gc::RelocationOverlay*>(*ref);
     if (!overlay->isForwarded())
         return false;
-    *ref = overlay->forwardingAddress();
+    *ref = static_cast<JSObject*>(overlay->forwardingAddress());
     return true;
 }
 
 inline void
 js::Nursery::maybeSetForwardingPointer(JSTracer* trc, void* oldData, void* newData, bool direct)
 {
     if (trc->isTenuringTracer())
         setForwardingPointerWhileTenuring(oldData, newData, direct);
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -109,37 +109,31 @@ js::NurseryChunk::toChunk(JSRuntime* rt)
 }
 
 js::Nursery::Nursery(JSRuntime* rt)
   : runtime_(rt)
   , position_(0)
   , currentStartChunk_(0)
   , currentStartPosition_(0)
   , currentEnd_(0)
-  , currentStringEnd_(0)
   , currentChunk_(0)
   , maxChunkCount_(0)
   , chunkCountLimit_(0)
   , timeInChunkAlloc_(0)
   , previousPromotionRate_(0)
   , profileThreshold_(0)
   , enableProfiling_(false)
-  , canAllocateStrings_(true)
   , reportTenurings_(0)
   , minorGCTriggerReason_(JS::gcreason::NO_REASON)
   , minorGcCount_(0)
   , freeMallocedBuffersTask(nullptr)
 #ifdef JS_GC_ZEAL
   , lastCanary_(nullptr)
 #endif
-{
-    const char* env = getenv("MOZ_DISABLE_NURSERY_STRINGS");
-    if (env && *env)
-        canAllocateStrings_ = false;
-}
+{}
 
 bool
 js::Nursery::init(uint32_t maxNurseryBytes, AutoLockGCBgAlloc& lock)
 {
     if (!mallocedBuffers.init())
         return false;
 
     freeMallocedBuffersTask = js_new<FreeMallocedBuffersTask>(runtime()->defaultFreeOp());
@@ -230,36 +224,20 @@ js::Nursery::disable()
     MOZ_ASSERT(isEmpty());
     if (!isEnabled())
         return;
 
     freeChunksFrom(0);
     maxChunkCount_ = 0;
 
     currentEnd_ = 0;
-    currentStringEnd_ = 0;
+
     runtime()->gc.storeBuffer().disable();
 }
 
-void
-js::Nursery::enableStrings()
-{
-    MOZ_ASSERT(isEmpty());
-    canAllocateStrings_ = true;
-    currentStringEnd_ = currentEnd_;
-}
-
-void
-js::Nursery::disableStrings()
-{
-    MOZ_ASSERT(isEmpty());
-    canAllocateStrings_ = false;
-    currentStringEnd_ = 0;
-}
-
 bool
 js::Nursery::isEmpty() const
 {
     if (!isEnabled())
         return true;
 
     if (!runtime()->hasZealMode(ZealMode::GenerationalGC)) {
         MOZ_ASSERT(currentStartChunk_ == 0);
@@ -316,33 +294,16 @@ js::Nursery::allocateObject(JSContext* c
 
     /* Always initialize the slots field to match the JIT behavior. */
     obj->setInitialSlotsMaybeNonNative(slots);
 
     TraceNurseryAlloc(obj, size);
     return obj;
 }
 
-Cell*
-js::Nursery::allocateString(JSContext* cx, Zone* zone, size_t size, AllocKind kind)
-{
-    /* Ensure there's enough space to replace the contents with a RelocationOverlay. */
-    MOZ_ASSERT(size >= sizeof(RelocationOverlay));
-
-    size_t allocSize = JS_ROUNDUP(sizeof(StringLayout) - 1 + size, CellAlignBytes);
-    auto header = static_cast<StringLayout*>(allocate(allocSize));
-    if (!header)
-        return nullptr;
-    header->zone = zone;
-
-    auto cell = reinterpret_cast<Cell*>(&header->cell);
-    TraceNurseryAlloc(cell, kind);
-    return cell;
-}
-
 void*
 js::Nursery::allocate(size_t size)
 {
     MOZ_ASSERT(isEnabled());
     MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
     MOZ_ASSERT_IF(currentChunk_ == currentStartChunk_, position() >= currentStartPosition_);
     MOZ_ASSERT(position() % CellAlignBytes == 0);
@@ -402,17 +363,17 @@ js::Nursery::allocateBuffer(Zone* zone, 
 
     if (nbytes <= MaxNurseryBufferSize) {
         void* buffer = allocate(nbytes);
         if (buffer)
             return buffer;
     }
 
     void* buffer = zone->pod_malloc<uint8_t>(nbytes);
-    if (buffer && !registerMallocedBuffer(buffer)) {
+    if (buffer && !mallocedBuffers.putNew(buffer)) {
         js_free(buffer);
         return nullptr;
     }
     return buffer;
 }
 
 void*
 js::Nursery::allocateBuffer(JSObject* obj, size_t nbytes)
@@ -525,20 +486,18 @@ js::Nursery::forwardBufferPointer(HeapSl
     MOZ_ASSERT(!isInside(*pSlotsElems));
     MOZ_ASSERT(IsWriteableAddress(*pSlotsElems));
 }
 
 js::TenuringTracer::TenuringTracer(JSRuntime* rt, Nursery* nursery)
   : JSTracer(rt, JSTracer::TracerKindTag::Tenuring, TraceWeakMapKeysValues)
   , nursery_(*nursery)
   , tenuredSize(0)
-  , objHead(nullptr)
-  , objTail(&objHead)
-  , stringHead(nullptr)
-  , stringTail(&stringHead)
+  , head(nullptr)
+  , tail(&head)
 {
 }
 
 inline float
 js::Nursery::calcPromotionRate(bool *validForTenuring) const {
     float used = float(previousGC.nurseryUsedBytes);
     float capacity = float(previousGC.nurseryCapacity);
     float tenured = float(previousGC.tenuredBytes);
@@ -729,50 +688,31 @@ js::Nursery::collect(JS::gcreason::Reaso
     // If we are promoting the nursery, or exhausted the store buffer with
     // pointers to nursery things, which will force a collection well before
     // the nursery is full, look for object groups that are getting promoted
     // excessively and try to pretenure them.
     startProfile(ProfileKey::Pretenure);
     bool validPromotionRate;
     const float promotionRate = calcPromotionRate(&validPromotionRate);
     uint32_t pretenureCount = 0;
-    bool shouldPretenure = (validPromotionRate && promotionRate > 0.6) ||
-        IsFullStoreBufferReason(reason);
-
-    if (shouldPretenure) {
-        JSContext* cx = TlsContext.get();
-        for (auto& entry : tenureCounts.entries) {
-            if (entry.count >= 3000) {
-                ObjectGroup* group = entry.group;
-                if (group->canPreTenure() && group->zone()->group()->canEnterWithoutYielding(cx)) {
-                    AutoCompartment ac(cx, group);
-                    group->setShouldPreTenure(cx);
-                    pretenureCount++;
+    if (validPromotionRate) {
+        if (promotionRate > 0.8 || IsFullStoreBufferReason(reason)) {
+            JSContext* cx = TlsContext.get();
+            for (auto& entry : tenureCounts.entries) {
+                if (entry.count >= 3000) {
+                    ObjectGroup* group = entry.group;
+                    if (group->canPreTenure() && group->zone()->group()->canEnterWithoutYielding(cx)) {
+                        AutoCompartment ac(cx, group);
+                        group->setShouldPreTenure(cx);
+                        pretenureCount++;
+                    }
                 }
             }
         }
     }
-    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
-        if (shouldPretenure && zone->allocNurseryStrings && zone->tenuredStrings >= 30 * 1000) {
-            JSRuntime::AutoProhibitActiveContextChange apacc(rt);
-            CancelOffThreadIonCompile(zone);
-            bool preserving = zone->isPreservingCode();
-            zone->setPreservingCode(false);
-            zone->discardJitCode(rt->defaultFreeOp());
-            zone->setPreservingCode(preserving);
-            for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
-                if (jit::JitCompartment* jitComp = c->jitCompartment()) {
-                    jitComp->discardStubs();
-                    jitComp->stringsCanBeInNursery = false;
-                }
-            }
-            zone->allocNurseryStrings = false;
-        }
-        zone->tenuredStrings = 0;
-    }
     endProfile(ProfileKey::Pretenure);
 
     // We ignore gcMaxBytes when allocating for minor collection. However, if we
     // overflowed, we disable the nursery. The next time we allocate, we'll fail
     // because gcBytes >= gcMaxBytes.
     if (rt->gc.usage.gcBytes() >= rt->gc.tunables.gcMaxBytes())
         disable();
     // Disable the nursery if the user changed the configuration setting.  The
@@ -870,19 +810,19 @@ js::Nursery::doCollection(JS::gcreason::
 
     startProfile(ProfileKey::MarkDebugger);
     {
         gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::MARK_ROOTS);
         Debugger::traceAllForMovingGC(&mover);
     }
     endProfile(ProfileKey::MarkDebugger);
 
-    startProfile(ProfileKey::SweepCaches);
-    rt->gc.purgeRuntimeForMinorGC();
-    endProfile(ProfileKey::SweepCaches);
+    startProfile(ProfileKey::ClearNewObjectCache);
+    rt->caches().newObjectCache.clearNurseryObjects(rt);
+    endProfile(ProfileKey::ClearNewObjectCache);
 
     // Most of the work is done here. This loop iterates over objects that have
     // been moved to the major heap. If these objects have any outgoing pointers
     // to the nursery, then those nursery objects get moved as well, until no
     // objects are left to move. That is, we iterate to a fixed point.
     startProfile(ProfileKey::CollectToFP);
     collectToFixedPoint(mover, tenureCounts);
     endProfile(ProfileKey::CollectToFP);
@@ -945,23 +885,16 @@ js::Nursery::FreeMallocedBuffersTask::tr
 void
 js::Nursery::FreeMallocedBuffersTask::run()
 {
     for (MallocedBuffersSet::Range r = buffers_.all(); !r.empty(); r.popFront())
         fop_->free_(r.front());
     buffers_.clear();
 }
 
-bool
-js::Nursery::registerMallocedBuffer(void* buffer)
-{
-    MOZ_ASSERT(buffer);
-    return mallocedBuffers.putNew(buffer);
-}
-
 void
 js::Nursery::freeMallocedBuffers()
 {
     if (mallocedBuffers.empty())
         return;
 
     bool started;
     {
@@ -1055,18 +988,16 @@ js::Nursery::spaceToEnd(unsigned chunkCo
 MOZ_ALWAYS_INLINE void
 js::Nursery::setCurrentChunk(unsigned chunkno)
 {
     MOZ_ASSERT(chunkno < chunkCountLimit());
     MOZ_ASSERT(chunkno < allocatedChunkCount());
     currentChunk_ = chunkno;
     position_ = chunk(chunkno).start();
     currentEnd_ = chunk(chunkno).end();
-    if (canAllocateStrings_)
-        currentStringEnd_ = currentEnd_;
     chunk(chunkno).poisonAndInit(runtime(), JS_FRESH_NURSERY_PATTERN);
 }
 
 bool
 js::Nursery::allocateNextChunk(const unsigned chunkno,
     AutoLockGCBgAlloc& lock)
 {
     const unsigned priorCount = allocatedChunkCount();
@@ -1096,17 +1027,17 @@ js::Nursery::setStartPosition()
 {
     currentStartChunk_ = currentChunk_;
     currentStartPosition_ = position();
 }
 
 void
 js::Nursery::maybeResizeNursery(JS::gcreason::Reason reason)
 {
-    static const double GrowThreshold   = 0.03;
+    static const double GrowThreshold   = 0.05;
     static const double ShrinkThreshold = 0.01;
     unsigned newMaxNurseryChunks;
 
     // Shrink the nursery to its minimum size of we ran out of memory or
     // received a memory pressure event.
     if (gc::IsOOMReason(reason)) {
         minimizeAllocableSpace();
         return;
@@ -1215,25 +1146,8 @@ js::Nursery::sweepDictionaryModeObjects(
     for (auto obj : dictionaryModeObjects_) {
         if (!IsForwarded(obj))
             obj->sweepDictionaryListPointer();
         else
             Forwarded(obj)->updateDictionaryListPointerAfterMinorGC(obj);
     }
     dictionaryModeObjects_.clear();
 }
-
-
-JS_PUBLIC_API(void)
-JS::EnableNurseryStrings(JSContext* cx)
-{
-    AutoEmptyNursery empty(cx);
-    ReleaseAllJITCode(cx->runtime()->defaultFreeOp());
-    cx->runtime()->gc.nursery().enableStrings();
-}
-
-JS_PUBLIC_API(void)
-JS::DisableNurseryStrings(JSContext* cx)
-{
-    AutoEmptyNursery empty(cx);
-    ReleaseAllJITCode(cx->runtime()->defaultFreeOp());
-    cx->runtime()->gc.nursery().disableStrings();
-}
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -23,17 +23,17 @@
     _(TraceValues,              "mkVals")                                     \
     _(TraceCells,               "mkClls")                                     \
     _(TraceSlots,               "mkSlts")                                     \
     _(TraceWholeCells,          "mcWCll")                                     \
     _(TraceGenericEntries,      "mkGnrc")                                     \
     _(CheckHashTables,          "ckTbls")                                     \
     _(MarkRuntime,              "mkRntm")                                     \
     _(MarkDebugger,             "mkDbgr")                                     \
-    _(SweepCaches,              "swpCch")                                     \
+    _(ClearNewObjectCache,      "clrNOC")                                     \
     _(CollectToFP,              "collct")                                     \
     _(ObjectsTenuredCallback,   "tenCB")                                      \
     _(Sweep,                    "sweep")                                      \
     _(UpdateJitActivations,     "updtIn")                                     \
     _(FreeMallocedBuffers,      "frSlts")                                     \
     _(ClearStoreBuffer,         "clrSB")                                      \
     _(ClearNursery,             "clear")                                      \
     _(Pretenure,                "pretnr")
@@ -75,51 +75,47 @@ class MacroAssembler;
 class TenuringTracer : public JSTracer
 {
     friend class Nursery;
     Nursery& nursery_;
 
     // Amount of data moved to the tenured generation during collection.
     size_t tenuredSize;
 
-    // These lists are threaded through the Nursery using the space from
-    // already moved things. The lists are used to fix up the moved things and
-    // to find things held live by intra-Nursery pointers.
-    gc::RelocationOverlay* objHead;
-    gc::RelocationOverlay** objTail;
-    gc::RelocationOverlay* stringHead;
-    gc::RelocationOverlay** stringTail;
+    // This list is threaded through the Nursery using the space from already
+    // moved things. The list is used to fix up the moved things and to find
+    // things held live by intra-Nursery pointers.
+    gc::RelocationOverlay* head;
+    gc::RelocationOverlay** tail;
 
     TenuringTracer(JSRuntime* rt, Nursery* nursery);
 
   public:
-    Nursery& nursery() { return nursery_; }
+    const Nursery& nursery() const { return nursery_; }
 
+    // Returns true if the pointer was updated.
     template <typename T> void traverse(T** thingp);
     template <typename T> void traverse(T* thingp);
 
     // The store buffers need to be able to call these directly.
     void traceObject(JSObject* src);
     void traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t length);
     void traceSlots(JS::Value* vp, uint32_t nslots);
-    void traceString(JSString* src);
 
   private:
-    inline void insertIntoObjectFixupList(gc::RelocationOverlay* entry);
-    inline void insertIntoStringFixupList(gc::RelocationOverlay* entry);
+    Nursery& nursery() { return nursery_; }
+
+    inline void insertIntoFixupList(gc::RelocationOverlay* entry);
     template <typename T>
     inline T* allocTenured(JS::Zone* zone, gc::AllocKind kind);
 
     inline JSObject* movePlainObjectToTenured(PlainObject* src);
     JSObject* moveToTenuredSlow(JSObject* src);
-    JSString* moveToTenured(JSString* src);
-
     size_t moveElementsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
     size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
-    size_t moveStringToTenured(JSString* dst, JSString* src, gc::AllocKind dstKind);
 
     void traceSlots(JS::Value* vp, JS::Value* end);
 };
 
 /*
  * Classes with JSCLASS_SKIP_NURSERY_FINALIZE or Wrapper classes with
  * CROSS_COMPARTMENT flags will not have their finalizer called if they are
  * nursery allocated and not promoted to the tenured heap. The finalizers for
@@ -134,25 +130,16 @@ CanNurseryAllocateFinalizedClass(const j
 }
 
 class Nursery
 {
   public:
     static const size_t Alignment = gc::ChunkSize;
     static const size_t ChunkShift = gc::ChunkShift;
 
-    struct alignas(gc::CellAlignBytes) CellAlignedByte {
-        char byte;
-    };
-
-    struct StringLayout {
-        JS::Zone* zone;
-        CellAlignedByte cell;
-    };
-
     explicit Nursery(JSRuntime* rt);
     ~Nursery();
 
     MOZ_MUST_USE bool init(uint32_t maxNurseryBytes, AutoLockGCBgAlloc& lock);
 
     unsigned chunkCountLimit() const { return chunkCountLimit_; }
 
     // Number of allocated (ready to use) chunks.
@@ -165,20 +152,16 @@ class Nursery
     unsigned maxChunkCount() const { return maxChunkCount_; }
 
     bool exists() const { return chunkCountLimit() != 0; }
 
     void enable();
     void disable();
     bool isEnabled() const { return maxChunkCount() != 0; }
 
-    void enableStrings();
-    void disableStrings();
-    bool canAllocateStrings() const { return canAllocateStrings_; }
-
     /* Return true if no allocations have been made since the last collection. */
     bool isEmpty() const;
 
     /*
      * Check whether an arbitrary pointer is within the nursery. This is
      * slower than IsInsideNursery(Cell*), but works on all types of pointers.
      */
     MOZ_ALWAYS_INLINE bool isInside(gc::Cell* cellp) const = delete;
@@ -194,39 +177,16 @@ class Nursery
     inline bool isInside(const SharedMem<T>& p) const;
 
     /*
      * Allocate and return a pointer to a new GC object with its |slots|
      * pointer pre-filled. Returns nullptr if the Nursery is full.
      */
     JSObject* allocateObject(JSContext* cx, size_t size, size_t numDynamic, const js::Class* clasp);
 
-    /*
-     * Allocate and return a pointer to a new string. Returns nullptr if the
-     * Nursery is full.
-     */
-    gc::Cell* allocateString(JSContext* cx, JS::Zone* zone, size_t size, gc::AllocKind kind);
-
-    /*
-     * String zones are stored just before the string in nursery memory.
-     */
-    static JS::Zone* getStringZone(const JSString* str) {
-#ifdef DEBUG
-        auto cell = reinterpret_cast<const js::gc::Cell*>(str); // JSString type is incomplete here
-        MOZ_ASSERT(js::gc::IsInsideNursery(cell), "getStringZone must be passed a nursery string");
-#endif
-
-        auto layout = reinterpret_cast<const uint8_t*>(str) - offsetof(StringLayout, cell);
-        return reinterpret_cast<const StringLayout*>(layout)->zone;
-    }
-
-    static size_t stringHeaderSize() {
-        return offsetof(StringLayout, cell);
-    }
-
     /* Allocate a buffer for a given zone, using the nursery if possible. */
     void* allocateBuffer(JS::Zone* zone, size_t nbytes);
 
     /*
      * Allocate a buffer for a given object, using the nursery if possible and
      * obj is in the nursery.
      */
     void* allocateBuffer(JSObject* obj, size_t nbytes);
@@ -247,35 +207,28 @@ class Nursery
 
     /* The maximum number of bytes allowed to reside in nursery buffers. */
     static const size_t MaxNurseryBufferSize = 1024;
 
     /* Do a minor collection. */
     void collect(JS::gcreason::Reason reason);
 
     /*
-     * If the thing at |*ref| in the Nursery has been forwarded, set |*ref| to
-     * the new location and return true. Otherwise return false and leave
-     * |*ref| unset.
+     * Check if the thing at |*ref| in the Nursery has been forwarded. If so,
+     * sets |*ref| to the new location of the object and returns true. Otherwise
+     * returns false and leaves |*ref| unset.
      */
-    MOZ_ALWAYS_INLINE MOZ_MUST_USE static bool getForwardedPointer(js::gc::Cell** ref);
+    MOZ_ALWAYS_INLINE MOZ_MUST_USE static bool getForwardedPointer(JSObject** ref);
 
     /* Forward a slots/elements pointer stored in an Ion frame. */
     void forwardBufferPointer(HeapSlot** pSlotsElems);
 
     inline void maybeSetForwardingPointer(JSTracer* trc, void* oldData, void* newData, bool direct);
     inline void setForwardingPointerWhileTenuring(void* oldData, void* newData, bool direct);
 
-    /*
-     * Register a malloced buffer that is held by a nursery object, which
-     * should be freed at the end of a minor GC. Buffers are unregistered when
-     * their owning objects are tenured.
-     */
-    bool registerMallocedBuffer(void* buffer);
-
     /* Mark a malloced buffer as no longer needing to be freed. */
     void removeMallocedBuffer(void* buffer) {
         mallocedBuffers.remove(buffer);
     }
 
     void waitBackgroundFreeEnd();
 
     MOZ_MUST_USE bool addedUniqueIdToCell(gc::Cell* cell) {
@@ -323,17 +276,16 @@ class Nursery
     /* Print header line for profile times. */
     static void printProfileHeader();
 
     /* Print total profile times on shutdown. */
     void printTotalProfileTimes();
 
     void* addressOfCurrentEnd() const { return (void*)&currentEnd_; }
     void* addressOfPosition() const { return (void*)&position_; }
-    void* addressOfCurrentStringEnd() const { return (void*)&currentStringEnd_; }
 
     void requestMinorGC(JS::gcreason::Reason reason) const;
 
     bool minorGCRequested() const { return minorGCTriggerReason_ != JS::gcreason::NO_REASON; }
     JS::gcreason::Reason minorGCTriggerReason() const { return minorGCTriggerReason_; }
     void clearMinorGCRequest() { minorGCTriggerReason_ = JS::gcreason::NO_REASON; }
 
     bool needIdleTimeCollection() const {
@@ -360,22 +312,16 @@ class Nursery
 
     /* Pointer to the logical start of the Nursery. */
     unsigned currentStartChunk_;
     uintptr_t currentStartPosition_;
 
     /* Pointer to the last byte of space in the current chunk. */
     uintptr_t currentEnd_;
 
-    /*
-     * Pointer to the last byte of space in the current chunk, or nullptr if we
-     * are not allocating strings in the nursery.
-     */
-    uintptr_t currentStringEnd_;
-
     /* The index of the chunk that is currently being allocated from. */
     unsigned currentChunk_;
 
     /*
      * The nursery may grow the chunks_ vector up to this size without a
      * collection.  This allows the nursery to grow lazilly.  This limit may
      * change during maybeResizeNursery() each collection.
      */
@@ -391,20 +337,17 @@ class Nursery
 
     /* Promotion rate for the previous minor collection. */
     float previousPromotionRate_;
 
     /* Report minor collections taking at least this long, if enabled. */
     mozilla::TimeDuration profileThreshold_;
     bool enableProfiling_;
 
-    /* Whether we will nursery-allocate strings. */
-    bool canAllocateStrings_;
-
-    /* Report ObjectGroups with at least this many instances tenured. */
+    /* Report ObjectGroups with at lest this many instances tenured. */
     int64_t reportTenurings_;
 
     /*
      * Whether and why a collection of this nursery has been requested. This is
      * mutable as it is set by the store buffer, which otherwise cannot modify
      * anything in the nursery.
      */
     mutable JS::gcreason::Reason minorGCTriggerReason_;
@@ -513,16 +456,19 @@ class Nursery
         AutoLockGCBgAlloc& lock);
 
     MOZ_ALWAYS_INLINE uintptr_t currentEnd() const;
 
     uintptr_t position() const { return position_; }
 
     JSRuntime* runtime() const { return runtime_; }
 
+    /* Allocates a new GC thing from the tenured generation during minor GC. */
+    gc::TenuredCell* allocateFromTenured(JS::Zone* zone, gc::AllocKind thingKind);
+
     /* Common internal allocator function. */
     void* allocate(size_t size);
 
     void doCollection(JS::gcreason::Reason reason,
                         gc::TenureCountCache& tenureCounts);
 
     /*
      * Move the object at |src| in the Nursery to an already-allocated cell
--- a/js/src/gc/NurseryAwareHashMap.h
+++ b/js/src/gc/NurseryAwareHashMap.h
@@ -143,28 +143,24 @@ class NurseryAwareHashMap
             // Drop the entry if the value is not marked.
             if (JS::GCPolicy<BarrieredValue>::needsSweep(&p->value())) {
                 map.remove(key);
                 continue;
             }
 
             // Update and relocate the key, if the value is still needed.
             //
-            // Non-string Values will contain a strong reference to Key, as per
-            // its use in the CrossCompartmentWrapperMap, so the key will never
-            // be dying here. Strings do *not* have any sort of pointer from
-            // wrapper to wrappee, as they are just copies. The wrapper map
-            // entry is merely used as a cache to avoid re-copying the string,
-            // and currently that entire cache is flushed on major GC.
+            // Note that this currently assumes that all Value will contain a
+            // strong reference to Key, as per its use as the
+            // CrossCompartmentWrapperMap. We may need to make the following
+            // behavior more dynamic if we use this map in other nursery-aware
+            // contexts.
             Key copy(key);
-            bool sweepKey = JS::GCPolicy<Key>::needsSweep(&copy);
-            if (sweepKey) {
-                map.remove(key);
-                continue;
-            }
+            mozilla::DebugOnly<bool> sweepKey = JS::GCPolicy<Key>::needsSweep(&copy);
+            MOZ_ASSERT(!sweepKey);
             map.rekeyIfMoved(key, copy);
         }
         nurseryEntries.clear();
     }
 
     void sweep() {
         MOZ_ASSERT(nurseryEntries.empty());
         map.sweep();
--- a/js/src/gc/RelocationOverlay.h
+++ b/js/src/gc/RelocationOverlay.h
@@ -10,66 +10,45 @@
 
 #ifndef gc_RelocationOverlay_h
 #define gc_RelocationOverlay_h
 
 #include "mozilla/Assertions.h"
 
 #include <stdint.h>
 
-#include "jsobj.h"
-
-#include "js/HeapAPI.h"
-#include "vm/Shape.h"
-
 namespace js {
 namespace gc {
 
 struct Cell;
 
 /*
  * This structure overlays a Cell that has been moved and provides a way to find
  * its new location. It's used during generational and compacting GC.
  */
 class RelocationOverlay
 {
-    /* See comment in js/public/HeapAPI.h. */
-    static const uint32_t Relocated = js::gc::Relocated;
-
-#if MOZ_LITTLE_ENDIAN
-    /*
-     * Keep the low 32 bits untouched. Use them to distinguish strings from
-     * objects in the nursery.
-     */
-    uint32_t preserve_;
+    /* The low bit is set so this should never equal a normal pointer. */
+    static const uintptr_t Relocated = uintptr_t(0xbad0bad1);
 
     /* Set to Relocated when moved. */
-    uint32_t magic_;
-#else
-    uint32_t magic_;
-    uint32_t preserve_;
-#endif
+    uintptr_t magic_;
 
     /* The location |this| was moved to. */
     Cell* newLocation_;
 
     /* A list entry to track all relocated things. */
     RelocationOverlay* next_;
 
   public:
-    static const RelocationOverlay* fromCell(const Cell* cell) {
-        return reinterpret_cast<const RelocationOverlay*>(cell);
-    }
-
     static RelocationOverlay* fromCell(Cell* cell) {
         return reinterpret_cast<RelocationOverlay*>(cell);
     }
 
     bool isForwarded() const {
-        (void) preserve_; // Suppress warning
         return magic_ == Relocated;
     }
 
     Cell* forwardingAddress() const {
         MOZ_ASSERT(isForwarded());
         return newLocation_;
     }
 
@@ -80,17 +59,17 @@ class RelocationOverlay
         return next_;
     }
 
     RelocationOverlay* next() const {
         MOZ_ASSERT(isForwarded());
         return next_;
     }
 
-    static bool isCellForwarded(const Cell* cell) {
+    static bool isCellForwarded(Cell* cell) {
         return fromCell(cell)->isForwarded();
     }
 };
 
 } // namespace gc
 } // namespace js
 
 #endif /* gc_RelocationOverlay_h */
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -400,18 +400,16 @@ class StoreBuffer
     }
 
     MOZ_MUST_USE bool enable();
     void disable();
     bool isEnabled() const { return enabled_; }
 
     void clear();
 
-    const Nursery& nursery() const { return nursery_; }
-
     /* Get the overflowed status. */
     bool isAboutToOverflow() const { return aboutToOverflow_; }
 
     bool cancelIonCompilations() const { return cancelIonCompilations_; }
 
     /* Insert a single edge into the buffer/remembered set. */
     void putValue(JS::Value* vp) { put(bufferVal, ValueEdge(vp)); }
     void unputValue(JS::Value* vp) { unput(bufferVal, ValueEdge(vp)); }
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -524,24 +524,17 @@ HeapCheckTracerBase::onChild(const JS::G
         return;
     }
 
     // Don't trace into GC things owned by another runtime.
     if (cell->runtimeFromAnyThread() != rt)
         return;
 
     // Don't trace into GC in zones being used by helper threads.
-    Zone* zone;
-    if (thing.is<JSObject>())
-        zone = thing.as<JSObject>().zone();
-    else if (thing.is<JSString>())
-        zone = thing.as<JSString>().zone();
-    else
-        zone = cell->asTenured().zone();
-
+    Zone* zone = thing.is<JSObject>() ? thing.as<JSObject>().zone() : cell->asTenured().zone();
     if (zone->group() && zone->group()->usedByHelperThread())
         return;
 
     WorkItem item(thing, contextName(), parentIndex);
     if (!stack.append(item))
         oom = true;
 }
 
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -41,18 +41,16 @@ JS::Zone::Zone(JSRuntime* rt, ZoneGroup*
     regExps(this),
     markedAtoms_(group),
     atomCache_(group),
     externalStringCache_(group),
     functionToStringCache_(group),
     usage(&rt->gc.usage),
     threshold(),
     gcDelayBytes(0),
-    tenuredStrings(group, 0),
-    allocNurseryStrings(group, true),
     propertyTree_(group, this),
     baseShapes_(group, this),
     initialShapes_(group, this),
     nurseryShapes_(group),
     data(group, nullptr),
     isSystem(group, false),
 #ifdef DEBUG
     gcLastSweepGroupIndex(group, 0),
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -516,19 +516,16 @@ struct Zone : public JS::shadow::Zone,
 
     // Thresholds used to trigger GC.
     js::gc::ZoneHeapThreshold threshold;
 
     // Amount of data to allocate before triggering a new incremental slice for
     // the current GC.
     js::UnprotectedData<size_t> gcDelayBytes;
 
-    js::ZoneGroupData<uint32_t> tenuredStrings;
-    js::ZoneGroupData<bool> allocNurseryStrings;
-
   private:
     // Shared Shape property tree.
     js::ZoneGroupData<js::PropertyTree> propertyTree_;
   public:
     js::PropertyTree& propertyTree() { return propertyTree_.ref(); }
 
   private:
     // Set of all unowned base shapes in the Zone.
--- a/js/src/gdb/mozilla/JSString.py
+++ b/js/src/gdb/mozilla/JSString.py
@@ -12,17 +12,17 @@ except ValueError as exc: # yuck, we are
 # Forget any printers from previous loads of this module.
 mozilla.prettyprinters.clear_module_printers(__name__)
 
 # Cache information about the JSString type for this objfile.
 class JSStringTypeCache(object):
     def __init__(self, cache):
         dummy = gdb.Value(0).cast(cache.JSString_ptr_t)
         self.ROPE_FLAGS = dummy['ROPE_FLAGS']
-        self.NON_ATOM_BIT = dummy['NON_ATOM_BIT']
+        self.ATOM_BIT = dummy['ATOM_BIT']
         self.INLINE_CHARS_BIT = dummy['INLINE_CHARS_BIT']
         self.TYPE_FLAGS_MASK = dummy['TYPE_FLAGS_MASK']
         self.LATIN1_CHARS_BIT = dummy['LATIN1_CHARS_BIT']
 
 class Common(mozilla.prettyprinters.Pointer):
     def __init__(self, value, cache):
         super(Common, self).__init__(value, cache)
         if not cache.mod_JSString:
--- a/js/src/jit-test/tests/heap-analysis/byteSize-of-string.js
+++ b/js/src/jit-test/tests/heap-analysis/byteSize-of-string.js
@@ -8,193 +8,126 @@
 
 // Run this test only if we're using jemalloc. Other malloc implementations
 // exhibit surprising behaviors. For example, 32-bit Fedora builds have
 // non-deterministic allocation sizes.
 var config = getBuildConfiguration();
 if (!config['moz-memory'])
   quit(0);
 
-gczeal(0); // Need to control when tenuring happens
-
-// Ion eager runs much of this code in Ion, and Ion nursery-allocates more
-// aggressively than other modes.
-if (getJitCompilerOptions()["ion.warmup.trigger"] <= 100)
-    setJitCompilerOption("ion.warmup.trigger", 100);
-
 if (config['pointer-byte-size'] == 4)
   var s = (s32, s64) => s32
 else
   var s = (s32, s64) => s64
 
-// Convert an input string, which is probably an atom because it's a literal in
-// the source text, to a nursery-allocated string with the same contents.
-function copyString(str) {
-  if (str.length == 0)
-    return str; // Nothing we can do here
-  return ensureFlatString(str.substr(0, 1) + str.substr(1));
-}
+// Return the byte size of |obj|, ensuring that the size is not affected by
+// being tenured. (We use 'survives a GC' as an approximation for 'tenuring'.)
+function tByteSize(obj) {
+  var nurserySize = byteSize(obj);
+  minorgc();
+  var tenuredSize = byteSize(obj);
+  if (nurserySize != tenuredSize) {
+    print("nursery size: " + nurserySize + "  tenured size: " + tenuredSize);
+    return -1; // make the stack trace point at the real test
+  }
 
-// Return the nursery byte size of |str|.
-function nByteSize(str) {
-  // Strings that appear in the source will always be atomized and therefore
-  // will never be in the nursery.
-  return byteSize(copyString(str));
-}
-
-// Return the tenured byte size of |str|.
-function tByteSize(str) {
-  // Strings that appear in the source will always be atomized and therefore
-  // will never be in the nursery. But we'll make them get tenured instead of
-  // using the atom.
-  str = copyString(str);
-  minorgc();
-  return byteSize(str);
+  return tenuredSize;
 }
 
 // There are four representations of flat strings, with the following capacities
 // (excluding a terminating null character):
 //
 //                      32-bit                  64-bit                test
 // representation       Latin-1   char16_t      Latin-1   char16_t    label
 // ========================================================================
 // JSExternalString            (cannot be tested in shell)            -
 // JSThinInlineString   7         3             15        7           T
 // JSFatInlineString    23        11            23        11          F
 // JSExtensibleString          - limited by available memory -        X
 // JSUndependedString          - same as JSExtensibleString -
 
-// Notes:
-//  - labels are suffixed with A for atoms and N for non-atoms
-//  - atoms are 8 bytes larger than non-atoms, to store the atom's hash code.
-//  - Nursery-allocated strings require a header that stores the zone.
-
-// Expected sizes based on type of string
-const m32 = (config['pointer-byte-size'] == 4);
-const TA = m32 ? 24 : 32; // ThinInlineString atom, includes a hash value
-const TN = m32 ? 16 : 24; // ThinInlineString
-const FN = m32 ? 32 : 32; // FatInlineString
-const XN = m32 ? 16 : 24; // ExtensibleString, has additional storage buffer
-const RN = m32 ? 16 : 24; // Rope
-const DN = m32 ? 16 : 24; // DependentString
-
-// A function that pads out a tenured size to the nursery size. We store a zone
-// pointer in the nursery just before the string (4 bytes on 32-bit, 8 bytes on
-// 64-bit), and the string struct itself must be 8-byte aligned (resulting in
-// +4 bytes on 32-bit, +0 bytes on 64-bit). The end result? Nursery strings are
-// 8 bytes larger.
-const Nursery = m32 ? s => s + 4 + 4 : s => s + 8 + 0;
+// Note that atoms are 8 bytes larger than non-atoms, to store the atom's hash code.
 
 // Latin-1
-assertEq(tByteSize(""),                                               s(TA, TA));
-assertEq(tByteSize("1"),                                              s(TA, TA));
-assertEq(tByteSize("1234567"),                                        s(TN, TN));
-assertEq(tByteSize("12345678"),                                       s(FN, TN));
-assertEq(tByteSize("123456789.12345"),                                s(FN, TN));
-assertEq(tByteSize("123456789.123456"),                               s(FN, FN));
-assertEq(tByteSize("123456789.123456789.123"),                        s(FN, FN));
-assertEq(tByteSize("123456789.123456789.1234"),                       s(XN+32, XN+32));
-assertEq(tByteSize("123456789.123456789.123456789.1"),                s(XN+32, XN+32));
-assertEq(tByteSize("123456789.123456789.123456789.12"),               s(XN+64, XN+64));
-
-assertEq(nByteSize(""),                                               s(TA, TA));
-assertEq(nByteSize("1"),                                              s(TA, TA));
-assertEq(nByteSize("1234567"),                                        s(Nursery(TN), Nursery(TN)));
-assertEq(nByteSize("12345678"),                                       s(Nursery(FN), Nursery(TN)));
-assertEq(nByteSize("123456789.12345"),                                s(Nursery(FN), Nursery(TN)));
-assertEq(nByteSize("123456789.123456"),                               s(Nursery(FN), Nursery(FN)));
-assertEq(nByteSize("123456789.123456789.123"),                        s(Nursery(FN), Nursery(FN)));
-assertEq(nByteSize("123456789.123456789.1234"),                       s(Nursery(XN)+32,Nursery(XN)+32));
-assertEq(nByteSize("123456789.123456789.123456789.1"),                s(Nursery(XN)+32,Nursery(XN)+32));
-assertEq(nByteSize("123456789.123456789.123456789.12"),               s(Nursery(XN)+64,Nursery(XN)+64));
+assertEq(tByteSize(""),                                                 s(24, 32)); // T, T
+assertEq(tByteSize("1"),                                                s(24, 32)); // T, T
+assertEq(tByteSize("1234567"),                                          s(24, 32)); // T, T
+assertEq(tByteSize("12345678"),                                         s(40, 32)); // F, T
+assertEq(tByteSize("123456789.12345"),                                  s(40, 32)); // F, T
+assertEq(tByteSize("123456789.123456"),                                 s(40, 40)); // F, F
+assertEq(tByteSize("123456789.123456789.123"),                          s(40, 40)); // F, F
+assertEq(tByteSize("123456789.123456789.1234"),                         s(56, 64)); // X, X
+assertEq(tByteSize("123456789.123456789.123456789.1"),                  s(56, 64)); // X, X
+assertEq(tByteSize("123456789.123456789.123456789.12"),                 s(72, 80)); // X, X
 
 // Inline char16_t atoms.
 // "Impassionate gods have never seen the red that is the Tatsuta River."
 //   - Ariwara no Narihira
-assertEq(tByteSize("千"),						s(TA, TA));
-assertEq(tByteSize("千早"),						s(TN, TN));
-assertEq(tByteSize("千早ぶ"),						s(TN, TN));
-assertEq(tByteSize("千早ぶる"),						s(FN, TN));
-assertEq(tByteSize("千早ぶる神"),						s(FN, TN));
-assertEq(tByteSize("千早ぶる神代"),					s(FN, TN));
-assertEq(tByteSize("千早ぶる神代も"),					s(FN, TN));
-assertEq(tByteSize("千早ぶる神代もき"),					s(FN, FN));
-assertEq(tByteSize("千早ぶる神代もきかず龍"),				s(FN, FN));
-assertEq(tByteSize("千早ぶる神代もきかず龍田"),				s(XN+32, XN+32));
-assertEq(tByteSize("千早ぶる神代もきかず龍田川 か"),				s(XN+32, XN+32));
-assertEq(tByteSize("千早ぶる神代もきかず龍田川 から"),			s(XN+64, XN+64));
-assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水く"),		s(XN+64, XN+64));
-assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くく"),		s(XN+64, XN+64));
-assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くくるとは"),	s(XN+64, XN+64));
-
-assertEq(nByteSize("千"),						s(TA, TA));
-assertEq(nByteSize("千早"),						s(Nursery(TN), Nursery(TN)));
-assertEq(nByteSize("千早ぶ"),						s(Nursery(TN), Nursery(TN)));
-assertEq(nByteSize("千早ぶる"),						s(Nursery(FN), Nursery(TN)));
-assertEq(nByteSize("千早ぶる神"),						s(Nursery(FN), Nursery(TN)));
-assertEq(nByteSize("千早ぶる神代"),					s(Nursery(FN), Nursery(TN)));
-assertEq(nByteSize("千早ぶる神代も"),					s(Nursery(FN), Nursery(TN)));
-assertEq(nByteSize("千早ぶる神代もき"),					s(Nursery(FN), Nursery(FN)));
-assertEq(nByteSize("千早ぶる神代もきかず龍"),				s(Nursery(FN), Nursery(FN)));
-assertEq(nByteSize("千早ぶる神代もきかず龍田"),				s(Nursery(XN)+32, Nursery(XN)+32));
-assertEq(nByteSize("千早ぶる神代もきかず龍田川 か"),				s(Nursery(XN)+32, Nursery(XN)+32));
-assertEq(nByteSize("千早ぶる神代もきかず龍田川 から"),			s(Nursery(XN)+64, Nursery(XN)+64));
-assertEq(nByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水く"),		s(Nursery(XN)+64, Nursery(XN)+64));
-assertEq(nByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くく"),		s(Nursery(XN)+64, Nursery(XN)+64));
-assertEq(nByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くくるとは"),	s(Nursery(XN)+64, Nursery(XN)+64));
+assertEq(tByteSize("千"),						s(24, 32)); // T, T
+assertEq(tByteSize("千早"),    						s(24, 32)); // T, T
+assertEq(tByteSize("千早ぶ"),    					s(24, 32)); // T, T
+assertEq(tByteSize("千早ぶる"),    					s(40, 32)); // F, T
+assertEq(tByteSize("千早ぶる神"),    					s(40, 32)); // F, T
+assertEq(tByteSize("千早ぶる神代"),					s(40, 32)); // F, T
+assertEq(tByteSize("千早ぶる神代も"),					s(40, 32)); // F, T
+assertEq(tByteSize("千早ぶる神代もき"),					s(40, 40)); // F, F
+assertEq(tByteSize("千早ぶる神代もきかず龍"),				s(40, 40)); // F, F
+assertEq(tByteSize("千早ぶる神代もきかず龍田"),    			s(56, 64)); // X, X
+assertEq(tByteSize("千早ぶる神代もきかず龍田川 か"),    			s(56, 64)); // X, X
+assertEq(tByteSize("千早ぶる神代もきかず龍田川 から"),    			s(72, 80)); // X, X
+assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水く"),    	s(72, 80)); // X, X
+assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くく"),    	s(88, 96)); // X, X
+assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くくるとは"),	s(88, 96)); // X, X
 
 // A Latin-1 rope. This changes size when flattened.
 // "In a village of La Mancha, the name of which I have no desire to call to mind"
 //   - Miguel de Cervantes, Don Quixote
 var fragment8 = "En un lugar de la Mancha, de cuyo nombre no quiero acordarme"; // 60 characters
 var rope8 = fragment8;
 for (var i = 0; i < 10; i++) // 1024 repetitions
   rope8 = rope8 + rope8;
-
-assertEq(byteSize(rope8),                                               s(Nursery(RN), Nursery(RN)));
-minorgc();
-assertEq(byteSize(rope8),                                               s(RN, RN));
+assertEq(tByteSize(rope8),                                              s(16, 24));
 var matches8 = rope8.match(/(de cuyo nombre no quiero acordarme)/);
-assertEq(byteSize(rope8),                                               s(XN + 65536, XN + 65536));
+assertEq(tByteSize(rope8),                                              s(16 + 65536,  24 + 65536));
 
 // Test extensible strings.
 //
 // Appending another copy of the fragment should yield another rope.
 //
 // Flatting that should turn the original rope into a dependent string, and
-// yield a new linear string, of the same size as the original.
+// yield a new linear string, of the some size as the original.
 rope8a = rope8 + fragment8;
-assertEq(byteSize(rope8a),                                              s(Nursery(RN), Nursery(RN)));
+assertEq(tByteSize(rope8a),                                             s(16, 24));
 rope8a.match(/x/, function() { assertEq(true, false); });
-assertEq(byteSize(rope8a),                                              s(Nursery(XN) + 65536, Nursery(XN) + 65536));
-assertEq(byteSize(rope8),                                               s(RN, RN));
+assertEq(tByteSize(rope8a),                                             s(16 + 65536,  24 + 65536));
+assertEq(tByteSize(rope8),                                              s(16, 24));
 
 
 // A char16_t rope. This changes size when flattened.
 // "From the Heliconian Muses let us begin to sing"
 //   --- Hesiod, Theogony
 var fragment16 = "μουσάων Ἑλικωνιάδων ἀρχώμεθ᾽ ἀείδειν";
 var rope16 = fragment16;
 for (var i = 0; i < 10; i++) // 1024 repetitions
   rope16 = rope16 + rope16;
-assertEq(byteSize(rope16),                                              s(Nursery(RN), Nursery(RN)));
+assertEq(tByteSize(rope16),                                     s(16,  24));
 let matches16 = rope16.match(/(Ἑλικωνιάδων ἀρχώμεθ᾽)/);
-assertEq(byteSize(rope16),                                              s(Nursery(RN) + 131072, Nursery(RN) + 131072));
+assertEq(tByteSize(rope16),                                     s(16 + 131072,  24 + 131072));
 
 // Latin-1 and char16_t dependent strings.
-assertEq(byteSize(rope8.substr(1000, 2000)),                            s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(rope16.substr(1000, 2000)),                           s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(matches8[0]),                                         s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(matches8[1]),                                         s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(matches16[0]),                                        s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(matches16[1]),                                        s(Nursery(DN), Nursery(DN)));
+assertEq(tByteSize(rope8.substr(1000, 2000)),                   s(16,  24));
+assertEq(tByteSize(rope16.substr(1000, 2000)),                  s(16,  24));
+assertEq(tByteSize(matches8[0]),                                s(16,  24));
+assertEq(tByteSize(matches8[1]),                                s(16,  24));
+assertEq(tByteSize(matches16[0]),                               s(16,  24));
+assertEq(tByteSize(matches16[1]),                               s(16,  24));
 
 // Test extensible strings.
 //
 // Appending another copy of the fragment should yield another rope.
 //
 // Flatting that should turn the original rope into a dependent string, and
 // yield a new linear string, of the some size as the original.
 rope16a = rope16 + fragment16;
-assertEq(byteSize(rope16a),                                             s(Nursery(RN), Nursery(RN)));
+assertEq(tByteSize(rope16a),                                    s(16, 24));
 rope16a.match(/x/, function() { assertEq(true, false); });
-assertEq(byteSize(rope16a),                                             s(Nursery(XN) + 131072, Nursery(XN) + 131072));
-assertEq(byteSize(rope16),                                              s(Nursery(XN), Nursery(XN)));
+assertEq(tByteSize(rope16a),                                    s(16 + 131072,  24 + 131072));
+assertEq(tByteSize(rope16),                                     s(16, 24));
--- a/js/src/jit/BaselineCacheIRCompiler.cpp
+++ b/js/src/jit/BaselineCacheIRCompiler.cpp
@@ -351,18 +351,18 @@ BaselineCacheIRCompiler::emitGuardSpecif
 
     Address atomAddr(stubAddress(reader.stubOffset()));
 
     Label done;
     masm.branchPtr(Assembler::Equal, atomAddr, str, &done);
 
     // The pointers are not equal, so if the input string is also an atom it
     // must be a different string.
-    masm.branchTest32(Assembler::Zero, Address(str, JSString::offsetOfFlags()),
-                      Imm32(JSString::NON_ATOM_BIT), failure->label());
+    masm.branchTest32(Assembler::NonZero, Address(str, JSString::offsetOfFlags()),
+                      Imm32(JSString::ATOM_BIT), failure->label());
 
     // Check the length.
     masm.loadPtr(atomAddr, scratch);
     masm.loadStringLength(scratch, scratch);
     masm.branch32(Assembler::NotEqual, Address(str, JSString::offsetOfLength()),
                   scratch, failure->label());
 
     // We have a non-atomized string with the same length. Call a helper
@@ -1261,18 +1261,19 @@ BaselineCacheIRCompiler::emitStoreTypedO
     }
 
     // Compute the address being written to.
     LoadTypedThingData(masm, layout, obj, scratch1);
     masm.addPtr(offsetAddr, scratch1);
     Address dest(scratch1, 0);
 
     emitStoreTypedObjectReferenceProp(val, type, dest, scratch2);
-    emitPostBarrierSlot(obj, val, scratch1);
-
+
+    if (type != ReferenceTypeDescr::TYPE_STRING)
+        emitPostBarrierSlot(obj, val, scratch1);
     return true;
 }
 
 bool
 BaselineCacheIRCompiler::emitStoreTypedObjectScalarProperty()
 {
     Register obj = allocator.useRegister(masm, reader.objOperandId());
     Address offsetAddr = stubAddress(reader.stubOffset());
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -2787,17 +2787,17 @@ BaselineCompiler::emit_JSOP_SETALIASEDVA
     frame.push(R0);
 
     // Only R0 is live at this point.
     // Scope coordinate object is already in R2.scratchReg().
     Register temp = R1.scratchReg();
 
     Label skipBarrier;
     masm.branchPtrInNurseryChunk(Assembler::Equal, objReg, temp, &skipBarrier);
-    masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
+    masm.branchValueIsNurseryObject(Assembler::NotEqual, R0, temp, &skipBarrier);
 
     masm.call(&postBarrierSlot_); // Won't clobber R0
 
     masm.bind(&skipBarrier);
     return true;
 }
 
 bool
@@ -3211,17 +3211,17 @@ BaselineCompiler::emitFormalArgAccess(ui
 
         // Reload the arguments object
         Register reg = R2.scratchReg();
         masm.loadPtr(Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfArgsObj()), reg);
 
         Label skipBarrier;
 
         masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &skipBarrier);
-        masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
+        masm.branchValueIsNurseryObject(Assembler::NotEqual, R0, temp, &skipBarrier);
 
         masm.call(&postBarrierSlot_);
 
         masm.bind(&skipBarrier);
     }
 
     masm.bind(&done);
     return true;
--- a/js/src/jit/CacheIRCompiler.cpp
+++ b/js/src/jit/CacheIRCompiler.cpp
@@ -1830,17 +1830,17 @@ CacheIRCompiler::emitLoadStringCharResul
 
     FailurePath* failure;
     if (!addFailurePath(&failure))
         return false;
 
     // Bounds check, load string char.
     masm.branch32(Assembler::BelowOrEqual, Address(str, JSString::offsetOfLength()),
                   index, failure->label());
-    masm.loadStringChar(str, index, scratch2, scratch1, failure->label());
+    masm.loadStringChar(str, index, scratch1, failure->label());
 
     // Load StaticString for this char.
     masm.branch32(Assembler::AboveOrEqual, scratch1, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
                   failure->label());
     masm.movePtr(ImmPtr(&cx_->staticStrings().unitStaticTable), scratch2);
     masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch2);
 
     EmitStoreResult(masm, scratch2, JSVAL_TYPE_STRING, output);
@@ -2334,18 +2334,16 @@ CacheIRCompiler::emitBreakpoint()
     masm.breakpoint();
     return true;
 }
 
 void
 CacheIRCompiler::emitStoreTypedObjectReferenceProp(ValueOperand val, ReferenceTypeDescr::Type type,
                                                    const Address& dest, Register scratch)
 {
-    // Callers will post-barrier this store.
-
     switch (type) {
       case ReferenceTypeDescr::TYPE_ANY:
         EmitPreBarrier(masm, dest, MIRType::Value);
         masm.storeValue(val, dest);
         break;
 
       case ReferenceTypeDescr::TYPE_OBJECT: {
         EmitPreBarrier(masm, dest, MIRType::Object);
@@ -2388,29 +2386,28 @@ CacheIRCompiler::emitRegisterEnumerator(
 void
 CacheIRCompiler::emitPostBarrierShared(Register obj, const ConstantOrRegister& val,
                                        Register scratch, Register maybeIndex)
 {
     if (!cx_->nursery().exists())
         return;
 
     if (val.constant()) {
-        MOZ_ASSERT_IF(val.value().isGCThing(), !IsInsideNursery(val.value().toGCThing()));
+        MOZ_ASSERT_IF(val.value().isObject(), !IsInsideNursery(&val.value().toObject()));
         return;
     }
 
     TypedOrValueRegister reg = val.reg();
-    if (reg.hasTyped()) {
-        if (reg.type() != MIRType::Object && reg.type() != MIRType::String)
-            return;
-    }
+    if (reg.hasTyped() && reg.type() != MIRType::Object)
+        return;
 
     Label skipBarrier;
     if (reg.hasValue()) {
-        masm.branchValueIsNurseryCell(Assembler::NotEqual, reg.valueReg(), scratch, &skipBarrier);
+        masm.branchValueIsNurseryObject(Assembler::NotEqual, reg.valueReg(), scratch,
+                                        &skipBarrier);
     } else {
         masm.branchPtrInNurseryChunk(Assembler::NotEqual, reg.typedReg().gpr(), scratch,
                                      &skipBarrier);
     }
     masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, &skipBarrier);
 
     // Call one of these, depending on maybeIndex:
     //
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -1188,93 +1188,16 @@ CodeGenerator::visitValueToObjectOrNull(
     masm.branchTestNull(Assembler::NotEqual, input, ool->entry());
 
     masm.bind(&done);
     masm.unboxNonDouble(input, output);
 
     masm.bind(ool->rejoin());
 }
 
-static void
-EmitStoreBufferMutation(MacroAssembler& masm, Register strbase, int32_t strofs,
-                        Register buffer,
-                        LiveGeneralRegisterSet& liveVolatiles,
-                        void (*fun)(js::gc::StoreBuffer*, js::gc::Cell**))
-{
-    Label callVM;
-    Label exit;
-
-    // Call into the VM to barrier the write. The only registers that need to
-    // be preserved are those in liveVolatiles, so once they are saved on the
-    // stack all volatile registers are available for use.
-    masm.bind(&callVM);
-    masm.PushRegsInMask(liveVolatiles);
-
-    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
-    regs.takeUnchecked(buffer);
-    regs.takeUnchecked(strbase);
-    Register addrReg = regs.takeAny();
-    masm.computeEffectiveAddress(Address(strbase, strofs), addrReg);
-
-    bool needExtraReg = !regs.hasAny<GeneralRegisterSet::DefaultType>();
-    if (needExtraReg) {
-        masm.push(strbase);
-        masm.setupUnalignedABICall(strbase);
-    } else {
-        masm.setupUnalignedABICall(regs.takeAny());
-    }
-    masm.passABIArg(buffer);
-    masm.passABIArg(addrReg);
-    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, fun), MoveOp::GENERAL,
-                     CheckUnsafeCallWithABI::DontCheckOther);
-
-    if (needExtraReg)
-        masm.pop(strbase);
-    masm.PopRegsInMask(liveVolatiles);
-    masm.bind(&exit);
-}
-
-// Warning: this function modifies prev and next.
-static void
-EmitPostWriteBarrierS(MacroAssembler& masm,
-                      Register strbase, int32_t strofs,
-                      Register prev, Register next,
-                      LiveGeneralRegisterSet& liveVolatiles)
-{
-    Label exit;
-    Label checkRemove, putCell;
-
-    // if (next && (buffer = next->storeBuffer()))
-    // but we never pass in nullptr for next.
-    Register storebuffer = next;
-    masm.loadStoreBuffer(next, storebuffer);
-    masm.branchPtr(Assembler::Equal, storebuffer, ImmWord(0), &checkRemove);
-
-    // if (prev && prev->storeBuffer())
-    masm.branchPtr(Assembler::Equal, prev, ImmWord(0), &putCell);
-    masm.loadStoreBuffer(prev, prev);
-    masm.branchPtr(Assembler::NotEqual, prev, ImmWord(0), &exit);
-
-    // buffer->putCell(cellp)
-    masm.bind(&putCell);
-    EmitStoreBufferMutation(masm, strbase, strofs, storebuffer, liveVolatiles,
-                            JSString::addCellAddressToStoreBuffer);
-    masm.jump(&exit);
-
-    // if (prev && (buffer = prev->storeBuffer()))
-    masm.bind(&checkRemove);
-    masm.branchPtr(Assembler::Equal, prev, ImmWord(0), &exit);
-    masm.loadStoreBuffer(prev, storebuffer);
-    masm.branchPtr(Assembler::Equal, storebuffer, ImmWord(0), &exit);
-    EmitStoreBufferMutation(masm, strbase, strofs, storebuffer, liveVolatiles,
-                            JSString::removeCellAddressFromStoreBuffer);
-
-    masm.bind(&exit);
-}
-
 typedef JSObject* (*CloneRegExpObjectFn)(JSContext*, Handle<RegExpObject*>);
 static const VMFunction CloneRegExpObjectInfo =
     FunctionInfo<CloneRegExpObjectFn>(CloneRegExpObject, "CloneRegExpObject");
 
 void
 CodeGenerator::visitRegExp(LRegExp* lir)
 {
     Register output = ToRegister(lir->output());
@@ -1507,32 +1430,18 @@ PrepareAndExecuteRegExp(JSContext* cx, M
     Address matchesInputAddress(temp1, RegExpStatics::offsetOfMatchesInput());
     Address lazySourceAddress(temp1, RegExpStatics::offsetOfLazySource());
     Address lazyIndexAddress(temp1, RegExpStatics::offsetOfLazyIndex());
 
     masm.guardedCallPreBarrier(pendingInputAddress, MIRType::String);
     masm.guardedCallPreBarrier(matchesInputAddress, MIRType::String);
     masm.guardedCallPreBarrier(lazySourceAddress, MIRType::String);
 
-    if (temp1.volatile_())
-        volatileRegs.add(temp1);
-
-    // Writing into RegExpStatics tenured memory; must post-barrier.
-    masm.loadPtr(pendingInputAddress, temp2);
     masm.storePtr(input, pendingInputAddress);
-    masm.movePtr(input, temp3);
-    EmitPostWriteBarrierS(masm, temp1, RegExpStatics::offsetOfPendingInput(),
-                          temp2 /* prev */, temp3 /* next */, volatileRegs);
-
-    masm.loadPtr(matchesInputAddress, temp2);
     masm.storePtr(input, matchesInputAddress);
-    masm.movePtr(input, temp3);
-    EmitPostWriteBarrierS(masm, temp1, RegExpStatics::offsetOfMatchesInput(),
-                          temp2 /* prev */, temp3 /* next */, volatileRegs);
-
     masm.storePtr(lastIndex, Address(temp1, RegExpStatics::offsetOfLazyIndex()));
     masm.store32(Imm32(1), Address(temp1, RegExpStatics::offsetOfPendingLazyEvaluation()));
 
     masm.loadPtr(Address(regexp, NativeObject::getFixedSlotOffset(RegExpObject::PRIVATE_SLOT)), temp2);
     masm.loadPtr(Address(temp2, RegExpShared::offsetOfSource()), temp3);
     masm.storePtr(temp3, lazySourceAddress);
     masm.load32(Address(temp2, RegExpShared::offsetOfFlags()), temp3);
     masm.store32(temp3, Address(temp1, RegExpStatics::offsetOfLazyFlags()));
@@ -1565,29 +1474,27 @@ class CreateDependentString
 public:
     // Generate code that creates DependentString.
     // Caller should call generateFallback after masm.ret(), to generate
     // fallback path.
     void generate(MacroAssembler& masm, const JSAtomState& names,
                   bool latin1, Register string,
                   Register base, Register temp1, Register temp2,
                   BaseIndex startIndexAddress, BaseIndex limitIndexAddress,
-                  bool stringsCanBeInNursery,
                   Label* failure);
 
     // Generate fallback path for creating DependentString.
     void generateFallback(MacroAssembler& masm, LiveRegisterSet regsToSave);
 };
 
 void
 CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
                                 bool latin1, Register string,
                                 Register base, Register temp1, Register temp2,
                                 BaseIndex startIndexAddress, BaseIndex limitIndexAddress,
-                                bool stringsCanBeInNursery,
                                 Label* failure)
 {
     string_ = string;
     temp_ = temp2;
     failure_ = failure;
 
     // Compute the string length.
     masm.load32(startIndexAddress, temp2);
@@ -1615,25 +1522,25 @@ CreateDependentString::generate(MacroAss
         Label stringAllocated, fatInline;
 
         int32_t maxThinInlineLength = latin1
                                       ? (int32_t) JSThinInlineString::MAX_LENGTH_LATIN1
                                       : (int32_t) JSThinInlineString::MAX_LENGTH_TWO_BYTE;
         masm.branch32(Assembler::Above, temp1, Imm32(maxThinInlineLength), &fatInline);
 
         int32_t thinFlags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::INIT_THIN_INLINE_FLAGS;
-        masm.newGCString(string, temp2, &fallbacks_[FallbackKind::InlineString], stringsCanBeInNursery);
+        masm.newGCString(string, temp2, &fallbacks_[FallbackKind::InlineString]);
         masm.bind(&joins_[FallbackKind::InlineString]);
         masm.store32(Imm32(thinFlags), Address(string, JSString::offsetOfFlags()));
         masm.jump(&stringAllocated);
 
         masm.bind(&fatInline);
 
         int32_t fatFlags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::INIT_FAT_INLINE_FLAGS;
-        masm.newGCFatInlineString(string, temp2, &fallbacks_[FallbackKind::FatInlineString], stringsCanBeInNursery);
+        masm.newGCFatInlineString(string, temp2, &fallbacks_[FallbackKind::FatInlineString]);
         masm.bind(&joins_[FallbackKind::FatInlineString]);
         masm.store32(Imm32(fatFlags), Address(string, JSString::offsetOfFlags()));
 
         masm.bind(&stringAllocated);
         masm.store32(temp1, Address(string, JSString::offsetOfLength()));
 
         masm.push(string);
         masm.push(base);
@@ -1668,72 +1575,58 @@ CreateDependentString::generate(MacroAss
 
     masm.jump(&done);
     masm.bind(&notInline);
 
     {
         // Make a dependent string.
         int32_t flags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::DEPENDENT_FLAGS;
 
-        masm.newGCString(string, temp2, &fallbacks_[FallbackKind::NotInlineString], stringsCanBeInNursery);
-        // Warning: string may be tenured (if the fallback case is hit), so
-        // stores into it must be post barriered.
+        masm.newGCString(string, temp2, &fallbacks_[FallbackKind::NotInlineString]);
         masm.bind(&joins_[FallbackKind::NotInlineString]);
         masm.store32(Imm32(flags), Address(string, JSString::offsetOfFlags()));
         masm.store32(temp1, Address(string, JSString::offsetOfLength()));
 
         masm.loadPtr(Address(base, JSString::offsetOfNonInlineChars()), temp1);
         masm.load32(startIndexAddress, temp2);
         if (latin1)
             masm.addPtr(temp2, temp1);
         else
             masm.computeEffectiveAddress(BaseIndex(temp1, temp2, TimesTwo), temp1);
         masm.storePtr(temp1, Address(string, JSString::offsetOfNonInlineChars()));
         masm.storePtr(base, Address(string, JSDependentString::offsetOfBase()));
-        masm.movePtr(base, temp1);
 
         // Follow any base pointer if the input is itself a dependent string.
         // Watch for undepended strings, which have a base pointer but don't
         // actually share their characters with it.
         Label noBase;
         masm.branchTest32(Assembler::Zero, Address(base, JSString::offsetOfFlags()),
                           Imm32(JSString::HAS_BASE_BIT), &noBase);
         masm.branchTest32(Assembler::NonZero, Address(base, JSString::offsetOfFlags()),
                           Imm32(JSString::FLAT_BIT), &noBase);
         masm.loadPtr(Address(base, JSDependentString::offsetOfBase()), temp1);
         masm.storePtr(temp1, Address(string, JSDependentString::offsetOfBase()));
         masm.bind(&noBase);
-
-        // Post-barrier the base store, whether it was the direct or indirect
-        // base (both will end up in temp1 here).
-        masm.movePtr(ImmWord(0), temp2);
-        LiveGeneralRegisterSet saveRegs(GeneralRegisterSet::Volatile());
-        if (temp1.volatile_())
-            saveRegs.takeUnchecked(temp1);
-        if (temp2.volatile_())
-            saveRegs.takeUnchecked(temp1);
-        EmitPostWriteBarrierS(masm, string, JSDependentString::offsetOfBase(),
-                              temp2 /* prev */, temp1 /* next */, saveRegs);
     }
 
     masm.bind(&done);
 }
 
 static void*
 AllocateString(JSContext* cx)
 {
     AutoUnsafeCallWithABI unsafe;
-    return js::Allocate<JSString, NoGC>(cx, js::gc::TenuredHeap);
+    return js::Allocate<JSString, NoGC>(cx);
 }
 
 static void*
 AllocateFatInlineString(JSContext* cx)
 {
     AutoUnsafeCallWithABI unsafe;
-    return js::Allocate<JSFatInlineString, NoGC>(cx, js::gc::TenuredHeap);
+    return js::Allocate<JSFatInlineString, NoGC>(cx);
 }
 
 void
 CreateDependentString::generateFallback(MacroAssembler& masm, LiveRegisterSet regsToSave)
 {
     regsToSave.take(string_);
     regsToSave.take(temp_);
     for (FallbackKind kind : mozilla::MakeEnumeratedRange(FallbackKind::Count)) {
@@ -1929,22 +1822,20 @@ JitCompartment::generateRegExpMatcherStu
 
             Label matchLoop;
             masm.bind(&matchLoop);
 
             Label isUndefined, storeDone;
             masm.branch32(Assembler::LessThan, stringIndexAddress, Imm32(0), &isUndefined);
 
             depStr[isLatin].generate(masm, cx->names(), isLatin, temp3, input, temp4, temp5,
-                                     stringIndexAddress, stringLimitAddress,
-                                     stringsCanBeInNursery,
-                                     failure);
+                                     stringIndexAddress, stringLimitAddress, failure);
 
             masm.storeValue(JSVAL_TYPE_STRING, temp3, stringAddress);
-            // Storing into nursery-allocated results object's elements; no post barrier.
+
             masm.jump(&storeDone);
             masm.bind(&isUndefined);
 
             masm.storeValue(UndefinedValue(), stringAddress);
             masm.bind(&storeDone);
 
             masm.add32(Imm32(1), matchIndex);
             masm.branch32(Assembler::LessThanOrEqual, pairCountAddress, matchIndex, &done);
@@ -1979,19 +1870,17 @@ JitCompartment::generateRegExpMatcherStu
     masm.loadPtr(Address(object, NativeObject::offsetOfSlots()), temp2);
 
     MOZ_ASSERT(templateObject->numFixedSlots() == 0);
     MOZ_ASSERT(templateObject->lookupPure(cx->names().index)->slot() == 0);
     MOZ_ASSERT(templateObject->lookupPure(cx->names().input)->slot() == 1);
 
     masm.load32(pairsVectorAddress, temp3);
     masm.storeValue(JSVAL_TYPE_INT32, temp3, Address(temp2, 0));
-    Address inputSlotAddress(temp2, sizeof(Value));
-    masm.storeValue(JSVAL_TYPE_STRING, input, inputSlotAddress);
-    // No post barrier needed (inputSlotAddress is within nursery object.)
+    masm.storeValue(JSVAL_TYPE_STRING, input, Address(temp2, sizeof(Value)));
 
     // All done!
     masm.tagValue(JSVAL_TYPE_OBJECT, object, result);
     masm.ret();
 
     masm.bind(&notFound);
     masm.moveValue(NullValue(), result);
     masm.ret();
@@ -2566,17 +2455,17 @@ CodeGenerator::visitGetFirstDollarIndex(
     Register output = ToRegister(ins->output());
     Register temp0 = ToRegister(ins->temp0());
     Register temp1 = ToRegister(ins->temp1());
     Register len = ToRegister(ins->temp2());
 
     OutOfLineCode* ool = oolCallVM(GetFirstDollarIndexRawInfo, ins, ArgList(str),
                                    StoreRegisterTo(output));
 
-    masm.branchIfRope(str, temp0, ool->entry());
+    masm.branchIfRope(str, ool->entry());
     masm.loadStringLength(str, len);
 
     Label isLatin1, done;
     masm.branchLatin1String(str, &isLatin1);
     {
         FindFirstDollarIndex(masm, str, len, temp0, temp1, output, /* isLatin1 = */ false);
     }
     masm.jump(&done);
@@ -2903,18 +2792,16 @@ CodeGenerator::emitLambdaInit(Register o
     u.s.flags = info.flags;
 
     static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
                   "the code below needs to be adapted");
     masm.store32(Imm32(u.word), Address(output, JSFunction::offsetOfNargs()));
     masm.storePtr(ImmGCPtr(info.scriptOrLazyScript),
                   Address(output, JSFunction::offsetOfScriptOrLazyScript()));
     masm.storePtr(envChain, Address(output, JSFunction::offsetOfEnvironment()));
-    // No post barrier needed because output is guaranteed to be allocated in
-    // the nursery.
     masm.storePtr(ImmGCPtr(info.fun->displayAtom()), Address(output, JSFunction::offsetOfAtom()));
 }
 
 typedef bool (*SetFunNameFn)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
 static const VMFunction SetFunNameInfo =
     FunctionInfo<SetFunNameFn>(js::SetFunctionNameIfNoOwnName, "SetFunName");
 
 void
@@ -3842,21 +3729,22 @@ class OutOfLineCallPostWriteBarrier : pu
         return lir_;
     }
     const LAllocation* object() const {
         return object_;
     }
 };
 
 static void
-EmitStoreBufferCheckForConstant(MacroAssembler& masm, const gc::TenuredCell* cell,
+EmitStoreBufferCheckForConstant(MacroAssembler& masm, JSObject* object,
                                 AllocatableGeneralRegisterSet& regs, Label* exit, Label* callVM)
 {
     Register temp = regs.takeAny();
 
+    const gc::TenuredCell* cell = &object->asTenured();
     gc::Arena* arena = cell->arena();
 
     Register cells = temp;
     masm.loadPtr(AbsoluteAddress(&arena->bufferedCells()), cells);
 
     size_t index = gc::ArenaCellSet::getCellIndex(cell);
     size_t word;
     uint32_t mask;
@@ -3884,17 +3772,17 @@ EmitPostWriteBarrier(MacroAssembler& mas
     MOZ_ASSERT_IF(isGlobal, maybeConstant);
 
     Label callVM;
     Label exit;
 
     // We already have a fast path to check whether a global is in the store
     // buffer.
     if (!isGlobal && maybeConstant)
-        EmitStoreBufferCheckForConstant(masm, &maybeConstant->asTenured(), regs, &exit, &callVM);
+        EmitStoreBufferCheckForConstant(masm, maybeConstant, regs, &exit, &callVM);
 
     // Call into the VM to barrier the write.
     masm.bind(&callVM);
 
     Register runtimereg = regs.takeAny();
     masm.mov(ImmPtr(runtime), runtimereg);
 
     void (*fun)(JSRuntime*, JSObject*) = isGlobal ? PostGlobalWriteBarrier : PostWriteBarrier;
@@ -3959,46 +3847,40 @@ CodeGenerator::maybeEmitGlobalBarrierChe
     if (!isGlobalObject(obj))
         return;
 
     JSCompartment* comp = obj->compartment();
     auto addr = AbsoluteAddress(&comp->globalWriteBarriered);
     masm.branch32(Assembler::NotEqual, addr, Imm32(0), ool->rejoin());
 }
 
-template <class LPostBarrierType, MIRType nurseryType>
-void
-CodeGenerator::visitPostWriteBarrierCommon(LPostBarrierType* lir, OutOfLineCode* ool)
+template <class LPostBarrierType>
+void
+CodeGenerator::visitPostWriteBarrierCommonO(LPostBarrierType* lir, OutOfLineCode* ool)
 {
     addOutOfLineCode(ool, lir->mir());
 
     Register temp = ToTempRegisterOrInvalid(lir->temp());
 
     if (lir->object()->isConstant()) {
-        // Constant nursery objects cannot appear here, see
-        // LIRGenerator::visitPostWriteElementBarrier.
+        // Constant nursery objects cannot appear here, see LIRGenerator::visitPostWriteElementBarrier.
         MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
     } else {
         masm.branchPtrInNurseryChunk(Assembler::Equal, ToRegister(lir->object()), temp,
                                      ool->rejoin());
     }
 
     maybeEmitGlobalBarrierCheck(lir->object(), ool);
 
-    Register value = ToRegister(lir->value());
-    if (nurseryType == MIRType::Object) {
-        if (lir->mir()->value()->type() == MIRType::ObjectOrNull)
-            masm.branchTestPtr(Assembler::Zero, value, value, ool->rejoin());
-        else
-            MOZ_ASSERT(lir->mir()->value()->type() == MIRType::Object);
-    } else {
-        MOZ_ASSERT(nurseryType == MIRType::String);
-        MOZ_ASSERT(lir->mir()->value()->type() == MIRType::String);
-    }
-    masm.branchPtrInNurseryChunk(Assembler::Equal, value, temp, ool->entry());
+    Register valueObj = ToRegister(lir->value());
+    if (lir->mir()->value()->type() == MIRType::ObjectOrNull)
+        masm.branchTestPtr(Assembler::Zero, valueObj, valueObj, ool->rejoin());
+    else
+        MOZ_ASSERT(lir->mir()->value()->type() == MIRType::Object);
+    masm.branchPtrInNurseryChunk(Assembler::Equal, valueObj, temp, ool->entry());
 
     masm.bind(ool->rejoin());
 }
 
 template <class LPostBarrierType>
 void
 CodeGenerator::visitPostWriteBarrierCommonV(LPostBarrierType* lir, OutOfLineCode* ool)
 {
@@ -4012,35 +3894,26 @@ CodeGenerator::visitPostWriteBarrierComm
     } else {
         masm.branchPtrInNurseryChunk(Assembler::Equal, ToRegister(lir->object()), temp,
                                      ool->rejoin());
     }
 
     maybeEmitGlobalBarrierCheck(lir->object(), ool);
 
     ValueOperand value = ToValue(lir, LPostBarrierType::Input);
-    // Bug 1386094 - most callers only need to check for object or string, not
-    // both.
-    masm.branchValueIsNurseryCell(Assembler::Equal, value, temp, ool->entry());
+    masm.branchValueIsNurseryObject(Assembler::Equal, value, temp, ool->entry());
 
     masm.bind(ool->rejoin());
 }
 
 void
 CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO* lir)
 {
     auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
-    visitPostWriteBarrierCommon<LPostWriteBarrierO, MIRType::Object>(lir, ool);
-}
-
-void
-CodeGenerator::visitPostWriteBarrierS(LPostWriteBarrierS* lir)
-{
-    auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
-    visitPostWriteBarrierCommon<LPostWriteBarrierS, MIRType::String>(lir, ool);
+    visitPostWriteBarrierCommonO(lir, ool);
 }
 
 void
 CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV* lir)
 {
     auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
     visitPostWriteBarrierCommonV(lir, ool);
 }
@@ -4110,24 +3983,17 @@ CodeGenerator::visitOutOfLineCallPostWri
 
     masm.jump(ool->rejoin());
 }
 
 void
 CodeGenerator::visitPostWriteElementBarrierO(LPostWriteElementBarrierO* lir)
 {
     auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
-    visitPostWriteBarrierCommon<LPostWriteElementBarrierO, MIRType::Object>(lir, ool);
-}
-
-void
-CodeGenerator::visitPostWriteElementBarrierS(LPostWriteElementBarrierS* lir)
-{
-    auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
-    visitPostWriteBarrierCommon<LPostWriteElementBarrierS, MIRType::String>(lir, ool);
+    visitPostWriteBarrierCommonO(lir, ool);
 }
 
 void
 CodeGenerator::visitPostWriteElementBarrierV(LPostWriteElementBarrierV* lir)
 {
     auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
     visitPostWriteBarrierCommonV(lir, ool);
 }
@@ -6818,47 +6684,52 @@ CodeGenerator::emitLoadIteratorValues<Va
     Address valueAddress(front, ValueMap::Entry::offsetOfValue());
     Address keyElemAddress(result, elementsOffset);
     Address valueElemAddress(result, elementsOffset + sizeof(Value));
     masm.guardedCallPreBarrier(keyElemAddress, MIRType::Value);
     masm.guardedCallPreBarrier(valueElemAddress, MIRType::Value);
     masm.storeValue(keyAddress, keyElemAddress, temp);
     masm.storeValue(valueAddress, valueElemAddress, temp);
 
-    Label emitBarrier, skipBarrier;
-    masm.branchValueIsNurseryCell(Assembler::Equal, keyAddress, temp, &emitBarrier);
-    masm.branchValueIsNurseryCell(Assembler::NotEqual, valueAddress, temp, &skipBarrier);
+    Label keyIsNotObject, valueIsNotNurseryObject, emitBarrier;
+    masm.branchTestObject(Assembler::NotEqual, keyAddress, &keyIsNotObject);
+    masm.branchValueIsNurseryObject(Assembler::Equal, keyAddress, temp, &emitBarrier);
+    masm.bind(&keyIsNotObject);
+    masm.branchTestObject(Assembler::NotEqual, valueAddress, &valueIsNotNurseryObject);
+    masm.branchValueIsNurseryObject(Assembler::NotEqual, valueAddress, temp,
+                                    &valueIsNotNurseryObject);
     {
         masm.bind(&emitBarrier);
         saveVolatile(temp);
         emitPostWriteBarrier(result);
         restoreVolatile(temp);
     }
-    masm.bind(&skipBarrier);
+    masm.bind(&valueIsNotNurseryObject);
 }
 
 template <>
 void
 CodeGenerator::emitLoadIteratorValues<ValueSet>(Register result, Register temp, Register front)
 {
     size_t elementsOffset = NativeObject::offsetOfFixedElements();
 
     Address keyAddress(front, ValueSet::offsetOfEntryKey());
     Address keyElemAddress(result, elementsOffset);
     masm.guardedCallPreBarrier(keyElemAddress, MIRType::Value);
     masm.storeValue(keyAddress, keyElemAddress, temp);
 
-    Label skipBarrier;
-    masm.branchValueIsNurseryCell(Assembler::NotEqual, keyAddress, temp, &skipBarrier);
+    Label keyIsNotObject;
+    masm.branchTestObject(Assembler::NotEqual, keyAddress, &keyIsNotObject);
+    masm.branchValueIsNurseryObject(Assembler::NotEqual, keyAddress, temp, &keyIsNotObject);
     {
         saveVolatile(temp);
         emitPostWriteBarrier(result);
         restoreVolatile(temp);
     }
-    masm.bind(&skipBarrier);
+    masm.bind(&keyIsNotObject);
 }
 
 template <class IteratorObject, class OrderedHashTable>
 void
 CodeGenerator::emitGetNextEntryForIterator(LGetNextEntryForIterator* lir)
 {
     Register iter = ToRegister(lir->iter());
     Register result = ToRegister(lir->result());
@@ -7770,48 +7641,47 @@ CopyStringCharsMaybeInflate(MacroAssembl
         CopyStringChars(masm, destChars, input, temp1, temp2, sizeof(char), sizeof(char16_t));
     }
     masm.bind(&done);
 }
 
 static void
 ConcatInlineString(MacroAssembler& masm, Register lhs, Register rhs, Register output,
                    Register temp1, Register temp2, Register temp3,
-                   bool stringsCanBeInNursery,
                    Label* failure, Label* failurePopTemps, bool isTwoByte)
 {
     // State: result length in temp2.
 
     // Ensure both strings are linear.
-    masm.branchIfRope(lhs, temp1, failure);
-    masm.branchIfRope(rhs, temp1, failure);
+    masm.branchIfRope(lhs, failure);
+    masm.branchIfRope(rhs, failure);
 
     // Allocate a JSThinInlineString or JSFatInlineString.
     size_t maxThinInlineLength;
     if (isTwoByte)
         maxThinInlineLength = JSThinInlineString::MAX_LENGTH_TWO_BYTE;
     else
         maxThinInlineLength = JSThinInlineString::MAX_LENGTH_LATIN1;
 
     Label isFat, allocDone;
     masm.branch32(Assembler::Above, temp2, Imm32(maxThinInlineLength), &isFat);
     {
         uint32_t flags = JSString::INIT_THIN_INLINE_FLAGS;
         if (!isTwoByte)
             flags |= JSString::LATIN1_CHARS_BIT;
-        masm.newGCString(output, temp1, failure, stringsCanBeInNursery);
+        masm.newGCString(output, temp1, failure);
         masm.store32(Imm32(flags), Address(output, JSString::offsetOfFlags()));
         masm.jump(&allocDone);
     }
     masm.bind(&isFat);
     {
         uint32_t flags = JSString::INIT_FAT_INLINE_FLAGS;
         if (!isTwoByte)
             flags |= JSString::LATIN1_CHARS_BIT;
-        masm.newGCFatInlineString(output, temp1, failure, stringsCanBeInNursery);
+        masm.newGCFatInlineString(output, temp1, failure);
         masm.store32(Imm32(flags), Address(output, JSString::offsetOfFlags()));
     }
     masm.bind(&allocDone);
 
     // Store length.
     masm.store32(temp2, Address(output, JSString::offsetOfLength()));
 
     // Load chars pointer in temp2.
@@ -7887,17 +7757,17 @@ CodeGenerator::visitSubstr(LSubstr* lir)
     masm.jump(done);
 
     // Use slow path for ropes.
     masm.bind(&nonZero);
     masm.branchIfRopeOrExternal(string, temp, slowPath);
 
     // Handle inlined strings by creating a FatInlineString.
     masm.branchTest32(Assembler::Zero, stringFlags, Imm32(JSString::INLINE_CHARS_BIT), &notInline);
-    masm.newGCFatInlineString(output, temp, slowPath, stringsCanBeInNursery());
+    masm.newGCFatInlineString(output, temp, slowPath);
     masm.store32(length, Address(output, JSString::offsetOfLength()));
     Address stringStorage(string, JSInlineString::offsetOfInlineStorage());
     Address outputStorage(output, JSInlineString::offsetOfInlineStorage());
 
     masm.branchLatin1String(string, &isInlinedLatin1);
     {
         masm.store32(Imm32(JSString::INIT_FAT_INLINE_FLAGS),
                      Address(output, JSString::offsetOfFlags()));
@@ -7929,17 +7799,17 @@ CodeGenerator::visitSubstr(LSubstr* lir)
         masm.store8(Imm32(0), Address(temp, 0));
         if (temp2 == string)
             masm.pop(string);
         masm.jump(done);
     }
 
     // Handle other cases with a DependentString.
     masm.bind(&notInline);
-    masm.newGCString(output, temp, slowPath, gen->stringsCanBeInNursery());
+    masm.newGCString(output, temp, slowPath);
     masm.store32(length, Address(output, JSString::offsetOfLength()));
     masm.storePtr(string, Address(output, JSDependentString::offsetOfBase()));
 
     masm.branchLatin1String(string, &isLatin1);
     {
         masm.store32(Imm32(JSString::DEPENDENT_FLAGS), Address(output, JSString::offsetOfFlags()));
         masm.loadPtr(Address(string, JSString::offsetOfNonInlineChars()), temp);
         BaseIndex chars(temp, begin, ScaleFromElemWidth(sizeof(char16_t)));
@@ -8009,26 +7879,24 @@ JitCompartment::generateStringConcatStub
     }
     masm.bind(&notInline);
 
     // Keep AND'ed flags in temp1.
 
     // Ensure result length <= JSString::MAX_LENGTH.
     masm.branch32(Assembler::Above, temp2, Imm32(JSString::MAX_LENGTH), &failure);
 
-    // Allocate a new rope, guaranteed to be in the nursery.
-    masm.newGCString(output, temp3, &failure, stringsCanBeInNursery);
+    // Allocate a new rope.
+    masm.newGCString(output, temp3, &failure);
 
     // Store rope length and flags. temp1 still holds the result of AND'ing the
-    // lhs and rhs flags, so we just have to clear the other flags and set
-    // NON_ATOM_BIT to get our rope flags (Latin1 if both lhs and rhs are
-    // Latin1).
-    static_assert(JSString::ROPE_FLAGS == JSString::NON_ATOM_BIT, "Rope flags must be NON_ATOM_BIT only");
+    // lhs and rhs flags, so we just have to clear the other flags to get our
+    // rope flags (Latin1 if both lhs and rhs are Latin1).
+    static_assert(JSString::ROPE_FLAGS == 0, "Rope flags must be 0");
     masm.and32(Imm32(JSString::LATIN1_CHARS_BIT), temp1);
-    masm.or32(Imm32(JSString::NON_ATOM_BIT), temp1);
     masm.store32(temp1, Address(output, JSString::offsetOfFlags()));
     masm.store32(temp2, Address(output, JSString::offsetOfLength()));
 
     // Store left and right nodes.
     masm.storePtr(lhs, Address(output, JSRope::offsetOfLeft()));
     masm.storePtr(rhs, Address(output, JSRope::offsetOfRight()));
     masm.ret();
 
@@ -8037,21 +7905,21 @@ JitCompartment::generateStringConcatStub
     masm.ret();
 
     masm.bind(&rightEmpty);
     masm.mov(lhs, output);
     masm.ret();
 
     masm.bind(&isFatInlineTwoByte);
     ConcatInlineString(masm, lhs, rhs, output, temp1, temp2, temp3,
-                       stringsCanBeInNursery, &failure, &failurePopTemps, true);
+                       &failure, &failurePopTemps, true);
 
     masm.bind(&isFatInlineLatin1);
     ConcatInlineString(masm, lhs, rhs, output, temp1, temp2, temp3,
-                       stringsCanBeInNursery, &failure, &failurePopTemps, false);
+                       &failure, &failurePopTemps, false);
 
     masm.bind(&failurePopTemps);
     masm.pop(temp2);
     masm.pop(temp1);
 
     masm.bind(&failure);
     masm.movePtr(ImmPtr(nullptr), output);
     masm.ret();
@@ -8242,17 +8110,17 @@ static const VMFunction CharCodeAtInfo =
 void
 CodeGenerator::visitCharCodeAt(LCharCodeAt* lir)
 {
     Register str = ToRegister(lir->str());
     Register index = ToRegister(lir->index());
     Register output = ToRegister(lir->output());
 
     OutOfLineCode* ool = oolCallVM(CharCodeAtInfo, lir, ArgList(str, index), StoreRegisterTo(output));
-    masm.loadStringChar(str, index, ToRegister(lir->temp()), output, ool->entry());
+    masm.loadStringChar(str, index, output, ool->entry());
     masm.bind(ool->rejoin());
 }
 
 typedef JSFlatString* (*StringFromCharCodeFn)(JSContext*, int32_t);
 static const VMFunction StringFromCharCodeInfo =
     FunctionInfo<StringFromCharCodeFn>(jit::StringFromCharCode, "StringFromCharCode");
 
 void
@@ -8310,17 +8178,17 @@ CodeGenerator::visitFromCodePoint(LFromC
         bailoutCmp32(Assembler::Above, codePoint, Imm32(unicode::NonBMPMax), snapshot);
 
         // Allocate a JSThinInlineString.
         {
             static_assert(JSThinInlineString::MAX_LENGTH_TWO_BYTE >= 2,
                           "JSThinInlineString can hold a supplementary code point");
 
             uint32_t flags = JSString::INIT_THIN_INLINE_FLAGS;
-            masm.newGCString(output, temp1, ool->entry(), gen->stringsCanBeInNursery());
+            masm.newGCString(output, temp1, ool->entry());
             masm.store32(Imm32(flags), Address(output, JSString::offsetOfFlags()));
         }
 
         Label isSupplementary;
         masm.branch32(Assembler::AboveOrEqual, codePoint, Imm32(unicode::NonBMPMin),
                       &isSupplementary);
         {
             // Store length.
@@ -10073,17 +9941,17 @@ CodeGenerator::link(JSContext* cx, Compi
     MOZ_ASSERT_IF(snapshots_.listSize(), recovers_.size());
     if (recovers_.size())
         ionScript->copyRecovers(&recovers_);
     if (graph.numConstants()) {
         const Value* vp = graph.constantPool();
         ionScript->copyConstants(vp);
         for (size_t i = 0; i < graph.numConstants(); i++) {
             const Value& v = vp[i];
-            if ((v.isObject() || v.isString()) && IsInsideNursery(v.toGCThing())) {
+            if (v.isObject() && IsInsideNursery(&v.toObject())) {
                 cx->zone()->group()->storeBuffer().putWholeCell(script);
                 break;
             }
         }
     }
     if (patchableBackedges_.length() > 0)
         ionScript->copyPatchableBackedges(cx, code, patchableBackedges_.begin(), masm);
 
--- a/js/src/jit/CodeGenerator.h
+++ b/js/src/jit/CodeGenerator.h
@@ -159,25 +159,22 @@ class CodeGenerator final : public CodeG
     void visitGuardReceiverPolymorphic(LGuardReceiverPolymorphic* lir);
     void visitGuardUnboxedExpando(LGuardUnboxedExpando* lir);
     void visitLoadUnboxedExpando(LLoadUnboxedExpando* lir);
     void visitTypeBarrierV(LTypeBarrierV* lir);
     void visitTypeBarrierO(LTypeBarrierO* lir);
     void visitMonitorTypes(LMonitorTypes* lir);
     void emitPostWriteBarrier(const LAllocation* obj);
     void emitPostWriteBarrier(Register objreg);
-    void emitPostWriteBarrierS(Address address, Register prev, Register next);
-    template <class LPostBarrierType, MIRType nurseryType>
-    void visitPostWriteBarrierCommon(LPostBarrierType* lir, OutOfLineCode* ool);
+    template <class LPostBarrierType>
+    void visitPostWriteBarrierCommonO(LPostBarrierType* lir, OutOfLineCode* ool);
     template <class LPostBarrierType>
     void visitPostWriteBarrierCommonV(LPostBarrierType* lir, OutOfLineCode* ool);
     void visitPostWriteBarrierO(LPostWriteBarrierO* lir);
     void visitPostWriteElementBarrierO(LPostWriteElementBarrierO* lir);
-    void visitPostWriteBarrierS(LPostWriteBarrierS* lir);
-    void visitPostWriteElementBarrierS(LPostWriteElementBarrierS* lir);
     void visitPostWriteBarrierV(LPostWriteBarrierV* lir);
     void visitPostWriteElementBarrierV(LPostWriteElementBarrierV* lir);
     void visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier* ool);
     void visitOutOfLineCallPostWriteElementBarrier(OutOfLineCallPostWriteElementBarrier* ool);
     void visitCallNative(LCallNative* call);
     void emitCallInvokeFunction(LInstruction* call, Register callereg,
                                 bool isConstructing, bool ignoresReturnValue,
                                 uint32_t argc, uint32_t unusedStack);
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -178,45 +178,25 @@ CompileZone::addressOfFreeList(gc::Alloc
 
 const void*
 CompileZone::addressOfNurseryPosition()
 {
     return zone()->runtimeFromAnyThread()->gc.addressOfNurseryPosition();
 }
 
 const void*
-CompileZone::addressOfStringNurseryPosition()
-{
-    // Objects and strings share a nursery, for now at least.
-    return zone()->runtimeFromAnyThread()->gc.addressOfNurseryPosition();
-}
-
-const void*
 CompileZone::addressOfNurseryCurrentEnd()
 {
     return zone()->runtimeFromAnyThread()->gc.addressOfNurseryCurrentEnd();
 }
 
-const void*
-CompileZone::addressOfStringNurseryCurrentEnd()
-{
-    return zone()->runtimeFromAnyThread()->gc.addressOfStringNurseryCurrentEnd();
-}
-
-bool
-CompileZone::canNurseryAllocateStrings()
-{
-    return nurseryExists() &&
-        zone()->group()->nursery().canAllocateStrings() &&
-        zone()->allocNurseryStrings;
-}
-
 bool
 CompileZone::nurseryExists()
 {
+    MOZ_ASSERT(CurrentThreadCanAccessZone(zone()));
     return zone()->group()->nursery().exists();
 }
 
 void
 CompileZone::setMinorGCShouldCancelIonCompilations()
 {
     MOZ_ASSERT(CurrentThreadCanAccessZone(zone()));
     zone()->group()->storeBuffer().setShouldCancelIonCompilations();
--- a/js/src/jit/CompileWrappers.h
+++ b/js/src/jit/CompileWrappers.h
@@ -71,22 +71,19 @@ class CompileZone
 #ifdef DEBUG
     const void* addressOfIonBailAfter();
 #endif
 
     const void* addressOfJSContext();
     const void* addressOfNeedsIncrementalBarrier();
     const void* addressOfFreeList(gc::AllocKind allocKind);
     const void* addressOfNurseryPosition();
-    const void* addressOfStringNurseryPosition();
     const void* addressOfNurseryCurrentEnd();
-    const void* addressOfStringNurseryCurrentEnd();
 
     bool nurseryExists();
-    bool canNurseryAllocateStrings();
     void setMinorGCShouldCancelIonCompilations();
 };
 
 class JitCompartment;
 
 class CompileCompartment
 {
     JSCompartment* compartment();
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -450,18 +450,16 @@ JitCompartment::initialize(JSContext* cx
     if (!stubCodes_)
         return false;
 
     if (!stubCodes_->init()) {
         ReportOutOfMemory(cx);
         return false;
     }
 
-    stringsCanBeInNursery = cx->nursery().canAllocateStrings();
-
     return true;
 }
 
 bool
 JitCompartment::ensureIonStubsExist(JSContext* cx)
 {
     if (!stringConcatStub_) {
         stringConcatStub_ = generateStringConcatStub(cx);
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -7388,24 +7388,19 @@ IonBuilder::loadStaticSlot(JSObject* sta
 
     return loadSlot(obj, slot, NumFixedSlots(staticObject), rvalType, barrier, types);
 }
 
 // Whether a write of the given value may need a post-write barrier for GC purposes.
 bool
 IonBuilder::needsPostBarrier(MDefinition* value)
 {
-    CompileZone* zone = compartment->zone();
-    if (!zone->nurseryExists())
+    if (!compartment->zone()->nurseryExists())
         return false;
-    if (value->mightBeType(MIRType::Object))
-        return true;
-    if (value->mightBeType(MIRType::String) && zone->canNurseryAllocateStrings())
-        return true;
-    return false;
+    return value->mightBeType(MIRType::Object);
 }
 
 AbortReasonOr<Ok>
 IonBuilder::setStaticName(JSObject* staticObject, PropertyName* name)
 {
     jsid id = NameToId(name);
 
     bool isGlobalLexical = staticObject->is<LexicalEnvironmentObject>() &&
@@ -11918,17 +11913,17 @@ IonBuilder::storeUnboxedValue(MDefinitio
 
       case JSVAL_TYPE_DOUBLE:
         store = MStoreUnboxedScalar::New(alloc(), elements, scaledOffset, value, Scalar::Float64,
                                          MStoreUnboxedScalar::DontTruncateInput,
                                          DoesNotRequireMemoryBarrier, elementsOffset);
         break;
 
       case JSVAL_TYPE_STRING:
-        store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value, obj,
+        store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value,
                                          elementsOffset, preBarrier);
         break;
 
       case JSVAL_TYPE_OBJECT:
         MOZ_ASSERT(value->type() == MIRType::Object ||
                    value->type() == MIRType::Null ||
                    value->type() == MIRType::Value);
         MOZ_ASSERT(!value->mightBeType(MIRType::Undefined),
@@ -13631,19 +13626,19 @@ IonBuilder::setPropTryReferenceTypedObje
       case ReferenceTypeDescr::TYPE_OBJECT:
         // Note: We cannot necessarily tell at this point whether a post
         // barrier is needed, because the type policy may insert ToObjectOrNull
         // instructions later, and those may require a post barrier. Therefore,
         // defer the insertion of post barriers to the type policy.
         store = MStoreUnboxedObjectOrNull::New(alloc(), elements, scaledOffset, value, typedObj, adjustment);
         break;
       case ReferenceTypeDescr::TYPE_STRING:
-        // See previous comment. The StoreUnboxedString type policy may insert
-        // ToString instructions that require a post barrier.
-        store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value, typedObj, adjustment);
+        // Strings are not nursery allocated, so these writes do not need post
+        // barriers.
+        store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value, adjustment);
         break;
     }
 
     current->add(store);
     current->push(value);
 
     trackOptimizationSuccess();
     *emitted = true;
--- a/js/src/jit/IonCacheIRCompiler.cpp
+++ b/js/src/jit/IonCacheIRCompiler.cpp
@@ -748,18 +748,18 @@ IonCacheIRCompiler::emitGuardSpecificAto
     if (!addFailurePath(&failure))
         return false;
 
     Label done;
     masm.branchPtr(Assembler::Equal, str, ImmGCPtr(atom), &done);
 
     // The pointers are not equal, so if the input string is also an atom it
     // must be a different string.
-    masm.branchTest32(Assembler::Zero, Address(str, JSString::offsetOfFlags()),
-                      Imm32(JSString::NON_ATOM_BIT), failure->label());
+    masm.branchTest32(Assembler::NonZero, Address(str, JSString::offsetOfFlags()),
+                      Imm32(JSString::ATOM_BIT), failure->label());
 
     // Check the length.
     masm.branch32(Assembler::NotEqual, Address(str, JSString::offsetOfLength()),
                   Imm32(atom->length()), failure->label());
 
     // We have a non-atomized string with the same length. Call a helper
     // function to do the comparison.
     LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -605,23 +605,16 @@ class JitCompartment
     MOZ_MUST_USE bool ensureIonStubsExist(JSContext* cx);
 
     void sweep(FreeOp* fop, JSCompartment* compartment);
 
     JitCode* stringConcatStubNoBarrier() const {
         return stringConcatStub_;
     }
 
-    void discardStubs() {
-        stringConcatStub_ = nullptr;
-        regExpMatcherStub_ = nullptr;
-        regExpSearcherStub_ = nullptr;
-        regExpTesterStub_ = nullptr;
-    }
-
     JitCode* regExpMatcherStubNoBarrier() const {
         return regExpMatcherStub_;
     }
 
     MOZ_MUST_USE bool ensureRegExpMatcherStubExists(JSContext* cx) {
         if (regExpMatcherStub_)
             return true;
         regExpMatcherStub_ = generateRegExpMatcherStub(cx);
@@ -646,18 +639,16 @@ class JitCompartment
     MOZ_MUST_USE bool ensureRegExpTesterStubExists(JSContext* cx) {
         if (regExpTesterStub_)
             return true;
         regExpTesterStub_ = generateRegExpTesterStub(cx);
         return regExpTesterStub_ != nullptr;
     }
 
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
-
-    bool stringsCanBeInNursery;
 };
 
 // Called from JSCompartment::discardJitCode().
 void InvalidateAll(FreeOp* fop, JS::Zone* zone);
 void FinishInvalidation(FreeOp* fop, JSScript* script);
 
 // On windows systems, really large frames need to be incrementally touched.
 // The following constant defines the minimum increment of the touch.
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -111,17 +111,17 @@ TryToUseImplicitInterruptCheck(MIRGraph&
             if (iter->isInterruptCheck()) {
                 if (!check) {
                     MOZ_ASSERT(*block == backedge->loopHeaderOfBackedge());
                     check = iter->toInterruptCheck();
                 }
                 continue;
             }
 
-            MOZ_ASSERT_IF(iter->isPostWriteBarrierO() || iter->isPostWriteBarrierV() || iter->isPostWriteBarrierS(),
+            MOZ_ASSERT_IF(iter->isPostWriteBarrierO() || iter->isPostWriteBarrierV(),
                           iter->safepoint());
 
             if (iter->safepoint())
                 return;
         }
         if (*block == backedge)
             break;
         block++;
@@ -1955,19 +1955,17 @@ void
 LIRGenerator::visitCharCodeAt(MCharCodeAt* ins)
 {
     MDefinition* str = ins->getOperand(0);
     MDefinition* idx = ins->getOperand(1);
 
     MOZ_ASSERT(str->type() == MIRType::String);
     MOZ_ASSERT(idx->type() == MIRType::Int32);
 
-    LCharCodeAt* lir = new(alloc()) LCharCodeAt(useRegister(str),
-                                                useRegister(idx),
-                                                temp(LDefinition::INT32));
+    LCharCodeAt* lir = new(alloc()) LCharCodeAt(useRegister(str), useRegister(idx));
     define(lir, ins);
     assignSafepoint(lir, ins);
 }
 
 void
 LIRGenerator::visitFromCharCode(MFromCharCode* ins)
 {
     MDefinition* code = ins->getOperand(0);
@@ -2849,42 +2847,31 @@ LIRGenerator::visitPostWriteBarrier(MPos
             new(alloc()) LPostWriteBarrierO(useConstantObject
                                             ? useOrConstant(ins->object())
                                             : useRegister(ins->object()),
                                             useRegister(ins->value()), tmp);
         add(lir, ins);
         assignSafepoint(lir, ins);
         break;
       }
-      case MIRType::String: {
-          LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
-          LPostWriteBarrierS* lir =
-            new(alloc()) LPostWriteBarrierS(useConstantObject
-                                            ? useOrConstant(ins->object())
-                                            : useRegister(ins->object()),
-                                            useRegister(ins->value()), tmp);
-        add(lir, ins);
-        assignSafepoint(lir, ins);
-        break;
-      }
       case MIRType::Value: {
         LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
         LPostWriteBarrierV* lir =
             new(alloc()) LPostWriteBarrierV(useConstantObject
                                             ? useOrConstant(ins->object())
                                             : useRegister(ins->object()),
                                             useBox(ins->value()),
                                             tmp);
         add(lir, ins);
         assignSafepoint(lir, ins);
         break;
       }
       default:
-        // Currently, only objects and strings can be in the nursery. Other
-        // instruction types cannot hold nursery pointers.
+        // Currently, only objects can be in the nursery. Other instruction
+        // types cannot hold nursery pointers.
         break;
     }
 }
 
 void
 LIRGenerator::visitPostWriteElementBarrier(MPostWriteElementBarrier* ins)
 {
     MOZ_ASSERT(ins->object()->type() == MIRType::Object);
@@ -2908,45 +2895,32 @@ LIRGenerator::visitPostWriteElementBarri
                                                    : useRegister(ins->object()),
                                                    useRegister(ins->value()),
                                                    useRegister(ins->index()),
                                                    tmp);
         add(lir, ins);
         assignSafepoint(lir, ins);
         break;
       }
-      case MIRType::String: {
-        LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
-        LPostWriteElementBarrierS* lir =
-            new(alloc()) LPostWriteElementBarrierS(useConstantObject
-                                                   ? useOrConstant(ins->object())
-                                                   : useRegister(ins->object()),
-                                                   useRegister(ins->value()),
-                                                   useRegister(ins->index()),
-                                                   tmp);
-        add(lir, ins);
-        assignSafepoint(lir, ins);
-        break;
-      }
       case MIRType::Value: {
         LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
         LPostWriteElementBarrierV* lir =
             new(alloc()) LPostWriteElementBarrierV(useConstantObject
                                                    ? useOrConstant(ins->object())
                                                    : useRegister(ins->object()),
                                                    useRegister(ins->index()),
                                                    useBox(ins->value()),
                                                    tmp);
         add(lir, ins);
         assignSafepoint(lir, ins);
         break;
       }
       default:
-        // Currently, only objects and strings can be in the nursery. Other
-        // instruction types cannot hold nursery pointers.
+        // Currently, only objects can be in the nursery. Other instruction
+        // types cannot hold nursery pointers.
         break;
     }
 }
 
 void
 LIRGenerator::visitArrayLength(MArrayLength* ins)
 {
     MOZ_ASSERT(ins->elements()->type() == MIRType::Elements);
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -9900,17 +9900,17 @@ class MFallibleStoreElement
     bool strict() const {
         return strict_;
     }
 
     ALLOW_CLONE(MFallibleStoreElement)
 };
 
 
-// Store an unboxed object or null pointer to an elements vector.
+// Store an unboxed object or null pointer to a v\ector.
 class MStoreUnboxedObjectOrNull
   : public MQuaternaryInstruction,
     public StoreUnboxedObjectOrNullPolicy::Data
 {
     int32_t offsetAdjustment_;
     bool preBarrier_;
 
     MStoreUnboxedObjectOrNull(MDefinition* elements, MDefinition* index,
@@ -9943,57 +9943,49 @@ class MStoreUnboxedObjectOrNull
     // For StoreUnboxedObjectOrNullPolicy.
     void setValue(MDefinition* def) {
         replaceOperand(2, def);
     }
 
     ALLOW_CLONE(MStoreUnboxedObjectOrNull)
 };
 
-// Store an unboxed string to an elements vector.
+// Store an unboxed object or null pointer to a vector.
 class MStoreUnboxedString
-  : public MQuaternaryInstruction,
-    public StoreUnboxedStringPolicy::Data
+  : public MTernaryInstruction,
+    public MixPolicy<SingleObjectPolicy, ConvertToStringPolicy<2> >::Data
 {
     int32_t offsetAdjustment_;
     bool preBarrier_;
 
-    MStoreUnboxedString(MDefinition* elements, MDefinition* index,
-                        MDefinition* value, MDefinition* typedObj,
+    MStoreUnboxedString(MDefinition* elements, MDefinition* index, MDefinition* value,
                         int32_t offsetAdjustment = 0, bool preBarrier = true)
-      : MQuaternaryInstruction(classOpcode, elements, index, value, typedObj),
+      : MTernaryInstruction(classOpcode, elements, index, value),
         offsetAdjustment_(offsetAdjustment),
         preBarrier_(preBarrier)
     {
         MOZ_ASSERT(IsValidElementsType(elements, offsetAdjustment));
         MOZ_ASSERT(index->type() == MIRType::Int32);
-        MOZ_ASSERT(typedObj->type() == MIRType::Object);
     }
 
   public:
     INSTRUCTION_HEADER(StoreUnboxedString)
     TRIVIAL_NEW_WRAPPERS
-    NAMED_OPERANDS((0, elements), (1, index), (2, value), (3, typedObj));
+    NAMED_OPERANDS((0, elements), (1, index), (2, value))
 
     int32_t offsetAdjustment() const {
         return offsetAdjustment_;
     }
     bool preBarrier() const {
         return preBarrier_;
     }
     AliasSet getAliasSet() const override {
         return AliasSet::Store(AliasSet::UnboxedElement);
     }
 
-    // For StoreUnboxedStringPolicy, to replace the original output with the
-    // output of a post barrier (if one is needed.)
-    void setValue(MDefinition* def) {
-        replaceOperand(2, def);
-    }
-
     ALLOW_CLONE(MStoreUnboxedString)
 };
 
 // Passes through an object, after ensuring it is converted from an unboxed
 // object to a native representation.
 class MConvertUnboxedObjectToNative
   : public MUnaryInstruction,
     public SingleObjectPolicy::Data
--- a/js/src/jit/MIRGenerator.h
+++ b/js/src/jit/MIRGenerator.h
@@ -104,20 +104,16 @@ class MIRGenerator
         return !compilingWasm() && instrumentedProfiling();
     }
 
     bool isOptimizationTrackingEnabled() {
         return isProfilerInstrumentationEnabled() && !info().isAnalysis() &&
                !JitOptions.disableOptimizationTracking;
     }
 
-    bool stringsCanBeInNursery() const {
-        return stringsCanBeInNursery_;
-    }
-
     bool safeForMinorGC() const {
         return safeForMinorGC_;
     }
     void setNotSafeForMinorGC() {
         safeForMinorGC_ = false;
     }
 
     // Whether the active thread is trying to cancel this build.
@@ -197,17 +193,16 @@ class MIRGenerator
     // Keep track of whether frame arguments are modified during execution.
     // RegAlloc needs to know this as spilling values back to their register
     // slots is not compatible with that.
     bool modifiesFrameArguments_;
 
     bool instrumentedProfiling_;
     bool instrumentedProfilingIsCached_;
     bool safeForMinorGC_;
-    bool stringsCanBeInNursery_;
 
     void addAbortedPreliminaryGroup(ObjectGroup* group);
 
     uint32_t minWasmHeapLength_;
 
 #if defined(JS_ION_PERF)
     WasmPerfSpewer wasmPerfSpewer_;
 
--- a/js/src/jit/MIRGraph.cpp
+++ b/js/src/jit/MIRGraph.cpp
@@ -33,17 +33,16 @@ MIRGenerator::MIRGenerator(CompileCompar
     needsOverrecursedCheck_(false),
     needsStaticStackAlignment_(false),
     usesSimd_(false),
     cachedUsesSimd_(false),
     modifiesFrameArguments_(false),
     instrumentedProfiling_(false),
     instrumentedProfilingIsCached_(false),
     safeForMinorGC_(true),
-    stringsCanBeInNursery_(compartment ? compartment->zone()->canNurseryAllocateStrings() : false),
     minWasmHeapLength_(0),
     options(options),
     gs_(alloc)
 { }
 
 bool
 MIRGenerator::usesSimd()
 {
--- a/js/src/jit/MacroAssembler-inl.h
+++ b/js/src/jit/MacroAssembler-inl.h
@@ -419,44 +419,42 @@ MacroAssembler::branchIfFalseBool(Regist
 void
 MacroAssembler::branchIfTrueBool(Register reg, Label* label)
 {
     // Note that C++ bool is only 1 byte, so ignore the higher-order bits.
     branchTest32(Assembler::NonZero, reg, Imm32(0xFF), label);
 }
 
 void
-MacroAssembler::branchIfRope(Register str, Register temp, Label* label)
+MacroAssembler::branchIfRope(Register str, Label* label)
 {
     Address flags(str, JSString::offsetOfFlags());
-    move32(Imm32(JSString::TYPE_FLAGS_MASK), temp);
-    and32(flags, temp);
-
-    branch32(Assembler::Equal, temp, Imm32(JSString::ROPE_FLAGS), label);
+    static_assert(JSString::ROPE_FLAGS == 0, "Rope type flags must be 0");
+    branchTest32(Assembler::Zero, flags, Imm32(JSString::TYPE_FLAGS_MASK), label);
 }
 
 void
 MacroAssembler::branchIfRopeOrExternal(Register str, Register temp, Label* label)
 {
     Address flags(str, JSString::offsetOfFlags());
     move32(Imm32(JSString::TYPE_FLAGS_MASK), temp);
     and32(flags, temp);
 
-    branch32(Assembler::Equal, temp, Imm32(JSString::ROPE_FLAGS), label);
+    static_assert(JSString::ROPE_FLAGS == 0, "Rope type flags must be 0");
+    branchTest32(Assembler::Zero, temp, temp, label);
+
     branch32(Assembler::Equal, temp, Imm32(JSString::EXTERNAL_FLAGS), label);
 }
 
 void
-MacroAssembler::branchIfNotRope(Register str, Register temp, Label* label)
+MacroAssembler::branchIfNotRope(Register str, Label* label)
 {
     Address flags(str, JSString::offsetOfFlags());
-    move32(Imm32(JSString::TYPE_FLAGS_MASK), temp);
-    and32(flags, temp);
-
-    branch32(Assembler::NotEqual, temp, Imm32(JSString::ROPE_FLAGS), label);
+    static_assert(JSString::ROPE_FLAGS == 0, "Rope type flags must be 0");
+    branchTest32(Assembler::NonZero, flags, Imm32(JSString::TYPE_FLAGS_MASK), label);
 }
 
 void
 MacroAssembler::branchLatin1String(Register string, Label* label)
 {
     branchTest32(Assembler::NonZero, Address(string, JSString::offsetOfFlags()),
                  Imm32(JSString::LATIN1_CHARS_BIT), label);
 }
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -721,34 +721,36 @@ MacroAssembler::checkAllocatorState(Labe
 #endif
 
     // Don't execute the inline path if the compartment has an object metadata callback,
     // as the metadata to use for the object may vary between executions of the op.
     if (GetJitContext()->compartment->hasAllocationMetadataBuilder())
         jump(fail);
 }
 
+// Inline version of ShouldNurseryAllocate.
 bool
 MacroAssembler::shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap initialHeap)
 {
     // Note that Ion elides barriers on writes to objects known to be in the
     // nursery, so any allocation that can be made into the nursery must be made
     // into the nursery, even if the nursery is disabled. At runtime these will
     // take the out-of-line path, which is required to insert a barrier for the
     // initializing writes.
     return IsNurseryAllocable(allocKind) && initialHeap != gc::TenuredHeap;
 }
 
 // Inline version of Nursery::allocateObject. If the object has dynamic slots,
 // this fills in the slots_ pointer.
 void
-MacroAssembler::nurseryAllocateObject(Register result, Register temp, gc::AllocKind allocKind,
-                                      size_t nDynamicSlots, Label* fail)
+MacroAssembler::nurseryAllocate(Register result, Register temp, gc::AllocKind allocKind,
+                                size_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail)
 {
     MOZ_ASSERT(IsNurseryAllocable(allocKind));
+    MOZ_ASSERT(initialHeap != gc::TenuredHeap);
 
     // We still need to allocate in the nursery, per the comment in
     // shouldNurseryAllocate; however, we need to insert into the
     // mallocedBuffers set, so bail to do the nursery allocation in the
     // interpreter.
     if (nDynamicSlots >= Nursery::MaxNurseryBufferSize / sizeof(Value)) {
         jump(fail);
         return;
@@ -858,20 +860,18 @@ MacroAssembler::callFreeStub(Register sl
 void
 MacroAssembler::allocateObject(Register result, Register temp, gc::AllocKind allocKind,
                                uint32_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail)
 {
     MOZ_ASSERT(gc::IsObjectAllocKind(allocKind));
 
     checkAllocatorState(fail);
 
-    if (shouldNurseryAllocate(allocKind, initialHeap)) {
-        MOZ_ASSERT(initialHeap == gc::DefaultHeap);
-        return nurseryAllocateObject(result, temp, allocKind, nDynamicSlots, fail);
-    }
+    if (shouldNurseryAllocate(allocKind, initialHeap))
+        return nurseryAllocate(result, temp, allocKind, nDynamicSlots, initialHeap, fail);
 
     if (!nDynamicSlots)
         return freeListAllocate(result, temp, allocKind, fail);
 
     callMallocStub(nDynamicSlots * sizeof(GCPtrValue), temp, fail);
 
     Label failAlloc;
     Label success;
@@ -921,92 +921,26 @@ MacroAssembler::createGCObject(Register 
 // simpler path.
 void
 MacroAssembler::allocateNonObject(Register result, Register temp, gc::AllocKind allocKind, Label* fail)
 {
     checkAllocatorState(fail);
     freeListAllocate(result, temp, allocKind, fail);
 }
 
-// Inline version of Nursery::allocateString.
 void
-MacroAssembler::nurseryAllocateString(Register result, Register temp, gc::AllocKind allocKind,
-                                      Label* fail)
+MacroAssembler::newGCString(Register result, Register temp, Label* fail)
 {
-    MOZ_ASSERT(IsNurseryAllocable(allocKind));
-
-    // No explicit check for nursery.isEnabled() is needed, as the comparison
-    // with the nursery's end will always fail in such cases.
-
-    CompileZone* zone = GetJitContext()->compartment->zone();
-    int thingSize = int(gc::Arena::thingSize(allocKind));
-    int totalSize = js::Nursery::stringHeaderSize() + thingSize;
-    MOZ_ASSERT(totalSize % gc::CellAlignBytes == 0);
-
-    // The nursery position (allocation pointer) and the nursery end are stored
-    // very close to each other. In practice, the zone will probably be close
-    // (within 32 bits) as well. If so, use relative offsets between them, to
-    // avoid multiple 64-bit immediate loads.
-    auto nurseryPosAddr = intptr_t(zone->addressOfStringNurseryPosition());
-    auto nurseryEndAddr = intptr_t(zone->addressOfStringNurseryCurrentEnd());
-    auto zoneAddr = intptr_t(zone);
-
-    intptr_t maxOffset = std::max(std::abs(nurseryPosAddr - zoneAddr),
-                                  std::abs(nurseryEndAddr - zoneAddr));
-    if (maxOffset < (1 << 31)) {
-        movePtr(ImmPtr(zone), temp); // temp holds the Zone pointer from here on.
-        loadPtr(Address(temp, nurseryPosAddr - zoneAddr), result);
-        addPtr(Imm32(totalSize), result); // result points past this allocation.
-        branchPtr(Assembler::Below, Address(temp, nurseryEndAddr - zoneAddr), result, fail);
-        storePtr(result, Address(temp, nurseryPosAddr - zoneAddr)); // Update position.
-        subPtr(Imm32(thingSize), result); // Point result at Cell data.
-        storePtr(temp, Address(result, -js::Nursery::stringHeaderSize())); // Store Zone*
-    } else {
-        // Otherwise, the zone is far from the nursery pointers. But the
-        // nursery pos/end pointers are still near each other.
-        movePtr(ImmPtr(zone->addressOfNurseryPosition()), temp);
-        loadPtr(Address(temp, 0), result);
-        addPtr(Imm32(totalSize), result);
-        branchPtr(Assembler::Below, Address(temp, nurseryEndAddr - nurseryPosAddr), result, fail);
-        storePtr(result, Address(temp, 0));
-        subPtr(Imm32(thingSize), result);
-        storePtr(ImmPtr(zone), Address(result, -js::Nursery::stringHeaderSize()));
-    }
-}
-
-// Inlined equivalent of gc::AllocateString, jumping to fail if nursery
-// allocation requested but unsuccessful.
-void
-MacroAssembler::allocateString(Register result, Register temp, gc::AllocKind allocKind,
-                               gc::InitialHeap initialHeap, Label* fail)
-{
-    MOZ_ASSERT(allocKind == gc::AllocKind::STRING || allocKind == gc::AllocKind::FAT_INLINE_STRING);
-
-    checkAllocatorState(fail);
-
-    if (shouldNurseryAllocate(allocKind, initialHeap)) {
-        MOZ_ASSERT(initialHeap == gc::DefaultHeap);
-        return nurseryAllocateString(result, temp, allocKind, fail);
-    }
-
-    freeListAllocate(result, temp, allocKind, fail);
+    allocateNonObject(result, temp, js::gc::AllocKind::STRING, fail);
 }
 
 void
-MacroAssembler::newGCString(Register result, Register temp, Label* fail, bool attemptNursery)
+MacroAssembler::newGCFatInlineString(Register result, Register temp, Label* fail)
 {
-    allocateString(result, temp, js::gc::AllocKind::STRING,
-                   attemptNursery ? gc::DefaultHeap : gc::TenuredHeap, fail);
-}
-
-void
-MacroAssembler::newGCFatInlineString(Register result, Register temp, Label* fail, bool attemptNursery)
-{
-    allocateString(result, temp, js::gc::AllocKind::FAT_INLINE_STRING,
-                   attemptNursery ? gc::DefaultHeap : gc::TenuredHeap, fail);
+    allocateNonObject(result, temp, js::gc::AllocKind::FAT_INLINE_STRING, fail);
 }
 
 void
 MacroAssembler::copySlotsFromTemplate(Register obj, const NativeObject* templateObj,
                                       uint32_t start, uint32_t end)
 {
     uint32_t nfixed = Min(templateObj->numFixedSlotsForCompilation(), end);
     for (unsigned i = start; i < nfixed; i++) {
@@ -1404,19 +1338,19 @@ MacroAssembler::compareStrings(JSOp op, 
     branchPtr(Assembler::NotEqual, left, right, &notPointerEqual);
     move32(Imm32(op == JSOP_EQ || op == JSOP_STRICTEQ), result);
     jump(&done);
 
     bind(&notPointerEqual);
 
     Label notAtom;
     // Optimize the equality operation to a pointer compare for two atoms.
-    Imm32 nonAtomBit(JSString::NON_ATOM_BIT);
-    branchTest32(Assembler::NonZero, Address(left, JSString::offsetOfFlags()), nonAtomBit, &notAtom);
-    branchTest32(Assembler::NonZero, Address(right, JSString::offsetOfFlags()), nonAtomBit, &notAtom);
+    Imm32 atomBit(JSString::ATOM_BIT);
+    branchTest32(Assembler::Zero, Address(left, JSString::offsetOfFlags()), atomBit, &notAtom);
+    branchTest32(Assembler::Zero, Address(right, JSString::offsetOfFlags()), atomBit, &notAtom);
 
     cmpPtrSet(JSOpToCondition(MCompare::Compare_String, op), left, right, result);
     jump(&done);
 
     bind(&notAtom);
     // Strings of different length can never be equal.
     loadStringLength(left, result);
     branch32(Assembler::Equal, Address(right, JSString::offsetOfLength()), result, fail);
@@ -1437,36 +1371,36 @@ MacroAssembler::loadStringChars(Register
 
     bind(&isInline);
     computeEffectiveAddress(Address(str, JSInlineString::offsetOfInlineStorage()), dest);
 
     bind(&done);
 }
 
 void
-MacroAssembler::loadStringChar(Register str, Register index, Register temp, Register output, Label* fail)
+MacroAssembler::loadStringChar(Register str, Register index, Register output, Label* fail)
 {
     MOZ_ASSERT(str != output);
     MOZ_ASSERT(index != output);
 
     movePtr(str, output);
 
     // This follows JSString::getChar.
     Label notRope;
-    branchIfNotRope(str, temp, &notRope);
+    branchIfNotRope(str, &notRope);
 
     // Load leftChild.
     loadPtr(Address(str, JSRope::offsetOfLeft()), output);
 
     // Check if the index is contained in the leftChild.
     // Todo: Handle index in the rightChild.
     branch32(Assembler::BelowOrEqual, Address(output, JSString::offsetOfLength()), index, fail);
 
     // If the left side is another rope, give up.
-    branchIfRope(output, temp, fail);
+    branchIfRope(output, fail);
 
     bind(&notRope);
 
     Label isLatin1, done;
     // We have to check the left/right side for ropes,
     // because a TwoByte rope might have a Latin1 child.
     branchLatin1String(output, &isLatin1);
 
@@ -3220,17 +3154,17 @@ MacroAssembler::emitPreBarrierFastPath(J
     bind(&nonZero);
 #endif
 
     // Load the chunk address in temp2.
     movePtr(ImmWord(~gc::ChunkMask), temp2);
     andPtr(temp1, temp2);
 
     // If the GC thing is in the nursery, we don't need to barrier it.
-    if (type == MIRType::Value || type == MIRType::Object || type == MIRType::String) {
+    if (type == MIRType::Value || type == MIRType::Object) {
         branch32(Assembler::Equal, Address(temp2, gc::ChunkLocationOffset),
                  Imm32(int32_t(gc::ChunkLocation::Nursery)), noBarrier);
     } else {
 #ifdef DEBUG
         Label isTenured;
         branch32(Assembler::NotEqual, Address(temp2, gc::ChunkLocationOffset),
                  Imm32(int32_t(gc::ChunkLocation::Nursery)), &isTenured);
         assumeUnreachable("JIT pre-barrier: unexpected nursery pointer");
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -1060,32 +1060,27 @@ class MacroAssembler : public MacroAssem
     inline void branchPtr(Condition cond, const AbsoluteAddress& lhs, Register rhs, Label* label)
         DEFINED_ON(arm, arm64, mips_shared, x86, x64);
     inline void branchPtr(Condition cond, const AbsoluteAddress& lhs, ImmWord rhs, Label* label)
         DEFINED_ON(arm, arm64, mips_shared, x86, x64);
 
     inline void branchPtr(Condition cond, wasm::SymbolicAddress lhs, Register rhs, Label* label)
         DEFINED_ON(arm, arm64, mips_shared, x86, x64);
 
-    // Given a pointer to a GC Cell, retrieve the StoreBuffer pointer from its
-    // chunk trailer, or nullptr if it is in the tenured heap.
-    void loadStoreBuffer(Register ptr, Register buffer) PER_ARCH;
-
     template <typename T>
     inline CodeOffsetJump branchPtrWithPatch(Condition cond, Register lhs, T rhs, RepatchLabel* label) PER_SHARED_ARCH;
     template <typename T>
     inline CodeOffsetJump branchPtrWithPatch(Condition cond, Address lhs, T rhs, RepatchLabel* label) PER_SHARED_ARCH;
 
     void branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp, Label* label)
         DEFINED_ON(arm, arm64, mips_shared, x86, x64);
     void branchPtrInNurseryChunk(Condition cond, const Address& address, Register temp, Label* label)
         DEFINED_ON(x86);
+    void branchValueIsNurseryObject(Condition cond, const Address& address, Register temp, Label* label) PER_ARCH;
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label* label) PER_ARCH;
-    void branchValueIsNurseryCell(Condition cond, const Address& address, Register temp, Label* label) PER_ARCH;
-    void branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp, Label* label) PER_ARCH;
 
     // This function compares a Value (lhs) which is having a private pointer
     // boxed inside a js::Value, with a raw pointer (rhs).
     inline void branchPrivatePtr(Condition cond, const Address& lhs, Register rhs, Label* label) PER_ARCH;
 
     inline void branchFloat(DoubleCondition cond, FloatRegister lhs, FloatRegister rhs,
                             Label* label) PER_SHARED_ARCH;
 
@@ -1144,20 +1139,20 @@ class MacroAssembler : public MacroAssem
 
     // Branches to |label| if |reg| is false. |reg| should be a C++ bool.
     template <class L>
     inline void branchIfFalseBool(Register reg, L label);
 
     // Branches to |label| if |reg| is true. |reg| should be a C++ bool.
     inline void branchIfTrueBool(Register reg, Label* label);
 
-    inline void branchIfRope(Register str, Register temp, Label* label);
+    inline void branchIfRope(Register str, Label* label);
     inline void branchIfRopeOrExternal(Register str, Register temp, Label* label);
 
-    inline void branchIfNotRope(Register str, Register temp, Label* label);
+    inline void branchIfNotRope(Register str, Label* label);
 
     inline void branchLatin1String(Register string, Label* label);
     inline void branchTwoByteString(Register string, Label* label);
 
     inline void branchIfFunctionHasNoScript(Register fun, Label* label);
     inline void branchIfInterpreted(Register fun, Label* label);
 
     inline void branchFunctionKind(Condition cond, JSFunction::FunctionKind kind, Register fun,
@@ -1294,17 +1289,17 @@ class MacroAssembler : public MacroAssem
 
     template <typename T, typename S, typename L>
     inline void branchPtrImpl(Condition cond, const T& lhs, const S& rhs, L label)
         DEFINED_ON(x86_shared);
 
     void branchPtrInNurseryChunkImpl(Condition cond, Register ptr, Label* label)
         DEFINED_ON(x86);
     template <typename T>
-    void branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp, Label* label)
+    void branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp, Label* label)
         DEFINED_ON(arm64, mips64, x64);
 
     template <typename T>
     inline void branchTestUndefinedImpl(Condition cond, const T& t, Label* label)
         DEFINED_ON(arm, arm64, x86_shared);
     template <typename T>
     inline void branchTestInt32Impl(Condition cond, const T& t, Label* label)
         DEFINED_ON(arm, arm64, x86_shared);
@@ -1774,17 +1769,17 @@ class MacroAssembler : public MacroAssem
         loadPtr(Address(dest, ObjectGroup::offsetOfProto()), dest);
     }
 
     void loadStringLength(Register str, Register dest) {
         load32(Address(str, JSString::offsetOfLength()), dest);
     }
 
     void loadStringChars(Register str, Register dest);
-    void loadStringChar(Register str, Register index, Register temp, Register output, Label* fail);
+    void loadStringChar(Register str, Register index, Register output, Label* fail);
 
     void loadStringIndexValue(Register str, Register dest, Label* fail);
 
     void loadJSContext(Register dest);
     void loadJitActivation(Register dest) {
         loadJSContext(dest);
         loadPtr(Address(dest, offsetof(JSContext, activation_)), dest);
     }
@@ -2015,25 +2010,21 @@ class MacroAssembler : public MacroAssem
 
         bind(&done);
     }
 
     // Inline allocation.
   private:
     void checkAllocatorState(Label* fail);
     bool shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap initialHeap);
-    void nurseryAllocateObject(Register result, Register temp, gc::AllocKind allocKind,
-                               size_t nDynamicSlots, Label* fail);
+    void nurseryAllocate(Register result, Register temp, gc::AllocKind allocKind,
+                         size_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail);
     void freeListAllocate(Register result, Register temp, gc::AllocKind allocKind, Label* fail);
     void allocateObject(Register result, Register temp, gc::AllocKind allocKind,
                         uint32_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail);
-    void nurseryAllocateString(Register result, Register temp, gc::AllocKind allocKind,
-                               Label* fail);
-    void allocateString(Register result, Register temp, gc::AllocKind allocKind,
-                        gc::InitialHeap initialHeap, Label* fail);
     void allocateNonObject(Register result, Register temp, gc::AllocKind allocKind, Label* fail);
     void copySlotsFromTemplate(Register obj, const NativeObject* templateObj,
                                uint32_t start, uint32_t end);
     void fillSlotsWithConstantValue(Address addr, Register temp, uint32_t start, uint32_t end,
                                     const Value& v);
     void fillSlotsWithUndefined(Address addr, Register temp, uint32_t start, uint32_t end);
     void fillSlotsWithUninitialized(Address addr, Register temp, uint32_t start, uint32_t end);
 
@@ -2049,18 +2040,18 @@ class MacroAssembler : public MacroAssem
     void initGCThing(Register obj, Register temp, JSObject* templateObj,
                      bool initContents = true, bool convertDoubleElements = false);
     void initTypedArraySlots(Register obj, Register temp, Register lengthReg,
                              LiveRegisterSet liveRegs, Label* fail,
                              TypedArrayObject* templateObj, TypedArrayLength lengthKind);
 
     void initUnboxedObjectContents(Register object, UnboxedPlainObject* templateObject);
 
-    void newGCString(Register result, Register temp, Label* fail, bool attemptNursery);
-    void newGCFatInlineString(Register result, Register temp, Label* fail, bool attemptNursery);
+    void newGCString(Register result, Register temp, Label* fail);
+    void newGCFatInlineString(Register result, Register temp, Label* fail);
 
     // Compares two strings for equality based on the JSOP.
     // This checks for identical pointers, atoms and length and fails for everything else.
     void compareStrings(JSOp op, Register left, Register right, Register result,
                         Label* fail);
 
     // Result of the typeof operation. Falls back to slow-path for proxies.
     void typeOfObject(Register objReg, Register scratch, Label* slow,
--- a/js/src/jit/TypePolicy.cpp
+++ b/js/src/jit/TypePolicy.cpp
@@ -1092,43 +1092,16 @@ StoreUnboxedObjectOrNullPolicy::adjustIn
 
     MInstruction* barrier = MPostWriteBarrier::New(alloc, store->typedObj(), replace);
     store->block()->insertBefore(store, barrier);
 
     return true;
 }
 
 bool
-StoreUnboxedStringPolicy::adjustInputs(TempAllocator& alloc, MInstruction* ins)
-{
-    if (!ObjectPolicy<0>::staticAdjustInputs(alloc, ins))
-        return false;
-
-    // Change the value input to a ToString instruction if it might be
-    // a non-null primitive.
-    if (!ConvertToStringPolicy<2>::staticAdjustInputs(alloc, ins))
-        return false;
-
-    if (!ObjectPolicy<3>::staticAdjustInputs(alloc, ins))
-        return false;
-
-    // Insert a post barrier for the instruction's object and whatever its new
-    // value is.
-    MStoreUnboxedString* store = ins->toStoreUnboxedString();
-
-    MOZ_ASSERT(store->typedObj()->type() == MIRType::Object);
-
-    MDefinition* value = store->value();
-    MOZ_ASSERT(value->type() == MIRType::String);
-    MInstruction* barrier = MPostWriteBarrier::New(alloc, store->typedObj(), value);
-    store->block()->insertBefore(store, barrier);
-    return true;
-}
-
-bool
 ClampPolicy::adjustInputs(TempAllocator& alloc, MInstruction* ins)
 {
     MDefinition* in = ins->toClampToUint8()->input();
 
     switch (in->type()) {
       case MIRType::Int32:
       case MIRType::Double:
       case MIRType::Value:
@@ -1233,17 +1206,16 @@ FilterTypeSetPolicy::adjustInputs(TempAl
     _(PowPolicy)                                \
     _(SimdAllPolicy)                            \
     _(SimdSelectPolicy)                         \
     _(SimdShufflePolicy)                        \
     _(StoreTypedArrayElementStaticPolicy)       \
     _(StoreTypedArrayHolePolicy)                \
     _(StoreUnboxedScalarPolicy)                 \
     _(StoreUnboxedObjectOrNullPolicy)           \
-    _(StoreUnboxedStringPolicy)                 \
     _(TestPolicy)                               \
     _(AllDoublePolicy)                          \
     _(ToDoublePolicy)                           \
     _(ToInt32Policy)                            \
     _(ToStringPolicy)                           \
     _(TypeBarrierPolicy)
 
 #define TEMPLATE_TYPE_POLICY_LIST(_)                                    \
--- a/js/src/jit/TypePolicy.h
+++ b/js/src/jit/TypePolicy.h
@@ -487,23 +487,16 @@ class StoreTypedArrayElementStaticPolicy
 
 class StoreUnboxedObjectOrNullPolicy final : public TypePolicy
 {
   public:
     EMPTY_DATA_;
     virtual MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) override;
 };
 
-class StoreUnboxedStringPolicy final : public TypePolicy
-{
-  public:
-    EMPTY_DATA_;
-    virtual MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) override;
-};
-
 // Accepts integers and doubles. Everything else is boxed.
 class ClampPolicy final : public TypePolicy
 {
   public:
     EMPTY_DATA_;
     virtual MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) override;
 };
 
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1330,26 +1330,26 @@ AutoDetectInvalidation::setReturnOverrid
 void
 AssertValidObjectPtr(JSContext* cx, JSObject* obj)
 {
     AutoUnsafeCallWithABI unsafe;
 #ifdef DEBUG
     // Check what we can, so that we'll hopefully assert/crash if we get a
     // bogus object (pointer).
     MOZ_ASSERT(obj->compartment() == cx->compartment());
-    MOZ_ASSERT(obj->zoneFromAnyThread() == cx->zone());
     MOZ_ASSERT(obj->runtimeFromActiveCooperatingThread() == cx->runtime());
 
     MOZ_ASSERT_IF(!obj->hasLazyGroup() && obj->maybeShape(),
                   obj->group()->clasp() == obj->maybeShape()->getObjectClass());
 
     if (obj->isTenured()) {
         MOZ_ASSERT(obj->isAligned());
         gc::AllocKind kind = obj->asTenured().getAllocKind();
         MOZ_ASSERT(gc::IsObjectAllocKind(kind));
+        MOZ_ASSERT(obj->asTenured().zone() == cx->zone());
     }
 #endif
 }
 
 void
 AssertValidObjectOrNullPtr(JSContext* cx, JSObject* obj)
 {
     AutoUnsafeCallWithABI unsafe;
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -4810,88 +4810,56 @@ MacroAssembler::moveValue(const Value& s
     else
         ma_mov(Imm32(src.toNunboxPayload()), dest.payloadReg());
 }
 
 // ===============================================================
 // Branch functions
 
 void
-MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
-{
-    ma_lsr(Imm32(gc::ChunkShift), ptr, buffer);
-    ma_lsl(Imm32(gc::ChunkShift), buffer, buffer);
-    load32(Address(buffer, gc::ChunkStoreBufferOffset), buffer);
-}
-
-void
 MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp,
                                         Label* label)
 {
-    Maybe<SecondScratchRegisterScope> scratch2;
-    if (temp == Register::Invalid()) {
-        scratch2.emplace(*this);
-        temp = scratch2.ref();
-    }
+    SecondScratchRegisterScope scratch2(*this);
 
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(ptr != temp);
-
-    ma_lsr(Imm32(gc::ChunkShift), ptr, temp);
-    ma_lsl(Imm32(gc::ChunkShift), temp, temp);
-    load32(Address(temp, gc::ChunkLocationOffset), temp);
-    branch32(cond, temp, Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address,
-                                         Register temp, Label* label)
+    MOZ_ASSERT(ptr != scratch2);
+
+    ma_lsr(Imm32(gc::ChunkShift), ptr, scratch2);
+    ma_lsl(Imm32(gc::ChunkShift), scratch2, scratch2);
+    load32(Address(scratch2, gc::ChunkLocationOffset), scratch2);
+    branch32(cond, scratch2, Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
+}
+
+void
+MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address,
+                                           Register temp, Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    Label done, checkAddress;
-
-    Register tag = temp;
-    extractTag(address, tag);
-    branchTestObject(Assembler::Equal, tag, &checkAddress);
-    branchTestString(Assembler::NotEqual, tag, cond == Assembler::Equal ? &done : label);
-
-    bind(&checkAddress);
-    loadPtr(ToPayload(address), temp);
-    SecondScratchRegisterScope scratch2(*this);
-    branchPtrInNurseryChunk(cond, temp, scratch2, label);
-
-    bind(&done);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value,
-                                         Register temp, Label* label)
-{
-    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    Label done, checkAddress;
-
-    branchTestObject(Assembler::Equal, value.typeReg(), &checkAddress);
-    branchTestString(Assembler::NotEqual, value.typeReg(),
-                     cond == Assembler::Equal ? &done : label);
-
-    bind(&checkAddress);
-    branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
+
+    Label done;
+    branchTestObject(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
+
+    loadPtr(address, temp);
+    branchPtrInNurseryChunk(cond, temp, InvalidReg, label);
 
     bind(&done);
 }
 
 void
 MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
                                            Register temp, Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
+
     Label done;
-
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
-    branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
+
+    branchPtrInNurseryChunk(cond, value.payloadReg(), InvalidReg, label);
 
     bind(&done);
 }
 
 void
 MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
                                 const Value& rhs, Label* label)
 {
--- a/js/src/jit/arm64/MacroAssembler-arm64.cpp
+++ b/js/src/jit/arm64/MacroAssembler-arm64.cpp
@@ -737,79 +737,49 @@ MacroAssembler::moveValue(const Value& s
     BufferOffset load = movePatchablePtr(ImmPtr(src.bitsAsPunboxPointer()), dest.valueReg());
     writeDataRelocation(src, load);
 }
 
 // ===============================================================
 // Branch functions
 
 void
-MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
-{
-    if (ptr != buffer)
-        movePtr(ptr, buffer);
-    orPtr(Imm32(gc::ChunkMask), buffer);
-    loadPtr(Address(buffer, gc::ChunkStoreBufferOffsetFromLastByte), buffer);
-}
-
-void
 MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp,
                                         Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(ptr != temp);
     MOZ_ASSERT(ptr != ScratchReg && ptr != ScratchReg2); // Both may be used internally.
     MOZ_ASSERT(temp != ScratchReg && temp != ScratchReg2);
 
     movePtr(ptr, temp);
     orPtr(Imm32(gc::ChunkMask), temp);
     branch32(cond, Address(temp, gc::ChunkLocationOffsetFromLastByte),
              Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
 }
 
 void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
-                                         Label* label)
-{
-    branchValueIsNurseryCellImpl(cond, address, temp, label);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp,
-                                         Label* label)
+MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
+                                           Label* label)
 {
-    branchValueIsNurseryCellImpl(cond, value, temp, label);
-}
-
-template <typename T>
-void
-MacroAssembler::branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp,
-                                             Label* label)
-{
-    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    MOZ_ASSERT(temp != ScratchReg && temp != ScratchReg2); // Both may be used internally.
-
-    Label done, checkAddress;
-    bool testNursery = (cond == Assembler::Equal);
-    branchTestObject(Assembler::Equal, value, &checkAddress);
-    branchTestString(Assembler::NotEqual, value, testNursery ? &done : label);
-    bind(&checkAddress);
-
-    unboxNonDouble(value, temp);
-    orPtr(Imm32(gc::ChunkMask), temp);
-    branch32(cond, Address(temp, gc::ChunkLocationOffsetFromLastByte),
-             Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
-
-    bind(&done);
+    branchValueIsNurseryObjectImpl(cond, address, temp, label);
 }
 
 void
 MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
                                            Label* label)
 {
+    branchValueIsNurseryObjectImpl(cond, value, temp, label);
+}
+
+template <typename T>
+void
+MacroAssembler::branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp,
+                                               Label* label)
+{
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(temp != ScratchReg && temp != ScratchReg2); // Both may be used internally.
 
     Label done;
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
 
     extractObject(value, temp);
     orPtr(Imm32(gc::ChunkMask), temp);
--- a/js/src/jit/mips32/MacroAssembler-mips32.cpp
+++ b/js/src/jit/mips32/MacroAssembler-mips32.cpp
@@ -2364,43 +2364,39 @@ MacroAssembler::moveValue(const Value& s
     else
         move32(Imm32(src.toNunboxPayload()), dest.payloadReg());
 }
 
 // ===============================================================
 // Branch functions
 
 void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address,
-                                         Register temp, Label* label)
+MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address,
+                                           Register temp, Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    Label done, checkAddress;
 
-    branchTestObject(Assembler::Equal, address, &checkAddress);
-    branchTestString(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
+    Label done;
 
-    bind(&checkAddress);
+    branchTestObject(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
     loadPtr(address, temp);
     branchPtrInNurseryChunk(cond, temp, InvalidReg, label);
 
     bind(&done);
 }
 
 void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value,
-                                         Register temp, Label* label)
+MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
+                                           Register temp, Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
 
-    Label done, checkAddress;
-    branchTestObject(Assembler::Equal, value, &checkAddress);
-    branchTestString(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
+    Label done;
 
-    bind(&checkAddress);
+    branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
     branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
 
     bind(&done);
 }
 
 void
 MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
                                 const Value& rhs, Label* label)
--- a/js/src/jit/mips64/MacroAssembler-mips64.cpp
+++ b/js/src/jit/mips64/MacroAssembler-mips64.cpp
@@ -2489,67 +2489,48 @@ MacroAssembler::moveValue(const Value& s
     writeDataRelocation(src);
     movWithPatch(ImmWord(src.asRawBits()), dest.valueReg());
 }
 
 // ===============================================================
 // Branch functions
 
 void
+MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
+                                           Label* label)
+{
+    branchValueIsNurseryObjectImpl(cond, address, temp, label);
+}
+
+void
 MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
                                            Register temp, Label* label)
 {
+    branchValueIsNurseryObjectImpl(cond, value, temp, label);
+}
+
+template <typename T>
+void
+MacroAssembler::branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp,
+                                               Label* label)
+{
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
 
     Label done;
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
 
     extractObject(value, SecondScratchReg);
     orPtr(Imm32(gc::ChunkMask), SecondScratchReg);
     branch32(cond, Address(SecondScratchReg, gc::ChunkLocationOffsetFromLastByte),
              Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
 
     bind(&done);
 }
 
 void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
-                                         Label* label)
-{
-    branchValueIsNurseryCellImpl(cond, address, temp, label);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value,
-                                         Register temp, Label* label)
-{
-    branchValueIsNurseryCellImpl(cond, value, temp, label);
-}
-
-template <typename T>
-void
-MacroAssembler::branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp,
-                                             Label* label)
-{
-    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-
-    Label done, checkAddress;
-    branchTestObject(Assembler::Equal, value, &checkAddress);
-    branchTestString(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
-
-    bind(&checkAddress);
-    extractCell(value, SecondScratchReg);
-    orPtr(Imm32(gc::ChunkMask), SecondScratchReg);
-    branch32(cond, Address(SecondScratchReg, gc::ChunkLocationOffsetFromLastByte),
-             Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
-
-    bind(&done);
-}
-
-void
 MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
                                 const Value& rhs, Label* label)
 {
     MOZ_ASSERT(cond == Equal || cond == NotEqual);
     ScratchRegisterScope scratch(*this);
     MOZ_ASSERT(lhs.valueReg() != scratch);
     moveValue(rhs, ValueOperand(scratch));
     ma_b(lhs.valueReg(), scratch, label, cond);
--- a/js/src/jit/mips64/MacroAssembler-mips64.h
+++ b/js/src/jit/mips64/MacroAssembler-mips64.h
@@ -400,23 +400,16 @@ class MacroAssemblerMIPS64Compat : publi
     // Extended unboxing API. If the payload is already in a register, returns
     // that register. Otherwise, provides a move to the given scratch register,
     // and returns that.
     Register extractObject(const Address& address, Register scratch);
     Register extractObject(const ValueOperand& value, Register scratch) {
         unboxObject(value, scratch);
         return scratch;
     }
-    Register extractCell(const Address& address, Register scratch) {
-        return extractObject(address, scratch);
-    }
-    Register extractCell(const ValueOperand& value, Register scratch) {
-        unboxNonDouble(value, scratch);
-        return scratch;
-    }
     Register extractInt32(const ValueOperand& value, Register scratch) {
         unboxInt32(value, scratch);
         return scratch;
     }
     Register extractBoolean(const ValueOperand& value, Register scratch) {
         unboxBoolean(value, scratch);
         return scratch;
     }
--- a/js/src/jit/shared/CodeGenerator-shared.h
+++ b/js/src/jit/shared/CodeGenerator-shared.h
@@ -142,20 +142,16 @@ class CodeGeneratorShared : public LElem
 
     JSScript** nativeToBytecodeScriptList_;
     uint32_t nativeToBytecodeScriptListLength_;
 
     bool isProfilerInstrumentationEnabled() {
         return gen->isProfilerInstrumentationEnabled();
     }
 
-    bool stringsCanBeInNursery() const {
-        return gen->stringsCanBeInNursery();
-    }
-
     js::Vector<NativeToTrackedOptimizations, 0, SystemAllocPolicy> trackedOptimizations_;
     uint8_t* trackedOptimizationsMap_;
     uint32_t trackedOptimizationsMapSize_;
     uint32_t trackedOptimizationsRegionTableOffset_;
     uint32_t trackedOptimizationsTypesTableOffset_;
     uint32_t trackedOptimizationsAttemptsTableOffset_;
 
     bool isOptimizationTrackingEnabled() {
--- a/js/src/jit/shared/LIR-shared.h
+++ b/js/src/jit/shared/LIR-shared.h
@@ -4061,36 +4061,32 @@ class LConcat : public LInstructionHelpe
         return this->getTemp(3);
     }
     const LDefinition* temp5() {
         return this->getTemp(4);
     }
 };
 
 // Get uint16 character code from a string.
-class LCharCodeAt : public LInstructionHelper<1, 2, 1>
+class LCharCodeAt : public LInstructionHelper<1, 2, 0>
 {
   public:
     LIR_HEADER(CharCodeAt)
 
-    LCharCodeAt(const LAllocation& str, const LAllocation& index, const LDefinition& temp) {
+    LCharCodeAt(const LAllocation& str, const LAllocation& index) {
         setOperand(0, str);
         setOperand(1, index);
-        setTemp(0, temp);
     }
 
     const LAllocation* str() {
         return this->getOperand(0);
     }
     const LAllocation* index() {
         return this->getOperand(1);
     }
-    const LDefinition* temp() {
-        return getTemp(0);
-    }
 };
 
 // Convert uint16 character code to a string.
 class LFromCharCode : public LInstructionHelper<1, 1, 0>
 {
   public:
     LIR_HEADER(FromCharCode)
 
@@ -7741,43 +7737,16 @@ class LPostWriteBarrierO : public LInstr
     const LAllocation* value() {
         return getOperand(1);
     }
     const LDefinition* temp() {
         return getTemp(0);
     }
 };
 
-// Generational write barrier used when writing a string to an object.
-class LPostWriteBarrierS : public LInstructionHelper<0, 2, 1>
-{
-  public:
-    LIR_HEADER(PostWriteBarrierS)
-
-    LPostWriteBarrierS(const LAllocation& obj, const LAllocation& value,
-                       const LDefinition& temp) {
-        setOperand(0, obj);
-        setOperand(1, value);
-        setTemp(0, temp);
-    }
-
-    const MPostWriteBarrier* mir() const {
-        return mir_->toPostWriteBarrier();
-    }
-    const LAllocation* object() {
-        return getOperand(0);
-    }
-    const LAllocation* value() {
-        return getOperand(1);
-    }
-    const LDefinition* temp() {
-        return getTemp(0);
-    }
-};
-
 // Generational write barrier used when writing a value to another object.
 class LPostWriteBarrierV : public LInstructionHelper<0, 1 + BOX_PIECES, 1>
 {
   public:
     LIR_HEADER(PostWriteBarrierV)
 
     LPostWriteBarrierV(const LAllocation& obj, const LBoxAllocation& value,
                        const LDefinition& temp) {
@@ -7830,52 +7799,16 @@ class LPostWriteElementBarrierO : public
         return getOperand(2);
     }
 
     const LDefinition* temp() {
         return getTemp(0);
     }
 };
 
-// Generational write barrier used when writing a string to an object's
-// elements.
-class LPostWriteElementBarrierS : public LInstructionHelper<0, 3, 1>
-{
-  public:
-    LIR_HEADER(PostWriteElementBarrierS)
-
-    LPostWriteElementBarrierS(const LAllocation& obj, const LAllocation& value,
-                              const LAllocation& index, const LDefinition& temp) {
-        setOperand(0, obj);
-        setOperand(1, value);
-        setOperand(2, index);
-        setTemp(0, temp);
-    }
-
-    const MPostWriteElementBarrier* mir() const {
-        return mir_->toPostWriteElementBarrier();
-    }
-
-    const LAllocation* object() {
-        return getOperand(0);
-    }
-
-    const LAllocation* value() {
-        return getOperand(1);
-    }
-
-    const LAllocation* index() {
-        return getOperand(2);
-    }
-
-    const LDefinition* temp() {
-        return getTemp(0);
-    }
-};
-
 // Generational write barrier used when writing a value to another object's
 // elements.
 class LPostWriteElementBarrierV : public LInstructionHelper<0, 2 + BOX_PIECES, 1>
 {
   public:
     LIR_HEADER(PostWriteElementBarrierV)
 
     LPostWriteElementBarrierV(const LAllocation& obj, const LAllocation& index,
--- a/js/src/jit/shared/LOpcodes-shared.h
+++ b/js/src/jit/shared/LOpcodes-shared.h
@@ -263,20 +263,18 @@
     _(GuardObjectIdentity)          \
     _(GuardClass)                   \
     _(GuardUnboxedExpando)          \
     _(LoadUnboxedExpando)           \
     _(TypeBarrierV)                 \
     _(TypeBarrierO)                 \
     _(MonitorTypes)                 \
     _(PostWriteBarrierO)            \
-    _(PostWriteBarrierS)            \
     _(PostWriteBarrierV)            \
     _(PostWriteElementBarrierO)     \
-    _(PostWriteElementBarrierS)     \
     _(PostWriteElementBarrierV)     \
     _(InitializedLength)            \
     _(SetInitializedLength)         \
     _(BoundsCheck)                  \
     _(BoundsCheckRange)             \
     _(BoundsCheckLower)             \
     _(LoadElementV)                 \
     _(LoadElementT)                 \
--- a/js/src/jit/x64/MacroAssembler-x64.cpp
+++ b/js/src/jit/x64/MacroAssembler-x64.cpp
@@ -612,94 +612,63 @@ MacroAssembler::moveValue(const Value& s
     movWithPatch(ImmWord(src.asRawBits()), dest.valueReg());
     writeDataRelocation(src);
 }
 
 // ===============================================================
 // Branch functions
 
 void
-MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
-{
-    if (ptr != buffer)
-        movePtr(ptr, buffer);
-    orPtr(Imm32(gc::ChunkMask), buffer);
-    loadPtr(Address(buffer, gc::ChunkStoreBufferOffsetFromLastByte), buffer);
-}
-
-void
 MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp, Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
 
     ScratchRegisterScope scratch(*this);
     MOZ_ASSERT(ptr != temp);
     MOZ_ASSERT(ptr != scratch);
 
     movePtr(ptr, scratch);
     orPtr(Imm32(gc::ChunkMask), scratch);
     branch32(cond, Address(scratch, gc::ChunkLocationOffsetFromLastByte),
              Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
 }
 
 void
+MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
+                                           Label* label)
+{
+    branchValueIsNurseryObjectImpl(cond, address, temp, label);
+}
+
+void
 MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
                                            Label* label)
 {
+    branchValueIsNurseryObjectImpl(cond, value, temp, label);
+}
+
+template <typename T>
+void
+MacroAssembler::branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp,
+                                               Label* label)
+{
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(temp != InvalidReg);
 
     Label done;
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
 
     extractObject(value, temp);
     orPtr(Imm32(gc::ChunkMask), temp);
     branch32(cond, Address(temp, gc::ChunkLocationOffsetFromLastByte),
              Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
 
     bind(&done);
 }
 
-template <typename T>
-void
-MacroAssembler::branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp,
-                                             Label* label)
-{
-    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    MOZ_ASSERT(temp != InvalidReg);
-    Label done, checkAddress;
-
-    Register tag = temp;
-    splitTag(value, tag);
-    branchTestObject(Assembler::Equal, tag, &checkAddress);
-    branchTestString(Assembler::NotEqual, tag, cond == Assembler::Equal ? &done : label);
-
-    bind(&checkAddress);
-    unboxNonDouble(value, temp);
-    orPtr(Imm32(gc::ChunkMask), temp);
-    branch32(cond, Address(temp, gc::ChunkLocationOffsetFromLastByte),
-             Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
-
-    bind(&done);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
-                                         Label* label)
-{
-    branchValueIsNurseryCellImpl(cond, address, temp, label);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp,
-                                         Label* label)
-{
-    branchValueIsNurseryCellImpl(cond, value, temp, label);
-}
-
 void
 MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
                                 const Value& rhs, Label* label)
 {
     MOZ_ASSERT(cond == Equal || cond == NotEqual);
     ScratchRegisterScope scratch(*this);
     MOZ_ASSERT(lhs.valueReg() != scratch);
     moveValue(rhs, ValueOperand(scratch));
--- a/js/src/jit/x86/MacroAssembler-x86.cpp
+++ b/js/src/jit/x86/MacroAssembler-x86.cpp
@@ -537,25 +537,16 @@ MacroAssembler::moveValue(const Value& s
     else
         movl(Imm32(src.toNunboxPayload()), dest.payloadReg());
 }
 
 // ===============================================================
 // Branch functions
 
 void
-MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
-{
-    if (ptr != buffer)
-        movePtr(ptr, buffer);
-    orPtr(Imm32(gc::ChunkMask), buffer);
-    loadPtr(Address(buffer, gc::ChunkStoreBufferOffsetFromLastByte), buffer);
-}
-
-void
 MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp,
                                         Label* label)
 {
     MOZ_ASSERT(temp != InvalidReg);  // A temp register is required for x86.
     MOZ_ASSERT(ptr != temp);
     movePtr(ptr, temp);
     branchPtrInNurseryChunkImpl(cond, temp, label);
 }
@@ -575,64 +566,44 @@ MacroAssembler::branchPtrInNurseryChunkI
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
 
     orPtr(Imm32(gc::ChunkMask), ptr);
     branch32(cond, Address(ptr, gc::ChunkLocationOffsetFromLastByte),
              Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
 }
 
 void
+MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
+                                           Label* label)
+{
+    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
+
+    Label done;
+
+    branchTestObject(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
+    branchPtrInNurseryChunk(cond, address, temp, label);
+
+    bind(&done);
+}
+
+void
 MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
                                            Label* label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
 
     Label done;
 
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
     branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
 
     bind(&done);
 }
 
 void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
-                                         Label* label)
-{
-    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    Label done, checkAddress;
-
-    Register tag = extractTag(address, temp);
-    MOZ_ASSERT(tag == temp);
-    branchTestObject(Assembler::Equal, tag, &checkAddress);
-    branchTestString(Assembler::NotEqual, tag, cond == Assembler::Equal ? &done : label);
-
-    bind(&checkAddress);
-    branchPtrInNurseryChunk(cond, ToPayload(address), temp, label);
-
-    bind(&done);
-}
-
-void
-MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp,
-                                         Label* label)
-{
-    MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
-    Label done, checkAddress;
-
-    branchTestObject(Assembler::Equal, value, &checkAddress);
-    branchTestString(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
-
-    bind(&checkAddress);
-    branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
-
-    bind(&done);
-}
-
-void
 MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
                                 const Value& rhs, Label* label)
 {
     MOZ_ASSERT(cond == Equal || cond == NotEqual);
     if (rhs.isGCThing())
         cmpPtr(lhs.payloadReg(), ImmGCPtr(rhs.toGCThing()));
     else
         cmpPtr(lhs.payloadReg(), ImmWord(rhs.toNunboxPayload()));
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -7708,23 +7708,16 @@ JS::CopyAsyncStack(JSContext* cx, JS::Ha
 }
 
 JS_PUBLIC_API(Zone*)
 JS::GetObjectZone(JSObject* obj)
 {
     return obj->zone();
 }
 
-JS_PUBLIC_API(Zone*)
-JS::GetNurseryStringZone(JSString* str)
-{
-    MOZ_ASSERT(!str->isTenured());
-    return str->zone();
-}
-
 JS_PUBLIC_API(JS::TraceKind)
 JS::GCThingTraceKind(void* thing)
 {
     MOZ_ASSERT(thing);
     return static_cast<js::gc::Cell*>(thing)->getTraceKind();
 }
 
 JS_PUBLIC_API(void)
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -821,17 +821,17 @@ JSCompartment::sweepAfterMinorGC(JSTrace
 {
     globalWriteBarriered = 0;
 
     InnerViewTable& table = innerViews.get();
     if (table.needsSweepAfterMinorGC())
         table.sweepAfterMinorGC();
 
     crossCompartmentWrappers.sweepAfterMinorGC(trc);
-    dtoaCache.purge();
+
     sweepMapAndSetObjectsAfterMinorGC();
 }
 
 void
 JSCompartment::sweepSavedStacks()
 {
     savedStacks_.sweep();
 }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -221,17 +221,17 @@ class CrossCompartmentKey
         }
     };
 
     bool isTenured() const {
         struct IsTenuredFunctor {
             using ReturnType = bool;
             ReturnType operator()(JSObject** tp) { return !IsInsideNursery(*tp); }
             ReturnType operator()(JSScript** tp) { return true; }
-            ReturnType operator()(JSString** tp) { return !IsInsideNursery(*tp); }
+            ReturnType operator()(JSString** tp) { return true; }
         };
         return const_cast<CrossCompartmentKey*>(this)->applyToWrapped(IsTenuredFunctor());
     }
 
     void trace(JSTracer* trc);
     bool needsSweep();
 
   private:
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -15,17 +15,16 @@
 
 #include "jsapi.h" // For JSAutoByteString.  See bug 1033916.
 #include "jsbytecode.h"
 #include "jspubtd.h"
 
 #include "js/CallArgs.h"
 #include "js/CallNonGenericMethod.h"
 #include "js/Class.h"
-#include "js/HeapAPI.h"
 #include "js/Utility.h"
 
 #if JS_STACK_GROWTH_DIRECTION > 0
 # define JS_CHECK_STACK_SIZE(limit, sp) (MOZ_LIKELY((uintptr_t)(sp) < (limit)))
 #else
 # define JS_CHECK_STACK_SIZE(limit, sp) (MOZ_LIKELY((uintptr_t)(sp) > (limit)))
 #endif
 
@@ -608,36 +607,30 @@ struct Function {
     /* Used only for natives */
     JSNative native;
     const JSJitInfo* jitinfo;
     void* _1;
 };
 
 struct String
 {
-    static const uint32_t NON_ATOM_BIT     = JS_BIT(0);
-    static const uint32_t INLINE_CHARS_BIT = JS_BIT(3);
+    static const uint32_t INLINE_CHARS_BIT = JS_BIT(2);
     static const uint32_t LATIN1_CHARS_BIT = JS_BIT(6);
-    static const uint32_t ROPE_FLAGS       = NON_ATOM_BIT;
-    static const uint32_t EXTERNAL_FLAGS   = NON_ATOM_BIT | JS_BIT(5);
+    static const uint32_t ROPE_FLAGS       = 0;
+    static const uint32_t EXTERNAL_FLAGS   = JS_BIT(5);
     static const uint32_t TYPE_FLAGS_MASK  = JS_BIT(6) - 1;
     uint32_t flags;
     uint32_t length;
     union {
         const JS::Latin1Char* nonInlineCharsLatin1;
         const char16_t* nonInlineCharsTwoByte;
         JS::Latin1Char inlineStorageLatin1[1];
         char16_t inlineStorageTwoByte[1];
     };
     const JSStringFinalizer* externalFinalizer;
-
-    static bool nurseryCellIsString(const js::gc::Cell* cell) {
-        MOZ_ASSERT(IsInsideNursery(cell));
-        return reinterpret_cast<const String*>(cell)->flags & NON_ATOM_BIT;
-    }
 };
 
 } /* namespace shadow */
 
 // This is equal to |&JSObject::class_|.  Use it in places where you don't want
 // to #include jsobj.h.
 extern JS_FRIEND_DATA(const js::Class* const) ObjectClassPtr;
 
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3880,29 +3880,16 @@ class MOZ_RAII js::gc::AutoRunParallelTa
     }
 
     void run() override {
         func_(runtime());
     }
 };
 
 void
-GCRuntime::purgeRuntimeForMinorGC()
-{ 
-    // If external strings become nursery allocable, remember to call
-    // zone->externalStringCache().purge() (and delete this assert.)
-    MOZ_ASSERT(!IsNurseryAllocable(AllocKind::EXTERNAL_STRING));
-
-    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next())
-        zone->functionToStringCache().purge();
-
-    rt->caches().purgeForMinorGC(rt);
-}
-
-void
 GCRuntime::purgeRuntime(AutoLockForExclusiveAccess& lock)
 {
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::PURGE);
 
     for (GCCompartmentsIter comp(rt); !comp.done(); comp.next())
         comp->purge();
 
     for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
@@ -3912,17 +3899,22 @@ GCRuntime::purgeRuntime(AutoLockForExclu
     }
 
     for (const CooperatingContext& target : rt->cooperatingContexts()) {
         freeUnusedLifoBlocksAfterSweeping(&target.context()->tempLifoAlloc());
         target.context()->interpreterStack().purge(rt);
         target.context()->frontendCollectionPool().purge();
     }
 
-    rt->caches().purge();
+    rt->caches().gsnCache.purge();
+    rt->caches().envCoordinateNameCache.purge();
+    rt->caches().newObjectCache.purge();
+    rt->caches().uncompressedSourceCache.purge();
+    if (rt->caches().evalCache.initialized())
+        rt->caches().evalCache.clear();
 
     if (auto cache = rt->maybeThisRuntimeSharedImmutableStrings())
         cache->purge();
 
     MOZ_ASSERT(unmarkGrayStack.empty());
     unmarkGrayStack.clearAndFree();
 }
 
@@ -6577,17 +6569,19 @@ GCRuntime::compactPhase(JS::gcreason::Re
     }
 
     if (ShouldProtectRelocatedArenas(reason))
         protectAndHoldArenas(relocatedArenas);
     else
         releaseRelocatedArenas(relocatedArenas);
 
     // Clear caches that can contain cell pointers.
-    rt->caches().purgeForCompaction();
+    rt->caches().newObjectCache.purge();
+    if (rt->caches().evalCache.initialized())
+        rt->caches().evalCache.clear();
 
 #ifdef DEBUG
     CheckHashTablesAfterMovingGC(rt);
 #endif
 
     return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
 }
 
@@ -8227,35 +8221,34 @@ JS_FRIEND_API(void)
 JS::AssertGCThingMustBeTenured(JSObject* obj)
 {
     MOZ_ASSERT(obj->isTenured() &&
                (!IsNurseryAllocable(obj->asTenured().getAllocKind()) ||
                 obj->getClass()->hasFinalize()));
 }
 
 JS_FRIEND_API(void)
-JS::AssertGCThingIsNotNurseryAllocable(Cell* cell)
+JS::AssertGCThingIsNotAnObjectSubclass(Cell* cell)
 {
     MOZ_ASSERT(cell);
-    MOZ_ASSERT(!cell->is<JSObject>() && !cell->is<JSString>());
+    MOZ_ASSERT(!cell->is<JSObject>());
 }
 
 JS_FRIEND_API(void)
 js::gc::AssertGCThingHasType(js::gc::Cell* cell, JS::TraceKind kind)
 {
     if (!cell) {
         MOZ_ASSERT(kind == JS::TraceKind::Null);
         return;
     }
 
     MOZ_ASSERT(IsCellPointerValid(cell));
 
     if (IsInsideNursery(cell)) {
-        MOZ_ASSERT(kind == (JSString::nurseryCellIsString(cell) ? JS::TraceKind::String
-                                                                : JS::TraceKind::Object));
+        MOZ_ASSERT(kind == JS::TraceKind::Object);
         return;
     }
 
     MOZ_ASSERT(MapAllocToTraceKind(cell->asTenured().getAllocKind()) == kind);
 }
 #endif
 
 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -162,17 +162,17 @@ class ArenaCellIterImpl
             ExposeGCThingToActiveJS(JS::GCCellPtr(cell, traceKind));
 
         return cell;
     }
 
     template<typename T> T* get() const {
         MOZ_ASSERT(!done());
         MOZ_ASSERT(JS::MapTypeToTraceKind<T>::kind == traceKind);
-        return reinterpret_cast<T*>(getCell());
+        return static_cast<T*>(getCell());
     }
 
     void next() {
         MOZ_ASSERT(!done());
         thing += thingSize;
         if (thing < ArenaSize)
             moveForwardIfFree();
     }
--- a/js/src/tests/lib/tests.py
+++ b/js/src/tests/lib/tests.py
@@ -36,19 +36,16 @@ JITFLAGS = {
     # handler (bug 1362239), so must avoid wasm/asmjs.
     'tsan': [
         ['--no-asmjs', '--no-wasm'],
         ['--no-asmjs', '--no-wasm',
          '--ion-eager', '--ion-offthread-compile=off', '--non-writable-jitcode',
          '--ion-check-range-analysis', '--ion-extra-checks', '--no-sse3', '--no-threads'],
         ['--no-asmjs', '--no-wasm', '--no-baseline', '--no-ion'],
     ],
-    'baseline': [
-        ['--no-ion'],
-    ],
     # Interpreter-only, for tools that cannot handle binary code generation.
     'interp': [
         ['--no-baseline', '--no-asmjs', '--no-wasm', '--no-native-regexp']
     ],
     'none': [
         [] # no flags, normal baseline and ion
     ]
 }
--- a/js/src/vm/Caches.h
+++ b/js/src/vm/Caches.h
@@ -59,25 +59,16 @@ struct EnvironmentCoordinateNameCache {
 };
 
 struct EvalCacheEntry
 {
     JSLinearString* str;
     JSScript* script;
     JSScript* callerScript;
     jsbytecode* pc;
-
-    // We sweep this cache before a nursery collection to remove entries with
-    // string keys in the nursery.
-    //
-    // The entire cache is purged on a major GC, so we don't need to sweep it
-    // then.
-    bool needsSweep() {
-        return !str->isTenured();
-    }
 };
 
 struct EvalCacheLookup
 {
     explicit EvalCacheLookup(JSContext* cx) : str(cx), callerScript(cx) {}
     RootedLinearString str;
     RootedScript callerScript;
     jsbytecode* pc;
@@ -86,17 +77,17 @@ struct EvalCacheLookup
 struct EvalCacheHashPolicy
 {
     typedef EvalCacheLookup Lookup;
 
     static HashNumber hash(const Lookup& l);
     static bool match(const EvalCacheEntry& entry, const EvalCacheLookup& l);
 };
 
-typedef GCHashSet<EvalCacheEntry, EvalCacheHashPolicy, SystemAllocPolicy> EvalCache;
+typedef HashSet<EvalCacheEntry, EvalCacheHashPolicy, SystemAllocPolicy> EvalCache;
 
 /*
  * Cache for speeding up repetitive creation of objects in the VM.
  * When an object is created which matches the criteria in the 'key' section
  * below, an entry is filled with the resulting object.
  */
 class NewObjectCache
 {
@@ -240,31 +231,13 @@ class RuntimeCaches
     bool init();
 
     js::MathCache* getMathCache(JSContext* cx) {
         return mathCache_ ? mathCache_.get() : createMathCache(cx);
     }
     js::MathCache* maybeGetMathCache() {
         return mathCache_.get();
     }
-
-    void purgeForMinorGC(JSRuntime* rt) {
-        newObjectCache.clearNurseryObjects(rt);
-        evalCache.sweep();
-    }
-
-    void purgeForCompaction() {
-        newObjectCache.purge();
-        if (evalCache.initialized())
-            evalCache.clear();
-    }
-
-    void purge() {
-        purgeForCompaction();
-        gsnCache.purge();
-        envCoordinateNameCache.purge();
-        uncompressedSourceCache.purge();
-    }
 };
 
 } // namespace js
 
 #endif /* vm_Caches_h */
--- a/js/src/vm/HelperThreads.cpp
+++ b/js/src/vm/HelperThreads.cpp
@@ -219,34 +219,32 @@ FinishOffThreadIonCompile(jit::IonBuilde
 
 static JSRuntime*
 GetSelectorRuntime(const CompilationSelector& selector)
 {
     struct Matcher
     {
         JSRuntime* match(JSScript* script)    { return script->runtimeFromActiveCooperatingThread(); }
         JSRuntime* match(JSCompartment* comp) { return comp->runtimeFromActiveCooperatingThread(); }
-        JSRuntime* match(Zone* zone)          { return zone->runtimeFromActiveCooperatingThread(); }
         JSRuntime* match(ZonesInState zbs)    { return zbs.runtime; }
         JSRuntime* match(JSRuntime* runtime)  { return runtime; }
         JSRuntime* match(AllCompilations all) { return nullptr; }
         JSRuntime* match(CompilationsUsingNursery cun) { return cun.runtime; }
     };
 
     return selector.match(Matcher());
 }
 
 static bool
 JitDataStructuresExist(const CompilationSelector& selector)
 {
     struct Matcher
     {
         bool match(JSScript* script)    { return !!script->compartment()->jitCompartment(); }
         bool match(JSCompartment* comp) { return !!comp->jitCompartment(); }
-        bool match(Zone* zone)          { return !!zone->jitZone(); }
         bool match(ZonesInState zbs)    { return zbs.runtime->hasJitRuntime(); }
         bool match(JSRuntime* runtime)  { return runtime->hasJitRuntime(); }
         bool match(AllCompilations all) { return true; }
         bool match(CompilationsUsingNursery cun) { return cun.runtime->hasJitRuntime(); }
     };
 
     return selector.match(Matcher());
 }
@@ -255,17 +253,16 @@ static bool
 IonBuilderMatches(const CompilationSelector& selector, jit::IonBuilder* builder)
 {
     struct BuilderMatches
     {
         jit::IonBuilder* builder_;
 
         bool match(JSScript* script)    { return script == builder_->script(); }
         bool match(JSCompartment* comp) { return comp == builder_->script()->compartment(); }
-        bool match(Zone* zone)          { return zone == builder_->script()->zone(); }
         bool match(JSRuntime* runtime)  { return runtime == builder_->script()->runtimeFromAnyThread(); }
         bool match(AllCompilations all) { return true; }
         bool match(ZonesInState zbs)    {
             return zbs.runtime == builder_->script()->runtimeFromAnyThread() &&
                    zbs.state == builder_->script()->zoneFromAnyThread()->gcState();
         }
         bool match(CompilationsUsingNursery cun) {
             return cun.runtime == builder_->script()->runtimeFromAnyThread() &&
--- a/js/src/vm/HelperThreads.h
+++ b/js/src/vm/HelperThreads.h
@@ -497,17 +497,16 @@ bool
 StartOffThreadIonFree(jit::IonBuilder* builder, const AutoLockHelperThreadState& lock);
 
 struct AllCompilations {};
 struct ZonesInState { JSRuntime* runtime; JS::Zone::GCState state; };
 struct CompilationsUsingNursery { JSRuntime* runtime; };
 
 using CompilationSelector = mozilla::Variant<JSScript*,
                                              JSCompartment*,
-                                             Zone*,
                                              ZonesInState,
                                              JSRuntime*,
                                              CompilationsUsingNursery,
                                              AllCompilations>;
 
 /*
  * Cancel scheduled or in progress Ion compilations.
  */
@@ -522,22 +521,16 @@ CancelOffThreadIonCompile(JSScript* scri
 
 inline void
 CancelOffThreadIonCompile(JSCompartment* comp)
 {
     CancelOffThreadIonCompile(CompilationSelector(comp), true);
 }
 
 inline void
-CancelOffThreadIonCompile(Zone* zone)
-{
-    CancelOffThreadIonCompile(CompilationSelector(zone), true);
-}
-
-inline void
 CancelOffThreadIonCompile(JSRuntime* runtime, JS::Zone::GCState state)
 {
     CancelOffThreadIonCompile(CompilationSelector(ZonesInState{runtime, state}), true);
 }
 
 inline void
 CancelOffThreadIonCompile(JSRuntime* runtime)
 {
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -9,17 +9,16 @@
 #include "mozilla/DebugOnly.h"
 
 #include "jscompartment.h"
 #include "jsgc.h"
 #include "jsobj.h"
 #include "jsscript.h"
 
 #include "gc/Heap.h"
-#include "gc/Nursery.h"
 #include "jit/BaselineJIT.h"
 #include "jit/Ion.h"
 #include "vm/ArrayObject.h"
 #include "vm/Runtime.h"
 #include "vm/Shape.h"
 #include "vm/String.h"
 #include "vm/Symbol.h"
 #include "vm/WrapperObject.h"
@@ -517,26 +516,23 @@ StatsCellCallback(JSRuntime* rt, void* d
                                    &cStats.baselineStubsFallback);
         cStats.ionData += jit::SizeOfIonData(script, rtStats->mallocSizeOf_);
         CollectScriptSourceStats<granularity>(closure, script->scriptSource());
         break;
       }
 
       case JS::TraceKind::String: {
         JSString* str = static_cast<JSString*>(thing);
-        size_t size = thingSize;
-        if (!str->isTenured())
-            size += Nursery::stringHeaderSize();
 
         JS::StringInfo info;
         if (str->hasLatin1Chars()) {
-            info.gcHeapLatin1 = size;
+            info.gcHeapLatin1 = thingSize;
             info.mallocHeapLatin1 = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
         } else {
-            info.gcHeapTwoByte = size;
+            info.gcHeapTwoByte = thingSize;
             info.mallocHeapTwoByte = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
         }
         info.numCopies = 1;
 
         zStats->stringInfo.add(info);
 
         // The primary use case for anonymization is automated crash submission
         // (to help detect OOM crashes). In that case, we don't want to pay the
--- a/js/src/vm/NativeObject-inl.h
+++ b/js/src/vm/NativeObject-inl.h
@@ -120,17 +120,17 @@ NativeObject::markDenseElementsNotPacked
     MarkObjectGroupFlags(cx, this, OBJECT_FLAG_NON_PACKED);
 }
 
 inline void
 NativeObject::elementsRangeWriteBarrierPost(uint32_t start, uint32_t count)
 {
     for (size_t i = 0; i < count; i++) {
         const Value& v = elements_[start + i];
-        if ((v.isObject() || v.isString()) && IsInsideNursery(v.toGCThing())) {
+        if (v.isObject() && IsInsideNursery(&v.toObject())) {
             zone()->group()->storeBuffer().putSlot(this, HeapSlot::Element,
                                                    unshiftedIndex(start + i),
                                                    count - i);
             return;
         }
     }
 }
 
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -1386,17 +1386,18 @@ class NativeObject : public ShapedObject
     }
     void setPrivate(void* data) {
         void** pprivate = &privateRef(numFixedSlots());
         privateWriteBarrierPre(pprivate);
         *pprivate = data;
     }
 
     void setPrivateGCThing(gc::Cell* cell) {
-        MOZ_ASSERT_IF(IsMarkedBlack(this), !cell->isMarkedGray());
+        MOZ_ASSERT_IF(IsMarkedBlack(this),
+                      !JS::GCThingIsMarkedGray(JS::GCCellPtr(cell, cell->getTraceKind())));
         void** pprivate = &privateRef(numFixedSlots());
         privateWriteBarrierPre(pprivate);
         *pprivate = reinterpret_cast<void*>(cell);
         privateWriteBarrierPost(pprivate);
     }
 
     void setPrivateUnbarriered(void* data) {
         void** pprivate = &privateRef(numFixedSlots());
--- a/js/src/vm/Scope.h
+++ b/js/src/vm/Scope.h
@@ -219,43 +219,34 @@ class WrappedPtrOperations<Scope*, Wrapp
 //
 // The base class of all Scopes.
 //
 class Scope : public js::gc::TenuredCell
 {
     friend class GCMarker;
 
     // The kind determines data_.
-    //
-    // The memory here must be fully initialized, since otherwise the magic_
-    // value for gc::RelocationOverlay will land in the padding and may be
-    // stale.
-    union {
-        ScopeKind kind_;
-        uintptr_t paddedKind_;
-    };
+    ScopeKind kind_;
 
     // The enclosing scope or nullptr.
     GCPtrScope enclosing_;
 
     // If there are any aliased bindings, the shape for the
     // EnvironmentObject. Otherwise nullptr.
     GCPtrShape environmentShape_;
 
   protected:
     uintptr_t data_;
 
     Scope(ScopeKind kind, Scope* enclosing, Shape* environmentShape)
-      : enclosing_(enclosing),
+      : kind_(kind),
+        enclosing_(enclosing),
         environmentShape_(environmentShape),
         data_(0)
-    {
-        paddedKind_ = 0;
-        kind_ = kind;
-    }
+    { }
 
     static Scope* create(JSContext* cx, ScopeKind kind, HandleScope enclosing,
                          HandleShape envShape);
 
     template <typename T, typename D>
     static Scope* create(JSContext* cx, ScopeKind kind, HandleScope enclosing,
                          HandleShape envShape, mozilla::UniquePtr<T, D> data);
 
--- a/js/src/vm/String-inl.h
+++ b/js/src/vm/String-inl.h
@@ -77,19 +77,23 @@ NewInlineString(JSContext* cx, HandleLin
 
     JS::AutoCheckCannotGC nogc;
     mozilla::PodCopy(chars, base->chars<CharT>(nogc) + start, length);
     chars[length] = 0;
     return s;
 }
 
 static inline void
-StringWriteBarrierPost(JSContext* maybecx, JSString** strp, JSString* prev, JSString* next)
+StringWriteBarrierPost(JSContext* maybecx, JSString** strp)
 {
-    js::BarrierMethods<JSString*>::postBarrier(strp, prev, next);
+}
+
+static inline void
+StringWriteBarrierPostRemove(JSContext* maybecx, JSString** strp)
+{
 }
 
 } /* namespace js */
 
 MOZ_ALWAYS_INLINE bool
 JSString::validateLength(JSContext* maybecx, size_t length)
 {
     if (MOZ_UNLIKELY(length > JSString::MAX_LENGTH)) {
@@ -104,30 +108,30 @@ MOZ_ALWAYS_INLINE void
 JSRope::init(JSContext* cx, JSString* left, JSString* right, size_t length)
 {
     d.u1.length = length;
     d.u1.flags = ROPE_FLAGS;
     if (left->hasLatin1Chars() && right->hasLatin1Chars())
         d.u1.flags |= LATIN1_CHARS_BIT;
     d.s.u2.left = left;
     d.s.u3.right = right;
-    js::BarrierMethods<JSString*>::postBarrier(&d.s.u2.left, nullptr, left);
-    js::BarrierMethods<JSString*>::postBarrier(&d.s.u3.right, nullptr, right);
+    js::StringWriteBarrierPost(cx, &d.s.u2.left);
+    js::StringWriteBarrierPost(cx, &d.s.u3.right);
 }
 
 template <js::AllowGC allowGC>
 MOZ_ALWAYS_INLINE JSRope*
 JSRope::new_(JSContext* cx,
              typename js::MaybeRooted<JSString*, allowGC>::HandleType left,
              typename js::MaybeRooted<JSString*, allowGC>::HandleType right,
              size_t length)
 {
     if (!validateLength(cx, length))
         return nullptr;
-    JSRope* str = js::Allocate<JSRope, allowGC>(cx, js::gc::DefaultHeap);
+    JSRope* str = static_cast<JSRope*>(js::Allocate<JSString, allowGC>(cx));
     if (!str)
         return nullptr;
     str->init(cx, left, right, length);
     return str;
 }
 
 MOZ_ALWAYS_INLINE void
 JSDependentString::init(JSContext* cx, JSLinearString* base, size_t start,
@@ -139,17 +143,17 @@ JSDependentString::init(JSContext* cx, J
     if (base->hasLatin1Chars()) {
         d.u1.flags = DEPENDENT_FLAGS | LATIN1_CHARS_BIT;
         d.s.u2.nonInlineCharsLatin1 = base->latin1Chars(nogc) + start;
     } else {
         d.u1.flags = DEPENDENT_FLAGS;
         d.s.u2.nonInlineCharsTwoByte = base->twoByteChars(nogc) + start;
     }
     d.s.u3.base = base;
-    js::BarrierMethods<JSString*>::postBarrier(reinterpret_cast<JSString**>(&d.s.u3.base), nullptr, base);
+    js::StringWriteBarrierPost(cx, reinterpret_cast<JSString**>(&d.s.u3.base));
 }
 
 MOZ_ALWAYS_INLINE JSLinearString*
 JSDependentString::new_(JSContext* cx, JSLinearString* baseArg, size_t start,
                         size_t length)
 {
     /*
      * Try to avoid long chains of dependent strings. We can't avoid these
@@ -177,78 +181,64 @@ JSDependentString::new_(JSContext* cx, J
         return baseArg->hasLatin1Chars()
                ? js::NewInlineString<JS::Latin1Char>(cx, base, start, length)
                : js::NewInlineString<char16_t>(cx, base, start, length);
     }
 
     if (baseArg->isExternal() && !baseArg->ensureFlat(cx))
         return nullptr;
 
-    JSDependentString* str = js::Allocate<JSDependentString, js::NoGC>(cx, js::gc::DefaultHeap);
+    JSDependentString* str = static_cast<JSDependentString*>(js::Allocate<JSString, js::NoGC>(cx));
     if (str) {
         str->init(cx, baseArg, start, length);
         return str;
     }
 
     js::RootedLinearString base(cx, baseArg);
 
-    str = js::Allocate<JSDependentString>(cx, js::gc::DefaultHeap);
+    str = static_cast<JSDependentString*>(js::Allocate<JSString>(cx));
     if (!str)
         return nullptr;
     str->init(cx, base, start, length);
     return str;
 }
 
 MOZ_ALWAYS_INLINE void
 JSFlatString::init(const char16_t* chars, size_t length)
 {
     d.u1.length = length;
-    d.u1.flags = FLAT_FLAGS;
+    d.u1.flags = FLAT_BIT;
     d.s.u2.nonInlineCharsTwoByte = chars;
 }
 
 MOZ_ALWAYS_INLINE void
 JSFlatString::init(const JS::Latin1Char* chars, size_t length)
 {
     d.u1.length = length;
-    d.u1.flags = FLAT_FLAGS | LATIN1_CHARS_BIT;
+    d.u1.flags = FLAT_BIT | LATIN1_CHARS_BIT;
     d.s.u2.nonInlineCharsLatin1 = chars;
 }
 
 template <js::AllowGC allowGC, typename CharT>
 MOZ_ALWAYS_INLINE JSFlatString*
 JSFlatString::new_(JSContext* cx, const CharT* chars, size_t length)
 {
     MOZ_ASSERT(chars[length] == CharT(0));
 
     if (!validateLength(cx, length))
         return nullptr;
 
     JSFlatString* str;
     if (cx->compartment()->isAtomsCompartment())
         str = js::Allocate<js::NormalAtom, allowGC>(cx);
     else
-        str = js::Allocate<JSFlatString, allowGC>(cx, js::gc::DefaultHeap);
+        str = static_cast<JSFlatString*>(js::Allocate<JSString, allowGC>(cx));
     if (!str)
         return nullptr;
 
-    if (!str->isTenured()) {
-        // The chars pointer is only considered to be handed over to this
-        // function on a successful return. If the following registration
-        // fails, the string is partially initialized and must be made valid,
-        // or its finalizer may attempt to free uninitialized memory.
-        void* ptr = const_cast<void*>(static_cast<const void*>(chars));
-        if (!cx->runtime()->gc.nursery().registerMallocedBuffer(ptr)) {
-            str->init((JS::Latin1Char*)nullptr, 0);
-            if (allowGC)
-                ReportOutOfMemory(cx);
-            return nullptr;
-        }
-    }
-
     str->init(chars, length);
     return str;
 }
 
 inline js::PropertyName*
 JSFlatString::toPropertyName(JSContext* cx)
 {
 #ifdef DEBUG
@@ -265,27 +255,27 @@ JSFlatString::toPropertyName(JSContext* 
 
 template <js::AllowGC allowGC>
 MOZ_ALWAYS_INLINE JSThinInlineString*
 JSThinInlineString::new_(JSContext* cx)
 {
     if (cx->compartment()->isAtomsCompartment())
         return (JSThinInlineString*)(js::Allocate<js::NormalAtom, allowGC>(cx));
 
-    return js::Allocate<JSThinInlineString, allowGC>(cx, js::gc::DefaultHeap);
+    return static_cast<JSThinInlineString*>(js::Allocate<JSString, allowGC>(cx));
 }
 
 template <js::AllowGC allowGC>
 MOZ_ALWAYS_INLINE JSFatInlineString*
 JSFatInlineString::new_(JSContext* cx)
 {
     if (cx->compartment()->isAtomsCompartment())
         return (JSFatInlineString*)(js::Allocate<js::FatInlineAtom, allowGC>(cx));
 
-    return js::Allocate<JSFatInlineString, allowGC>(cx, js::gc::DefaultHeap);
+    return js::Allocate<JSFatInlineString, allowGC>(cx);
 }
 
 template<>
 MOZ_ALWAYS_INLINE JS::Latin1Char*
 JSThinInlineString::init<JS::Latin1Char>(size_t length)
 {
     MOZ_ASSERT(lengthFits<JS::Latin1Char>(length));
     d.u1.length = length;
--- a/js/src/vm/String.cpp
+++ b/js/src/vm/String.cpp
@@ -9,17 +9,16 @@
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/MemoryReporting.h"
 #include "mozilla/PodOperations.h"
 #include "mozilla/RangedPtr.h"
 #include "mozilla/TypeTraits.h"
 #include "mozilla/Unused.h"
 
 #include "gc/Marking.h"
-#include "gc/Nursery.h"
 #include "js/UbiNode.h"
 #include "vm/GeckoProfiler.h"
 
 #include "jscntxtinlines.h"
 #include "jscompartmentinlines.h"
 
 #include "vm/GeckoProfiler-inl.h"
 
@@ -86,19 +85,19 @@ JS::ubi::Concrete<JSString>::size(mozill
 {
     JSString& str = get();
     size_t size;
     if (str.isAtom())
         size = str.isFatInline() ? sizeof(js::FatInlineAtom) : sizeof(js::NormalAtom);
     else
         size = str.isFatInline() ? sizeof(JSFatInlineString) : sizeof(JSString);
 
-    if (IsInsideNursery(&str))
-        size += Nursery::stringHeaderSize();
-
+    // We can't use mallocSizeof on things in the nursery. At the moment,
+    // strings are never in the nursery, but that may change.
+    MOZ_ASSERT(!IsInsideNursery(&str));
     size += str.sizeOfExcludingThis(mallocSizeOf);
 
     return size;
 }
 
 const char16_t JS::ubi::Concrete<JSString>::concreteTypeName[] = u"JSString";
 
 #ifdef DEBUG
@@ -204,21 +203,20 @@ JSString::dumpRepresentationHeader(js::G
 {
     uint32_t flags = d.u1.flags;
     // Print the string's address as an actual C++ expression, to facilitate
     // copy-and-paste into a debugger.
     out.printf("((%s*) %p) length: %zu  flags: 0x%x", subclass, this, length(), flags);
     if (flags & FLAT_BIT)               out.put(" FLAT");
     if (flags & HAS_BASE_BIT)           out.put(" HAS_BASE");
     if (flags & INLINE_CHARS_BIT)       out.put(" INLINE_CHARS");
-    if (flags & NON_ATOM_BIT)           out.put(" NON_ATOM");
+    if (flags & ATOM_BIT)               out.put(" ATOM");
     if (isPermanentAtom())              out.put(" PERMANENT");
     if (flags & LATIN1_CHARS_BIT)       out.put(" LATIN1");
     if (flags & INDEX_VALUE_BIT)        out.put(" INDEX_VALUE(%u)", getIndexValue());
-    if (!isTenured())                   out.put(" NURSERY");
     out.putChar('\n');
 }
 
 void
 JSLinearString::dumpRepresentationChars(js::GenericPrinter& out, int indent) const
 {
     if (hasLatin1Chars()) {
         out.printf("%*schars: ((Latin1Char*) %p) ", indent, "", rawLatin1Chars());
@@ -481,110 +479,97 @@ JSRope::flattenInternal(JSContext* maybe
              */
             MOZ_ASSERT(str->isRope());
             while (str != leftMostRope) {
                 if (b == WithIncrementalBarrier) {
                     JSString::writeBarrierPre(str->d.s.u2.left);
                     JSString::writeBarrierPre(str->d.s.u3.right);
                 }
                 JSString* child = str->d.s.u2.left;
-                js::BarrierMethods<JSString*>::postBarrier(&str->d.s.u2.left, child, nullptr);
                 MOZ_ASSERT(child->isRope());
                 str->setNonInlineChars(left.nonInlineChars<CharT>(nogc));
                 child->d.u1.flattenData = uintptr_t(str) | Tag_VisitRightChild;
                 str = child;
             }
             if (b == WithIncrementalBarrier) {
                 JSString::writeBarrierPre(str->d.s.u2.left);
                 JSString::writeBarrierPre(str->d.s.u3.right);
             }
             str->setNonInlineChars(left.nonInlineChars<CharT>(nogc));
             wholeCapacity = capacity;
             wholeChars = const_cast<CharT*>(left.nonInlineChars<CharT>(nogc));
             pos = wholeChars + left.d.u1.length;
-            static_assert((EXTENSIBLE_FLAGS & DEPENDENT_FLAGS) == NON_ATOM_BIT,
-                          "extensible and dependent flags must only overlap on NON_ATOM_BIT");
-            left.d.u1.flags ^= (EXTENSIBLE_FLAGS | DEPENDENT_FLAGS) & ~NON_ATOM_BIT;
+            JS_STATIC_ASSERT(!(EXTENSIBLE_FLAGS & DEPENDENT_FLAGS));
+            left.d.u1.flags ^= (EXTENSIBLE_FLAGS | DEPENDENT_FLAGS);
             left.d.s.u3.base = (JSLinearString*)this;  /* will be true on exit */
-            MOZ_ASSERT(!static_cast<JSString&>(left).isExtensible());
-            MOZ_ASSERT(left.isDependent());
-            MOZ_ASSERT(!left.isAtom());
-            BarrierMethods<JSString*>::postBarrier((JSString**)&left.d.s.u3.base, nullptr, this);
+            StringWriteBarrierPostRemove(maybecx, &left.d.s.u2.left);
+            StringWriteBarrierPost(maybecx, (JSString**)&left.d.s.u3.base);
             goto visit_right_child;
         }
-   }
+    }
 
     if (!AllocChars(this, wholeLength, &wholeChars, &wholeCapacity)) {
         if (maybecx)
             ReportOutOfMemory(maybecx);
         return nullptr;
     }
 
-    if (!isTenured() && maybecx) {
-        JSRuntime* rt = maybecx->runtime();
-        if (!rt->gc.nursery().registerMallocedBuffer(wholeChars)) {
-            js_free(wholeChars);
-            ReportOutOfMemory(maybecx);
-            return nullptr;
-        }
-    }
-
     pos = wholeChars;
     first_visit_node: {
         if (b == WithIncrementalBarrier) {
             JSString::writeBarrierPre(str->d.s.u2.left);
             JSString::writeBarrierPre(str->d.s.u3.right);
         }
 
         JSString& left = *str->d.s.u2.left;
-        js::BarrierMethods<JSString*>::postBarrier(&str->d.s.u2.left, &left, nullptr);
         str->setNonInlineChars(pos);
+        StringWriteBarrierPostRemove(maybecx, &str->d.s.u2.left);
         if (left.isRope()) {
             /* Return to this node when 'left' done, then goto visit_right_child. */
             left.d.u1.flattenData = uintptr_t(str) | Tag_VisitRightChild;
             str = &left;
             goto first_visit_node;
         }
         CopyChars(pos, left.asLinear());
         pos += left.length();
     }
     visit_right_child: {
         JSString& right = *str->d.s.u3.right;
-        BarrierMethods<JSString*>::postBarrier(&str->d.s.u3.right, &right, nullptr);
         if (right.isRope()) {
             /* Return to this node when 'right' done, then goto finish_node. */
             right.d.u1.flattenData = uintptr_t(str) | Tag_FinishNode;
             str = &right;
             goto first_visit_node;
         }
         CopyChars(pos, right.asLinear());
         pos += right.length();
     }
-
     finish_node: {
         if (str == this) {
             MOZ_ASSERT(pos == wholeChars + wholeLength);
             *pos = '\0';
             str->d.u1.length = wholeLength;
             if (IsSame<CharT, char16_t>::value)
                 str->d.u1.flags = EXTENSIBLE_FLAGS;
             else
                 str->d.u1.flags = EXTENSIBLE_FLAGS | LATIN1_CHARS_BIT;
             str->setNonInlineChars(wholeChars);
             str->d.s.u3.capacity = wholeCapacity;
+            StringWriteBarrierPostRemove(maybecx, &str->d.s.u2.left);
+            StringWriteBarrierPostRemove(maybecx, &str->d.s.u3.right);
             return &this->asFlat();
         }
         uintptr_t flattenData = str->d.u1.flattenData;
         if (IsSame<CharT, char16_t>::value)
             str->d.u1.flags = DEPENDENT_FLAGS;
         else
             str->d.u1.flags = DEPENDENT_FLAGS | LATIN1_CHARS_BIT;
         str->d.u1.length = pos - str->asLinear().nonInlineChars<CharT>(nogc);
         str->d.s.u3.base = (JSLinearString*)this;       /* will be true on exit */
-        BarrierMethods<JSString*>::postBarrier((JSString**)&str->d.s.u3.base, nullptr, this);
+        StringWriteBarrierPost(maybecx, (JSString**)&str->d.s.u3.base);
         str = (JSString*)(flattenData & ~Tag_Mask);
         if ((flattenData & Tag_Mask) == Tag_VisitRightChild)
             goto visit_right_child;
         MOZ_ASSERT((flattenData & Tag_Mask) == Tag_FinishNode);
         goto finish_node;
     }
 }
 
@@ -1126,21 +1111,19 @@ JSExternalString::ensureFlat(JSContext* 
         AutoCheckCannotGC nogc;
         PodCopy(s, nonInlineChars<char16_t>(nogc), n);
         s[n] = '\0';
     }
 
     // Release the external chars.
     finalize(cx->runtime()->defaultFreeOp());
 
-    // Transform the string into a non-external, flat string. Note that the
-    // resulting string will still be in an AllocKind::EXTERNAL_STRING arena,
-    // but will no longer be an external string.
+    // Transform the string into a non-external, flat string.
     setNonInlineChars<char16_t>(s);
-    d.u1.flags = FLAT_FLAGS;
+    d.u1.flags = FLAT_BIT;
 
     return &this->asFlat();
 }
 
 #ifdef DEBUG
 void
 JSAtom::dump(js::GenericPrinter& out)
 {
--- a/js/src/vm/String.h
+++ b/js/src/vm/String.h
@@ -11,19 +11,17 @@
 #include "mozilla/PodOperations.h"
 #include "mozilla/Range.h"
 
 #include "jsapi.h"
 #include "jsfriendapi.h"
 #include "jsstr.h"
 
 #include "gc/Barrier.h"
-#include "gc/Cell.h"
 #include "gc/Heap.h"
-#include "gc/Nursery.h"
 #include "gc/Rooting.h"
 #include "js/CharacterEncoding.h"
 #include "js/RootingAPI.h"
 
 #include "vm/Printer.h"
 
 class JSDependentString;
 class JSExtensibleString;
@@ -150,17 +148,17 @@ static const size_t UINT32_CHAR_BUFFER_L
  *
  * Derived string types can be queried from ancestor types via isX() and
  * retrieved with asX() debug-only-checked casts.
  *
  * The ensureX() operations mutate 'this' in place to effectively the type to be
  * at least X (e.g., ensureLinear will change a JSRope to be a JSFlatString).
  */
 
-class JSString : public js::gc::Cell
+class JSString : public js::gc::TenuredCell
 {
   protected:
     static const size_t NUM_INLINE_CHARS_LATIN1   = 2 * sizeof(void*) / sizeof(JS::Latin1Char);
     static const size_t NUM_INLINE_CHARS_TWO_BYTE = 2 * sizeof(void*) / sizeof(char16_t);
 
     /* Fields only apply to string types commented on the right. */
     struct Data
     {
@@ -217,76 +215,66 @@ class JSString : public js::gc::Cell
      * string instance of that type. Abstract types have no instances and thus
      * have no such entry. The "subtype predicate" entry for a type specifies
      * the predicate used to query whether a JSString instance is subtype
      * (reflexively) of that type.
      *
      *   String        Instance     Subtype
      *   type          encoding     predicate
      *   ------------------------------------
-     *   Rope          000001       000001
-     *   Linear        -           !000001
-     *   HasBase       -            xxx1xx
-     *   Dependent     000101       000101
-     *   External      100001       100001
-     *   Flat          -            xxxx1x
-     *   Undepended    000111       000111
-     *   Extensible    010011       010011
-     *   Inline        001011       xx1xxx
-     *   FatInline     011011       x11xxx
-     *   Atom          000000       xxxxx0
-     *   PermanentAtom 100000       1xxxx0
-     *   InlineAtom    -            xx1xx0
-     *   FatInlineAtom -            x11xx0
+     *   Rope          000000       000000
+     *   Linear        -           !000000
+     *   HasBase       -            xxxx1x
+     *   Dependent     000010       000010
+     *   External      100000       100000
+     *   Flat          -            xxxxx1
+     *   Undepended    000011       000011
+     *   Extensible    010001       010001
+     *   Inline        000101       xxx1xx
+     *   FatInline     010101       x1x1xx
+     *   Atom          001001       xx1xxx
+     *   PermanentAtom 101001       1x1xxx
+     *   InlineAtom    -            xx11xx
+     *   FatInlineAtom -            x111xx
      *
      * Note that the first 4 flag bits (from right to left in the previous table)
      * have the following meaning and can be used for some hot queries:
      *
-     *   Bit 0: !IsAtom (Atom, PermanentAtom)
-     *   Bit 1: IsFlat
-     *   Bit 2: HasBase (Dependent, Undepended)
-     *   Bit 3: IsInline (Inline, FatInline)
+     *   Bit 0: IsFlat
+     *   Bit 1: HasBase (Dependent, Undepended)
+     *   Bit 2: IsInline (Inline, FatInline)
+     *   Bit 3: IsAtom (Atom, PermanentAtom)
      *
      *  "HasBase" here refers to the two string types that have a 'base' field:
      *  JSDependentString and JSUndependedString.
      *  A JSUndependedString is a JSDependentString which has been 'fixed' (by ensureFixed)
      *  to be null-terminated.  In such cases, the string must keep marking its base since
      *  there may be any number of *other* JSDependentStrings transitively depending on it.
      *
-     * The atom bit (NON_ATOM_BIT) is inverted so that objects and strings can
-     * be differentiated in the nursery: atoms are never in the nursery, so
-     * this bit is always 1 for a nursery string. For an object on a
-     * little-endian architecture, this is the low-order bit of the ObjectGroup
-     * pointer in a JSObject, which will always be zero. A 64-bit big-endian
-     * architecture will need to do something else (the ObjectGroup* is in the
-     * same place as a string's struct { uint32_t flags; uint32_t length; }).
-     *
      * If the INDEX_VALUE_BIT is set the upper 16 bits of the flag word hold the integer
      * index.
      */
 
-    static const uint32_t NON_ATOM_BIT           = JS_BIT(0);
-    static const uint32_t FLAT_BIT               = JS_BIT(1);
-    static const uint32_t HAS_BASE_BIT           = JS_BIT(2);
-    static const uint32_t INLINE_CHARS_BIT       = JS_BIT(3);
+    static const uint32_t FLAT_BIT               = JS_BIT(0);
+    static const uint32_t HAS_BASE_BIT           = JS_BIT(1);
+    static const uint32_t INLINE_CHARS_BIT       = JS_BIT(2);
+    static const uint32_t ATOM_BIT               = JS_BIT(3);
 
-    static const uint32_t ROPE_FLAGS             = NON_ATOM_BIT;
-    static const uint32_t DEPENDENT_FLAGS        = NON_ATOM_BIT | HAS_BASE_BIT;
-    static const uint32_t FLAT_FLAGS             = NON_ATOM_BIT | FLAT_BIT;
-    static const uint32_t UNDEPENDED_FLAGS       = NON_ATOM_BIT | FLAT_BIT | HAS_BASE_BIT;
-    static const uint32_t EXTENSIBLE_FLAGS       = NON_ATOM_BIT | FLAT_BIT | JS_BIT(4);
-    static const uint32_t EXTERNAL_FLAGS         = NON_ATOM_BIT | JS_BIT(5);
+    static const uint32_t ROPE_FLAGS             = 0;
+    static const uint32_t DEPENDENT_FLAGS        = HAS_BASE_BIT;
+    static const uint32_t UNDEPENDED_FLAGS       = FLAT_BIT | HAS_BASE_BIT;
+    static const uint32_t EXTENSIBLE_FLAGS       = FLAT_BIT | JS_BIT(4);
+    static const uint32_t EXTERNAL_FLAGS         = JS_BIT(5);
 
     static const uint32_t FAT_INLINE_MASK        = INLINE_CHARS_BIT | JS_BIT(4);
-    static const uint32_t PERMANENT_ATOM_MASK    = NON_ATOM_BIT | JS_BIT(5);
-    static const uint32_t PERMANENT_ATOM         = JS_BIT(5);
+    static const uint32_t PERMANENT_ATOM_MASK    = ATOM_BIT | JS_BIT(5);
 
     /* Initial flags for thin inline and fat inline strings. */
-    static const uint32_t INIT_THIN_INLINE_FLAGS = NON_ATOM_BIT | FLAT_BIT | INLINE_CHARS_BIT;
-    static const uint32_t INIT_FAT_INLINE_FLAGS  = NON_ATOM_BIT | FLAT_BIT | FAT_INLINE_MASK;
+    static const uint32_t INIT_THIN_INLINE_FLAGS = FLAT_BIT | INLINE_CHARS_BIT;
+    static const uint32_t INIT_FAT_INLINE_FLAGS  = FLAT_BIT | FAT_INLINE_MASK;
 
     static const uint32_t TYPE_FLAGS_MASK        = JS_BIT(6) - 1;
 
     static const uint32_t LATIN1_CHARS_BIT       = JS_BIT(6);
 
     static const uint32_t INDEX_VALUE_BIT        = JS_BIT(7);
     static const uint32_t INDEX_VALUE_SHIFT      = 16;
 
@@ -478,38 +466,30 @@ class JSString : public js::gc::Cell
 
     MOZ_ALWAYS_INLINE
     bool isUndepended() const {
         return (d.u1.flags & TYPE_FLAGS_MASK) == UNDEPENDED_FLAGS;
     }
 
     MOZ_ALWAYS_INLINE
     bool isAtom() const {
-        return !(d.u1.flags & NON_ATOM_BIT);
+        return d.u1.flags & ATOM_BIT;
     }
 
     MOZ_ALWAYS_INLINE
     bool isPermanentAtom() const {
-        return (d.u1.flags & PERMANENT_ATOM_MASK) == PERMANENT_ATOM;
+        return (d.u1.flags & PERMANENT_ATOM_MASK) == PERMANENT_ATOM_MASK;
     }
 
     MOZ_ALWAYS_INLINE
     JSAtom& asAtom() const {
         MOZ_ASSERT(isAtom());
         return *(JSAtom*)this;
     }
 
-    // Used for distinguishing strings from objects in the nursery. The caller
-    // must ensure that cell is in the nursery (and not forwarded).
-    MOZ_ALWAYS_INLINE
-    static bool nurseryCellIsString(js::gc::Cell* cell) {
-        MOZ_ASSERT(!cell->isTenured());
-        return !static_cast<JSString*>(cell)->isAtom();
-    }
-
     // Fills |array| with various strings that represent the different string
     // kinds and character encodings.
     static bool fillWithRepresentatives(JSContext* cx, js::HandleArrayObject array);
 
     /* Only called by the GC for dependent or undepended strings. */
 
     inline bool hasBase() const {
         return d.u1.flags & HAS_BASE_BIT;
@@ -540,64 +520,16 @@ class JSString : public js::gc::Cell
         static_assert(offsetof(JSString, d.s.u2.nonInlineCharsTwoByte) ==
                       offsetof(JSString, d.s.u2.nonInlineCharsLatin1),
                       "nonInlineCharsTwoByte and nonInlineCharsLatin1 must have same offset");
         return offsetof(JSString, d.s.u2.nonInlineCharsTwoByte);
     }
 
     static const JS::TraceKind TraceKind = JS::TraceKind::String;
 
-    JS::Zone* zone() const {
-        if (isTenured()) {
-            // Allow permanent atoms to be accessed across zones and runtimes.
-            if (isPermanentAtom())
-                return zoneFromAnyThread();
-            return asTenured().zone();
-        }
-        return js::Nursery::getStringZone(this);
-    }
-
-    // Implement TenuredZone members needed for template instantiations.
-
-    JS::Zone* zoneFromAnyThread() const {
-        if (isTenured())
-            return asTenured().zoneFromAnyThread();
-        return js::Nursery::getStringZone(this);
-    }
-
-    void fixupAfterMovingGC() {}
-
-    js::gc::AllocKind getAllocKind() const {
-        using js::gc::AllocKind;
-        AllocKind kind;
-        if (isAtom())
-            if (isFatInline())
-                kind = AllocKind::FAT_INLINE_ATOM;
-            else
-                kind = AllocKind::ATOM;
-        else if (isFatInline())
-            kind = AllocKind::FAT_INLINE_STRING;
-        else if (isExternal())
-            kind = AllocKind::EXTERNAL_STRING;
-        else
-            kind = AllocKind::STRING;
-
-#if DEBUG
-        if (isTenured()) {
-            // Normally, the kinds should match, but an EXTERNAL_STRING arena
-            // may contain strings that have been flattened (see
-            // JSExternalString::ensureFlat).
-            AllocKind tenuredKind = asTenured().getAllocKind();
-            MOZ_ASSERT(kind == tenuredKind ||
-                       (tenuredKind == AllocKind::EXTERNAL_STRING && kind == AllocKind::STRING));
-        }
-#endif
-        return kind;
-    }
-
 #ifdef DEBUG
     void dump(); // Debugger-friendly stderr dump.
     void dump(js::GenericPrinter& out);
     void dumpNoNewline(js::GenericPrinter& out);
     void dumpCharsNoNewline(js::GenericPrinter& out);
     void dumpRepresentation(js::GenericPrinter& out, int indent) const;
     void dumpRepresentationHeader(js::GenericPrinter& out, int indent, const char* subclass) const;
 
@@ -605,52 +537,27 @@ class JSString : public js::gc::Cell
     static void dumpChars(const CharT* s, size_t len, js::GenericPrinter& out);
 
     bool equals(const char* s);
 #endif
 
     void traceChildren(JSTracer* trc);
 
     static MOZ_ALWAYS_INLINE void readBarrier(JSString* thing) {
-        if (thing->isPermanentAtom() || js::gc::IsInsideNursery(thing))
+        if (thing->isPermanentAtom())
             return;
-        js::gc::TenuredCell::readBarrier(&thing->asTenured());
+
+        TenuredCell::readBarrier(thing);
     }
 
     static MOZ_ALWAYS_INLINE void writeBarrierPre(JSString* thing) {
-        if (!thing || thing->isPermanentAtom() || js::gc::IsInsideNursery(thing))
+        if (!thing || thing->isPermanentAtom())
             return;
 
-        js::gc::TenuredCell::writeBarrierPre(&thing->asTenured());
-    }
-
-    static void addCellAddressToStoreBuffer(js::gc::StoreBuffer* buffer, js::gc::Cell** cellp)
-    {
-        buffer->putCell(cellp);
-    }
-
-    static void removeCellAddressFromStoreBuffer(js::gc::StoreBuffer* buffer, js::gc::Cell** cellp)
-    {
-        buffer->unputCell(cellp);
-    }
-
-    static void writeBarrierPost(void* cellp, JSString* prev, JSString* next) {
-        // See JSObject::writeBarrierPost for a description of the logic here.
-        MOZ_ASSERT(cellp);
-
-        js::gc::StoreBuffer* buffer;
-        if (next && (buffer = next->storeBuffer())) {
-            if (prev && prev->storeBuffer())
-                return;
-            buffer->putCell(static_cast<js::gc::Cell**>(cellp));
-            return;
-        }
-
-        if (prev && (buffer = prev->storeBuffer()))
-            buffer->unputCell(static_cast<js::gc::Cell**>(cellp));
+        TenuredCell::writeBarrierPre(thing);
     }
 
   private:
     JSString() = delete;
     JSString(const JSString& other) = delete;
     void operator=(const JSString& other) = delete;
 };
 
@@ -717,17 +624,16 @@ class JSRope : public JSString
 
 static_assert(sizeof(JSRope) == sizeof(JSString),
               "string subclasses must be binary-compatible with JSString");
 
 class JSLinearString : public JSString
 {
     friend class JSString;
     friend class js::AutoStableStringChars;
-    friend class js::TenuringTracer;
 
     /* Vacuous and therefore unimplemented. */
     JSLinearString* ensureLinear(JSContext* cx) = delete;
     bool isLinear() const = delete;
     JSLinearString& asLinear() const = delete;
 
   protected:
     /* Returns void pointer to latin1/twoByte chars, for finalizers. */
@@ -1085,21 +991,16 @@ class JSExternalString : public JSLinear
     const char16_t* twoByteChars() const {
         return rawTwoByteChars();
     }
 
     /* Only called by the GC for strings with the AllocKind::EXTERNAL_STRING kind. */
 
     inline void finalize(js::FreeOp* fop);
 
-    /*
-     * Free the external chars and allocate a new buffer, converting this to a
-     * flat string (which still lives in an AllocKind::EXTERNAL_STRING
-     * arena).
-     */
     JSFlatString* ensureFlat(JSContext* cx);
 
 #ifdef DEBUG
     void dumpRepresentation(js::GenericPrinter& out, int indent) const;
 #endif
 };
 
 static_assert(sizeof(JSExternalString) == sizeof(JSString),
@@ -1132,18 +1033,17 @@ class JSAtom : public JSFlatString
     MOZ_ALWAYS_INLINE
     bool isPermanent() const {
         return JSString::isPermanentAtom();
     }
 
     // Transform this atom into a permanent atom. This is only done during
     // initialization of the runtime.
     MOZ_ALWAYS_INLINE void morphIntoPermanentAtom() {
-        MOZ_ASSERT(static_cast<JSString*>(this)->isAtom());
-        d.u1.flags = (d.u1.flags & ~PERMANENT_ATOM_MASK) | PERMANENT_ATOM;
+        d.u1.flags |= PERMANENT_ATOM_MASK;
     }
 
     inline js::HashNumber hash() const;
     inline void initHash(js::HashNumber hash);
 
 #ifdef DEBUG
     void dump(js::GenericPrinter& out);
     void dump();
@@ -1210,28 +1110,26 @@ JSAtom::initHash(js::HashNumber hash)
     if (isFatInline())
         return static_cast<js::FatInlineAtom*>(this)->initHash(hash);
     return static_cast<js::NormalAtom*>(this)->initHash(hash);
 }
 
 MOZ_ALWAYS_INLINE JSAtom*
 JSFlatString::morphAtomizedStringIntoAtom(js::HashNumber hash)
 {
-    MOZ_ASSERT(!isAtom());
-    d.u1.flags &= ~NON_ATOM_BIT;
+    d.u1.flags |= ATOM_BIT;
     JSAtom* atom = &asAtom();
     atom->initHash(hash);
     return atom;
 }
 
 MOZ_ALWAYS_INLINE JSAtom*
 JSFlatString::morphAtomizedStringIntoPermanentAtom(js::HashNumber hash)
 {
-    MOZ_ASSERT(!isAtom());
-    d.u1.flags = (d.u1.flags & ~PERMANENT_ATOM_MASK) | PERMANENT_ATOM;
+    d.u1.flags |= PERMANENT_ATOM_MASK;
     JSAtom* atom = &asAtom();
     atom->initHash(hash);
     return atom;
 }
 
 namespace js {
 
 class StaticStrings
@@ -1664,27 +1562,9 @@ JSAtom::asPropertyName()
 {
 #ifdef DEBUG
     uint32_t dummy;
     MOZ_ASSERT(!isIndex(&dummy));
 #endif
     return static_cast<js::PropertyName*>(this);
 }
 
-namespace js {
-namespace gc {
-template<>
-inline JSString*
-Cell::as<JSString>() {
-    MOZ_ASSERT(is<JSString>());
-    return reinterpret_cast<JSString*>(this);
-}
-
-template<>
-inline JSString*
-TenuredCell::as<JSString>() {
-    MOZ_ASSERT(is<JSString>());
-    return reinterpret_cast<JSString*>(this);
-}
-}
-}
-
 #endif /* vm_String_h */
--- a/js/src/vm/UnboxedObject-inl.h
+++ b/js/src/vm/UnboxedObject-inl.h
@@ -118,20 +118,18 @@ SetUnboxedValue(JSContext* cx, JSObject*
         if (v.isNumber()) {
             *reinterpret_cast<double*>(p) = v.toNumber();
             return true;
         }
         return false;
 
       case JSVAL_TYPE_STRING:
         if (v.isString()) {
+            MOZ_ASSERT(!IsInsideNursery(v.toString()));
             JSString** np = reinterpret_cast<JSString**>(p);
-            if (IsInsideNursery(v.toString()) && !IsInsideNursery(unboxedObject))
-                unboxedObject->zone()->group()->storeBuffer().putWholeCell(unboxedObject);
-
             if (preBarrier)
                 JSString::writeBarrierPre(*np);
             *np = v.toString();
             return true;
         }
         return false;
 
       case JSVAL_TYPE_OBJECT:
--- a/js/src/vm/UnboxedObject.cpp
+++ b/js/src/vm/UnboxedObject.cpp
@@ -132,33 +132,23 @@ UnboxedLayout::makeConstructorCode(JSCon
     masm.createGCObject(object, scratch1, templateObject, gc::TenuredHeap, &failure,
                         /* initFixedSlots = */ false);
 
     // If any of the properties being stored are in the nursery, add a store
     // buffer entry for the new object.
     Label postBarrier;
     for (size_t i = 0; i < layout.properties().length(); i++) {
         const UnboxedLayout::Property& property = layout.properties()[i];
-        if (!UnboxedTypeNeedsPostBarrier(property.type))
-            continue;
-
-        Address valueAddress(propertiesReg, i * sizeof(IdValuePair) + offsetof(IdValuePair, value));
         if (property.type == JSVAL_TYPE_OBJECT) {
+            Address valueAddress(propertiesReg, i * sizeof(IdValuePair) + offsetof(IdValuePair, value));
             Label notObject;
             masm.branchTestObject(Assembler::NotEqual, valueAddress, &notObject);
             Register valueObject = masm.extractObject(valueAddress, scratch1);
             masm.branchPtrInNurseryChunk(Assembler::Equal, valueObject, scratch2, &postBarrier);
             masm.bind(&notObject);
-        } else {
-            MOZ_ASSERT(property.type == JSVAL_TYPE_STRING);
-            Label notString;
-            masm.branchTestString(Assembler::NotEqual, valueAddress, &notString);
-            Register valueString = masm.extractString(valueAddress, scratch1);
-            masm.branchPtrInNurseryChunk(Assembler::Equal, valueString, scratch2, &postBarrier);
-            masm.bind(&notString);
         }
     }
 
     masm.jump(&allocated);
     masm.bind(&postBarrier);
 
     LiveGeneralRegisterSet liveVolatileRegisters;
     liveVolatileRegisters.add(propertiesReg);
--- a/js/src/vm/UnboxedObject.h
+++ b/js/src/vm/UnboxedObject.h
@@ -35,17 +35,17 @@ static inline bool
 UnboxedTypeNeedsPreBarrier(JSValueType type)
 {
     return type == JSVAL_TYPE_STRING || type == JSVAL_TYPE_OBJECT;
 }
 
 static inline bool
 UnboxedTypeNeedsPostBarrier(JSValueType type)
 {
-    return type == JSVAL_TYPE_STRING || type == JSVAL_TYPE_OBJECT;
+    return type == JSVAL_TYPE_OBJECT;
 }
 
 // Class tracking information specific to unboxed objects.
 class UnboxedLayout : public mozilla::LinkedListElement<UnboxedLayout>
 {
   public:
     struct Property {
         PropertyName* name;
--- a/toolkit/components/aboutmemory/tests/test_memoryReporters.xul
+++ b/toolkit/components/aboutmemory/tests/test_memoryReporters.xul
@@ -68,20 +68,16 @@
   // "@)(*&".  We'll check that these strings are reported in at least
   // one of the memory reporters.
   let shortStrings = [];
   for (let i = 0; i < 10000; i++) {
     let str = (Math.random() > 0.5 ? "!" : "@") + ")(*&";
     shortStrings.push(str);
   }
 
-  // Strings in the nursery are not reported, so make sure the above test
-  // strings are tenured.
-  Components.utils.forceGC();
-
   let mySandbox = Components.utils.Sandbox(document.nodePrincipal,
                     { sandboxName: "this-is-a-sandbox-name" });
 
   function handleReportNormal(aProcess, aPath, aKind, aUnits, aAmount,
                               aDescription)
   {
     // Record the values of some notable reporters.
     if (aPath === "vsize") {