Bug 849273 - Investigate splitting the js and JS namespaces r=terrence
authorJon Coppeard <jcoppeard@mozilla.com>
Tue, 19 Mar 2013 10:35:41 +0000
changeset 131859 fa78767a3e78c3e18f0b3afeea472bcab65957b2
parent 131858 a5447c2831e7a0822680f01667d97ad40ed94a29
child 131860 b7c5623df2283f89b799e24a3933f6e6164c7556
push idunknown
push userunknown
push dateunknown
reviewersterrence
bugs849273
milestone22.0a1
Bug 849273 - Investigate splitting the js and JS namespaces r=terrence
dom/base/nsDOMWindowUtils.cpp
dom/base/nsGlobalWindow.cpp
dom/base/nsJSEnvironment.cpp
dom/bindings/BindingUtils.h
dom/ipc/ContentChild.cpp
dom/workers/WorkerPrivate.cpp
js/public/HashTable.h
js/public/RootingAPI.h
js/public/Utility.h
js/public/Value.h
js/src/gc/Marking.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/gc/Verifier.cpp
js/src/jsapi-tests/testGCFinalizeCallback.cpp
js/src/jsapi.cpp
js/src/jsapi.h
js/src/jsarray.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsdbgapi.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsinfer.cpp
js/src/jsinferinlines.h
js/src/jsmemorymetrics.cpp
js/src/jsscript.cpp
js/src/jsstr.cpp
js/src/jswatchpoint.cpp
js/src/methodjit/Compiler.cpp
js/src/vm/Debugger.cpp
js/src/vm/ForkJoin.cpp
js/src/vm/ForkJoin.h
js/src/vm/String-inl.h
js/src/vm/StringBuffer.h
js/xpconnect/src/XPCComponents.cpp
js/xpconnect/src/XPCJSRuntime.cpp
js/xpconnect/src/XPCWrappedNative.cpp
js/xpconnect/src/nsXPConnect.cpp
js/xpconnect/src/xpcprivate.h
js/xpconnect/src/xpcpublic.h
layout/base/nsDocumentViewer.cpp
xpcom/base/nsCycleCollector.cpp
--- a/dom/base/nsDOMWindowUtils.cpp
+++ b/dom/base/nsDOMWindowUtils.cpp
@@ -1142,17 +1142,17 @@ nsDOMWindowUtils::GarbageCollect(nsICycl
   SAMPLE_LABEL("GC", "GarbageCollect");
   // Always permit this in debug builds.
 #ifndef DEBUG
   if (!nsContentUtils::IsCallerChrome()) {
     return NS_ERROR_DOM_SECURITY_ERR;
   }
 #endif
 
-  nsJSContext::GarbageCollectNow(js::gcreason::DOM_UTILS);
+  nsJSContext::GarbageCollectNow(JS::gcreason::DOM_UTILS);
   nsJSContext::CycleCollectNow(aListener, aExtraForgetSkippableCalls);
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsDOMWindowUtils::CycleCollect(nsICycleCollectorListener *aListener,
                                int32_t aExtraForgetSkippableCalls)
@@ -2791,17 +2791,17 @@ nsDOMWindowUtils::GetFileReferences(cons
 
 NS_IMETHODIMP
 nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
 {
   if (!nsContentUtils::IsCallerChrome()) {
     return NS_ERROR_DOM_SECURITY_ERR;
   }
 
-  *aResult = js::IsIncrementalGCEnabled(JS_GetRuntime(cx));
+  *aResult = JS::IsIncrementalGCEnabled(JS_GetRuntime(cx));
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx)
 {
   if (!nsContentUtils::IsCallerChrome()) {
     return NS_ERROR_DOM_SECURITY_ERR;
--- a/dom/base/nsGlobalWindow.cpp
+++ b/dom/base/nsGlobalWindow.cpp
@@ -2369,17 +2369,17 @@ nsGlobalWindow::SetNewDocument(nsIDocume
       mArgumentsOrigin = nullptr;
     }
 
     // Give the new inner window our chrome event handler (since it
     // doesn't have one).
     newInnerWindow->mChromeEventHandler = mChromeEventHandler;
   }
 
-  mContext->GC(js::gcreason::SET_NEW_DOCUMENT);
+  mContext->GC(JS::gcreason::SET_NEW_DOCUMENT);
   mContext->DidInitializeContext();
 
   if (newInnerWindow && !newInnerWindow->mHasNotifiedGlobalCreated && mDoc) {
     // We should probably notify. However if this is the, arguably bad,
     // situation when we're creating a temporary non-chrome-about-blank
     // document in a chrome docshell, don't notify just yet. Instead wait
     // until we have a real chrome doc.
     int32_t itemType = nsIDocShellTreeItem::typeContent;
@@ -2564,17 +2564,17 @@ nsGlobalWindow::DetachFromDocShell()
     // We got no new document after someone called
     // SetArguments(), drop our reference to the arguments.
     mArguments = nullptr;
     mArgumentsLast = nullptr;
     mArgumentsOrigin = nullptr;
   }
 
   if (mContext) {
-    mContext->GC(js::gcreason::SET_DOC_SHELL);
+    mContext->GC(JS::gcreason::SET_DOC_SHELL);
     mContext = nullptr;
   }
 
   mDocShell = nullptr; // Weak Reference
 
   NS_ASSERTION(!mNavigator, "Non-null mNavigator in outer window!");
 
   if (mFrames) {
--- a/dom/base/nsJSEnvironment.cpp
+++ b/dom/base/nsJSEnvironment.cpp
@@ -2591,17 +2591,17 @@ nsJSContext::GarbageCollectNow(JS::gcrea
   // NS_MAX_COMPARTMENT_GC_COUNT times after the previous global GC.
   if (!sDisableExplicitCompartmentGC &&
       aShrinking != ShrinkingGC && aCompartment != NonCompartmentGC &&
       sCompartmentGCCount < NS_MAX_COMPARTMENT_GC_COUNT) {
     JS::PrepareForFullGC(nsJSRuntime::sRuntime);
     for (nsJSContext* cx = sContextList; cx; cx = cx->mNext) {
       if (!cx->mActive && cx->mContext) {
         if (JSObject* global = cx->GetNativeGlobal()) {
-          JS::SkipZoneForGC(js::GetObjectZone(global));
+          JS::SkipZoneForGC(JS::GetObjectZone(global));
         }
       }
       cx->mActive = false;
     }
     if (JS::IsGCScheduled(nsJSRuntime::sRuntime)) {
       if (aIncremental == IncrementalGC) {
         JS::IncrementalGC(nsJSRuntime::sRuntime, aReason, aSliceMillis);
       } else {
--- a/dom/bindings/BindingUtils.h
+++ b/dom/bindings/BindingUtils.h
@@ -511,17 +511,17 @@ SetSystemOnlyWrapper(JSObject* obj, nsWr
 // If *vp is a gcthing and is not in the compartment of cx, wrap *vp
 // into the compartment of cx (typically by replacing it with an Xray or
 // cross-compartment wrapper around the original object).
 MOZ_ALWAYS_INLINE bool
 MaybeWrapValue(JSContext* cx, JS::Value* vp)
 {
   if (vp->isString()) {
     JSString* str = vp->toString();
-    if (js::GetGCThingZone(str) != js::GetContextZone(cx)) {
+    if (JS::GetGCThingZone(str) != js::GetContextZone(cx)) {
       return JS_WrapValue(cx, vp);
     }
     return true;
   }
 
   if (vp->isObject()) {
     JSObject* obj = &vp->toObject();
     if (js::GetObjectCompartment(obj) != js::GetContextCompartment(cx)) {
--- a/dom/ipc/ContentChild.cpp
+++ b/dom/ipc/ContentChild.cpp
@@ -1111,24 +1111,24 @@ ContentChild::RecvActivateA11y()
         do_GetService("@mozilla.org/accessibilityService;1");
 #endif
     return true;
 }
 
 bool
 ContentChild::RecvGarbageCollect()
 {
-    nsJSContext::GarbageCollectNow(js::gcreason::DOM_IPC);
+    nsJSContext::GarbageCollectNow(JS::gcreason::DOM_IPC);
     return true;
 }
 
 bool
 ContentChild::RecvCycleCollect()
 {
-    nsJSContext::GarbageCollectNow(js::gcreason::DOM_IPC);
+    nsJSContext::GarbageCollectNow(JS::gcreason::DOM_IPC);
     nsJSContext::CycleCollectNow();
     return true;
 }
 
 static void
 PreloadSlowThings()
 {
     // This fetches and creates all the built-in stylesheets.
--- a/dom/workers/WorkerPrivate.cpp
+++ b/dom/workers/WorkerPrivate.cpp
@@ -4137,22 +4137,22 @@ WorkerPrivate::UpdateGCZealInternal(JSCo
 
 void
 WorkerPrivate::GarbageCollectInternal(JSContext* aCx, bool aShrinking,
                                       bool aCollectChildren)
 {
   AssertIsOnWorkerThread();
 
   JSRuntime *rt = JS_GetRuntime(aCx);
-  js::PrepareForFullGC(rt);
+  JS::PrepareForFullGC(rt);
   if (aShrinking) {
-    js::ShrinkingGC(rt, js::gcreason::DOM_WORKER);
+    JS::ShrinkingGC(rt, JS::gcreason::DOM_WORKER);
   }
   else {
-    js::GCForReason(rt, js::gcreason::DOM_WORKER);
+    JS::GCForReason(rt, JS::gcreason::DOM_WORKER);
   }
 
   if (aCollectChildren) {
     for (uint32_t index = 0; index < mChildWorkers.Length(); index++) {
       mChildWorkers[index]->GarbageCollect(aCx, aShrinking);
     }
   }
 }
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -481,29 +481,29 @@ class HashSet
 
 // Pointer hashing policy that strips the lowest zeroBits when calculating the
 // hash to improve key distribution.
 template <typename Key, size_t zeroBits>
 struct PointerHasher
 {
     typedef Key Lookup;
     static HashNumber hash(const Lookup &l) {
-        JS_ASSERT(!js::IsPoisonedPtr(l));
+        JS_ASSERT(!JS::IsPoisonedPtr(l));
         size_t word = reinterpret_cast<size_t>(l) >> zeroBits;
         JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
 #if JS_BYTES_PER_WORD == 4
         return HashNumber(word);
 #else
         JS_STATIC_ASSERT(sizeof word == 8);
         return HashNumber((word >> 32) ^ word);
 #endif
     }
     static bool match(const Key &k, const Lookup &l) {
-        JS_ASSERT(!js::IsPoisonedPtr(k));
-        JS_ASSERT(!js::IsPoisonedPtr(l));
+        JS_ASSERT(!JS::IsPoisonedPtr(k));
+        JS_ASSERT(!JS::IsPoisonedPtr(l));
         return k == l;
     }
 };
 
 // Default hash policy: just use the 'lookup' value. This of course only
 // works if the lookup value is integral. HashTable applies ScrambleHashCode to
 // the result of the 'hash' which means that it is 'ok' if the lookup value is
 // not well distributed over the HashNumber domain.
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -324,17 +324,17 @@ class InternalHandle<T*>
     InternalHandle(const JS::Handle<H> &handle, T *field)
       : holder((void**)handle.address()), offset(uintptr_t(field) - uintptr_t(handle.get()))
     {}
 
     /*
      * Create an InternalHandle to a field within a Rooted<>.
      */
     template<typename R>
-    InternalHandle(const Rooted<R> &root, T *field)
+    InternalHandle(const JS::Rooted<R> &root, T *field)
       : holder((void**)root.address()), offset(uintptr_t(field) - uintptr_t(root.get()))
     {}
 
     T *get() const { return reinterpret_cast<T*>(uintptr_t(*holder) + offset); }
 
     const T &operator*() const { return *get(); }
     T *operator->() const { return get(); }
 
@@ -383,17 +383,17 @@ struct RootKind<T *>
     static ThingRootKind rootKind() { return T::rootKind(); }
 };
 
 template <typename T>
 struct RootMethods<T *>
 {
     static T *initial() { return NULL; }
     static ThingRootKind kind() { return RootKind<T *>::rootKind(); }
-    static bool poisoned(T *v) { return IsPoisonedPtr(v); }
+    static bool poisoned(T *v) { return JS::IsPoisonedPtr(v); }
 };
 
 } /* namespace js */
 
 namespace JS {
 
 /*
  * Local variable of type T whose value is always rooted. This is typically
@@ -702,44 +702,44 @@ enum AllowGC {
 template <typename T, AllowGC allowGC>
 class MaybeRooted
 {
 };
 
 template <typename T> class MaybeRooted<T, CanGC>
 {
   public:
-    typedef Handle<T> HandleType;
-    typedef Rooted<T> RootType;
-    typedef MutableHandle<T> MutableHandleType;
+    typedef JS::Handle<T> HandleType;
+    typedef JS::Rooted<T> RootType;
+    typedef JS::MutableHandle<T> MutableHandleType;
 
-    static inline Handle<T> toHandle(HandleType v) {
+    static inline JS::Handle<T> toHandle(HandleType v) {
         return v;
     }
 
-    static inline MutableHandle<T> toMutableHandle(MutableHandleType v) {
+    static inline JS::MutableHandle<T> toMutableHandle(MutableHandleType v) {
         return v;
     }
 };
 
 template <typename T> class MaybeRooted<T, NoGC>
 {
   public:
     typedef T HandleType;
     typedef FakeRooted<T> RootType;
     typedef FakeMutableHandle<T> MutableHandleType;
 
-    static inline Handle<T> toHandle(HandleType v) {
+    static inline JS::Handle<T> toHandle(HandleType v) {
         JS_NOT_REACHED("Bad conversion");
-        return Handle<T>::fromMarkedLocation(NULL);
+        return JS::Handle<T>::fromMarkedLocation(NULL);
     }
 
-    static inline MutableHandle<T> toMutableHandle(MutableHandleType v) {
+    static inline JS::MutableHandle<T> toMutableHandle(MutableHandleType v) {
         JS_NOT_REACHED("Bad conversion");
-        return MutableHandle<T>::fromMarkedLocation(NULL);
+        return JS::MutableHandle<T>::fromMarkedLocation(NULL);
     }
 };
 
 } /* namespace js */
 
 namespace JS {
 
 template <typename T> template <typename S>
@@ -773,19 +773,19 @@ namespace js {
 
 /*
  * Hook for dynamic root analysis. Checks the native stack and poisons
  * references to GC things which have not been rooted.
  */
 inline void MaybeCheckStackRoots(JSContext *cx, bool relax = true)
 {
 #if defined(DEBUG) && defined(JS_GC_ZEAL) && defined(JSGC_ROOT_ANALYSIS) && !defined(JS_THREADSAFE)
-    if (relax && NeedRelaxedRootChecks())
+    if (relax && JS::NeedRelaxedRootChecks())
         return;
-    CheckStackRoots(cx);
+    JS::CheckStackRoots(cx);
 #endif
 }
 
 namespace gc {
 struct Cell;
 } /* namespace gc */
 
 /* Base class for automatic read-only object rooting during compilation. */
--- a/js/public/Utility.h
+++ b/js/public/Utility.h
@@ -26,22 +26,17 @@
 
 /* The public JS engine namespace. */
 namespace JS {}
 
 /* The mozilla-shared reusable template/utility namespace. */
 namespace mozilla {}
 
 /* The private JS engine namespace. */
-namespace js {
-
-/* The private namespace is a superset of the public/shared namespaces. */
-using namespace JS;
-
-}  /* namespace js */
+namespace js {}
 
 /*
  * Pattern used to overwrite freed memory. If you are accessing an object with
  * this pattern, you probably have a dangling pointer.
  */
 #define JS_FREE_PATTERN 0xDA
 
 #define JS_ASSERT(expr)           MOZ_ASSERT(expr)
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -1392,26 +1392,26 @@ SameType(const Value &lhs, const Value &
 } // namespace JS
 
 /************************************************************************/
 
 namespace js {
 
 template <> struct RootMethods<const JS::Value>
 {
-    static JS::Value initial() { return UndefinedValue(); }
+    static JS::Value initial() { return JS::UndefinedValue(); }
     static ThingRootKind kind() { return THING_ROOT_VALUE; }
-    static bool poisoned(const JS::Value &v) { return IsPoisonedValue(v); }
+    static bool poisoned(const JS::Value &v) { return JS::IsPoisonedValue(v); }
 };
 
 template <> struct RootMethods<JS::Value>
 {
-    static JS::Value initial() { return UndefinedValue(); }
+    static JS::Value initial() { return JS::UndefinedValue(); }
     static ThingRootKind kind() { return THING_ROOT_VALUE; }
-    static bool poisoned(const JS::Value &v) { return IsPoisonedValue(v); }
+    static bool poisoned(const JS::Value &v) { return JS::IsPoisonedValue(v); }
 };
 
 template <class Outer> class MutableValueOperations;
 
 /*
  * A class designed for CRTP use in implementing the non-mutating parts of the
  * Value interface in Value-like classes.  Outer must be a class inheriting
  * ValueOperations<Outer> with a visible extract() method returning the
@@ -1483,57 +1483,57 @@ class MutableValueOperations : public Va
     void setObjectOrNull(JSObject *arg) { value()->setObjectOrNull(arg); }
 };
 
 /*
  * Augment the generic Handle<T> interface when T = Value with type-querying
  * and value-extracting operations.
  */
 template <>
-class HandleBase<JS::Value> : public ValueOperations<Handle<JS::Value> >
+class HandleBase<JS::Value> : public ValueOperations<JS::Handle<JS::Value> >
 {
-    friend class ValueOperations<Handle<JS::Value> >;
+    friend class ValueOperations<JS::Handle<JS::Value> >;
     const JS::Value * extract() const {
-        return static_cast<const Handle<JS::Value>*>(this)->address();
+        return static_cast<const JS::Handle<JS::Value>*>(this)->address();
     }
 };
 
 /*
  * Augment the generic MutableHandle<T> interface when T = Value with
  * type-querying, value-extracting, and mutating operations.
  */
 template <>
-class MutableHandleBase<JS::Value> : public MutableValueOperations<MutableHandle<JS::Value> >
+class MutableHandleBase<JS::Value> : public MutableValueOperations<JS::MutableHandle<JS::Value> >
 {
-    friend class ValueOperations<MutableHandle<JS::Value> >;
+    friend class ValueOperations<JS::MutableHandle<JS::Value> >;
     const JS::Value * extract() const {
-        return static_cast<const MutableHandle<JS::Value>*>(this)->address();
+        return static_cast<const JS::MutableHandle<JS::Value>*>(this)->address();
     }
 
-    friend class MutableValueOperations<MutableHandle<JS::Value> >;
+    friend class MutableValueOperations<JS::MutableHandle<JS::Value> >;
     JS::Value * extractMutable() {
-        return static_cast<MutableHandle<JS::Value>*>(this)->address();
+        return static_cast<JS::MutableHandle<JS::Value>*>(this)->address();
     }
 };
 
 /*
  * Augment the generic Rooted<T> interface when T = Value with type-querying,
  * value-extracting, and mutating operations.
  */
 template <>
-class RootedBase<JS::Value> : public MutableValueOperations<Rooted<JS::Value> >
+class RootedBase<JS::Value> : public MutableValueOperations<JS::Rooted<JS::Value> >
 {
-    friend class ValueOperations<Rooted<JS::Value> >;
+    friend class ValueOperations<JS::Rooted<JS::Value> >;
     const JS::Value * extract() const {
-        return static_cast<const Rooted<JS::Value>*>(this)->address();
+        return static_cast<const JS::Rooted<JS::Value>*>(this)->address();
     }
 
-    friend class MutableValueOperations<Rooted<JS::Value> >;
+    friend class MutableValueOperations<JS::Rooted<JS::Value> >;
     JS::Value * extractMutable() {
-        return static_cast<Rooted<JS::Value>*>(this)->address();
+        return static_cast<JS::Rooted<JS::Value>*>(this)->address();
     }
 };
 
 } // namespace js
 
 inline jsval_layout
 JSVAL_TO_IMPL(JS::Value v)
 {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1569,17 +1569,17 @@ UnmarkGrayChildren(JSTracer *trc, void *
         /*
          * If we run out of stack, we take a more drastic measure: require that
          * we GC again before the next CC.
          */
         trc->runtime->gcGrayBitsValid = false;
         return;
     }
 
-    if (!GCThingIsMarkedGray(thing))
+    if (!JS::GCThingIsMarkedGray(thing))
         return;
 
     UnmarkGrayGCThing(thing);
 
     /*
      * Trace children of |thing|. If |thing| and its parent are both shapes,
      * |thing| will get saved to mPreviousShape without being traced. The parent
      * will later trace |thing|. This is done to avoid increasing the stack
@@ -1597,29 +1597,29 @@ UnmarkGrayChildren(JSTracer *trc, void *
 
     if (tracer->tracingShape) {
         JS_ASSERT(!tracer->previousShape);
         tracer->previousShape = thing;
         return;
     }
 
     do {
-        JS_ASSERT(!GCThingIsMarkedGray(thing));
+        JS_ASSERT(!JS::GCThingIsMarkedGray(thing));
         JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE);
         thing = childTracer.previousShape;
         childTracer.previousShape = NULL;
     } while (thing);
 }
 
 JS_FRIEND_API(void)
 JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
 {
     JS_ASSERT(kind != JSTRACE_SHAPE);
 
-    if (!GCThingIsMarkedGray(thing))
+    if (!JS::GCThingIsMarkedGray(thing))
         return;
 
     UnmarkGrayGCThing(thing);
 
     JSRuntime *rt = static_cast<Cell *>(thing)->zone()->rt;
     UnmarkGrayTracer trc(rt);
     JS_TraceChildren(&trc, thing, kind);
 }
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -371,17 +371,17 @@ ConservativeGCData::recordStackTop()
 #endif
     (void) setjmp(registerSnapshot.jmpbuf);
 #if defined(_MSC_VER)
 # pragma warning(pop)
 #endif
 }
 
 void
-AutoIdArray::trace(JSTracer *trc)
+JS::AutoIdArray::trace(JSTracer *trc)
 {
     JS_ASSERT(tag_ == IDARRAY);
     gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
 }
 
 inline void
 AutoGCRooter::trace(JSTracer *trc)
 {
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -236,24 +236,24 @@ class gcstats::StatisticsSerializer
     }
 };
 
 /*
  * If this fails, then you can either delete this assertion and allow all
  * larger-numbered reasons to pile up in the last telemetry bucket, or switch
  * to GC_REASON_3 and bump the max value.
  */
-JS_STATIC_ASSERT(gcreason::NUM_TELEMETRY_REASONS >= gcreason::NUM_REASONS);
+JS_STATIC_ASSERT(JS::gcreason::NUM_TELEMETRY_REASONS >= JS::gcreason::NUM_REASONS);
 
 static const char *
-ExplainReason(gcreason::Reason reason)
+ExplainReason(JS::gcreason::Reason reason)
 {
     switch (reason) {
 #define SWITCH_REASON(name)                     \
-        case gcreason::name:                    \
+        case JS::gcreason::name:                    \
           return #name;
         GCREASONS(SWITCH_REASON)
 
         default:
           JS_NOT_REACHED("bad GC reason");
           return "?";
 #undef SWITCH_REASON
     }
@@ -566,17 +566,17 @@ Statistics::endGC()
     }
 
     if (fp)
         printStats();
 }
 
 void
 Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
-                       gcreason::Reason reason)
+                       JS::gcreason::Reason reason)
 {
     this->collectedCount = collectedCount;
     this->zoneCount = zoneCount;
     this->compartmentCount = compartmentCount;
 
     bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
     if (first)
         beginGC();
@@ -585,18 +585,19 @@ Statistics::beginSlice(int collectedCoun
     (void) slices.append(data); /* Ignore any OOMs here. */
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
         (*cb)(JS_TELEMETRY_GC_REASON, reason);
 
     // Slice callbacks should only fire for the outermost level
     if (++gcDepth == 1) {
         bool wasFullGC = collectedCount == zoneCount;
-        if (GCSliceCallback cb = runtime->gcSliceCallback)
-            (*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
+        if (JS::GCSliceCallback cb = runtime->gcSliceCallback)
+            (*cb)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
+                  JS::GCDescription(!wasFullGC));
     }
 }
 
 void
 Statistics::endSlice()
 {
     slices.back().end = PRMJ_Now();
     slices.back().endFaults = gc::GetPageFaultCount();
@@ -608,18 +609,19 @@ Statistics::endSlice()
 
     bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
     if (last)
         endGC();
 
     // Slice callbacks should only fire for the outermost level
     if (--gcDepth == 0) {
         bool wasFullGC = collectedCount == zoneCount;
-        if (GCSliceCallback cb = runtime->gcSliceCallback)
-            (*cb)(runtime, last ? GC_CYCLE_END : GC_SLICE_END, GCDescription(!wasFullGC));
+        if (JS::GCSliceCallback cb = runtime->gcSliceCallback)
+            (*cb)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
+                  JS::GCDescription(!wasFullGC));
     }
 
     /* Do this after the slice callback since it uses these values. */
     if (last)
         PodArrayZero(counts);
 }
 
 void
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -81,17 +81,17 @@ class StatisticsSerializer;
 
 struct Statistics {
     Statistics(JSRuntime *rt);
     ~Statistics();
 
     void beginPhase(Phase phase);
     void endPhase(Phase phase);
 
-    void beginSlice(int collectedCount, int zoneCount, int compartmentCount, gcreason::Reason reason);
+    void beginSlice(int collectedCount, int zoneCount, int compartmentCount, JS::gcreason::Reason reason);
     void endSlice();
 
     void reset(const char *reason) { slices.back().resetReason = reason; }
     void nonincremental(const char *reason) { nonincrementalReason = reason; }
 
     void count(Stat s) {
         JS_ASSERT(s < STAT_LIMIT);
         counts[s]++;
@@ -118,23 +118,23 @@ struct Statistics {
     int gcDepth;
 
     int collectedCount;
     int zoneCount;
     int compartmentCount;
     const char *nonincrementalReason;
 
     struct SliceData {
-        SliceData(gcreason::Reason reason, int64_t start, size_t startFaults)
+        SliceData(JS::gcreason::Reason reason, int64_t start, size_t startFaults)
           : reason(reason), resetReason(NULL), start(start), startFaults(startFaults)
         {
             PodArrayZero(phaseTimes);
         }
 
-        gcreason::Reason reason;
+        JS::gcreason::Reason reason;
         const char *resetReason;
         int64_t start, end;
         size_t startFaults, endFaults;
         int64_t phaseTimes[PHASE_LIMIT];
 
         int64_t duration() const { return end - start; }
     };
 
@@ -174,17 +174,17 @@ struct Statistics {
     bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
 
     double computeMMU(int64_t resolution);
 };
 
 struct AutoGCSlice
 {
     AutoGCSlice(Statistics &stats, int collectedCount, int zoneCount, int compartmentCount,
-                gcreason::Reason reason
+                JS::gcreason::Reason reason
                 MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
       : stats(stats)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
         stats.beginSlice(collectedCount, zoneCount, compartmentCount, reason);
     }
     ~AutoGCSlice() { stats.endSlice(); }
 
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -88,17 +88,17 @@ CheckStackRoot(JSRuntime *rt, uintptr_t 
     }
 
     /*
      * Only poison the last byte in the word. It is easy to get accidental
      * collisions when a value that does not occupy a full word is used to
      * overwrite a now-dead GC thing pointer. In this case we want to avoid
      * damaging the smaller value.
      */
-    PoisonPtr(w);
+    JS::PoisonPtr(w);
 }
 
 static void
 CheckStackRootsRange(JSRuntime *rt, uintptr_t *begin, uintptr_t *end, Rooter *rbegin, Rooter *rend)
 {
     JS_ASSERT(begin <= end);
     for (uintptr_t *i = begin; i != end; ++i)
         CheckStackRoot(rt, i, rbegin, rend);
--- a/js/src/jsapi-tests/testGCFinalizeCallback.cpp
+++ b/js/src/jsapi-tests/testGCFinalizeCallback.cpp
@@ -34,80 +34,80 @@ BEGIN_TEST(testGCFinalizeCallback)
     JS_GC(rt);
     CHECK(rt->gcIsFull);
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     /* Full GC, incremental. */
     FinalizeCalls = 0;
-    js::PrepareForFullGC(rt);
-    js::IncrementalGC(rt, js::gcreason::API, 1000000);
+    JS::PrepareForFullGC(rt);
+    JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
     CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
     CHECK(rt->gcIsFull);
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(false));
 
     JS::RootedObject global1(cx, createGlobal());
     JS::RootedObject global2(cx, createGlobal());
     JS::RootedObject global3(cx, createGlobal());
     CHECK(global1);
     CHECK(global2);
     CHECK(global3);
 
     /* Compartment GC, non-incremental, single compartment. */
     FinalizeCalls = 0;
-    js::PrepareZoneForGC(global1->zone());
-    js::GCForReason(rt, js::gcreason::API);
+    JS::PrepareZoneForGC(global1->zone());
+    JS::GCForReason(rt, JS::gcreason::API);
     CHECK(!rt->gcIsFull);
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, non-incremental, multiple compartments. */
     FinalizeCalls = 0;
-    js::PrepareZoneForGC(global1->zone());
-    js::PrepareZoneForGC(global2->zone());
-    js::PrepareZoneForGC(global3->zone());
-    js::GCForReason(rt, js::gcreason::API);
+    JS::PrepareZoneForGC(global1->zone());
+    JS::PrepareZoneForGC(global2->zone());
+    JS::PrepareZoneForGC(global3->zone());
+    JS::GCForReason(rt, JS::gcreason::API);
     CHECK(!rt->gcIsFull);
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, single compartment. */
     FinalizeCalls = 0;
-    js::PrepareZoneForGC(global1->zone());
-    js::IncrementalGC(rt, js::gcreason::API, 1000000);
+    JS::PrepareZoneForGC(global1->zone());
+    JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
     CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
     CHECK(!rt->gcIsFull);
     CHECK(checkSingleGroup());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
     /* Compartment GC, incremental, multiple compartments. */
     FinalizeCalls = 0;
-    js::PrepareZoneForGC(global1->zone());
-    js::PrepareZoneForGC(global2->zone());
-    js::PrepareZoneForGC(global3->zone());
-    js::IncrementalGC(rt, js::gcreason::API, 1000000);
+    JS::PrepareZoneForGC(global1->zone());
+    JS::PrepareZoneForGC(global2->zone());
+    JS::PrepareZoneForGC(global3->zone());
+    JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
     CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
     CHECK(!rt->gcIsFull);
     CHECK(checkMultipleGroups());
     CHECK(checkFinalizeStatus());
     CHECK(checkFinalizeIsCompartmentGC(true));
 
 #ifdef JS_GC_ZEAL
 
     /* Full GC with reset due to new compartment, becoming compartment GC. */
 
     FinalizeCalls = 0;
     JS_SetGCZeal(cx, 9, 1000000);
-    js::PrepareForFullGC(rt);
+    JS::PrepareForFullGC(rt);
     js::GCDebugSlice(rt, true, 1);
     CHECK(rt->gcIncrementalState == js::gc::MARK);
     CHECK(rt->gcIsFull);
 
     JS::RootedObject global4(cx, createGlobal());
     js::GCDebugSlice(rt, true, 1);
     CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
     CHECK(!rt->gcIsFull);
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -780,17 +780,17 @@ JSRuntime::JSRuntime(JSUseHelperThreads 
     gcDynamicMarkSlice(false),
     gcShouldCleanUpEverything(false),
     gcGrayBitsValid(false),
     gcIsNeeded(0),
     gcStats(thisFromCtor()),
     gcNumber(0),
     gcStartNumber(0),
     gcIsFull(false),
-    gcTriggerReason(gcreason::NO_REASON),
+    gcTriggerReason(JS::gcreason::NO_REASON),
     gcStrictCompartmentChecking(false),
     gcDisableStrictProxyCheckingCount(0),
     gcIncrementalState(gc::NO_INCREMENTAL),
     gcLastMarkSlice(false),
     gcSweepOnBackgroundThread(false),
     gcFoundBlackGrayEdges(false),
     gcSweepingZones(NULL),
     gcZoneGroupIndex(0),
@@ -2830,18 +2830,18 @@ JS_IsGCMarkingTracer(JSTracer *trc)
 {
     return IS_GC_MARKING_TRACER(trc);
 }
 
 JS_PUBLIC_API(void)
 JS_GC(JSRuntime *rt)
 {
     AssertHeapIsIdle(rt);
-    PrepareForFullGC(rt);
-    GC(rt, GC_NORMAL, gcreason::API);
+    JS::PrepareForFullGC(rt);
+    GC(rt, GC_NORMAL, JS::gcreason::API);
 }
 
 JS_PUBLIC_API(void)
 JS_MaybeGC(JSContext *cx)
 {
     MaybeGC(cx);
 }
 
@@ -3301,37 +3301,37 @@ class AutoHoldZone
     }
 
   private:
     bool *holdp;
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 JS_PUBLIC_API(JSObject *)
-JS_NewGlobalObject(JSContext *cx, JSClass *clasp, JSPrincipals *principals, ZoneSpecifier zoneSpec)
+JS_NewGlobalObject(JSContext *cx, JSClass *clasp, JSPrincipals *principals, JS::ZoneSpecifier zoneSpec)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
     JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
 
     JSRuntime *rt = cx->runtime;
 
     Zone *zone;
-    if (zoneSpec == SystemZone)
+    if (zoneSpec == JS::SystemZone)
         zone = rt->systemZone;
-    else if (zoneSpec == FreshZone)
+    else if (zoneSpec == JS::FreshZone)
         zone = NULL;
     else
         zone = ((JSObject *)zoneSpec)->zone();
 
     JSCompartment *compartment = NewCompartment(cx, zone, principals);
     if (!compartment)
         return NULL;
 
-    if (zoneSpec == SystemZone) {
+    if (zoneSpec == JS::SystemZone) {
         rt->systemZone = compartment->zone();
         rt->systemZone->isSystem = true;
     }
 
     AutoHoldZone hold(compartment->zone());
 
     JSCompartment *saved = cx->compartment;
     cx->setCompartment(compartment);
@@ -6243,17 +6243,17 @@ JS_Stringify(JSContext *cx, jsval *vp, J
 
 JS_PUBLIC_API(JSBool)
 JS_ParseJSON(JSContext *cx, const jschar *chars, uint32_t len, jsval *vp)
 {
     AssertHeapIsIdle(cx);
     CHECK_REQUEST(cx);
 
     RootedValue reviver(cx, NullValue()), value(cx);
-    if (!ParseJSONWithReviver(cx, StableCharPtr(chars, len), len, reviver, &value))
+    if (!ParseJSONWithReviver(cx, JS::StableCharPtr(chars, len), len, reviver, &value))
         return false;
 
     *vp = value;
     return true;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_ParseJSONWithReviver(JSContext *cx, const jschar *chars, uint32_t len, jsval reviverArg, jsval *vp)
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -1646,82 +1646,82 @@ ToInt64Slow(JSContext *cx, const JS::Val
 /* DO NOT CALL THIS. Use JS::ToUint64. */
 extern JS_PUBLIC_API(bool)
 ToUint64Slow(JSContext *cx, const JS::Value &v, uint64_t *out);
 } /* namespace js */
 
 namespace JS {
 
 JS_ALWAYS_INLINE bool
-ToUint16(JSContext *cx, const js::Value &v, uint16_t *out)
+ToUint16(JSContext *cx, const JS::Value &v, uint16_t *out)
 {
     AssertArgumentsAreSane(cx, v);
     {
         js::SkipRoot skip(cx, &v);
         js::MaybeCheckStackRoots(cx);
     }
 
     if (v.isInt32()) {
         *out = uint16_t(v.toInt32());
         return true;
     }
     return js::ToUint16Slow(cx, v, out);
 }
 
 JS_ALWAYS_INLINE bool
-ToInt32(JSContext *cx, const js::Value &v, int32_t *out)
+ToInt32(JSContext *cx, const JS::Value &v, int32_t *out)
 {
     AssertArgumentsAreSane(cx, v);
     {
         js::SkipRoot root(cx, &v);
         js::MaybeCheckStackRoots(cx);
     }
 
     if (v.isInt32()) {
         *out = v.toInt32();
         return true;
     }
     return js::ToInt32Slow(cx, v, out);
 }
 
 JS_ALWAYS_INLINE bool
-ToUint32(JSContext *cx, const js::Value &v, uint32_t *out)
+ToUint32(JSContext *cx, const JS::Value &v, uint32_t *out)
 {
     AssertArgumentsAreSane(cx, v);
     {
         js::SkipRoot root(cx, &v);
         js::MaybeCheckStackRoots(cx);
     }
 
     if (v.isInt32()) {
         *out = uint32_t(v.toInt32());
         return true;
     }
     return js::ToUint32Slow(cx, v, out);
 }
 
 JS_ALWAYS_INLINE bool
-ToInt64(JSContext *cx, const js::Value &v, int64_t *out)
+ToInt64(JSContext *cx, const JS::Value &v, int64_t *out)
 {
     AssertArgumentsAreSane(cx, v);
     {
         js::SkipRoot skip(cx, &v);
         js::MaybeCheckStackRoots(cx);
     }
 
     if (v.isInt32()) {
         *out = int64_t(v.toInt32());
         return true;
     }
 
     return js::ToInt64Slow(cx, v, out);
 }
 
 JS_ALWAYS_INLINE bool
-ToUint64(JSContext *cx, const js::Value &v, uint64_t *out)
+ToUint64(JSContext *cx, const JS::Value &v, uint64_t *out)
 {
     AssertArgumentsAreSane(cx, v);
     {
         js::SkipRoot skip(cx, &v);
         js::MaybeCheckStackRoots(cx);
     }
 
     if (v.isInt32()) {
@@ -1841,17 +1841,17 @@ IsPoisonedId(jsid iden)
 } /* namespace JS */
 
 namespace js {
 
 template <> struct RootMethods<jsid>
 {
     static jsid initial() { return JSID_VOID; }
     static ThingRootKind kind() { return THING_ROOT_ID; }
-    static bool poisoned(jsid id) { return IsPoisonedId(id); }
+    static bool poisoned(jsid id) { return JS::IsPoisonedId(id); }
 };
 
 } /* namespace js */
 
 class JSAutoRequest
 {
   public:
     JSAutoRequest(JSContext *cx
@@ -4948,9 +4948,90 @@ JS_EncodeInterpretedFunction(JSContext *
 extern JS_PUBLIC_API(JSScript *)
 JS_DecodeScript(JSContext *cx, const void *data, uint32_t length,
                 JSPrincipals *principals, JSPrincipals *originPrincipals);
 
 extern JS_PUBLIC_API(JSObject *)
 JS_DecodeInterpretedFunction(JSContext *cx, const void *data, uint32_t length,
                              JSPrincipals *principals, JSPrincipals *originPrincipals);
 
+namespace js {
+
+/*
+ * Import some JS:: names into the js namespace so we can make unqualified
+ * references to them.
+ */
+
+using JS::Value;
+using JS::IsPoisonedValue;
+using JS::NullValue;
+using JS::UndefinedValue;
+using JS::Int32Value;
+using JS::DoubleValue;
+using JS::StringValue;
+using JS::BooleanValue;
+using JS::ObjectValue;
+using JS::MagicValue;
+using JS::NumberValue;
+using JS::ObjectOrNullValue;
+using JS::PrivateValue;
+using JS::PrivateUint32Value;
+
+using JS::IsPoisonedPtr;
+using JS::IsPoisonedId;
+
+using JS::StableCharPtr;
+using JS::TwoByteChars;
+using JS::Latin1CharsZ;
+
+using JS::AutoIdVector;
+using JS::AutoValueVector;
+using JS::AutoScriptVector;
+using JS::AutoIdArray;
+
+using JS::AutoGCRooter;
+using JS::AutoValueRooter;
+using JS::AutoObjectRooter;
+using JS::AutoArrayRooter;
+using JS::AutoVectorRooter;
+using JS::AutoHashMapRooter;
+using JS::AutoHashSetRooter;
+
+using JS::CallArgs;
+using JS::IsAcceptableThis;
+using JS::NativeImpl;
+using JS::CallReceiver;
+using JS::CompileOptions;
+using JS::CallNonGenericMethod;
+
+using JS::Rooted;
+using JS::RootedObject;
+using JS::RootedModule;
+using JS::RootedFunction;
+using JS::RootedScript;
+using JS::RootedString;
+using JS::RootedId;
+using JS::RootedValue;
+
+using JS::Handle;
+using JS::HandleObject;
+using JS::HandleModule;
+using JS::HandleFunction;
+using JS::HandleScript;
+using JS::HandleString;
+using JS::HandleId;
+using JS::HandleValue;
+
+using JS::MutableHandle;
+using JS::MutableHandleObject;
+using JS::MutableHandleFunction;
+using JS::MutableHandleScript;
+using JS::MutableHandleString;
+using JS::MutableHandleId;
+using JS::MutableHandleValue;
+
+using JS::NullPtr;  /* To be removed by bug 781070. */
+
+using JS::Zone;
+
+}  /* namespace js */
+
 #endif /* jsapi_h___ */
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -2674,17 +2674,17 @@ NewArray(JSContext *cx, uint32_t length,
             if (allocateCapacity && !EnsureNewArrayElements(cx, obj, length))
                 return NULL;
             return obj;
         }
     }
 
     RootedObject proto(cx, protoArg);
     if (protoArg)
-        PoisonPtr(&protoArg);
+        JS::PoisonPtr(&protoArg);
 
     if (!proto && !FindProto(cx, &ArrayClass, &proto))
         return NULL;
 
     RootedTypeObject type(cx, proto->getNewType(cx, &ArrayClass));
     if (!type)
         return NULL;
 
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -105,17 +105,17 @@ js::TraceCycleDetectionSet(JSTracer *trc
         JSObject *prior = e.front();
         MarkObjectRoot(trc, const_cast<JSObject **>(&e.front()), "cycle detector table entry");
         if (prior != e.front())
             e.rekeyFront(e.front());
     }
 }
 
 void
-JSRuntime::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, RuntimeSizes *rtSizes)
+JSRuntime::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, JS::RuntimeSizes *rtSizes)
 {
     rtSizes->object = mallocSizeOf(this);
 
     rtSizes->atomsTable = atoms.sizeOfExcludingThis(mallocSizeOf);
 
     rtSizes->contexts = 0;
     for (ContextIter acx(this); !acx.done(); acx.next())
         rtSizes->contexts += acx->sizeOfIncludingThis(mallocSizeOf);
@@ -421,22 +421,22 @@ js::DestroyContext(JSContext *cx, Destro
         /* Clear debugging state to remove GC roots. */
         for (CompartmentsIter c(rt); !c.done(); c.next())
             c->clearTraps(rt->defaultFreeOp());
         JS_ClearAllWatchPoints(cx);
 
         /* Clear the statics table to remove GC roots. */
         rt->staticStrings.finish();
 
-        PrepareForFullGC(rt);
-        GC(rt, GC_NORMAL, gcreason::LAST_CONTEXT);
+        JS::PrepareForFullGC(rt);
+        GC(rt, GC_NORMAL, JS::gcreason::LAST_CONTEXT);
     } else if (mode == DCM_FORCE_GC) {
         JS_ASSERT(!rt->isHeapBusy());
-        PrepareForFullGC(rt);
-        GC(rt, GC_NORMAL, gcreason::DESTROY_CONTEXT);
+        JS::PrepareForFullGC(rt);
+        GC(rt, GC_NORMAL, JS::gcreason::DESTROY_CONTEXT);
     }
     js_delete(cx);
 }
 
 bool
 AutoResolving::alreadyStartedSlow() const
 {
     JS_ASSERT(link);
@@ -1367,17 +1367,17 @@ JSRuntime::updateMallocCounter(JS::Zone 
         onTooMuchMalloc();
     else if (zone)
         zone->updateMallocCounter(nbytes);
 }
 
 JS_FRIEND_API(void)
 JSRuntime::onTooMuchMalloc()
 {
-    TriggerGC(this, gcreason::TOO_MUCH_MALLOC);
+    TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC);
 }
 
 JS_FRIEND_API(void *)
 JSRuntime::onOutOfMemory(void *p, size_t nbytes)
 {
     return onOutOfMemory(p, nbytes, NULL);
 }
 
@@ -1386,17 +1386,17 @@ JSRuntime::onOutOfMemory(void *p, size_t
 {
     if (isHeapBusy())
         return NULL;
 
     /*
      * Retry when we are done with the background sweeping and have stopped
      * all the allocations and released the empty GC chunks.
      */
-    ShrinkGCBuffers(this);
+    JS::ShrinkGCBuffers(this);
     gcHelperThread.waitBackgroundSweepOrAllocEnd();
     if (!p)
         p = js_malloc(nbytes);
     else if (p == reinterpret_cast<void *>(1))
         p = js_calloc(nbytes);
     else
       p = js_realloc(p, nbytes);
     if (p)
@@ -1520,23 +1520,23 @@ JSContext::mark(JSTracer *trc)
 
     TraceCycleDetectionSet(trc, cycleDetectorSet);
 
     MarkValueRoot(trc, &iterValue, "iterValue");
 }
 
 #if defined JS_THREADSAFE && defined DEBUG
 
-AutoCheckRequestDepth::AutoCheckRequestDepth(JSContext *cx)
+JS::AutoCheckRequestDepth::AutoCheckRequestDepth(JSContext *cx)
     : cx(cx)
 {
     JS_ASSERT(cx->runtime->requestDepth || cx->runtime->isHeapBusy());
     cx->runtime->assertValidThread();
     cx->runtime->checkRequestDepth++;
 }
 
-AutoCheckRequestDepth::~AutoCheckRequestDepth()
+JS::AutoCheckRequestDepth::~AutoCheckRequestDepth()
 {
     JS_ASSERT(cx->runtime->checkRequestDepth != 0);
     cx->runtime->checkRequestDepth--;
 }
 
 #endif
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -891,17 +891,17 @@ struct JSRuntime : js::RuntimeFriendFiel
 
     /* Whether the currently running GC can finish in multiple slices. */
     int                 gcIsIncremental;
 
     /* Whether all compartments are being collected in first GC slice. */
     bool                gcIsFull;
 
     /* The reason that an interrupt-triggered GC should be called. */
-    js::gcreason::Reason gcTriggerReason;
+    JS::gcreason::Reason gcTriggerReason;
 
     /*
      * If this is true, all marked objects must belong to a compartment being
      * GCed. This is used to look for compartment bugs.
      */
     bool                gcStrictCompartmentChecking;
 
     /*
@@ -1057,17 +1057,17 @@ struct JSRuntime : js::RuntimeFriendFiel
     int gcZeal() { return 0; }
     bool needZealousGC() { return false; }
 #endif
 
     bool                gcValidate;
     bool                gcFullCompartmentChecks;
 
     JSGCCallback        gcCallback;
-    js::GCSliceCallback gcSliceCallback;
+    JS::GCSliceCallback gcSliceCallback;
     JSFinalizeCallback  gcFinalizeCallback;
 
     js::AnalysisPurgeCallback analysisPurgeCallback;
     uint64_t            analysisPurgeTriggerBytes;
 
   private:
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -803,17 +803,17 @@ JSCompartment::sweepBreakpoints(FreeOp *
                     bp->destroy(fop);
             }
         }
     }
 }
 
 void
 JSCompartment::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *compartmentObject,
-                                   TypeInferenceSizes *tiSizes, size_t *shapesCompartmentTables,
+                                   JS::TypeInferenceSizes *tiSizes, size_t *shapesCompartmentTables,
                                    size_t *crossCompartmentWrappersArg, size_t *regexpCompartment,
                                    size_t *debuggeesSet)
 {
     *compartmentObject = mallocSizeOf(this);
     sizeOfTypeInferenceData(tiSizes, mallocSizeOf);
     *shapesCompartmentTables = baseShapes.sizeOfExcludingThis(mallocSizeOf)
                              + initialShapes.sizeOfExcludingThis(mallocSizeOf)
                              + newTypeObjects.sizeOfExcludingThis(mallocSizeOf)
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -388,17 +388,17 @@ class js::AutoDebugModeGC
     explicit AutoDebugModeGC(JSRuntime *rt) : rt(rt), needGC(false) {}
 
     ~AutoDebugModeGC() {
         // Under some circumstances (say, in the midst of an animation),
         // the garbage collector may try to retain JIT code and analyses.
         // The DEBUG_MODE_GC reason forces the collector to always throw
         // everything away, as required for debug mode transitions.
         if (needGC)
-            GC(rt, GC_NORMAL, gcreason::DEBUG_MODE_GC);
+            GC(rt, GC_NORMAL, JS::gcreason::DEBUG_MODE_GC);
     }
 
     void scheduleGC(Zone *zone) {
         JS_ASSERT(!rt->isHeapBusy());
         PrepareZoneForGC(zone);
         needGC = true;
     }
 };
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -1027,17 +1027,17 @@ js_CallContextDebugHandler(JSContext *cx
         return JS_FALSE;
       case JSTRAP_RETURN:
       case JSTRAP_CONTINUE:
       default:
         return JS_TRUE;
     }
 }
 
-JS_PUBLIC_API(StackDescription *)
+JS_PUBLIC_API(JS::StackDescription *)
 JS::DescribeStack(JSContext *cx, unsigned maxFrames)
 {
     Vector<FrameDescription> frames(cx);
 
     for (ScriptFrameIter i(cx); !i.done(); ++i) {
         if (i.script()->selfHosted)
             continue;
         FrameDescription desc;
@@ -1045,27 +1045,27 @@ JS::DescribeStack(JSContext *cx, unsigne
         desc.lineno = PCToLineNumber(i.script(), i.pc());
         desc.fun = i.maybeCallee();
         if (!frames.append(desc))
             return NULL;
         if (frames.length() == maxFrames)
             break;
     }
 
-    StackDescription *desc = js_new<StackDescription>();
+    JS::StackDescription *desc = js_new<JS::StackDescription>();
     if (!desc)
         return NULL;
 
     desc->nframes = frames.length();
     desc->frames = frames.extractRawBuffer();
     return desc;
 }
 
 JS_PUBLIC_API(void)
-JS::FreeStackDescription(JSContext *cx, StackDescription *desc)
+JS::FreeStackDescription(JSContext *cx, JS::StackDescription *desc)
 {
     js_delete(desc->frames);
     js_delete(desc);
 }
 
 class AutoPropertyDescArray
 {
     JSContext *cx_;
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -141,17 +141,17 @@ JS::PrepareForFullGC(JSRuntime *rt)
 {
     for (ZonesIter zone(rt); !zone.done(); zone.next())
         zone->scheduleGC();
 }
 
 JS_FRIEND_API(void)
 JS::PrepareForIncrementalGC(JSRuntime *rt)
 {
-    if (!IsIncrementalGCInProgress(rt))
+    if (!JS::IsIncrementalGCInProgress(rt))
         return;
 
     for (ZonesIter zone(rt); !zone.done(); zone.next()) {
         if (zone->wasGCStarted())
             PrepareZoneForGC(zone);
     }
 }
 
@@ -778,20 +778,20 @@ js::SetActivityCallback(JSRuntime *rt, A
 }
 
 JS_FRIEND_API(bool)
 js::IsContextRunningJS(JSContext *cx)
 {
     return !cx->stack.empty();
 }
 
-JS_FRIEND_API(GCSliceCallback)
+JS_FRIEND_API(JS::GCSliceCallback)
 JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
 {
-    GCSliceCallback old = rt->gcSliceCallback;
+    JS::GCSliceCallback old = rt->gcSliceCallback;
     rt->gcSliceCallback = callback;
     return old;
 }
 
 JS_FRIEND_API(bool)
 JS::WasIncrementalGC(JSRuntime *rt)
 {
     return rt->gcIsIncremental;
@@ -831,18 +831,18 @@ JS::NotifyDidPaint(JSRuntime *rt)
     }
 
     if (rt->gcZeal() == gc::ZealFrameGCValue) {
         PrepareForFullGC(rt);
         GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
         return;
     }
 
-    if (IsIncrementalGCInProgress(rt) && !rt->gcInterFrameGC) {
-        PrepareForIncrementalGC(rt);
+    if (JS::IsIncrementalGCInProgress(rt) && !rt->gcInterFrameGC) {
+        JS::PrepareForIncrementalGC(rt);
         GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
     }
 
     rt->gcInterFrameGC = false;
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalGCEnabled(JSRuntime *rt)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -787,17 +787,17 @@ Chunk::allocateArena(Zone *zone, AllocKi
     aheader->init(zone, thingKind);
     if (JS_UNLIKELY(!hasAvailableArenas()))
         removeFromAvailableList();
 
     Probes::resizeHeap(zone, rt->gcBytes, rt->gcBytes + ArenaSize);
     rt->gcBytes += ArenaSize;
     zone->gcBytes += ArenaSize;
     if (zone->gcBytes >= zone->gcTriggerBytes)
-        TriggerZoneGC(zone, gcreason::ALLOC_TRIGGER);
+        TriggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER);
 
     return aheader;
 }
 
 inline void
 Chunk::addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader)
 {
     JS_ASSERT(!aheader->allocated());
@@ -1496,17 +1496,17 @@ RunLastDitchGC(JSContext *cx, JS::Zone *
     JS_ASSERT(!InParallelSection());
 
     PrepareZoneForGC(zone);
 
     JSRuntime *rt = cx->runtime;
 
     /* The last ditch GC preserves all atoms. */
     AutoKeepAtoms keep(rt);
-    GC(rt, GC_NORMAL, gcreason::LAST_DITCH);
+    GC(rt, GC_NORMAL, JS::gcreason::LAST_DITCH);
 
     /*
      * The JSGC_END callback can legitimately allocate new GC
      * things and populate the free list. If that happens, just
      * return that list head.
      */
     size_t thingSize = Arena::thingSize(thingKind);
     if (void *thing = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize))
@@ -1589,17 +1589,17 @@ js_LockThing(JSRuntime *rt, void *thing)
 
     /*
      * Sometimes Firefox will hold weak references to objects and then convert
      * them to strong references by calling AddRoot (e.g., via PreserveWrapper,
      * or ModifyBusyCount in workers). We need a read barrier to cover these
      * cases.
      */
     if (rt->gcIncrementalState != NO_INCREMENTAL)
-        IncrementalReferenceBarrier(thing, GetGCThingTraceKind(thing));
+        JS::IncrementalReferenceBarrier(thing, GetGCThingTraceKind(thing));
 
     if (GCLocks::Ptr p = rt->gcLocksHash.lookupWithDefault(thing, 0)) {
         p->value++;
         return true;
     }
 
     return false;
 }
@@ -1950,46 +1950,46 @@ js::SetMarkStackLimit(JSRuntime *rt, siz
 
 void
 js::MarkCompartmentActive(StackFrame *fp)
 {
     fp->script()->compartment()->zone()->active = true;
 }
 
 static void
-TriggerOperationCallback(JSRuntime *rt, gcreason::Reason reason)
+TriggerOperationCallback(JSRuntime *rt, JS::gcreason::Reason reason)
 {
     if (rt->gcIsNeeded)
         return;
 
     rt->gcIsNeeded = true;
     rt->gcTriggerReason = reason;
     rt->triggerOperationCallback();
 }
 
 void
-js::TriggerGC(JSRuntime *rt, gcreason::Reason reason)
+js::TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason)
 {
     /* Wait till end of parallel section to trigger GC. */
     if (ForkJoinSlice *slice = ForkJoinSlice::Current()) {
         slice->requestGC(reason);
         return;
     }
 
     rt->assertValidThread();
 
     if (rt->isHeapBusy())
         return;
 
-    PrepareForFullGC(rt);
+    JS::PrepareForFullGC(rt);
     TriggerOperationCallback(rt, reason);
 }
 
 void
-js::TriggerZoneGC(Zone *zone, gcreason::Reason reason)
+js::TriggerZoneGC(Zone *zone, JS::gcreason::Reason reason)
 {
     /* Wait till end of parallel section to trigger GC. */
     if (ForkJoinSlice *slice = ForkJoinSlice::Current()) {
         slice->requestZoneGC(zone, reason);
         return;
     }
 
     JSRuntime *rt = zone->rt;
@@ -2015,51 +2015,51 @@ js::TriggerZoneGC(Zone *zone, gcreason::
 
 void
 js::MaybeGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
     rt->assertValidThread();
 
     if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
-        PrepareForFullGC(rt);
-        GC(rt, GC_NORMAL, gcreason::MAYBEGC);
+        JS::PrepareForFullGC(rt);
+        GC(rt, GC_NORMAL, JS::gcreason::MAYBEGC);
         return;
     }
 
     if (rt->gcIsNeeded) {
-        GCSlice(rt, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(rt, GC_NORMAL, JS::gcreason::MAYBEGC);
         return;
     }
 
     double factor = rt->gcHighFrequencyGC ? 0.85 : 0.9;
     Zone *zone = cx->zone();
     if (zone->gcBytes > 1024 * 1024 &&
         zone->gcBytes >= factor * zone->gcTriggerBytes &&
         rt->gcIncrementalState == NO_INCREMENTAL &&
         !rt->gcHelperThread.sweeping())
     {
         PrepareZoneForGC(zone);
-        GCSlice(rt, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(rt, GC_NORMAL, JS::gcreason::MAYBEGC);
         return;
     }
 
 #ifndef JS_MORE_DETERMINISTIC
     /*
      * Access to the counters and, on 32 bit, setting gcNextFullGCTime below
      * is not atomic and a race condition could trigger or suppress the GC. We
      * tolerate this.
      */
     int64_t now = PRMJ_Now();
     if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
         if (rt->gcChunkAllocationSinceLastGC ||
             rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
         {
-            PrepareForFullGC(rt);
-            GCSlice(rt, GC_SHRINK, gcreason::MAYBEGC);
+            JS::PrepareForFullGC(rt);
+            GCSlice(rt, GC_SHRINK, JS::gcreason::MAYBEGC);
         } else {
             rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 #endif
 }
 
 static void
@@ -4099,17 +4099,17 @@ AutoCopyFreeListToArenas::~AutoCopyFreeL
 {
     for (ZonesIter zone(runtime); !zone.done(); zone.next())
         zone->allocator.arenas.clearFreeListsInArenas();
 }
 
 static void
 IncrementalCollectSlice(JSRuntime *rt,
                         int64_t budget,
-                        gcreason::Reason gcReason,
+                        JS::gcreason::Reason gcReason,
                         JSGCInvocationKind gcKind);
 
 static void
 ResetIncrementalGC(JSRuntime *rt, const char *reason)
 {
     switch (rt->gcIncrementalState) {
       case NO_INCREMENTAL:
         return;
@@ -4142,17 +4142,17 @@ ResetIncrementalGC(JSRuntime *rt, const 
       case SWEEP:
         rt->gcMarker.reset();
 
         for (ZonesIter zone(rt); !zone.done(); zone.next())
             zone->scheduledForDestruction = false;
 
         /* Finish sweeping the current zone group, then abort. */
         rt->gcAbortSweepAfterCurrentGroup = true;
-        IncrementalCollectSlice(rt, SliceBudget::Unlimited, gcreason::RESET, GC_NORMAL);
+        IncrementalCollectSlice(rt, SliceBudget::Unlimited, JS::gcreason::RESET, GC_NORMAL);
 
         {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
             rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
         }
         break;
 
       default:
@@ -4233,27 +4233,27 @@ PushZealSelectedObjects(JSRuntime *rt)
         MarkObjectUnbarriered(&rt->gcMarker, obj, "selected obj");
     }
 #endif
 }
 
 static void
 IncrementalCollectSlice(JSRuntime *rt,
                         int64_t budget,
-                        gcreason::Reason reason,
+                        JS::gcreason::Reason reason,
                         JSGCInvocationKind gckind)
 {
     AutoCopyFreeListToArenas copy(rt);
     AutoGCSlice slice(rt);
 
     gc::State initialState = rt->gcIncrementalState;
 
     int zeal = 0;
 #ifdef JS_GC_ZEAL
-    if (reason == gcreason::DEBUG_GC && budget != SliceBudget::Unlimited) {
+    if (reason == JS::gcreason::DEBUG_GC && budget != SliceBudget::Unlimited) {
         /*
          * Do the incremental collection type specified by zeal mode if the
          * collection was triggered by RunDebugGC() and incremental GC has not
          * been cancelled by ResetIncrementalGC.
          */
         zeal = rt->gcZeal();
     }
 #endif
@@ -4341,17 +4341,17 @@ IncrementalCollectSlice(JSRuntime *rt,
         /* fall through */
       }
 
       case SWEEP: {
         bool finished = SweepPhase(rt, sliceBudget);
         if (!finished)
             break;
 
-        EndSweepPhase(rt, gckind, reason == gcreason::LAST_CONTEXT);
+        EndSweepPhase(rt, gckind, reason == JS::gcreason::LAST_CONTEXT);
 
         if (rt->gcSweepOnBackgroundThread)
             rt->gcHelperThread.startBackgroundSweep(gckind == GC_SHRINK);
 
         rt->gcIncrementalState = NO_INCREMENTAL;
         break;
       }
 
@@ -4422,17 +4422,17 @@ BudgetIncrementalGC(JSRuntime *rt, int64
 
 /*
  * GC, repeatedly if necessary, until we think we have not created any new
  * garbage. We disable inlining to ensure that the bottom of the stack with
  * possible GC roots recorded in MarkRuntime excludes any pointers we use during
  * the marking implementation.
  */
 static JS_NEVER_INLINE void
-GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gckind, gcreason::Reason reason)
+GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     /* If we attempt to invoke the GC while we are running in the GC, assert. */
     JS_ASSERT(!rt->isHeapBusy());
 
 #ifdef DEBUG
     for (ZonesIter zone(rt); !zone.done(); zone.next())
         JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, zone->isGCScheduled());
 #endif
@@ -4461,50 +4461,50 @@ GCCycle(JSRuntime *rt, bool incremental,
         }
 
         IncrementalCollectSlice(rt, budget, reason, gckind);
     }
 }
 
 #ifdef JS_GC_ZEAL
 static bool
-IsDeterministicGCReason(gcreason::Reason reason)
-{
-    if (reason > gcreason::DEBUG_GC &&
-        reason != gcreason::CC_FORCED && reason != gcreason::SHUTDOWN_CC)
+IsDeterministicGCReason(JS::gcreason::Reason reason)
+{
+    if (reason > JS::gcreason::DEBUG_GC &&
+        reason != JS::gcreason::CC_FORCED && reason != JS::gcreason::SHUTDOWN_CC)
     {
         return false;
     }
 
-    if (reason == gcreason::MAYBEGC)
+    if (reason == JS::gcreason::MAYBEGC)
         return false;
 
     return true;
 }
 #endif
 
 static bool
-ShouldCleanUpEverything(JSRuntime *rt, gcreason::Reason reason, JSGCInvocationKind gckind)
+ShouldCleanUpEverything(JSRuntime *rt, JS::gcreason::Reason reason, JSGCInvocationKind gckind)
 {
     // During shutdown, we must clean everything up, for the sake of leak
     // detection. When a runtime has no contexts, or we're doing a GC before a
     // shutdown CC, those are strong indications that we're shutting down.
     //
     // DEBUG_MODE_GC indicates we're discarding code because the debug mode
     // has changed; debug mode affects the results of bytecode analysis, so
     // we need to clear everything away.
     return !rt->hasContexts() ||
-           reason == gcreason::SHUTDOWN_CC ||
-           reason == gcreason::DEBUG_MODE_GC ||
+           reason == JS::gcreason::SHUTDOWN_CC ||
+           reason == JS::gcreason::DEBUG_MODE_GC ||
            gckind == GC_SHRINK;
 }
 
 static void
 Collect(JSRuntime *rt, bool incremental, int64_t budget,
-        JSGCInvocationKind gckind, gcreason::Reason reason)
+        JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     /* GC shouldn't be running in parallel execution mode */
     JS_ASSERT(!InParallelSection());
 
     JS_AbortIfWrongThread(rt);
 
     if (rt->mainThread.suppressGC)
         return;
@@ -4522,17 +4522,17 @@ Collect(JSRuntime *rt, bool incremental,
 #ifdef JS_GC_ZEAL
     if (rt->gcDeterministicOnly && !IsDeterministicGCReason(reason))
         return;
 #endif
 
     JS_ASSERT_IF(!incremental || budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
 
 #ifdef JS_GC_ZEAL
-    bool isShutdown = reason == gcreason::SHUTDOWN_CC || !rt->hasContexts();
+    bool isShutdown = reason == JS::gcreason::SHUTDOWN_CC || !rt->hasContexts();
     struct AutoVerifyBarriers {
         JSRuntime *runtime;
         bool restartPreVerifier;
         bool restartPostVerifier;
         AutoVerifyBarriers(JSRuntime *rt, bool isShutdown)
           : runtime(rt)
         {
             restartPreVerifier = !isShutdown && rt->gcVerifyPreData;
@@ -4593,47 +4593,47 @@ Collect(JSRuntime *rt, bool incremental,
         if (rt->gcIncrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_GC_END);
             if (JSGCCallback callback = rt->gcCallback)
                 callback(rt, JSGC_END);
         }
 
         /* Need to re-schedule all zones for GC. */
         if (rt->gcPoke && rt->gcShouldCleanUpEverything)
-            PrepareForFullGC(rt);
+            JS::PrepareForFullGC(rt);
 
         /*
          * On shutdown, iterate until finalizers or the JSGC_END callback
          * stop creating garbage.
          */
     } while (rt->gcPoke && rt->gcShouldCleanUpEverything);
 }
 
 void
-js::GC(JSRuntime *rt, JSGCInvocationKind gckind, gcreason::Reason reason)
+js::GC(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     Collect(rt, false, SliceBudget::Unlimited, gckind, reason);
 }
 
 void
-js::GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, gcreason::Reason reason, int64_t millis)
+js::GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis)
 {
     int64_t sliceBudget;
     if (millis)
         sliceBudget = SliceBudget::TimeBudget(millis);
     else if (rt->gcHighFrequencyGC && rt->gcDynamicMarkSlice)
         sliceBudget = rt->gcSliceBudget * IGC_MARK_SLICE_MULTIPLIER;
     else
         sliceBudget = rt->gcSliceBudget;
 
     Collect(rt, true, sliceBudget, gckind, reason);
 }
 
 void
-js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, gcreason::Reason reason)
+js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     Collect(rt, true, SliceBudget::Unlimited, gckind, reason);
 }
 
 static bool
 ZonesSelected(JSRuntime *rt)
 {
     for (ZonesIter zone(rt); !zone.done(); zone.next()) {
@@ -4643,30 +4643,30 @@ ZonesSelected(JSRuntime *rt)
     return false;
 }
 
 void
 js::GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount)
 {
     int64_t budget = limit ? SliceBudget::WorkBudget(objCount) : SliceBudget::Unlimited;
     if (!ZonesSelected(rt)) {
-        if (IsIncrementalGCInProgress(rt))
-            PrepareForIncrementalGC(rt);
+        if (JS::IsIncrementalGCInProgress(rt))
+            JS::PrepareForIncrementalGC(rt);
         else
-            PrepareForFullGC(rt);
+            JS::PrepareForFullGC(rt);
     }
-    Collect(rt, true, budget, GC_NORMAL, gcreason::DEBUG_GC);
+    Collect(rt, true, budget, GC_NORMAL, JS::gcreason::DEBUG_GC);
 }
 
 /* Schedule a full GC unless a zone will already be collected. */
 void
 js::PrepareForDebugGC(JSRuntime *rt)
 {
     if (!ZonesSelected(rt))
-        PrepareForFullGC(rt);
+        JS::PrepareForFullGC(rt);
 }
 
 JS_FRIEND_API(void)
 JS::ShrinkGCBuffers(JSRuntime *rt)
 {
     AutoLockGC lock(rt);
     JS_ASSERT(!rt->isHeapBusy());
 
@@ -4679,19 +4679,19 @@ JS::ShrinkGCBuffers(JSRuntime *rt)
 void
 js::gc::FinishBackgroundFinalize(JSRuntime *rt)
 {
     rt->gcHelperThread.waitBackgroundSweepEnd();
 }
 
 AutoFinishGC::AutoFinishGC(JSRuntime *rt)
 {
-    if (IsIncrementalGCInProgress(rt)) {
-        PrepareForIncrementalGC(rt);
-        FinishIncrementalGC(rt, gcreason::API);
+    if (JS::IsIncrementalGCInProgress(rt)) {
+        JS::PrepareForIncrementalGC(rt);
+        JS::FinishIncrementalGC(rt, JS::gcreason::API);
     }
 
     gc::FinishBackgroundFinalize(rt);
 }
 
 AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime *rt)
   : finish(rt),
     session(rt),
@@ -4775,31 +4775,31 @@ gc::RunDebugGC(JSContext *cx)
             else
                 rt->gcIncrementalLimit *= 2;
             budget = SliceBudget::WorkBudget(rt->gcIncrementalLimit);
         } else {
             // This triggers incremental GC but is actually ignored by IncrementalMarkSlice.
             budget = SliceBudget::WorkBudget(1);
         }
 
-        Collect(rt, true, budget, GC_NORMAL, gcreason::DEBUG_GC);
+        Collect(rt, true, budget, GC_NORMAL, JS::gcreason::DEBUG_GC);
 
         /*
          * For multi-slice zeal, reset the slice size when we get to the sweep
          * phase.
          */
         if (type == ZealIncrementalMultipleSlices &&
             initialState == MARK && rt->gcIncrementalState == SWEEP)
         {
             rt->gcIncrementalLimit = rt->gcZealFrequency / 2;
         }
     } else if (type == ZealPurgeAnalysisValue) {
         cx->compartment->types.maybePurgeAnalysis(cx, /* force = */ true);
     } else {
-        Collect(rt, false, SliceBudget::Unlimited, GC_NORMAL, gcreason::DEBUG_GC);
+        Collect(rt, false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC);
     }
 
 #endif
 }
 
 void
 gc::SetDeterministicGC(JSContext *cx, bool enabled)
 {
@@ -5030,32 +5030,32 @@ ArenaLists::containsArena(JSRuntime *rt,
     }
     return false;
 }
 
 
 AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSContext *cx)
   : runtime(cx->runtime),
     markCount(runtime->gcObjectsMarkedInDeadZones),
-    inIncremental(IsIncrementalGCInProgress(runtime)),
+    inIncremental(JS::IsIncrementalGCInProgress(runtime)),
     manipulatingDeadZones(runtime->gcManipulatingDeadZones)
 {
     runtime->gcManipulatingDeadZones = true;
 }
 
 AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSObject *obj)
   : runtime(obj->compartment()->rt),
     markCount(runtime->gcObjectsMarkedInDeadZones),
-    inIncremental(IsIncrementalGCInProgress(runtime)),
+    inIncremental(JS::IsIncrementalGCInProgress(runtime)),
     manipulatingDeadZones(runtime->gcManipulatingDeadZones)
 {
     runtime->gcManipulatingDeadZones = true;
 }
 
 AutoMaybeTouchDeadZones::~AutoMaybeTouchDeadZones()
 {
     if (inIncremental && runtime->gcObjectsMarkedInDeadZones != markCount) {
-        PrepareForFullGC(runtime);
-        js::GC(runtime, GC_NORMAL, gcreason::TRANSPLANT);
+        JS::PrepareForFullGC(runtime);
+        js::GC(runtime, GC_NORMAL, JS::gcreason::TRANSPLANT);
     }
 
     runtime->gcManipulatingDeadZones = manipulatingDeadZones;
 }
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -584,21 +584,21 @@ namespace js {
 extern void
 MarkCompartmentActive(js::StackFrame *fp);
 
 extern void
 TraceRuntime(JSTracer *trc);
 
 /* Must be called with GC lock taken. */
 extern void
-TriggerGC(JSRuntime *rt, js::gcreason::Reason reason);
+TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason);
 
 /* Must be called with GC lock taken. */
 extern void
-TriggerZoneGC(Zone *zone, js::gcreason::Reason reason);
+TriggerZoneGC(Zone *zone, JS::gcreason::Reason reason);
 
 extern void
 MaybeGC(JSContext *cx);
 
 extern void
 ReleaseAllJITCode(FreeOp *op);
 
 /*
@@ -608,23 +608,23 @@ typedef enum JSGCInvocationKind {
     /* Normal invocation. */
     GC_NORMAL           = 0,
 
     /* Minimize GC triggers and release empty GC chunks right away. */
     GC_SHRINK             = 1
 } JSGCInvocationKind;
 
 extern void
-GC(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+GC(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason);
 
 extern void
-GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason, int64_t millis = 0);
+GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0);
 
 extern void
-GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason);
 
 extern void
 GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount);
 
 extern void
 PrepareForDebugGC(JSRuntime *rt);
 
 #ifdef JS_GC_ZEAL
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -6853,17 +6853,17 @@ TypeCompartment::maybePurgeAnalysis(JSCo
             return;
         }
 
         cx->runtime->analysisPurgeCallback(cx->runtime, &desc->asFlat());
     }
 }
 
 static void
-SizeOfScriptTypeInferenceData(RawScript script, TypeInferenceSizes *sizes,
+SizeOfScriptTypeInferenceData(RawScript script, JS::TypeInferenceSizes *sizes,
                               JSMallocSizeOfFun mallocSizeOf)
 {
     TypeScript *typeScript = script->types;
     if (!typeScript)
         return;
 
     /* If TI is disabled, a single TypeScript is still present. */
     if (!script->compartment()->zone()->types.inferenceEnabled) {
@@ -6882,17 +6882,17 @@ SizeOfScriptTypeInferenceData(RawScript 
 
 void
 Zone::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *typePool)
 {
     *typePool += types.typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
 }
 
 void
-JSCompartment::sizeOfTypeInferenceData(TypeInferenceSizes *sizes, JSMallocSizeOfFun mallocSizeOf)
+JSCompartment::sizeOfTypeInferenceData(JS::TypeInferenceSizes *sizes, JSMallocSizeOfFun mallocSizeOf)
 {
     sizes->analysisPool += analysisLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
 
     /* Pending arrays are cleared on GC along with the analysis pool. */
     sizes->pendingArrays += mallocSizeOf(types.pendingArray);
 
     /* TypeCompartment::pendingRecompiles is non-NULL only while inference code is running. */
     JS_ASSERT(!types.pendingRecompiles);
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -287,17 +287,17 @@ IdToTypeId(RawId id)
         return JSID_VOID;
 
     /*
      * Check for numeric strings, as in js_StringIsIndex, but allow negative
      * and overflowing integers.
      */
     if (JSID_IS_STRING(id)) {
         JSFlatString *str = JSID_TO_FLAT_STRING(id);
-        TwoByteChars cp = str->range();
+        JS::TwoByteChars cp = str->range();
         if (JS7_ISDEC(cp[0]) || cp[0] == '-') {
             for (size_t i = 1; i < cp.length(); ++i) {
                 if (!JS7_ISDEC(cp[i]))
                     return id;
             }
             return JSID_VOID;
         }
         return id;
--- a/js/src/jsmemorymetrics.cpp
+++ b/js/src/jsmemorymetrics.cpp
@@ -22,16 +22,21 @@
 #include "vm/Shape.h"
 
 #include "jsobjinlines.h"
 
 using mozilla::DebugOnly;
 
 using namespace js;
 
+using JS::RuntimeStats;
+using JS::ObjectPrivateVisitor;
+using JS::ZoneStats;
+using JS::CompartmentStats;
+
 JS_FRIEND_API(size_t)
 js::MemoryReportingSundriesThreshold()
 {
     return 8 * 1024;
 }
 
 typedef HashSet<ScriptSource *, DefaultHasher<ScriptSource *>, SystemAllocPolicy> SourceSet;
 
@@ -165,17 +170,17 @@ StatsCellCallback(JSRuntime *rt, void *d
             cStats->gcHeapObjectsFunction += thingSize;
         else if (obj->isArray())
             cStats->gcHeapObjectsDenseArray += thingSize;
         else if (obj->isCrossCompartmentWrapper())
             cStats->gcHeapObjectsCrossCompartmentWrapper += thingSize;
         else
             cStats->gcHeapObjectsOrdinary += thingSize;
 
-        ObjectsExtraSizes objectsExtra;
+        JS::ObjectsExtraSizes objectsExtra;
         obj->sizeOfExcludingThis(rtStats->mallocSizeOf_, &objectsExtra);
         cStats->objectsExtra.add(objectsExtra);
 
         // JSObject::sizeOfExcludingThis() doesn't measure objectsExtraPrivate,
         // so we do it here.
         if (ObjectPrivateVisitor *opv = closure->opv) {
             nsISupports *iface;
             if (opv->getISupports_(obj, &iface) && iface) {
@@ -187,19 +192,19 @@ StatsCellCallback(JSRuntime *rt, void *d
 
       case JSTRACE_STRING: {
         JSString *str = static_cast<JSString *>(thing);
 
         size_t strSize = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
 
         // If we can't grow hugeStrings, let's just call this string non-huge.
         // We're probably about to OOM anyway.
-        if (strSize >= HugeStringInfo::MinSize() && zStats->hugeStrings.growBy(1)) {
+        if (strSize >= JS::HugeStringInfo::MinSize() && zStats->hugeStrings.growBy(1)) {
             zStats->gcHeapStringsNormal += thingSize;
-            HugeStringInfo &info = zStats->hugeStrings.back();
+            JS::HugeStringInfo &info = zStats->hugeStrings.back();
             info.length = str->length();
             info.size = strSize;
             PutEscapedString(info.buffer, sizeof(info.buffer), &str->asLinear(), 0);
         } else if (str->isShort()) {
             MOZ_ASSERT(strSize == 0);
             zStats->gcHeapStringsShort += thingSize;
         } else {
             zStats->gcHeapStringsNormal += thingSize;
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -1478,17 +1478,17 @@ js::SaveSharedScriptData(JSContext *cx, 
 
 #ifdef JSGC_INCREMENTAL
     /*
      * During the IGC we need to ensure that bytecode is marked whenever it is
      * accessed even if the bytecode was already in the table: at this point
      * old scripts or exceptions pointing to the bytecode may no longer be
      * reachable. This is effectively a read barrier.
      */
-    if (IsIncrementalGCInProgress(rt) && rt->gcIsFull)
+    if (JS::IsIncrementalGCInProgress(rt) && rt->gcIsFull)
         ssd->marked = true;
 #endif
 
     script->code = ssd->data;
     script->atoms = ssd->atoms(script->length, script->numNotes());
     return true;
 }
 
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -3588,17 +3588,17 @@ js_NewStringCopyN<CanGC>(JSContext *cx, 
 template JSFlatString *
 js_NewStringCopyN<NoGC>(JSContext *cx, const jschar *s, size_t n);
 
 template <AllowGC allowGC>
 JSFlatString *
 js_NewStringCopyN(JSContext *cx, const char *s, size_t n)
 {
     if (JSShortString::lengthFits(n))
-        return NewShortString<allowGC>(cx, Latin1Chars(s, n));
+        return NewShortString<allowGC>(cx, JS::Latin1Chars(s, n));
 
     jschar *chars = InflateString(cx, s, &n);
     if (!chars)
         return NULL;
     JSFlatString *str = js_NewString<allowGC>(cx, chars, n);
     if (!str)
         js_free(chars);
     return str;
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -91,17 +91,17 @@ WatchpointMap::unwatch(JSObject *obj, js
                        JSWatchPointHandler *handlerp, JSObject **closurep)
 {
     if (Map::Ptr p = map.lookup(WatchKey(obj, id))) {
         if (handlerp)
             *handlerp = p->value.handler;
         if (closurep) {
             // Read barrier to prevent an incorrectly gray closure from escaping the
             // watchpoint. See the comment before UnmarkGrayChildren in gc/Marking.cpp
-            ExposeGCThingToActiveJS(p->value.closure, JSTRACE_OBJECT);
+            JS::ExposeGCThingToActiveJS(p->value.closure, JSTRACE_OBJECT);
             *closurep = p->value.closure;
         }
         map.remove(p);
     }
 }
 
 void
 WatchpointMap::unwatchObject(JSObject *obj)
@@ -139,17 +139,17 @@ WatchpointMap::triggerWatchpoint(JSConte
         if (RawShape shape = obj->nativeLookup(cx, id)) {
             if (shape->hasSlot())
                 old = obj->nativeGetSlot(shape->slot());
         }
     }
 
     // Read barrier to prevent an incorrectly gray closure from escaping the
     // watchpoint. See the comment before UnmarkGrayChildren in gc/Marking.cpp
-    ExposeGCThingToActiveJS(closure, JSTRACE_OBJECT);
+    JS::ExposeGCThingToActiveJS(closure, JSTRACE_OBJECT);
 
     /* Call the handler. */
     return handler(cx, obj, id, old, vp.address(), closure);
 }
 
 bool
 WatchpointMap::markCompartmentIteratively(JSCompartment *c, JSTracer *trc)
 {
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -1311,17 +1311,17 @@ mjit::Compiler::markUndefinedLocals()
      */
     for (uint32_t i = 0; i < script_->nfixed; i++)
         markUndefinedLocal(0, i);
 
 #ifdef DEBUG
     uint32_t depth = ssa.getFrame(a->inlineIndex).depth;
     for (uint32_t i = script_->nfixed; i < script_->nslots; i++) {
         Address local(JSFrameReg, sizeof(StackFrame) + (depth + i) * sizeof(Value));
-        masm.storeValue(ObjectValueCrashOnTouch(), local);
+        masm.storeValue(JS::ObjectValueCrashOnTouch(), local);
     }
 #endif
 }
 
 CompileStatus
 mjit::Compiler::generateEpilogue()
 {
     return Compile_Okay;
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -2591,17 +2591,17 @@ Debugger::findAllGlobals(JSContext *cx, 
 
         GlobalObject *global = c->maybeGlobal();
         if (global) {
             /*
              * We pulled |global| out of nowhere, so it's possible that it was
              * marked gray by XPConnect. Since we're now exposing it to JS code,
              * we need to mark it black.
              */
-            ExposeGCThingToActiveJS(global, JSTRACE_OBJECT);
+            JS::ExposeGCThingToActiveJS(global, JSTRACE_OBJECT);
 
             RootedValue globalValue(cx, ObjectValue(*global));
             if (!dbg->wrapDebuggeeValue(cx, &globalValue))
                 return false;
             if (!js_NewbornArrayPush(cx, result, globalValue))
                 return false;
         }
     }
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -43,22 +43,22 @@ class js::ForkJoinShared : public TaskEx
 
     Vector<Allocator *, 16> allocators_;
 
     /////////////////////////////////////////////////////////////////////////
     // Locked Fields
     //
     // Only to be accessed while holding the lock.
 
-    uint32_t uncompleted_;         // Number of uncompleted worker threads
-    uint32_t blocked_;             // Number of threads that have joined rendezvous
-    uint32_t rendezvousIndex_;     // Number of rendezvous attempts
-    bool gcRequested_;             // True if a worker requested a GC
-    gcreason::Reason gcReason_;    // Reason given to request GC
-    Zone *gcZone_;                 // Zone for GC, or NULL for full
+    uint32_t uncompleted_;          // Number of uncompleted worker threads
+    uint32_t blocked_;              // Number of threads that have joined rendezvous
+    uint32_t rendezvousIndex_;      // Number of rendezvous attempts
+    bool gcRequested_;              // True if a worker requested a GC
+    JS::gcreason::Reason gcReason_; // Reason given to request GC
+    Zone *gcZone_;                  // Zone for GC, or NULL for full
 
     /////////////////////////////////////////////////////////////////////////
     // Asynchronous Flags
     //
     // These can be read without the lock (hence the |volatile| declaration).
     // All fields should be *written with the lock*, however.
 
     // Set to true when parallel execution should abort.
@@ -116,18 +116,18 @@ class js::ForkJoinShared : public TaskEx
     // be invoked on the main thread, either during a rendezvous or
     // after the workers have completed.
     void transferArenasToCompartmentAndProcessGCRequests();
 
     // Invoked during processing by worker threads to "check in".
     bool check(ForkJoinSlice &threadCx);
 
     // Requests a GC, either full or specific to a zone.
-    void requestGC(gcreason::Reason reason);
-    void requestZoneGC(JS::Zone *zone, gcreason::Reason reason);
+    void requestGC(JS::gcreason::Reason reason);
+    void requestZoneGC(JS::Zone *zone, JS::gcreason::Reason reason);
 
     // Requests that computation abort.
     void setAbortFlag(bool fatal);
 
     JSRuntime *runtime() { return cx_->runtime; }
 };
 
 class js::AutoRendezvous
@@ -173,17 +173,17 @@ ForkJoinShared::ForkJoinShared(JSContext
     op_(op),
     numSlices_(numSlices),
     rendezvousEnd_(NULL),
     allocators_(cx),
     uncompleted_(uncompleted),
     blocked_(0),
     rendezvousIndex_(0),
     gcRequested_(false),
-    gcReason_(gcreason::NUM_REASONS),
+    gcReason_(JS::gcreason::NUM_REASONS),
     gcZone_(NULL),
     abort_(false),
     fatal_(false),
     rendezvous_(false)
 { }
 
 bool
 ForkJoinShared::init()
@@ -442,27 +442,27 @@ ForkJoinShared::setAbortFlag(bool fatal)
 
     abort_ = true;
     fatal_ = fatal_ || fatal;
 
     cx_->runtime->triggerOperationCallback();
 }
 
 void
-ForkJoinShared::requestGC(gcreason::Reason reason)
+ForkJoinShared::requestGC(JS::gcreason::Reason reason)
 {
     AutoLockMonitor lock(*this);
 
     gcZone_ = NULL;
     gcReason_ = reason;
     gcRequested_ = true;
 }
 
 void
-ForkJoinShared::requestZoneGC(JS::Zone *zone, gcreason::Reason reason)
+ForkJoinShared::requestZoneGC(JS::Zone *zone, JS::gcreason::Reason reason)
 {
     AutoLockMonitor lock(*this);
 
     if (gcRequested_ && gcZone_ != zone) {
         // If a full GC has been requested, or a GC for another zone,
         // issue a request for a full GC.
         gcZone_ = NULL;
         gcReason_ = reason;
@@ -518,24 +518,24 @@ ForkJoinSlice::InitializeTLS()
         TLSInitialized = true;
         PRStatus status = PR_NewThreadPrivateIndex(&ThreadPrivateIndex, NULL);
         return status == PR_SUCCESS;
     }
     return true;
 }
 
 void
-ForkJoinSlice::requestGC(gcreason::Reason reason)
+ForkJoinSlice::requestGC(JS::gcreason::Reason reason)
 {
     shared->requestGC(reason);
     triggerAbort();
 }
 
 void
-ForkJoinSlice::requestZoneGC(JS::Zone *zone, gcreason::Reason reason)
+ForkJoinSlice::requestZoneGC(JS::Zone *zone, JS::gcreason::Reason reason)
 {
     shared->requestZoneGC(zone, reason);
     triggerAbort();
 }
 
 void
 ForkJoinSlice::triggerAbort()
 {
@@ -566,19 +566,19 @@ class AutoEnterParallelSection
       : cx_(cx),
         prevIonTop_(cx->mainThread().ionTop)
     {
         // Note: we do not allow GC during parallel sections.
         // Moreover, we do not wish to worry about making
         // write barriers thread-safe.  Therefore, we guarantee
         // that there is no incremental GC in progress.
 
-        if (IsIncrementalGCInProgress(cx->runtime)) {
-            PrepareForIncrementalGC(cx->runtime);
-            FinishIncrementalGC(cx->runtime, gcreason::API);
+        if (JS::IsIncrementalGCInProgress(cx->runtime)) {
+            JS::PrepareForIncrementalGC(cx->runtime);
+            JS::FinishIncrementalGC(cx->runtime, JS::gcreason::API);
         }
 
         cx->runtime->gcHelperThread.waitBackgroundSweepEnd();
     }
 
     ~AutoEnterParallelSection() {
         cx_->runtime->mainThread.ionTop = prevIonTop_;
     }
@@ -632,23 +632,23 @@ ForkJoinSlice::check()
 
 bool
 ForkJoinSlice::InitializeTLS()
 {
     return true;
 }
 
 void
-ForkJoinSlice::requestGC(gcreason::Reason reason)
+ForkJoinSlice::requestGC(JS::gcreason::Reason reason)
 {
     JS_NOT_REACHED("No threadsafe, no ion");
 }
 
 void
-ForkJoinSlice::requestZoneGC(JS::Zone *zone, gcreason::Reason reason)
+ForkJoinSlice::requestZoneGC(JS::Zone *zone, JS::gcreason::Reason reason)
 {
     JS_NOT_REACHED("No threadsafe, no ion");
 }
 
 uint32_t
 js::ForkJoinSlices(JSContext *cx)
 {
     return 1;
--- a/js/src/vm/ForkJoin.h
+++ b/js/src/vm/ForkJoin.h
@@ -171,18 +171,18 @@ struct ForkJoinSlice
     // When the code would normally trigger a GC, we don't trigger it
     // immediately but instead record that request here.  This will
     // cause |ExecuteForkJoinOp()| to invoke |TriggerGC()| or
     // |TriggerCompartmentGC()| as appropriate once the parallel
     // section is complete. This is done because those routines do
     // various preparations that are not thread-safe, and because the
     // full set of arenas is not available until the end of the
     // parallel section.
-    void requestGC(gcreason::Reason reason);
-    void requestZoneGC(JS::Zone *zone, gcreason::Reason reason);
+    void requestGC(JS::gcreason::Reason reason);
+    void requestZoneGC(JS::Zone *zone, JS::gcreason::Reason reason);
 
     // During the parallel phase, this method should be invoked
     // periodically, for example on every backedge, similar to the
     // interrupt check.  If it returns false, then the parallel phase
     // has been aborted and so you should bailout.  The function may
     // also rendesvous to perform GC or do other similar things.
     //
     // This function is guaranteed to have no effect if both
--- a/js/src/vm/String-inl.h
+++ b/js/src/vm/String-inl.h
@@ -18,17 +18,17 @@
 #include "jsobjinlines.h"
 #include "gc/Barrier-inl.h"
 #include "gc/StoreBuffer.h"
 
 namespace js {
 
 template <AllowGC allowGC>
 static JS_ALWAYS_INLINE JSInlineString *
-NewShortString(JSContext *cx, Latin1Chars chars)
+NewShortString(JSContext *cx, JS::Latin1Chars chars)
 {
     size_t len = chars.length();
     JS_ASSERT(JSShortString::lengthFits(len));
     RawInlineString str = JSInlineString::lengthFits(len)
                           ? JSInlineString::new_<allowGC>(cx)
                           : JSShortString::new_<allowGC>(cx);
     if (!str)
         return NULL;
@@ -38,17 +38,17 @@ NewShortString(JSContext *cx, Latin1Char
         p[i] = static_cast<jschar>(chars[i]);
     p[len] = '\0';
     Probes::createString(cx, str, len);
     return str;
 }
 
 template <AllowGC allowGC>
 static JS_ALWAYS_INLINE JSInlineString *
-NewShortString(JSContext *cx, StableTwoByteChars chars)
+NewShortString(JSContext *cx, JS::StableTwoByteChars chars)
 {
     size_t len = chars.length();
 
     /*
      * Don't bother trying to find a static atom; measurement shows that not
      * many get here (for one, Atomize is catching them).
      */
     JS_ASSERT(JSShortString::lengthFits(len));
@@ -62,34 +62,34 @@ NewShortString(JSContext *cx, StableTwoB
     PodCopy(storage, chars.start().get(), len);
     storage[len] = 0;
     Probes::createString(cx, str, len);
     return str;
 }
 
 template <AllowGC allowGC>
 static JS_ALWAYS_INLINE JSInlineString *
-NewShortString(JSContext *cx, TwoByteChars chars)
+NewShortString(JSContext *cx, JS::TwoByteChars chars)
 {
     size_t len = chars.length();
 
     /*
      * Don't bother trying to find a static atom; measurement shows that not
      * many get here (for one, Atomize is catching them).
      */
     JS_ASSERT(JSShortString::lengthFits(len));
     JSInlineString *str = JSInlineString::lengthFits(len)
                           ? JSInlineString::new_<NoGC>(cx)
                           : JSShortString::new_<NoGC>(cx);
     if (!str) {
         if (!allowGC)
             return NULL;
         jschar tmp[JSShortString::MAX_SHORT_LENGTH];
         PodCopy(tmp, chars.start().get(), len);
-        return NewShortString<CanGC>(cx, StableTwoByteChars(tmp, len));
+        return NewShortString<CanGC>(cx, JS::StableTwoByteChars(tmp, len));
     }
 
     jschar *storage = str->init(len);
     PodCopy(storage, chars.start().get(), len);
     storage[len] = 0;
     Probes::createString(cx, str, len);
     return str;
 }
@@ -228,17 +228,17 @@ JSDependentString::new_(JSContext *cx, J
 #endif
 
     /*
      * Do not create a string dependent on inline chars from another string,
      * both to avoid the awkward moving-GC hazard this introduces and because it
      * is more efficient to immediately undepend here.
      */
     if (JSShortString::lengthFits(length))
-        return js::NewShortString<js::CanGC>(cx, js::TwoByteChars(chars, length));
+        return js::NewShortString<js::CanGC>(cx, JS::TwoByteChars(chars, length));
 
     JSDependentString *str = (JSDependentString *)js_NewGCString<js::NoGC>(cx);
     if (str) {
         str->init(baseArg, chars, length);
         return str;
     }
 
     JS::Rooted<JSLinearString*> base(cx, baseArg);
--- a/js/src/vm/StringBuffer.h
+++ b/js/src/vm/StringBuffer.h
@@ -44,17 +44,17 @@ class StringBuffer
 
   public:
     explicit StringBuffer(JSContext *cx) : cb(cx) { }
 
     inline bool reserve(size_t len) { return cb.reserve(len); }
     inline bool resize(size_t len) { return cb.resize(len); }
     inline bool append(const jschar c) { return cb.append(c); }
     inline bool append(const jschar *chars, size_t len) { return cb.append(chars, len); }
-    inline bool append(const CharPtr chars, size_t len) { return cb.append(chars.get(), len); }
+    inline bool append(const JS::CharPtr chars, size_t len) { return cb.append(chars.get(), len); }
     inline bool append(const jschar *begin, const jschar *end) { return cb.append(begin, end); }
     inline bool append(JSString *str);
     inline bool append(JSLinearString *str);
     inline bool appendN(const jschar c, size_t n) { return cb.appendN(c, n); }
     inline bool appendInflated(const char *cstr, size_t len);
 
     template <size_t ArrayLength>
     bool append(const char (&array)[ArrayLength]) {
@@ -63,17 +63,17 @@ class StringBuffer
 
     /* Infallible variants usable when the corresponding space is reserved. */
     void infallibleAppend(const jschar c) {
         cb.infallibleAppend(c);
     }
     void infallibleAppend(const jschar *chars, size_t len) {
         cb.infallibleAppend(chars, len);
     }
-    void infallibleAppend(const CharPtr chars, size_t len) {
+    void infallibleAppend(const JS::CharPtr chars, size_t len) {
         cb.infallibleAppend(chars.get(), len);
     }
     void infallibleAppend(const jschar *begin, const jschar *end) {
         cb.infallibleAppend(begin, end);
     }
     void infallibleAppendN(const jschar c, size_t n) {
         cb.infallibleAppendN(c, n);
     }
--- a/js/xpconnect/src/XPCComponents.cpp
+++ b/js/xpconnect/src/XPCComponents.cpp
@@ -4012,36 +4012,36 @@ nsXPCComponents_Utils::GetWeakReference(
     return NS_OK;
 }
 
 /* void forceGC (); */
 NS_IMETHODIMP
 nsXPCComponents_Utils::ForceGC()
 {
     JSRuntime* rt = nsXPConnect::GetRuntimeInstance()->GetJSRuntime();
-    js::PrepareForFullGC(rt);
-    js::GCForReason(rt, js::gcreason::COMPONENT_UTILS);
+    JS::PrepareForFullGC(rt);
+    JS::GCForReason(rt, JS::gcreason::COMPONENT_UTILS);
     return NS_OK;
 }
 
 /* void forceCC (); */
 NS_IMETHODIMP
 nsXPCComponents_Utils::ForceCC()
 {
     nsJSContext::CycleCollectNow();
     return NS_OK;
 }
 
 /* void forceShrinkingGC (); */
 NS_IMETHODIMP
 nsXPCComponents_Utils::ForceShrinkingGC()
 {
     JSRuntime* rt = nsXPConnect::GetRuntimeInstance()->GetJSRuntime();
-    js::PrepareForFullGC(rt);
-    js::ShrinkingGC(rt, js::gcreason::COMPONENT_UTILS);
+    JS::PrepareForFullGC(rt);
+    JS::ShrinkingGC(rt, JS::gcreason::COMPONENT_UTILS);
     return NS_OK;
 }
 
 class PreciseGCRunnable : public nsRunnable
 {
   public:
     PreciseGCRunnable(ScheduledGCCallback* aCallback, bool aShrinking)
     : mCallback(aCallback), mShrinking(aShrinking) {}
@@ -4053,21 +4053,21 @@ class PreciseGCRunnable : public nsRunna
         JSContext *cx;
         JSContext *iter = nullptr;
         while ((cx = JS_ContextIterator(rt, &iter)) != NULL) {
             if (JS_IsRunning(cx)) {
                 return NS_DispatchToMainThread(this);
             }
         }
 
-        js::PrepareForFullGC(rt);
+        JS::PrepareForFullGC(rt);
         if (mShrinking)
-            js::ShrinkingGC(rt, js::gcreason::COMPONENT_UTILS);
+            JS::ShrinkingGC(rt, JS::gcreason::COMPONENT_UTILS);
         else
-            js::GCForReason(rt, js::gcreason::COMPONENT_UTILS);
+            JS::GCForReason(rt, JS::gcreason::COMPONENT_UTILS);
 
         mCallback->Callback();
         return NS_OK;
     }
 
   private:
     nsRefPtr<ScheduledGCCallback> mCallback;
     bool mShrinking;
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -736,26 +736,26 @@ XPCJSRuntime::ReleaseIncrementally(nsTAr
 
     nsresult rv = NS_DispatchToMainThread(mReleaseRunnable);
     if (NS_FAILED(rv))
         mReleaseRunnable->ReleaseNow(false);
 }
 
 /* static */ void
 XPCJSRuntime::GCSliceCallback(JSRuntime *rt,
-                              js::GCProgress progress,
-                              const js::GCDescription &desc)
+                              JS::GCProgress progress,
+                              const JS::GCDescription &desc)
 {
     XPCJSRuntime *self = nsXPConnect::GetRuntimeInstance();
     if (!self)
         return;
 
 #ifdef MOZ_CRASHREPORTER
-    CrashReporter::SetGarbageCollecting(progress == js::GC_CYCLE_BEGIN ||
-                                        progress == js::GC_SLICE_BEGIN);
+    CrashReporter::SetGarbageCollecting(progress == JS::GC_CYCLE_BEGIN ||
+                                        progress == JS::GC_SLICE_BEGIN);
 #endif
 
     if (self->mPrevGCSliceCallback)
         (*self->mPrevGCSliceCallback)(rt, progress, desc);
 }
 
 /* static */ void
 XPCJSRuntime::GCCallback(JSRuntime *rt, JSGCStatus status)
@@ -784,17 +784,17 @@ XPCJSRuntime::GCCallback(JSRuntime *rt, 
              * don't want these to build up. We also don't want to allow any
              * existing incremental release runnables to run after a
              * non-incremental GC, since they are often used to detect leaks.
              */
             if (self->mReleaseRunnable)
                 self->mReleaseRunnable->ReleaseNow(false);
 
             // Do any deferred releases of native objects.
-            if (js::WasIncrementalGC(rt)) {
+            if (JS::WasIncrementalGC(rt)) {
                 self->ReleaseIncrementally(self->mNativesToReleaseArray);
             } else {
                 DoDeferredRelease(self->mNativesToReleaseArray);
                 for (uint32_t i = 0; i < self->mDeferredFinalizeFunctions.Length(); ++i) {
                     if (void *data = self->mDeferredFinalizeFunctions[i].start())
                         self->mDeferredFinalizeFunctions[i].run(UINT32_MAX, data);
                 }
             }
@@ -1222,17 +1222,17 @@ void XPCJSRuntime::SystemIsBeingShutDown
         mDetachedWrappedNativeProtoMap->
             Enumerate(DetachedWrappedNativeProtoShutdownMarker, nullptr);
 }
 
 XPCJSRuntime::~XPCJSRuntime()
 {
     MOZ_ASSERT(!mReleaseRunnable);
 
-    js::SetGCSliceCallback(mJSRuntime, mPrevGCSliceCallback);
+    JS::SetGCSliceCallback(mJSRuntime, mPrevGCSliceCallback);
 
     xpc_DelocalizeRuntime(mJSRuntime);
 
     if (mWatchdogWakeup) {
         // If the watchdog thread is running, tell it to terminate waking it
         // up if necessary and wait until it signals that it finished. As we
         // must release the lock before calling PR_DestroyCondVar, we use an
         // extra block here.
@@ -2648,17 +2648,17 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* 
     JS_SetNativeStackQuota(mJSRuntime, 2 * 128 * sizeof(size_t) * 1024);
 #else
     JS_SetNativeStackQuota(mJSRuntime, 128 * sizeof(size_t) * 1024);
 #endif
     JS_SetContextCallback(mJSRuntime, ContextCallback);
     JS_SetDestroyCompartmentCallback(mJSRuntime, CompartmentDestroyedCallback);
     JS_SetCompartmentNameCallback(mJSRuntime, CompartmentNameCallback);
     JS_SetGCCallback(mJSRuntime, GCCallback);
-    mPrevGCSliceCallback = js::SetGCSliceCallback(mJSRuntime, GCSliceCallback);
+    mPrevGCSliceCallback = JS::SetGCSliceCallback(mJSRuntime, GCSliceCallback);
     JS_SetFinalizeCallback(mJSRuntime, FinalizeCallback);
     JS_SetExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this);
     JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this);
     JS_SetWrapObjectCallbacks(mJSRuntime,
                               xpc::WrapperFactory::Rewrap,
                               xpc::WrapperFactory::WrapForSameCompartment,
                               xpc::WrapperFactory::PrepareForWrapping);
     js::SetPreserveWrapperCallback(mJSRuntime, PreserveWrapper);
@@ -2922,17 +2922,17 @@ XPCRootSetElem::AddToRootSet(XPCLock *lo
     }
     *listHead = this;
 }
 
 void
 XPCRootSetElem::RemoveFromRootSet(XPCLock *lock)
 {
     if (nsXPConnect *xpc = nsXPConnect::GetXPConnect())
-        js::PokeGC(xpc->GetRuntime()->GetJSRuntime());
+        JS::PokeGC(xpc->GetRuntime()->GetJSRuntime());
 
     NS_ASSERTION(mSelfp, "Must be linked");
 
     XPCAutoLock autoLock(lock);
 
     NS_ASSERTION(*mSelfp == this, "Link invariant");
     *mSelfp = mNext;
     if (mNext)
--- a/js/xpconnect/src/XPCWrappedNative.cpp
+++ b/js/xpconnect/src/XPCWrappedNative.cpp
@@ -939,34 +939,34 @@ XPCWrappedNative::Destroy()
 
     /*
      * The only time GetRuntime() will be NULL is if Destroy is called a second
      * time on a wrapped native. Since we already unregistered the pointer the
      * first time, there's no need to unregister again. Unregistration is safe
      * the first time because mWrapperWord isn't used afterwards.
      */
     if (XPCJSRuntime *rt = GetRuntime()) {
-        if (js::IsIncrementalBarrierNeeded(rt->GetJSRuntime()))
-            js::IncrementalObjectBarrier(GetWrapperPreserveColor());
+        if (JS::IsIncrementalBarrierNeeded(rt->GetJSRuntime()))
+            JS::IncrementalObjectBarrier(GetWrapperPreserveColor());
         mWrapperWord = WRAPPER_WORD_POISON;
     } else {
         MOZ_ASSERT(mWrapperWord == WRAPPER_WORD_POISON);
     }
 
     mMaybeScope = nullptr;
 }
 
 void
 XPCWrappedNative::UpdateScriptableInfo(XPCNativeScriptableInfo *si)
 {
     NS_ASSERTION(mScriptableInfo, "UpdateScriptableInfo expects an existing scriptable info");
 
     // Write barrier for incremental GC.
     JSRuntime* rt = GetRuntime()->GetJSRuntime();
-    if (js::IsIncrementalBarrierNeeded(rt))
+    if (JS::IsIncrementalBarrierNeeded(rt))
         mScriptableInfo->Mark();
 
     mScriptableInfo = si;
 }
 
 void
 XPCWrappedNative::SetProto(XPCWrappedNativeProto* p)
 {
--- a/js/xpconnect/src/nsXPConnect.cpp
+++ b/js/xpconnect/src/nsXPConnect.cpp
@@ -359,22 +359,22 @@ nsXPConnect::Collect(uint32_t reason)
     // will already be marked by the JS GC and will thus be colored black
     // themselves. Any C++ objects they hold will have a missing (untraversed)
     // edge from the JS object to the C++ object and so it will be marked black
     // too. This decreases the number of objects that the cycle collector has to
     // deal with.
     // To improve debugging, if WantAllTraces() is true all JS objects are
     // traversed.
 
-    MOZ_ASSERT(reason < js::gcreason::NUM_REASONS);
-    js::gcreason::Reason gcreason = (js::gcreason::Reason)reason;
+    MOZ_ASSERT(reason < JS::gcreason::NUM_REASONS);
+    JS::gcreason::Reason gcreason = (JS::gcreason::Reason)reason;
 
     JSRuntime *rt = GetRuntime()->GetJSRuntime();
-    js::PrepareForFullGC(rt);
-    js::GCForReason(rt, gcreason);
+    JS::PrepareForFullGC(rt);
+    JS::GCForReason(rt, gcreason);
 }
 
 NS_IMETHODIMP
 nsXPConnect::GarbageCollect(uint32_t reason)
 {
     Collect(reason);
     return NS_OK;
 }
@@ -490,17 +490,17 @@ struct FixWeakMappingGrayBitsTracer : pu
 
 private:
 
     static void
     FixWeakMappingGrayBits(js::WeakMapTracer *trc, JSObject *m,
                            void *k, JSGCTraceKind kkind,
                            void *v, JSGCTraceKind vkind)
     {
-        MOZ_ASSERT(!js::IsIncrementalGCInProgress(trc->runtime),
+        MOZ_ASSERT(!JS::IsIncrementalGCInProgress(trc->runtime),
                    "Don't call FixWeakMappingGrayBits during a GC.");
 
         FixWeakMappingGrayBitsTracer *tracer = static_cast<FixWeakMappingGrayBitsTracer*>(trc);
 
         // If nothing that could be held alive by this entry is marked gray, return.
         bool delegateMightNeedMarking = k && xpc_IsGrayGCThing(k);
         bool valueMightNeedMarking = v && xpc_IsGrayGCThing(v) && vkind != JSTRACE_STRING;
         if (!delegateMightNeedMarking && !valueMightNeedMarking)
@@ -2472,17 +2472,17 @@ nsXPConnect::GetTelemetryValue(JSContext
 
     *rval = OBJECT_TO_JSVAL(obj);
     return NS_OK;
 }
 
 NS_IMETHODIMP
 nsXPConnect::NotifyDidPaint()
 {
-    js::NotifyDidPaint(GetRuntime()->GetJSRuntime());
+    JS::NotifyDidPaint(GetRuntime()->GetJSRuntime());
     return NS_OK;
 }
 
 const uint8_t HAS_PRINCIPALS_FLAG               = 1;
 const uint8_t HAS_ORIGIN_PRINCIPALS_FLAG        = 2;
 
 static nsresult
 WriteScriptOrFunction(nsIObjectOutputStream *stream, JSContext *cx,
--- a/js/xpconnect/src/xpcprivate.h
+++ b/js/xpconnect/src/xpcprivate.h
@@ -797,18 +797,18 @@ public:
     static void TraceBlackJS(JSTracer* trc, void* data);
     static void TraceGrayJS(JSTracer* trc, void* data);
     void TraceXPConnectRoots(JSTracer *trc);
     void AddXPConnectRoots(nsCycleCollectionTraversalCallback& cb);
     void UnmarkSkippableJSHolders();
 
     static void GCCallback(JSRuntime *rt, JSGCStatus status);
     static void GCSliceCallback(JSRuntime *rt,
-                                js::GCProgress progress,
-                                const js::GCDescription &desc);
+                                JS::GCProgress progress,
+                                const JS::GCDescription &desc);
     static void FinalizeCallback(JSFreeOp *fop, JSFinalizeStatus status, JSBool isCompartmentGC);
 
     inline void AddVariantRoot(XPCTraceableVariant* variant);
     inline void AddWrappedJSRoot(nsXPCWrappedJS* wrappedJS);
     inline void AddObjectHolderRoot(XPCJSObjectHolder* holder);
 
     void AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer);
     void RemoveJSHolder(void* aHolder);
@@ -952,17 +952,17 @@ private:
     nsDataHashtable<nsPtrHashKey<void>, nsScriptObjectTracer*> mJSHolders;
     PRLock *mWatchdogLock;
     PRCondVar *mWatchdogWakeup;
     PRThread *mWatchdogThread;
     nsTArray<JSGCCallback> extraGCCallbacks;
     bool mWatchdogHibernating;
     PRTime mLastActiveTime; // -1 if active NOW
     nsRefPtr<XPCIncrementalReleaseRunnable> mReleaseRunnable;
-    js::GCSliceCallback mPrevGCSliceCallback;
+    JS::GCSliceCallback mPrevGCSliceCallback;
 
     nsCOMPtr<nsIException>   mPendingException;
     nsCOMPtr<nsIExceptionManager> mExceptionManager;
     bool mExceptionManagerNotAvailable;
 
 #define XPCCCX_STRING_CACHE_SIZE 2
 
     // String wrapper entry, holds a string, and a boolean that tells
@@ -1720,22 +1720,22 @@ private:
     ClassInfo2WrappedNativeProtoMap* mWrappedNativeProtoMap;
     ClassInfo2WrappedNativeProtoMap* mMainThreadWrappedNativeProtoMap;
     nsRefPtr<nsXPCComponents>        mComponents;
     XPCWrappedNativeScope*           mNext;
     // The JS global object for this scope.  If non-null, this will be the
     // default parent for the XPCWrappedNatives that have us as the scope,
     // unless a PreCreate hook overrides it.  Note that this _may_ be null (see
     // constructor).
-    js::ObjectPtr                    mGlobalJSObject;
+    JS::ObjectPtr                    mGlobalJSObject;
 
     // XBL Scope. This is is a lazily-created sandbox for non-system scopes.
     // EnsureXBLScope() decides whether it needs to be created or not.
     // This reference is wrapped into the compartment of mGlobalJSObject.
-    js::ObjectPtr                    mXBLScope;
+    JS::ObjectPtr                    mXBLScope;
 
     // Prototype to use for wrappers with no helper.
     JSObject*                        mPrototypeNoHelper;
 
     XPCContext*                      mContext;
 
     nsAutoPtr<DOMExpandoMap> mDOMExpandoMap;
 
@@ -2434,17 +2434,17 @@ public:
 
     void TraceJS(JSTracer *trc) {
         TraceSelf(trc);
         TraceInside(trc);
     }
 
     void WriteBarrierPre(JSRuntime* rt)
     {
-        if (js::IsIncrementalBarrierNeeded(rt) && mJSProtoObject)
+        if (JS::IsIncrementalBarrierNeeded(rt) && mJSProtoObject)
             mJSProtoObject.writeBarrierPre(rt);
     }
 
     // NOP. This is just here to make the AutoMarkingPtr code compile.
     inline void AutoTrace(JSTracer* trc) {}
 
     // Yes, we *do* need to mark the mScriptableInfo in both cases.
     void Mark() const
@@ -2481,17 +2481,17 @@ private:
 private:
     bool
     InitedOffsets()
     {
         return mOffsets != UNKNOWN_OFFSETS;
     }
 
     XPCWrappedNativeScope*   mScope;
-    js::ObjectPtr            mJSProtoObject;
+    JS::ObjectPtr            mJSProtoObject;
     nsCOMPtr<nsIClassInfo>   mClassInfo;
     uint32_t                 mClassInfoFlags;
     XPCNativeSet*            mSet;
     void*                    mSecurityInfo;
     XPCNativeScriptableInfo* mScriptableInfo;
     QITableEntry*            mOffsets;
 };
 
@@ -2888,17 +2888,17 @@ public:
             xpc_UnmarkGrayObject(wrapper);
             // Call this to unmark mFlatJSObject.
             GetFlatJSObject();
         }
         return wrapper;
     }
     void SetWrapper(JSObject *obj)
     {
-        js::IncrementalObjectBarrier(GetWrapperPreserveColor());
+        JS::IncrementalObjectBarrier(GetWrapperPreserveColor());
         intptr_t newval = intptr_t(obj) | (mWrapperWord & FLAG_MASK);
         mWrapperWord = newval;
     }
 
     // Returns the relevant same-compartment security if applicable, or
     // mFlatJSObject otherwise.
     //
     // This takes care of checking mWrapperWord to see if we already have such
--- a/js/xpconnect/src/xpcpublic.h
+++ b/js/xpconnect/src/xpcpublic.h
@@ -231,17 +231,17 @@ public:
                                  nsStringBuffer** sharedBuffer);
 
     // Convert the given stringbuffer/length pair to a jsval
     static MOZ_ALWAYS_INLINE bool
     StringBufferToJSVal(JSContext* cx, nsStringBuffer* buf, uint32_t length,
                         JS::Value* rval, bool* sharedBuffer)
     {
         if (buf == sCachedBuffer &&
-            js::GetGCThingZone(sCachedString) == js::GetContextZone(cx))
+            JS::GetGCThingZone(sCachedString) == js::GetContextZone(cx))
         {
             *rval = JS::StringValue(sCachedString);
             *sharedBuffer = false;
             return true;
         }
 
         JSString *str = JS_NewExternalString(cx,
                                              static_cast<jschar*>(buf->Data()),
--- a/layout/base/nsDocumentViewer.cpp
+++ b/layout/base/nsDocumentViewer.cpp
@@ -1265,17 +1265,17 @@ nsDocumentViewer::PageHide(bool aIsUnloa
   // inform the window so that the focus state is reset.
   NS_ENSURE_STATE(mDocument);
   nsPIDOMWindow *window = mDocument->GetWindow();
   if (window)
     window->PageHidden();
 
   if (aIsUnload) {
     // Poke the GC. The window might be collectable garbage now.
-    nsJSContext::PokeGC(js::gcreason::PAGE_HIDE, NS_GC_DELAY * 2);
+    nsJSContext::PokeGC(JS::gcreason::PAGE_HIDE, NS_GC_DELAY * 2);
 
     // if Destroy() was called during OnPageHide(), mDocument is nullptr.
     NS_ENSURE_STATE(mDocument);
 
     // First, get the window from the document...
     nsPIDOMWindow *window = mDocument->GetWindow();
 
     if (!window) {
--- a/xpcom/base/nsCycleCollector.cpp
+++ b/xpcom/base/nsCycleCollector.cpp
@@ -1551,17 +1551,17 @@ private:
         }
         ++childPi->mInternalRefs;
     }
 
     JS::Zone *MergeZone(void *gcthing) {
         if (!mMergeZones) {
             return nullptr;
         }
-        JS::Zone *zone = js::GetGCThingZone(gcthing);
+        JS::Zone *zone = JS::GetGCThingZone(gcthing);
         if (js::IsSystemZone(zone)) {
             return nullptr;
         }
         return zone;
     }
 };
 
 GCGraphBuilder::GCGraphBuilder(GCGraph &aGraph,