Bug 784739 - Switch from NULL to nullptr in js/src/gc/; r=ehsan
authorBirunthan Mohanathas <birunthan@mohanathas.com>
Tue, 24 Sep 2013 23:00:04 -0400
changeset 148622 ce4978329418c70b91c34560bf51b0691987183a
parent 148621 c4aa9fdddb72432391fbb394be4ae3ad9da64f10
child 148623 b01149a187a51f4b5f23e58df3d192eed02f5dda
push id25349
push userryanvm@gmail.com
push dateWed, 25 Sep 2013 18:52:12 +0000
treeherdermozilla-central@39f30376058c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersehsan
bugs784739
milestone27.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 784739 - Switch from NULL to nullptr in js/src/gc/; r=ehsan
js/src/gc/Barrier.h
js/src/gc/FindSCCs.h
js/src/gc/GCInternals.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/Memory.cpp
js/src/gc/Nursery-inl.h
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/gc/RootMarking.cpp
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Tracer.cpp
js/src/gc/Verifier.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -245,31 +245,31 @@ class EncapsulatedPtr
 {
   protected:
     union {
         T *value;
         Unioned other;
     };
 
   public:
-    EncapsulatedPtr() : value(NULL) {}
+    EncapsulatedPtr() : value(nullptr) {}
     EncapsulatedPtr(T *v) : value(v) {}
     explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v) : value(v.value) {}
 
     ~EncapsulatedPtr() { pre(); }
 
     void init(T *v) {
         JS_ASSERT(!IsPoisonedPtr<T>(v));
         this->value = v;
     }
 
-    /* Use to set the pointer to NULL. */
+    /* Use to set the pointer to nullptr. */
     void clear() {
         pre();
-        value = NULL;
+        value = nullptr;
     }
 
     EncapsulatedPtr<T, Unioned> &operator=(T *v) {
         pre();
         JS_ASSERT(!IsPoisonedPtr<T>(v));
         value = v;
         return *this;
     }
@@ -301,17 +301,17 @@ class EncapsulatedPtr
   protected:
     void pre() { T::writeBarrierPre(value); }
 };
 
 template <class T, class Unioned = uintptr_t>
 class HeapPtr : public EncapsulatedPtr<T, Unioned>
 {
   public:
-    HeapPtr() : EncapsulatedPtr<T>(NULL) {}
+    HeapPtr() : EncapsulatedPtr<T>(nullptr) {}
     explicit HeapPtr(T *v) : EncapsulatedPtr<T>(v) { post(); }
     explicit HeapPtr(const HeapPtr<T> &v)
       : EncapsulatedPtr<T>(v) { post(); }
 
     void init(T *v) {
         JS_ASSERT(!IsPoisonedPtr<T>(v));
         this->value = v;
         post();
@@ -373,17 +373,17 @@ class FixedHeapPtr
         value = ptr;
     }
 };
 
 template <class T>
 class RelocatablePtr : public EncapsulatedPtr<T>
 {
   public:
-    RelocatablePtr() : EncapsulatedPtr<T>(NULL) {}
+    RelocatablePtr() : EncapsulatedPtr<T>(nullptr) {}
     explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) {
         if (v)
             post();
     }
     RelocatablePtr(const RelocatablePtr<T> &v) : EncapsulatedPtr<T>(v) {
         if (this->value)
             post();
     }
@@ -1044,23 +1044,23 @@ class HeapId : public EncapsulatedId
  * this, we mark these empty shapes black whenever they get read out.
  */
 template<class T>
 class ReadBarriered
 {
     T *value;
 
   public:
-    ReadBarriered() : value(NULL) {}
+    ReadBarriered() : value(nullptr) {}
     ReadBarriered(T *value) : value(value) {}
     ReadBarriered(const Rooted<T*> &rooted) : value(rooted) {}
 
     T *get() const {
         if (!value)
-            return NULL;
+            return nullptr;
         T::readBarrier(value);
         return value;
     }
 
     operator T*() const { return get(); }
 
     T &operator*() const { return *get(); }
     T *operator->() const { return get(); }
--- a/js/src/gc/FindSCCs.h
+++ b/js/src/gc/FindSCCs.h
@@ -16,27 +16,27 @@ template<class Node>
 struct GraphNodeBase
 {
     Node           *gcNextGraphNode;
     Node           *gcNextGraphComponent;
     unsigned       gcDiscoveryTime;
     unsigned       gcLowLink;
 
     GraphNodeBase()
-      : gcNextGraphNode(NULL),
-        gcNextGraphComponent(NULL),
+      : gcNextGraphNode(nullptr),
+        gcNextGraphComponent(nullptr),
         gcDiscoveryTime(0),
         gcLowLink(0) {}
 
     ~GraphNodeBase() {}
 
     Node *nextNodeInGroup() const {
         if (gcNextGraphNode && gcNextGraphNode->gcNextGraphComponent == gcNextGraphComponent)
             return gcNextGraphNode;
-        return NULL;
+        return nullptr;
     }
 
     Node *nextGroup() const {
         return gcNextGraphComponent;
     }
 };
 
 /*
@@ -60,19 +60,19 @@ struct GraphNodeBase
  * finder.addNode(v);
  */
 template<class Node>
 class ComponentFinder
 {
   public:
     ComponentFinder(uintptr_t sl)
       : clock(1),
-        stack(NULL),
-        firstComponent(NULL),
-        cur(NULL),
+        stack(nullptr),
+        firstComponent(nullptr),
+        cur(nullptr),
         stackLimit(sl),
         stackFull(false)
     {}
 
     ~ComponentFinder() {
         JS_ASSERT(!stack);
         JS_ASSERT(!firstComponent);
     }
@@ -101,29 +101,29 @@ class ComponentFinder
                 firstComponent = v;
             }
             stackFull = false;
         }
 
         JS_ASSERT(!stack);
 
         Node *result = firstComponent;
-        firstComponent = NULL;
+        firstComponent = nullptr;
 
         for (Node *v = result; v; v = v->gcNextGraphNode) {
             v->gcDiscoveryTime = Undefined;
             v->gcLowLink = Undefined;
         }
 
         return result;
     }
 
     static void mergeGroups(Node *first) {
         for (Node *v = first; v; v = v->gcNextGraphNode)
-            v->gcNextGraphComponent = NULL;
+            v->gcNextGraphComponent = nullptr;
     }
 
   public:
     /* Call from implementation of GraphNodeBase::findOutgoingEdges(). */
     void addEdgeTo(Node *w) {
         if (w->gcDiscoveryTime == Undefined) {
             processNode(w);
             cur->gcLowLink = Min(cur->gcLowLink, w->gcLowLink);
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -64,24 +64,24 @@ struct AutoPrepareForTracing
 
 class IncrementalSafety
 {
     const char *reason_;
 
     IncrementalSafety(const char *reason) : reason_(reason) {}
 
   public:
-    static IncrementalSafety Safe() { return IncrementalSafety(NULL); }
+    static IncrementalSafety Safe() { return IncrementalSafety(nullptr); }
     static IncrementalSafety Unsafe(const char *reason) { return IncrementalSafety(reason); }
 
     typedef void (IncrementalSafety::* ConvertibleToBool)();
     void nonNull() {}
 
     operator ConvertibleToBool() const {
-        return reason_ == NULL ? &IncrementalSafety::nonNull : 0;
+        return reason_ == nullptr ? &IncrementalSafety::nonNull : 0;
     }
 
     const char *reason() {
         JS_ASSERT(reason_);
         return reason_;
     }
 };
 
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -278,17 +278,17 @@ struct FreeSpan
             first = thing + thingSize;
         } else if (JS_LIKELY(thing == last)) {
             /*
              * Move to the next span. We use JS_LIKELY as without PGO
              * compilers mis-predict == here as unlikely to succeed.
              */
             *this = *reinterpret_cast<FreeSpan *>(thing);
         } else {
-            return NULL;
+            return nullptr;
         }
         checkSpan();
         return reinterpret_cast<void *>(thing);
     }
 
     /* A version of allocate when we know that the span is not empty. */
     MOZ_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
         JS_ASSERT(thingSize % CellSize == 0);
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -20,19 +20,19 @@
 #include "gc/Nursery-inl.h"
 #include "vm/String-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 using mozilla::DebugOnly;
 
-void * const js::NullPtr::constNullValue = NULL;
+void * const js::NullPtr::constNullValue = nullptr;
 
-JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = NULL;
+JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = nullptr;
 
 /*
  * There are two mostly separate mark paths. The first is a fast path used
  * internally in the GC. The second is a slow path used for root marking and
  * for API consumers like the cycle collector or Class::trace implementations.
  *
  * The fast path uses explicit stacks. The basic marking process during a GC is
  * that all roots are pushed on to a mark stack, and then each item on the
@@ -189,18 +189,18 @@ MarkInternal(JSTracer *trc, T **thingp)
 
         PushMarkStack(AsGCMarker(trc), thing);
         thing->zone()->maybeAlive = true;
     } else {
         trc->callback(trc, (void **)thingp, MapTypeToTraceKind<T>::kind);
         JS_UNSET_TRACING_LOCATION(trc);
     }
 
-    trc->debugPrinter = NULL;
-    trc->debugPrintArg = NULL;
+    trc->debugPrinter = nullptr;
+    trc->debugPrintArg = nullptr;
 }
 
 #define JS_ROOT_MARKING_ASSERT(trc)                                     \
     JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),                             \
                  trc->runtime->gcIncrementalState == NO_INCREMENTAL ||  \
                  trc->runtime->gcIncrementalState == MARK_ROOTS);
 
 namespace js {
@@ -959,17 +959,17 @@ ScanRope(GCMarker *gcmarker, JSRope *rop
 {
     ptrdiff_t savedPos = gcmarker->stack.position();
     JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
     for (;;) {
         JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
         JS_DIAGNOSTICS_ASSERT(rope->JSString::isRope());
         JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime, rope);
         JS_ASSERT(rope->isMarked());
-        JSRope *next = NULL;
+        JSRope *next = nullptr;
 
         JSString *right = rope->rightChild();
         if (right->markIfUnmarked()) {
             if (right->isLinear())
                 ScanLinearString(gcmarker, &right->asLinear());
             else
                 next = &right->asRope();
         }
@@ -1108,17 +1108,17 @@ MarkCycleCollectorChildren(JSTracer *trc
  * shapes, so those are not marked. Instead, any shapes or base shapes
  * that are encountered have their children marked. Stack space is
  * bounded. If two shapes in a row have the same parent pointer, the
  * parent pointer will only be marked once.
  */
 void
 gc::MarkCycleCollectorChildren(JSTracer *trc, Shape *shape)
 {
-    JSObject *prevParent = NULL;
+    JSObject *prevParent = nullptr;
     do {
         MarkCycleCollectorChildren(trc, shape->base(), &prevParent);
         MarkId(trc, &shape->propidRef(), "propid");
         shape = shape->previous();
     } while (shape);
 }
 
 static void
@@ -1606,33 +1606,33 @@ UnmarkGrayChildren(JSTracer *trc, void *
 
 struct UnmarkGrayTracer : public JSTracer
 {
     /*
      * We set eagerlyTraceWeakMaps to false because the cycle collector will fix
      * up any color mismatches involving weakmaps when it runs.
      */
     UnmarkGrayTracer(JSRuntime *rt)
-      : tracingShape(false), previousShape(NULL), unmarkedAny(false)
+      : tracingShape(false), previousShape(nullptr), unmarkedAny(false)
     {
         JS_TracerInit(this, rt, UnmarkGrayChildren);
         eagerlyTraceWeakMaps = DoNotTraceWeakMaps;
     }
 
     UnmarkGrayTracer(JSTracer *trc, bool tracingShape)
-      : tracingShape(tracingShape), previousShape(NULL), unmarkedAny(false)
+      : tracingShape(tracingShape), previousShape(nullptr), unmarkedAny(false)
     {
         JS_TracerInit(this, trc->runtime, UnmarkGrayChildren);
         eagerlyTraceWeakMaps = DoNotTraceWeakMaps;
     }
 
     /* True iff we are tracing the immediate children of a shape. */
     bool tracingShape;
 
-    /* If tracingShape, shape child or NULL. Otherwise, NULL. */
+    /* If tracingShape, shape child or nullptr. Otherwise, nullptr. */
     void *previousShape;
 
     /* Whether we unmarked anything. */
     bool unmarkedAny;
 };
 
 /*
  * The GC and CC are run independently. Consequently, the following sequence of
@@ -1708,17 +1708,17 @@ UnmarkGrayChildren(JSTracer *trc, void *
         tracer->previousShape = thing;
         return;
     }
 
     do {
         JS_ASSERT(!JS::GCThingIsMarkedGray(thing));
         JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE);
         thing = childTracer.previousShape;
-        childTracer.previousShape = NULL;
+        childTracer.previousShape = nullptr;
     } while (thing);
     tracer->unmarkedAny |= childTracer.unmarkedAny;
 }
 
 JS_FRIEND_API(bool)
 JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
 {
     JS_ASSERT(kind != JSTRACE_SHAPE);
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -110,17 +110,19 @@ DeclMarker(String, JSAtom)
 DeclMarker(String, JSString)
 DeclMarker(String, JSFlatString)
 DeclMarker(String, JSLinearString)
 DeclMarker(String, PropertyName)
 DeclMarker(TypeObject, types::TypeObject)
 
 #undef DeclMarker
 
-/* Return true if the pointer is NULL, or if it is a tagged pointer to NULL. */
+/* Return true if the pointer is nullptr, or if it is a tagged pointer to
+ * nullptr.
+ */
 JS_ALWAYS_INLINE bool
 IsNullTaggedPointer(void *p)
 {
     return uintptr_t(p) < 32;
 }
 
 /*** Externally Typed Marking ***/
 
@@ -361,17 +363,17 @@ IsAboutToBeFinalized(ReadBarriered<js::j
 }
 #endif
 
 inline Cell *
 ToMarkable(const Value &v)
 {
     if (v.isMarkable())
         return (Cell *)v.toGCThing();
-    return NULL;
+    return nullptr;
 }
 
 inline Cell *
 ToMarkable(Cell *cell)
 {
     return cell;
 }
 
--- a/js/src/gc/Memory.cpp
+++ b/js/src/gc/Memory.cpp
@@ -36,38 +36,38 @@ gc::MapAlignedPages(JSRuntime *rt, size_
 {
     JS_ASSERT(size >= alignment);
     JS_ASSERT(size % alignment == 0);
     JS_ASSERT(size % rt->gcSystemPageSize == 0);
     JS_ASSERT(alignment % rt->gcSystemAllocGranularity == 0);
 
     /* Special case: If we want allocation alignment, no further work is needed. */
     if (alignment == rt->gcSystemAllocGranularity) {
-        return VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
+        return VirtualAlloc(nullptr, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
     }
 
     /*
      * Windows requires that there be a 1:1 mapping between VM allocation
      * and deallocation operations.  Therefore, take care here to acquire the
      * final result via one mapping operation.  This means unmapping any
      * preliminary result that is not correctly aligned.
      */
-    void *p = NULL;
+    void *p = nullptr;
     while (!p) {
         /*
          * Over-allocate in order to map a memory region that is
          * definitely large enough then deallocate and allocate again the
          * correct sizee, within the over-sized mapping.
          *
          * Since we're going to unmap the whole thing anyway, the first
          * mapping doesn't have to commit pages.
          */
-        p = VirtualAlloc(NULL, size * 2, MEM_RESERVE, PAGE_READWRITE);
+        p = VirtualAlloc(nullptr, size * 2, MEM_RESERVE, PAGE_READWRITE);
         if (!p)
-            return NULL;
+            return nullptr;
         void *chunkStart = (void *)(uintptr_t(p) + (alignment - (uintptr_t(p) % alignment)));
         UnmapPages(rt, p, size * 2);
         p = VirtualAlloc(chunkStart, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
 
         /* Failure here indicates a race with another thread, so try again. */
     }
 
     JS_ASSERT(uintptr_t(p) % alignment == 0);
@@ -141,17 +141,17 @@ gc::UnmapPages(JSRuntime *rt, void *addr
 
     return;
 }
 
 static void *
 MapAlignedPagesRecursively(JSRuntime *rt, size_t size, size_t alignment, int& recursions)
 {
     if (++recursions >= OS2_MAX_RECURSIONS)
-        return NULL;
+        return nullptr;
 
     void *tmp;
     if (DosAllocMem(&tmp, size,
                     OBJ_ANY | PAG_COMMIT | PAG_READ | PAG_WRITE)) {
         JS_ALWAYS_TRUE(DosAllocMem(&tmp, size,
                                    PAG_COMMIT | PAG_READ | PAG_WRITE) == 0);
     }
     size_t offset = reinterpret_cast<uintptr_t>(tmp) & (alignment - 1);
@@ -262,17 +262,17 @@ gc::MapAlignedPages(JSRuntime *rt, size_
     JS_ASSERT(size % rt->gcSystemPageSize == 0);
     JS_ASSERT(alignment % rt->gcSystemAllocGranularity == 0);
 
     int prot = PROT_READ | PROT_WRITE;
     int flags = MAP_PRIVATE | MAP_ANON | MAP_ALIGN | MAP_NOSYNC;
 
     void *p = mmap((caddr_t)alignment, size, prot, flags, -1, 0);
     if (p == MAP_FAILED)
-        return NULL;
+        return nullptr;
     return p;
 }
 
 void
 gc::UnmapPages(JSRuntime *rt, void *p, size_t size)
 {
     JS_ALWAYS_TRUE(0 == munmap((caddr_t)p, size));
 }
@@ -313,17 +313,17 @@ static inline void *
 MapMemory(size_t length, int prot, int flags, int fd, off_t offset)
 {
 #if defined(__ia64__)
     /*
      * The JS engine assumes that all allocated pointers have their high 17 bits clear,
      * which ia64's mmap doesn't support directly. However, we can emulate it by passing
      * mmap an "addr" parameter with those bits clear. The mmap will return that address,
      * or the nearest available memory above that address, providing a near-guarantee
-     * that those bits are clear. If they are not, we return NULL below to indicate
+     * that those bits are clear. If they are not, we return nullptr below to indicate
      * out-of-memory.
      *
      * The addr is chosen as 0x0000070000000000, which still allows about 120TB of virtual
      * address space.
      *
      * See Bug 589735 for more information.
      */
     void *region = mmap((void*)0x0000070000000000, length, prot, flags, fd, offset);
@@ -334,17 +334,17 @@ MapMemory(size_t length, int prot, int f
      * as out of memory.
      */
     if ((uintptr_t(region) + (length - 1)) & 0xffff800000000000) {
         JS_ALWAYS_TRUE(0 == munmap(region, length));
         return MAP_FAILED;
     }
     return region;
 #else
-    return mmap(NULL, length, prot, flags, fd, offset);
+    return mmap(nullptr, length, prot, flags, fd, offset);
 #endif
 }
 
 void *
 gc::MapAlignedPages(JSRuntime *rt, size_t size, size_t alignment)
 {
     JS_ASSERT(size >= alignment);
     JS_ASSERT(size % alignment == 0);
@@ -353,25 +353,25 @@ gc::MapAlignedPages(JSRuntime *rt, size_
 
     int prot = PROT_READ | PROT_WRITE;
     int flags = MAP_PRIVATE | MAP_ANON;
 
     /* Special case: If we want page alignment, no further work is needed. */
     if (alignment == rt->gcSystemAllocGranularity) {
         void *region = MapMemory(size, prot, flags, -1, 0);
         if (region == MAP_FAILED)
-            return NULL;
+            return nullptr;
         return region;
     }
 
     /* Overallocate and unmap the region's edges. */
     size_t reqSize = Min(size + 2 * alignment, 2 * size);
     void *region = MapMemory(reqSize, prot, flags, -1, 0);
     if (region == MAP_FAILED)
-        return NULL;
+        return nullptr;
 
     uintptr_t regionEnd = uintptr_t(region) + reqSize;
     uintptr_t offset = uintptr_t(region) % alignment;
     JS_ASSERT(offset < reqSize - size);
 
     void *front = (void *)(uintptr_t(region) + (alignment - offset));
     void *end = (void *)(uintptr_t(front) + size);
     if (front != region)
--- a/js/src/gc/Nursery-inl.h
+++ b/js/src/gc/Nursery-inl.h
@@ -51,17 +51,17 @@ class RelocationOverlay
         JS_ASSERT(isForwarded());
         return newLocation_;
     }
 
     void forwardTo(Cell *cell) {
         JS_ASSERT(!isForwarded());
         magic_ = Relocated;
         newLocation_ = cell;
-        next_ = NULL;
+        next_ = nullptr;
     }
 
     RelocationOverlay *next() const {
         return next_;
     }
 };
 
 } /* namespace gc */
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -98,17 +98,17 @@ void *
 js::Nursery::allocate(size_t size)
 {
     JS_ASSERT(size % ThingAlignment == 0);
     JS_ASSERT(position() % ThingAlignment == 0);
     JS_ASSERT(!runtime()->isHeapBusy());
 
     if (position() + size > currentEnd()) {
         if (currentChunk_ + 1 == numActiveChunks_)
-            return NULL;
+            return nullptr;
         setCurrentChunk(currentChunk_ + 1);
     }
 
     void *thing = (void *)position();
     position_ = position() + size;
 
 #ifdef DEBUG
     JS_POISON(thing, AllocatedThing, size);
@@ -232,25 +232,25 @@ class MinorCollectionTracer : public JST
     /* Save and restore all of the runtime state we use during MinorGC. */
     bool savedRuntimeNeedBarrier;
     AutoDisableProxyCheck disableStrictProxyChecking;
 
     /* Insert the given relocation entry into the list of things to visit. */
     JS_ALWAYS_INLINE void insertIntoFixupList(RelocationOverlay *entry) {
         *tail = entry;
         tail = &entry->next_;
-        *tail = NULL;
+        *tail = nullptr;
     }
 
     MinorCollectionTracer(JSRuntime *rt, Nursery *nursery)
       : JSTracer(),
         nursery(nursery),
         session(rt, MinorCollecting),
         tenuredSize(0),
-        head(NULL),
+        head(nullptr),
         tail(&head),
         savedRuntimeNeedBarrier(rt->needsBarrier()),
         disableStrictProxyChecking(rt)
     {
         JS_TracerInit(this, rt, Nursery::MinorGCCallback);
         eagerlyTraceWeakMaps = TraceWeakMapKeysValues;
         rt->gcNumber++;
 
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -65,17 +65,17 @@ class Nursery
     bool isEnabled() const { return numActiveChunks_ != 0; }
 
     template <typename T>
     JS_ALWAYS_INLINE bool isInside(const T *p) const {
         return uintptr_t(p) >= start() && uintptr_t(p) < heapEnd();
     }
 
     /*
-     * Allocate and return a pointer to a new GC thing. Returns NULL if the
+     * Allocate and return a pointer to a new GC thing. Returns nullptr if the
      * Nursery is full.
      */
     void *allocate(size_t size);
 
     /* Allocate a slots array for the given object. */
     HeapSlot *allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots);
 
     /* Allocate an elements vector for the given object. */
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -186,17 +186,17 @@ void *
 js::gc::GetAddressableGCThing(JSRuntime *rt, uintptr_t w)
 {
     void *thing;
     ArenaHeader *aheader;
     AllocKind thingKind;
     ConservativeGCTest status =
         IsAddressableGCThing(rt, w, false, &thingKind, &aheader, &thing);
     if (status != CGCT_VALID)
-        return NULL;
+        return nullptr;
     return thing;
 }
 #endif
 
 /*
  * Returns CGCT_VALID and mark it if the w can be a  live GC thing and sets
  * thingKind accordingly. Otherwise returns the reason for rejection.
  */
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -119,36 +119,36 @@ class gcstats::StatisticsSerializer
         needComma_ = false;
         pJSON("]");
         needComma_ = true;
     }
 
     jschar *finishJSString() {
         char *buf = finishCString();
         if (!buf)
-            return NULL;
+            return nullptr;
 
         size_t nchars = strlen(buf);
         jschar *out = js_pod_malloc<jschar>(nchars + 1);
         if (!out) {
             oom_ = true;
             js_free(buf);
-            return NULL;
+            return nullptr;
         }
 
         InflateStringToBuffer(buf, nchars, out);
         js_free(buf);
 
         out[nchars] = 0;
         return out;
     }
 
     char *finishCString() {
         if (oom_)
-            return NULL;
+            return nullptr;
 
         buf_.append('\0');
 
         char *buf = buf_.extractRawBuffer();
         if (!buf)
             oom_ = true;
 
         return buf;
@@ -304,17 +304,17 @@ static const PhaseInfo phases[] = {
     { PHASE_SWEEP_OBJECT, "Sweep Object", PHASE_SWEEP },
     { PHASE_SWEEP_STRING, "Sweep String", PHASE_SWEEP },
     { PHASE_SWEEP_SCRIPT, "Sweep Script", PHASE_SWEEP },
     { PHASE_SWEEP_SHAPE, "Sweep Shape", PHASE_SWEEP },
     { PHASE_SWEEP_IONCODE, "Sweep Ion code", PHASE_SWEEP },
     { PHASE_FINALIZE_END, "Finalize End Callback", PHASE_SWEEP },
     { PHASE_DESTROY, "Deallocate", PHASE_SWEEP },
     { PHASE_GC_END, "End Callback", PHASE_NO_PARENT },
-    { PHASE_LIMIT, NULL, PHASE_NO_PARENT }
+    { PHASE_LIMIT, nullptr, PHASE_NO_PARENT }
 };
 
 static void
 FormatPhaseTimes(StatisticsSerializer &ss, const char *name, int64_t *times)
 {
     ss.beginObject(name);
     for (unsigned i = 0; phases[i].name; i++)
         ss.appendIfNonzeroMS(phases[i].name, t(times[phases[i].index]));
@@ -349,17 +349,17 @@ Statistics::formatData(StatisticsSeriali
     gcDuration(&total, &longest);
 
     int64_t sccTotal, sccLongest;
     sccDurations(&sccTotal, &sccLongest);
 
     double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC);
     double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
 
-    ss.beginObject(NULL);
+    ss.beginObject(nullptr);
     if (ss.isJSON())
         ss.appendNumber("Timestamp", "%llu", "", (unsigned long long)timestamp);
     if (slices.length() > 1 || ss.isJSON())
         ss.appendDecimal("Max Pause", "ms", t(longest));
     else
         ss.appendString("Reason", ExplainReason(slices[0].reason));
     ss.appendDecimal("Total Time", "ms", t(total));
     ss.appendNumber("Compartments Collected", "%d", "", collectedCount);
@@ -383,17 +383,17 @@ Statistics::formatData(StatisticsSeriali
         for (size_t i = 0; i < slices.length(); i++) {
             int64_t width = slices[i].duration();
             if (i != 0 && i != slices.length() - 1 && width < SLICE_MIN_REPORT_TIME &&
                 !slices[i].resetReason && !ss.isJSON())
             {
                 continue;
             }
 
-            ss.beginObject(NULL);
+            ss.beginObject(nullptr);
             ss.extra("    ");
             ss.appendNumber("Slice", "%d", "", i);
             ss.appendDecimal("Pause", "", t(width));
             ss.extra(" (");
             ss.appendDecimal("When", "ms", t(slices[i].start - slices[0].start));
             ss.appendString("Reason", ExplainReason(slices[i].reason));
             if (ss.isJSON()) {
                 ss.appendDecimal("Page Faults", "",
@@ -432,32 +432,32 @@ Statistics::formatJSON(uint64_t timestam
     StatisticsSerializer ss(StatisticsSerializer::AsJSON);
     formatData(ss, timestamp);
     return ss.finishJSString();
 }
 
 Statistics::Statistics(JSRuntime *rt)
   : runtime(rt),
     startupTime(PRMJ_Now()),
-    fp(NULL),
+    fp(nullptr),
     fullFormat(false),
     gcDepth(0),
     collectedCount(0),
     zoneCount(0),
     compartmentCount(0),
-    nonincrementalReason(NULL),
+    nonincrementalReason(nullptr),
     preBytes(0),
     phaseNestingDepth(0)
 {
     PodArrayZero(phaseTotals);
     PodArrayZero(counts);
 
     char *env = getenv("MOZ_GCTIMER");
     if (!env || strcmp(env, "none") == 0) {
-        fp = NULL;
+        fp = nullptr;
         return;
     }
 
     if (strcmp(env, "stdout") == 0) {
         fullFormat = false;
         fp = stdout;
     } else if (strcmp(env, "stderr") == 0) {
         fullFormat = false;
@@ -514,17 +514,17 @@ Statistics::printStats()
 void
 Statistics::beginGC()
 {
     PodArrayZero(phaseStartTimes);
     PodArrayZero(phaseTimes);
 
     slices.clearAndFree();
     sccTimes.clearAndFree();
-    nonincrementalReason = NULL;
+    nonincrementalReason = nullptr;
 
     preBytes = runtime->gcBytes;
 }
 
 void
 Statistics::endGC()
 {
     crash::SnapshotGCStack();
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -116,17 +116,17 @@ struct Statistics {
 
     int collectedCount;
     int zoneCount;
     int compartmentCount;
     const char *nonincrementalReason;
 
     struct SliceData {
         SliceData(JS::gcreason::Reason reason, int64_t start, size_t startFaults)
-          : reason(reason), resetReason(NULL), start(start), startFaults(startFaults)
+          : reason(reason), resetReason(nullptr), start(start), startFaults(startFaults)
         {
             mozilla::PodArrayZero(phaseTimes);
         }
 
         JS::gcreason::Reason reason;
         const char *resetReason;
         int64_t start, end;
         size_t startFaults, endFaults;
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -20,29 +20,29 @@ using mozilla::ReentrancyGuard;
 
 /*** SlotEdge ***/
 
 JS_ALWAYS_INLINE HeapSlot *
 StoreBuffer::SlotEdge::slotLocation() const
 {
     if (kind == HeapSlot::Element) {
         if (offset >= object->getDenseInitializedLength())
-            return NULL;
+            return nullptr;
         return (HeapSlot *)&object->getDenseElement(offset);
     }
     if (offset >= object->slotSpan())
-        return NULL;
+        return nullptr;
     return &object->getSlotRef(offset);
 }
 
 JS_ALWAYS_INLINE void *
 StoreBuffer::SlotEdge::deref() const
 {
     HeapSlot *loc = slotLocation();
-    return (loc && loc->isGCThing()) ? loc->toGCThing() : NULL;
+    return (loc && loc->isGCThing()) ? loc->toGCThing() : nullptr;
 }
 
 JS_ALWAYS_INLINE void *
 StoreBuffer::SlotEdge::location() const
 {
     return (void *)slotLocation();
 }
 
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -253,17 +253,17 @@ class StoreBuffer
         friend class StoreBuffer::RelocatableMonoTypeBuffer<ValueEdge>;
 
         JS::Value *edge;
 
         explicit ValueEdge(JS::Value *v) : edge(v) {}
         bool operator==(const ValueEdge &other) const { return edge == other.edge; }
         bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
 
-        void *deref() const { return edge->isGCThing() ? edge->toGCThing() : NULL; }
+        void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
         void *location() const { return (void *)untagged().edge; }
 
         bool inRememberedSet(const Nursery &nursery) const {
             return !nursery.isInside(edge) && nursery.isInside(deref());
         }
 
         bool isNullEdge() const {
             return !deref();
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -121,17 +121,17 @@ CountDecimalDigits(size_t num)
 
     return numDigits;
 }
 
 JS_PUBLIC_API(void)
 JS_GetTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing,
                      JSGCTraceKind kind, bool details)
 {
-    const char *name = NULL; /* silence uninitialized warning */
+    const char *name = nullptr; /* silence uninitialized warning */
     size_t n;
 
     if (bufsize == 0)
         return;
 
     switch (kind) {
       case JSTRACE_OBJECT:
       {
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -263,17 +263,17 @@ JS::CheckStackRoots(JSContext *cx)
     }
 
     if (SuppressCheckRoots(rooters))
         return;
 
     // Truncate stackEnd to just after the address of the youngest
     // already-scanned rooter on the stack, to avoid re-scanning the rest of
     // the stack.
-    void *firstScanned = NULL;
+    void *firstScanned = nullptr;
     for (Rooter *p = rooters.begin(); p != rooters.end(); p++) {
         if (p->rooter->scanned) {
             uintptr_t *addr = reinterpret_cast<uintptr_t*>(p->rooter);
             if (stackEnd > addr) {
                 stackEnd = addr;
                 firstScanned = p->rooter;
             }
         }
@@ -373,17 +373,17 @@ struct VerifyPreTracer : JSTracer {
 
     /* This graph represents the initial GC "snapshot". */
     VerifyNode *curnode;
     VerifyNode *root;
     char *edgeptr;
     char *term;
     NodeMap nodemap;
 
-    VerifyPreTracer() : root(NULL) {}
+    VerifyPreTracer() : root(nullptr) {}
     ~VerifyPreTracer() { js_free(root); }
 };
 
 /*
  * This function builds up the heap snapshot by adding edges to the current
  * node.
  */
 static void
@@ -399,39 +399,39 @@ AccumulateEdge(JSTracer *jstrc, void **t
         return;
     }
 
     VerifyNode *node = trc->curnode;
     uint32_t i = node->count;
 
     node->edges[i].thing = *thingp;
     node->edges[i].kind = kind;
-    node->edges[i].label = trc->debugPrinter ? NULL : (char *)trc->debugPrintArg;
+    node->edges[i].label = trc->debugPrinter ? nullptr : (char *)trc->debugPrintArg;
     node->count++;
 }
 
 static VerifyNode *
 MakeNode(VerifyPreTracer *trc, void *thing, JSGCTraceKind kind)
 {
     NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing);
     if (!p) {
         VerifyNode *node = (VerifyNode *)trc->edgeptr;
         trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
         if (trc->edgeptr >= trc->term) {
             trc->edgeptr = trc->term;
-            return NULL;
+            return nullptr;
         }
 
         node->thing = thing;
         node->count = 0;
         node->kind = kind;
         trc->nodemap.add(p, thing, node);
         return node;
     }
-    return NULL;
+    return nullptr;
 }
 
 static VerifyNode *
 NextNode(VerifyNode *node)
 {
     if (node->count == 0)
         return (VerifyNode *)((char *)node + sizeof(VerifyNode) - sizeof(EdgeValue));
     else
@@ -469,17 +469,17 @@ gc::StartVerifyPreBarriers(JSRuntime *rt
         goto oom;
     trc->edgeptr = (char *)trc->root;
     trc->term = trc->edgeptr + size;
 
     if (!trc->nodemap.init())
         goto oom;
 
     /* Create the root node. */
-    trc->curnode = MakeNode(trc, NULL, JSGCTraceKind(0));
+    trc->curnode = MakeNode(trc, nullptr, JSGCTraceKind(0));
 
     /* We want MarkRuntime to save the roots to gcSavedRoots. */
     rt->gcIncrementalState = MARK_ROOTS;
 
     /* Make all the roots be edges emanating from the root node. */
     MarkRuntime(trc);
 
     VerifyNode *node;
@@ -514,48 +514,48 @@ gc::StartVerifyPreBarriers(JSRuntime *rt
         zone->allocator.arenas.purge();
     }
 
     return;
 
 oom:
     rt->gcIncrementalState = NO_INCREMENTAL;
     js_delete(trc);
-    rt->gcVerifyPreData = NULL;
+    rt->gcVerifyPreData = nullptr;
 }
 
 static bool
 IsMarkedOrAllocated(Cell *cell)
 {
     return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
 }
 
 static const uint32_t MAX_VERIFIER_EDGES = 1000;
 
 /*
  * This function is called by EndVerifyBarriers for every heap edge. If the edge
  * already existed in the original snapshot, we "cancel it out" by overwriting
- * it with NULL. EndVerifyBarriers later asserts that the remaining non-NULL
- * edges (i.e., the ones from the original snapshot that must have been
- * modified) must point to marked objects.
+ * it with nullptr. EndVerifyBarriers later asserts that the remaining
+ * non-nullptr edges (i.e., the ones from the original snapshot that must have
+ * been modified) must point to marked objects.
  */
 static void
 CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     VerifyPreTracer *trc = (VerifyPreTracer *)jstrc;
     VerifyNode *node = trc->curnode;
 
     /* Avoid n^2 behavior. */
     if (node->count > MAX_VERIFIER_EDGES)
         return;
 
     for (uint32_t i = 0; i < node->count; i++) {
         if (node->edges[i].thing == *thingp) {
             JS_ASSERT(node->edges[i].kind == kind);
-            node->edges[i].thing = NULL;
+            node->edges[i].thing = nullptr;
             return;
         }
     }
 }
 
 static void
 AssertMarkedOrAllocated(const EdgeValue &edge)
 {
@@ -594,17 +594,17 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
 
     /*
      * We need to bump gcNumber so that the methodjit knows that jitcode has
      * been discarded.
      */
     JS_ASSERT(trc->number == rt->gcNumber);
     rt->gcNumber++;
 
-    rt->gcVerifyPreData = NULL;
+    rt->gcVerifyPreData = nullptr;
     rt->gcIncrementalState = NO_INCREMENTAL;
 
     if (!compartmentCreated && IsIncrementalGCSafe(rt)) {
         JS_TracerInit(trc, rt, CheckEdge);
 
         /* Start after the roots. */
         VerifyNode *node = NextNode(trc->root);
         while ((char *)node < trc->edgeptr) {
@@ -680,17 +680,17 @@ PostVerifierCollectStoreBufferEdges(JSTr
     if (trc->runtime->gcNursery.isInside(thingp) || !trc->runtime->gcNursery.isInside(dst))
         return;
 
     /*
      * Values will be unpacked to the stack before getting here. However, the
      * only things that enter this callback are marked by the store buffer. The
      * store buffer ensures that the real tracing location is set correctly.
      */
-    void **loc = trc->realLocation != NULL ? (void **)trc->realLocation : thingp;
+    void **loc = trc->realLocation != nullptr ? (void **)trc->realLocation : thingp;
 
     trc->edges->put(loc);
 }
 
 static void
 AssertStoreBufferContainsEdge(VerifyPostTracer::EdgeSet *edges, void **loc, JSObject *dst)
 {
     if (edges->has(loc))
@@ -719,17 +719,17 @@ PostVerifierVisitEdge(JSTracer *jstrc, v
         return;
 
     /*
      * Values will be unpacked to the stack before getting here. However, the
      * only things that enter this callback are marked by the JS_TraceChildren
      * below. Since ObjectImpl::markChildren handles this, the real trace
      * location will be set correctly in these cases.
      */
-    void **loc = trc->realLocation != NULL ? (void **)trc->realLocation : thingp;
+    void **loc = trc->realLocation != nullptr ? (void **)trc->realLocation : thingp;
 
     AssertStoreBufferContainsEdge(trc->edges, loc, dst);
 }
 #endif
 
 void
 js::gc::EndVerifyPostBarriers(JSRuntime *rt)
 {
@@ -754,17 +754,17 @@ js::gc::EndVerifyPostBarriers(JSRuntime 
                 Cell *src = cells.getCell();
                 JS_TraceChildren(trc, src, MapAllocToTraceKind(AllocKind(kind)));
             }
         }
     }
 
 oom:
     js_delete(trc);
-    rt->gcVerifyPostData = NULL;
+    rt->gcVerifyPostData = nullptr;
 #endif
 }
 
 /*** Barrier Verifier Scheduling ***/
 
 static void
 VerifyPreBarriers(JSRuntime *rt)
 {
@@ -835,19 +835,19 @@ js::gc::MaybeVerifyBarriers(JSContext *c
     MaybeVerifyPostBarriers(cx->runtime(), always);
 }
 
 void
 js::gc::FinishVerifier(JSRuntime *rt)
 {
     if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData) {
         js_delete(trc);
-        rt->gcVerifyPreData = NULL;
+        rt->gcVerifyPreData = nullptr;
     }
 #ifdef JSGC_GENERATIONAL
     if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData) {
         js_delete(trc);
-        rt->gcVerifyPostData = NULL;
+        rt->gcVerifyPostData = nullptr;
     }
 #endif
 }
 
 #endif /* JS_GC_ZEAL */
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -46,17 +46,17 @@ JS::Zone::Zone(JSRuntime *rt)
               static_cast<JS::shadow::Zone *>(this));
 
     setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9);
 }
 
 Zone::~Zone()
 {
     if (this == runtimeFromMainThread()->systemZone)
-        runtimeFromMainThread()->systemZone = NULL;
+        runtimeFromMainThread()->systemZone = nullptr;
 }
 
 bool
 Zone::init(JSContext *cx)
 {
     types.init(cx);
     return true;
 }
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -284,17 +284,17 @@ struct Zone : public JS::shadow::Zone,
      }
 
     void onTooMuchMalloc();
 
     void *onOutOfMemory(void *p, size_t nbytes) {
         return runtimeFromMainThread()->onOutOfMemory(p, nbytes);
     }
     void reportAllocationOverflow() {
-        js_ReportAllocationOverflow(NULL);
+        js_ReportAllocationOverflow(nullptr);
     }
 
     void markTypes(JSTracer *trc);
 
     js::types::TypeZone types;
 
     void sweep(js::FreeOp *fop, bool releaseTypes);