Bug 784739 - Switch from NULL to nullptr in js/src/jit/ (3/7); r=ehsan
authorBirunthan Mohanathas <birunthan@mohanathas.com>
Fri, 27 Sep 2013 16:29:07 -0400
changeset 162947 e44494d17d37a5214aef9c47bb05b822ab478fb6
parent 162946 c102b5ec30df517228a565cdaf80dc64e399a8e2
child 162948 0c0ec323b6613098747fb1177b631eebf2efb763
push id3066
push userakeybl@mozilla.com
push dateMon, 09 Dec 2013 19:58:46 +0000
treeherdermozilla-beta@a31a0dce83aa [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersehsan
bugs784739
milestone27.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 784739 - Switch from NULL to nullptr in js/src/jit/ (3/7); r=ehsan
js/src/jit/Ion.h
js/src/jit/IonAllocPolicy.h
js/src/jit/IonAnalysis.cpp
js/src/jit/IonBuilder.cpp
--- a/js/src/jit/Ion.h
+++ b/js/src/jit/Ion.h
@@ -255,18 +255,18 @@ enum AbortReason {
     AbortReason_Inlining,
     AbortReason_Disable,
     AbortReason_Error,
     AbortReason_NoAbort
 };
 
 // An Ion context is needed to enter into either an Ion method or an instance
 // of the Ion compiler. It points to a temporary allocator and the active
-// JSContext, either of which may be NULL, and the active compartment, which
-// will not be NULL.
+// JSContext, either of which may be nullptr, and the active compartment, which
+// will not be nullptr.
 
 class IonContext
 {
   public:
     IonContext(JSContext *cx, TempAllocator *temp);
     IonContext(ExclusiveContext *cx, TempAllocator *temp);
     IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp);
     IonContext(JSRuntime *rt);
@@ -349,18 +349,18 @@ void ToggleBarriers(JS::Zone *zone, bool
 
 class IonBuilder;
 class MIRGenerator;
 class LIRGraph;
 class CodeGenerator;
 
 bool OptimizeMIR(MIRGenerator *mir);
 LIRGraph *GenerateLIR(MIRGenerator *mir);
-CodeGenerator *GenerateCode(MIRGenerator *mir, LIRGraph *lir, MacroAssembler *maybeMasm = NULL);
-CodeGenerator *CompileBackEnd(MIRGenerator *mir, MacroAssembler *maybeMasm = NULL);
+CodeGenerator *GenerateCode(MIRGenerator *mir, LIRGraph *lir, MacroAssembler *maybeMasm = nullptr);
+CodeGenerator *CompileBackEnd(MIRGenerator *mir, MacroAssembler *maybeMasm = nullptr);
 
 void AttachFinishedCompilations(JSContext *cx);
 void FinishOffThreadBuilder(IonBuilder *builder);
 
 static inline bool
 IsIonEnabled(JSContext *cx)
 {
     return cx->hasOption(JSOPTION_ION) &&
--- a/js/src/jit/IonAllocPolicy.h
+++ b/js/src/jit/IonAllocPolicy.h
@@ -23,31 +23,31 @@ class TempAllocator
     LifoAllocScope lifoScope_;
 
     // Linked list of GCThings rooted by this allocator.
     CompilerRootNode *rootList_;
 
   public:
     TempAllocator(LifoAlloc *lifoAlloc)
       : lifoScope_(lifoAlloc),
-        rootList_(NULL)
+        rootList_(nullptr)
     { }
 
     void *allocateInfallible(size_t bytes)
     {
         void *p = lifoScope_.alloc().allocInfallible(bytes);
         JS_ASSERT(p);
         return p;
     }
 
     void *allocate(size_t bytes)
     {
         void *p = lifoScope_.alloc().alloc(bytes);
         if (!ensureBallast())
-            return NULL;
+            return nullptr;
         return p;
     }
 
     LifoAlloc *lifoAlloc()
     {
         return &lifoScope_.alloc();
     }
 
--- a/js/src/jit/IonAnalysis.cpp
+++ b/js/src/jit/IonAnalysis.cpp
@@ -233,18 +233,18 @@ IsPhiObservable(MPhi *phi, Observability
 
 // Handles cases like:
 //    x is phi(a, x) --> a
 //    x is phi(a, a) --> a
 inline MDefinition *
 IsPhiRedundant(MPhi *phi)
 {
     MDefinition *first = phi->operandIfRedundant();
-    if (first == NULL)
-        return NULL;
+    if (first == nullptr)
+        return nullptr;
 
     // Propagate the Folded flag if |phi| is replaced with another phi.
     if (phi->isFolded())
         first->setFoldedUnchecked();
 
     return first;
 }
 
@@ -974,30 +974,30 @@ IntersectDominators(MBasicBlock *block1,
     JS_ASSERT(finger2);
 
     // In the original paper, the block ID comparisons are on the postorder index.
     // This implementation iterates in RPO, so the comparisons are reversed.
 
     // For this function to be called, the block must have multiple predecessors.
     // If a finger is then found to be self-dominating, it must therefore be
     // reachable from multiple roots through non-intersecting control flow.
-    // NULL is returned in this case, to denote an empty intersection.
+    // nullptr is returned in this case, to denote an empty intersection.
 
     while (finger1->id() != finger2->id()) {
         while (finger1->id() > finger2->id()) {
             MBasicBlock *idom = finger1->immediateDominator();
             if (idom == finger1)
-                return NULL; // Empty intersection.
+                return nullptr; // Empty intersection.
             finger1 = idom;
         }
 
         while (finger2->id() > finger1->id()) {
             MBasicBlock *idom = finger2->immediateDominator();
             if (idom == finger2)
-                return NULL; // Empty intersection.
+                return nullptr; // Empty intersection.
             finger2 = idom;
         }
     }
     return finger1;
 }
 
 static void
 ComputeImmediateDominators(MIRGraph &graph)
@@ -1025,40 +1025,40 @@ ComputeImmediateDominators(MIRGraph &gra
             if (block->immediateDominator() == *block)
                 continue;
 
             MBasicBlock *newIdom = block->getPredecessor(0);
 
             // Find the first common dominator.
             for (size_t i = 1; i < block->numPredecessors(); i++) {
                 MBasicBlock *pred = block->getPredecessor(i);
-                if (pred->immediateDominator() == NULL)
+                if (pred->immediateDominator() == nullptr)
                     continue;
 
                 newIdom = IntersectDominators(pred, newIdom);
 
                 // If there is no common dominator, the block self-dominates.
-                if (newIdom == NULL) {
+                if (newIdom == nullptr) {
                     block->setImmediateDominator(*block);
                     changed = true;
                     break;
                 }
             }
 
             if (newIdom && block->immediateDominator() != newIdom) {
                 block->setImmediateDominator(newIdom);
                 changed = true;
             }
         }
     }
 
 #ifdef DEBUG
     // Assert that all blocks have dominator information.
     for (MBasicBlockIterator block(graph.begin()); block != graph.end(); block++) {
-        JS_ASSERT(block->immediateDominator() != NULL);
+        JS_ASSERT(block->immediateDominator() != nullptr);
     }
 #endif
 }
 
 bool
 jit::BuildDominatorTree(MIRGraph &graph)
 {
     ComputeImmediateDominators(graph);
@@ -1315,23 +1315,23 @@ jit::AssertExtendedGraphCoherency(MIRGra
         }
 
         uint32_t successorWithPhis = 0;
         for (size_t i = 0; i < block->numSuccessors(); i++)
             if (!block->getSuccessor(i)->phisEmpty())
                 successorWithPhis++;
 
         JS_ASSERT(successorWithPhis <= 1);
-        JS_ASSERT_IF(successorWithPhis, block->successorWithPhis() != NULL);
+        JS_ASSERT_IF(successorWithPhis, block->successorWithPhis() != nullptr);
 
         // I'd like to assert this, but it's not necc. true.  Sometimes we set this
-        // flag to non-NULL just because a successor has multiple preds, even if it
+        // flag to non-nullptr just because a successor has multiple preds, even if it
         // does not actually have any phis.
         //
-        // JS_ASSERT_IF(!successorWithPhis, block->successorWithPhis() == NULL);
+        // JS_ASSERT_IF(!successorWithPhis, block->successorWithPhis() == nullptr);
     }
 #endif
 }
 
 
 struct BoundsCheckInfo
 {
     MBoundsCheck *check;
@@ -1361,17 +1361,17 @@ FindDominatingBoundsCheck(BoundsCheckMap
     BoundsCheckMap::Ptr p = checks.lookup(hash);
     if (!p || index > p->value.validUntil) {
         // We didn't find a dominating bounds check.
         BoundsCheckInfo info;
         info.check = check;
         info.validUntil = index + check->block()->numDominated();
 
         if(!checks.put(hash, info))
-            return NULL;
+            return nullptr;
 
         return check;
     }
 
     return p->value.check;
 }
 
 // Extract a linear sum from ins, if possible (otherwise giving the sum 'ins + 0').
@@ -1382,17 +1382,17 @@ jit::ExtractLinearSum(MDefinition *ins)
         ins = ins->getOperand(0);
 
     if (ins->type() != MIRType_Int32)
         return SimpleLinearSum(ins, 0);
 
     if (ins->isConstant()) {
         const Value &v = ins->toConstant()->value();
         JS_ASSERT(v.isInt32());
-        return SimpleLinearSum(NULL, v.toInt32());
+        return SimpleLinearSum(nullptr, v.toInt32());
     } else if (ins->isAdd() || ins->isSub()) {
         MDefinition *lhs = ins->getOperand(0);
         MDefinition *rhs = ins->getOperand(1);
         if (lhs->type() == MIRType_Int32 && rhs->type() == MIRType_Int32) {
             SimpleLinearSum lsum = ExtractLinearSum(lhs);
             SimpleLinearSum rsum = ExtractLinearSum(rhs);
 
             if (lsum.term && rsum.term)
@@ -1504,17 +1504,17 @@ TryEliminateBoundsCheck(BoundsCheckMap &
     // We found two bounds checks with the same hash number, but we still have
     // to make sure the lengths and index terms are equal.
     if (dominating->length() != dominated->length())
         return true;
 
     SimpleLinearSum sumA = ExtractLinearSum(dominating->index());
     SimpleLinearSum sumB = ExtractLinearSum(dominated->index());
 
-    // Both terms should be NULL or the same definition.
+    // Both terms should be nullptr or the same definition.
     if (sumA.term != sumB.term)
         return true;
 
     // This bounds check is redundant.
     *eliminated = true;
 
     // Normalize the ranges according to the constant offsets in the two indexes.
     int32_t minimumA, maximumA, minimumB, maximumB;
@@ -1712,33 +1712,33 @@ jit::EliminateRedundantChecks(MIRGraph &
         index++;
     }
 
     JS_ASSERT(index == graph.numBlocks());
     return true;
 }
 
 // If the given block contains a goto and nothing interesting before that,
-// return the goto. Return NULL otherwise.
+// return the goto. Return nullptr otherwise.
 static LGoto *
 FindLeadingGoto(LBlock *bb)
 {
     for (LInstructionIterator ins(bb->begin()); ins != bb->end(); ins++) {
         // Ignore labels.
         if (ins->isLabel())
             continue;
         // Ignore empty move groups.
         if (ins->isMoveGroup() && ins->toMoveGroup()->numMoves() == 0)
             continue;
         // If we have a goto, we're good to go.
         if (ins->isGoto())
             return ins->toGoto();
         break;
     }
-    return NULL;
+    return nullptr;
 }
 
 // Eliminate blocks containing nothing interesting besides gotos. These are
 // often created by optimizer, which splits all critical edges. If these
 // splits end up being unused after optimization and register allocation,
 // fold them back away to avoid unnecessary branching.
 bool
 jit::UnsplitEdges(LIRGraph *lir)
@@ -1947,17 +1947,17 @@ AnalyzePoppedThis(JSContext *cx, types::
         if (!types::AddClearDefiniteGetterSetterForPrototypeChain(cx, type, id)) {
             // The prototype chain already contains a getter/setter for this
             // property, or type information is too imprecise.
             return true;
         }
 
         DebugOnly<unsigned> slotSpan = baseobj->slotSpan();
         RootedValue value(cx, UndefinedValue());
-        if (!DefineNativeProperty(cx, baseobj, id, value, NULL, NULL,
+        if (!DefineNativeProperty(cx, baseobj, id, value, nullptr, nullptr,
                                   JSPROP_ENUMERATE, 0, 0, DNP_SKIP_TYPE))
         {
             return false;
         }
         JS_ASSERT(baseobj->slotSpan() != slotSpan);
         JS_ASSERT(!baseobj->inDictionaryMode());
 
         Vector<MResumePoint *> callerResumePoints(cx);
@@ -2066,23 +2066,23 @@ jit::AnalyzeNewScriptProperties(JSContex
 
     types::AutoEnterAnalysis enter(cx);
 
     if (!fun->nonLazyScript()->ensureRanAnalysis(cx))
         return false;
 
     MIRGraph graph(&temp);
     CompileInfo info(fun->nonLazyScript(), fun,
-                     /* osrPc = */ NULL, /* constructing = */ false,
+                     /* osrPc = */ nullptr, /* constructing = */ false,
                      DefinitePropertiesAnalysis);
 
     AutoTempAllocatorRooter root(cx, &temp);
 
     BaselineInspector inspector(cx, fun->nonLazyScript());
-    IonBuilder builder(cx, &temp, &graph, &inspector, &info, /* baselineFrame = */ NULL);
+    IonBuilder builder(cx, &temp, &graph, &inspector, &info, /* baselineFrame = */ nullptr);
 
     if (!builder.build()) {
         if (builder.abortReason() == AbortReason_Alloc)
             return false;
         return true;
     }
 
     if (!SplitCriticalEdges(graph))
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -34,43 +34,43 @@ using namespace js;
 using namespace js::jit;
 
 using mozilla::DebugOnly;
 
 IonBuilder::IonBuilder(JSContext *cx, TempAllocator *temp, MIRGraph *graph,
                        BaselineInspector *inspector, CompileInfo *info, BaselineFrame *baselineFrame,
                        size_t inliningDepth, uint32_t loopDepth)
   : MIRGenerator(cx->compartment(), temp, graph, info),
-    backgroundCodegen_(NULL),
+    backgroundCodegen_(nullptr),
     recompileInfo(cx->compartment()->types.compiledInfo),
     cx(cx),
     baselineFrame_(baselineFrame),
     abortReason_(AbortReason_Disable),
     analysis_(info->script()),
     loopDepth_(loopDepth),
-    callerResumePoint_(NULL),
-    callerBuilder_(NULL),
+    callerResumePoint_(nullptr),
+    callerBuilder_(nullptr),
     inspector(inspector),
     inliningDepth_(inliningDepth),
     numLoopRestarts_(0),
     failedBoundsCheck_(info->script()->failedBoundsCheck),
     failedShapeGuard_(info->script()->failedShapeGuard),
     nonStringIteration_(false),
-    lazyArguments_(NULL),
-    inlineCallInfo_(NULL)
+    lazyArguments_(nullptr),
+    inlineCallInfo_(nullptr)
 {
     script_.init(info->script());
     pc = info->startPC();
 }
 
 void
 IonBuilder::clearForBackEnd()
 {
-    cx = NULL;
-    baselineFrame_ = NULL;
+    cx = nullptr;
+    baselineFrame_ = nullptr;
 }
 
 bool
 IonBuilder::abort(const char *message, ...)
 {
     // Don't call PCToLineNumber in release builds.
 #ifdef DEBUG
     va_list ap;
@@ -138,31 +138,31 @@ IonBuilder::CFGState::AndOr(jsbytecode *
 
 IonBuilder::CFGState
 IonBuilder::CFGState::TableSwitch(jsbytecode *exitpc, MTableSwitch *ins)
 {
     CFGState state;
     state.state = TABLE_SWITCH;
     state.stopAt = exitpc;
     state.tableswitch.exitpc = exitpc;
-    state.tableswitch.breaks = NULL;
+    state.tableswitch.breaks = nullptr;
     state.tableswitch.ins = ins;
     state.tableswitch.currentBlock = 0;
     return state;
 }
 
 JSFunction *
 IonBuilder::getSingleCallTarget(types::TemporaryTypeSet *calleeTypes)
 {
     if (!calleeTypes)
-        return NULL;
+        return nullptr;
 
     JSObject *obj = calleeTypes->getSingleton();
     if (!obj || !obj->is<JSFunction>())
-        return NULL;
+        return nullptr;
 
     return &obj->as<JSFunction>();
 }
 
 bool
 IonBuilder::getPolyCallTargets(types::TemporaryTypeSet *calleeTypes, bool constructing,
                                ObjectVector &targets, uint32_t maxTargets, bool *gotLambda)
 {
@@ -354,17 +354,17 @@ IonBuilder::analyzeNewLoopTypes(MBasicBl
             // new types flow to the phis and the loop is processed at least
             // three times.
             loopHeaders_[i].header = entry;
             return;
         }
     }
     loopHeaders_.append(LoopHeader(start, entry));
 
-    jsbytecode *last = NULL, *earlier = NULL;
+    jsbytecode *last = nullptr, *earlier = nullptr;
     for (jsbytecode *pc = start; pc != end; earlier = last, last = pc, pc += GetBytecodeLength(pc)) {
         uint32_t slot;
         if (*pc == JSOP_SETLOCAL)
             slot = info().localSlot(GET_SLOTNO(pc));
         else if (*pc == JSOP_SETARG)
             slot = info().argSlotUnchecked(GET_SLOTNO(pc));
         else
             continue;
@@ -448,17 +448,17 @@ IonBuilder::analyzeNewLoopTypes(MBasicBl
               case JSOP_DIV:
               case JSOP_MOD:
               case JSOP_NEG:
                 type = inspector->expectedResultType(last);
               default:
                 break;
             }
             if (type != MIRType_None)
-                phi->addBackedgeType(type, NULL);
+                phi->addBackedgeType(type, nullptr);
         }
     }
 }
 
 bool
 IonBuilder::pushLoop(CFGState::State initial, jsbytecode *stopAt, MBasicBlock *entry, bool osr,
                      jsbytecode *loopHead, jsbytecode *initialPc,
                      jsbytecode *bodyStart, jsbytecode *bodyEnd, jsbytecode *exitpc,
@@ -475,19 +475,19 @@ IonBuilder::pushLoop(CFGState::State ini
     state.state = initial;
     state.stopAt = stopAt;
     state.loop.bodyStart = bodyStart;
     state.loop.bodyEnd = bodyEnd;
     state.loop.exitpc = exitpc;
     state.loop.continuepc = continuepc;
     state.loop.entry = entry;
     state.loop.osr = osr;
-    state.loop.successor = NULL;
-    state.loop.breaks = NULL;
-    state.loop.continues = NULL;
+    state.loop.successor = nullptr;
+    state.loop.breaks = nullptr;
+    state.loop.continues = nullptr;
     state.loop.initialState = initial;
     state.loop.initialPc = initialPc;
     state.loop.initialStopAt = stopAt;
     state.loop.loopHead = loopHead;
     return cfgStack_.append(state);
 }
 
 bool
@@ -759,17 +759,17 @@ IonBuilder::rewriteParameter(uint32_t sl
 {
     JS_ASSERT(param->isParameter() || param->isGetArgumentsObjectArg());
 
     types::TemporaryTypeSet *types = param->resultTypeSet();
     JSValueType definiteType = types->getKnownTypeTag();
     if (definiteType == JSVAL_TYPE_UNKNOWN)
         return;
 
-    MInstruction *actual = NULL;
+    MInstruction *actual = nullptr;
     switch (definiteType) {
       case JSVAL_TYPE_UNDEFINED:
         param->setFoldedUnchecked();
         actual = MConstant::New(UndefinedValue());
         break;
 
       case JSVAL_TYPE_NULL:
         param->setFoldedUnchecked();
@@ -843,17 +843,17 @@ IonBuilder::initParameters()
     }
 
     return true;
 }
 
 bool
 IonBuilder::initScopeChain(MDefinition *callee)
 {
-    MInstruction *scope = NULL;
+    MInstruction *scope = nullptr;
 
     // If the script doesn't use the scopechain, then it's already initialized
     // from earlier.  However, always make a scope chain when |needsArgsObj| is true
     // for the script, since arguments object construction requires the scope chain
     // to be passed in.
     if (!info().needsArgsObj() && !analysis().usesScopeChain())
         return true;
 
@@ -910,17 +910,17 @@ bool
 IonBuilder::addOsrValueTypeBarrier(uint32_t slot, MInstruction **def_,
                                    MIRType type, types::TemporaryTypeSet *typeSet)
 {
     MInstruction *&def = *def_;
     MBasicBlock *osrBlock = def->block();
 
     // Clear bogus type information added in newOsrPreheader().
     def->setResultType(MIRType_Value);
-    def->setResultTypeSet(NULL);
+    def->setResultTypeSet(nullptr);
 
     if (typeSet && !typeSet->unknown()) {
         MInstruction *barrier = MTypeBarrier::New(def, typeSet);
         osrBlock->insertBefore(osrBlock->lastIns(), barrier);
         osrBlock->rewriteSlot(slot, barrier);
         def = barrier;
     } else if (type == MIRType_Null ||
                type == MIRType_Undefined ||
@@ -1455,17 +1455,17 @@ IonBuilder::inspectOpcode(JSOp op)
       case JSOP_POPN:
         for (uint32_t i = 0, n = GET_UINT16(pc); i < n; i++)
             current->pop();
         return true;
 
       case JSOP_NEWINIT:
         if (GET_UINT8(pc) == JSProto_Array)
             return jsop_newarray(0);
-        return jsop_newobject(NULL);
+        return jsop_newobject(nullptr);
 
       case JSOP_NEWARRAY:
         return jsop_newarray(GET_UINT24(pc));
 
       case JSOP_NEWOBJECT:
         return jsop_newobject(info().getObject(pc));
 
       case JSOP_INITELEM:
@@ -1684,17 +1684,17 @@ IonBuilder::inspectOpcode(JSOp op)
 // every outer loop at once, if there are no intervening conditionals:
 //
 // for (...) {
 //   for (...) {
 //     return x;
 //   }
 // }
 //
-// If |current| is NULL when this function returns, then there is no more
+// If |current| is nullptr when this function returns, then there is no more
 // control flow to be processed.
 IonBuilder::ControlStatus
 IonBuilder::processControlEnd()
 {
     JS_ASSERT(!current);
 
     if (cfgStack_.empty()) {
         // If there is no more control flow to process, then this is the
@@ -2129,17 +2129,17 @@ IonBuilder::processForCondEnd(CFGState &
 
 IonBuilder::ControlStatus
 IonBuilder::processForBodyEnd(CFGState &state)
 {
     if (!processDeferredContinues(state))
         return ControlStatus_Error;
 
     // If there is no updatepc, just go right to processing what would be the
-    // end of the update clause. Otherwise, |current| might be NULL; if this is
+    // end of the update clause. Otherwise, |current| might be nullptr; if this is
     // the case, the udpate is unreachable anyway.
     if (!state.loop.updatepc || !current)
         return processForUpdateEnd(state);
 
     pc = state.loop.updatepc;
 
     state.state = CFGState::FOR_LOOP_UPDATE;
     state.stopAt = state.loop.updateEnd;
@@ -2156,17 +2156,17 @@ IonBuilder::processForUpdateEnd(CFGState
 
     current->end(MGoto::New(state.loop.entry));
     return finishLoop(state, state.loop.successor);
 }
 
 IonBuilder::DeferredEdge *
 IonBuilder::filterDeadDeferredEdges(DeferredEdge *edge)
 {
-    DeferredEdge *head = edge, *prev = NULL;
+    DeferredEdge *head = edge, *prev = nullptr;
 
     while (edge) {
         if (edge->block->isDead()) {
             if (prev)
                 prev->next = edge->next;
             else
                 head = edge->next;
         } else {
@@ -2208,44 +2208,44 @@ IonBuilder::processDeferredContinues(CFG
 
         // Remaining edges
         while (edge) {
             edge->block->end(MGoto::New(update));
             if (!update->addPredecessor(edge->block))
                 return ControlStatus_Error;
             edge = edge->next;
         }
-        state.loop.continues = NULL;
+        state.loop.continues = nullptr;
 
         setCurrentAndSpecializePhis(update);
     }
 
     return true;
 }
 
 MBasicBlock *
 IonBuilder::createBreakCatchBlock(DeferredEdge *edge, jsbytecode *pc)
 {
     edge = filterDeadDeferredEdges(edge);
 
     // Create block, using the first break statement as predecessor
     MBasicBlock *successor = newBlock(edge->block, pc);
     if (!successor)
-        return NULL;
+        return nullptr;
 
     // No need to use addPredecessor for first edge,
     // because it is already predecessor.
     edge->block->end(MGoto::New(successor));
     edge = edge->next;
 
     // Finish up remaining breaks.
     while (edge) {
         edge->block->end(MGoto::New(successor));
         if (!successor->addPredecessor(edge->block))
-            return NULL;
+            return nullptr;
         edge = edge->next;
     }
 
     return successor;
 }
 
 IonBuilder::ControlStatus
 IonBuilder::processNextTableSwitchCase(CFGState &state)
@@ -2382,17 +2382,17 @@ IonBuilder::processBreak(JSOp op, jssrcn
                 found = true;
                 break;
             }
         }
     }
 
     JS_ASSERT(found);
 
-    setCurrent(NULL);
+    setCurrent(nullptr);
     pc += js_CodeSpec[op].length;
     return processControlEnd();
 }
 
 static inline jsbytecode *
 EffectiveContinue(jsbytecode *pc)
 {
     if (JSOp(*pc) == JSOP_GOTO)
@@ -2401,17 +2401,17 @@ EffectiveContinue(jsbytecode *pc)
 }
 
 IonBuilder::ControlStatus
 IonBuilder::processContinue(JSOp op)
 {
     JS_ASSERT(op == JSOP_GOTO);
 
     // Find the target loop.
-    CFGState *found = NULL;
+    CFGState *found = nullptr;
     jsbytecode *target = pc + GetJumpOffset(pc);
     for (size_t i = loops_.length() - 1; i < loops_.length(); i--) {
         if (loops_[i].continuepc == target ||
             EffectiveContinue(loops_[i].continuepc) == target)
         {
             found = &cfgStack_[loops_[i].cfgEntry];
             break;
         }
@@ -2419,73 +2419,73 @@ IonBuilder::processContinue(JSOp op)
 
     // There must always be a valid target loop structure. If not, there's
     // probably an off-by-something error in which pc we track.
     JS_ASSERT(found);
     CFGState &state = *found;
 
     state.loop.continues = new DeferredEdge(current, state.loop.continues);
 
-    setCurrent(NULL);
+    setCurrent(nullptr);
     pc += js_CodeSpec[op].length;
     return processControlEnd();
 }
 
 IonBuilder::ControlStatus
 IonBuilder::processSwitchBreak(JSOp op)
 {
     JS_ASSERT(op == JSOP_GOTO);
 
     // Find the target switch.
-    CFGState *found = NULL;
+    CFGState *found = nullptr;
     jsbytecode *target = pc + GetJumpOffset(pc);
     for (size_t i = switches_.length() - 1; i < switches_.length(); i--) {
         if (switches_[i].continuepc == target) {
             found = &cfgStack_[switches_[i].cfgEntry];
             break;
         }
     }
 
     // There must always be a valid target loop structure. If not, there's
     // probably an off-by-something error in which pc we track.
     JS_ASSERT(found);
     CFGState &state = *found;
 
-    DeferredEdge **breaks = NULL;
+    DeferredEdge **breaks = nullptr;
     switch (state.state) {
       case CFGState::TABLE_SWITCH:
         breaks = &state.tableswitch.breaks;
         break;
       case CFGState::COND_SWITCH_BODY:
         breaks = &state.condswitch.breaks;
         break;
       default:
         MOZ_ASSUME_UNREACHABLE("Unexpected switch state.");
     }
 
     *breaks = new DeferredEdge(current, *breaks);
 
-    setCurrent(NULL);
+    setCurrent(nullptr);
     pc += js_CodeSpec[op].length;
     return processControlEnd();
 }
 
 IonBuilder::ControlStatus
 IonBuilder::processSwitchEnd(DeferredEdge *breaks, jsbytecode *exitpc)
 {
     // No break statements, no current.
     // This means that control flow is cut-off from this point
     // (e.g. all cases have return statements).
     if (!breaks && !current)
         return ControlStatus_Ended;
 
     // Create successor block.
     // If there are breaks, create block with breaks as predecessor
     // Else create a block with current as predecessor
-    MBasicBlock *successor = NULL;
+    MBasicBlock *successor = nullptr;
     if (breaks)
         successor = createBreakCatchBlock(breaks, exitpc);
     else
         successor = newBlock(current, exitpc);
 
     if (!successor)
         return ControlStatus_Ended;
 
@@ -2778,18 +2778,18 @@ IonBuilder::forLoop(JSOp op, jssrcnote *
     analyzeNewLoopTypes(header, bodyStart, exitpc);
     if (!pushLoop(initial, stopAt, header, osr,
                   loopHead, pc, bodyStart, bodyEnd, exitpc, updatepc))
     {
         return ControlStatus_Error;
     }
 
     CFGState &state = cfgStack_.back();
-    state.loop.condpc = (condpc != ifne) ? condpc : NULL;
-    state.loop.updatepc = (updatepc != condpc) ? updatepc : NULL;
+    state.loop.condpc = (condpc != ifne) ? condpc : nullptr;
+    state.loop.updatepc = (updatepc != condpc) ? updatepc : nullptr;
     if (state.loop.updatepc)
         state.loop.updateEnd = condpc;
 
     setCurrentAndSpecializePhis(header);
     if (!jsop_loophead(loopHead))
         return ControlStatus_Error;
 
     return ControlStatus_Jumped;
@@ -2846,17 +2846,17 @@ IonBuilder::tableSwitch(JSOp op, jssrcno
     // Create default case
     MBasicBlock *defaultcase = newBlock(current, defaultpc);
     if (!defaultcase)
         return ControlStatus_Error;
     tableswitch->addDefault(defaultcase);
     tableswitch->addBlock(defaultcase);
 
     // Create cases
-    jsbytecode *casepc = NULL;
+    jsbytecode *casepc = nullptr;
     for (int i = 0; i < high-low+1; i++) {
         casepc = pc + GET_JUMP_OFFSET(pc2);
 
         JS_ASSERT(casepc >= pc && casepc <= exitpc);
 
         MBasicBlock *caseblock = newBlock(current, casepc);
         if (!caseblock)
             return ControlStatus_Error;
@@ -3011,34 +3011,34 @@ IonBuilder::jsop_condswitch()
     return cfgStack_.append(state);
 }
 
 IonBuilder::CFGState
 IonBuilder::CFGState::CondSwitch(jsbytecode *exitpc, jsbytecode *defaultTarget)
 {
     CFGState state;
     state.state = COND_SWITCH_CASE;
-    state.stopAt = NULL;
+    state.stopAt = nullptr;
     state.condswitch.bodies = (FixedList<MBasicBlock *> *)GetIonContext()->temp->allocate(
         sizeof(FixedList<MBasicBlock *>));
     state.condswitch.currentIdx = 0;
     state.condswitch.defaultTarget = defaultTarget;
     state.condswitch.defaultIdx = uint32_t(-1);
     state.condswitch.exitpc = exitpc;
-    state.condswitch.breaks = NULL;
+    state.condswitch.breaks = nullptr;
     return state;
 }
 
 IonBuilder::CFGState
 IonBuilder::CFGState::Label(jsbytecode *exitpc)
 {
     CFGState state;
     state.state = LABEL;
     state.stopAt = exitpc;
-    state.label.breaks = NULL;
+    state.label.breaks = nullptr;
     return state;
 }
 
 IonBuilder::CFGState
 IonBuilder::CFGState::Try(jsbytecode *exitpc, MBasicBlock *successor)
 {
     CFGState state;
     state.state = TRY;
@@ -3052,35 +3052,35 @@ IonBuilder::processCondSwitchCase(CFGSta
 {
     JS_ASSERT(state.state == CFGState::COND_SWITCH_CASE);
     JS_ASSERT(!state.condswitch.breaks);
     JS_ASSERT(current);
     JS_ASSERT(JSOp(*pc) == JSOP_CASE);
     FixedList<MBasicBlock *> &bodies = *state.condswitch.bodies;
     jsbytecode *defaultTarget = state.condswitch.defaultTarget;
     uint32_t &currentIdx = state.condswitch.currentIdx;
-    jsbytecode *lastTarget = currentIdx ? bodies[currentIdx - 1]->pc() : NULL;
+    jsbytecode *lastTarget = currentIdx ? bodies[currentIdx - 1]->pc() : nullptr;
 
     // Fetch the following case in which we will continue.
     jssrcnote *sn = info().getNote(gsn, pc);
     ptrdiff_t off = js_GetSrcNoteOffset(sn, 0);
     jsbytecode *casePc = off ? pc + off : GetNextPc(pc);
     bool caseIsDefault = JSOp(*casePc) == JSOP_DEFAULT;
     JS_ASSERT(JSOp(*casePc) == JSOP_CASE || caseIsDefault);
 
     // Allocate the block of the matching case.
     bool bodyIsNew = false;
-    MBasicBlock *bodyBlock = NULL;
+    MBasicBlock *bodyBlock = nullptr;
     jsbytecode *bodyTarget = pc + GetJumpOffset(pc);
     if (lastTarget < bodyTarget) {
         // If the default body is in the middle or aliasing the current target.
         if (lastTarget < defaultTarget && defaultTarget <= bodyTarget) {
             JS_ASSERT(state.condswitch.defaultIdx == uint32_t(-1));
             state.condswitch.defaultIdx = currentIdx;
-            bodies[currentIdx] = NULL;
+            bodies[currentIdx] = nullptr;
             // If the default body does not alias any and it would be allocated
             // later and stored in the defaultIdx location.
             if (defaultTarget < bodyTarget)
                 currentIdx++;
         }
 
         bodyIsNew = true;
         // Pop switch and case operands.
@@ -3096,32 +3096,32 @@ IonBuilder::processCondSwitchCase(CFGSta
     if (!bodyBlock)
         return ControlStatus_Error;
 
     lastTarget = bodyTarget;
 
     // Allocate the block of the non-matching case.  This can either be a normal
     // case or the default case.
     bool caseIsNew = false;
-    MBasicBlock *caseBlock = NULL;
+    MBasicBlock *caseBlock = nullptr;
     if (!caseIsDefault) {
         caseIsNew = true;
         // Pop the case operand.
         caseBlock = newBlockPopN(current, GetNextPc(pc), 1);
     } else {
         // The non-matching case is the default case, which jump directly to its
         // body. Skip the creation of a default case block and directly create
         // the default body if it does not alias any previous body.
 
         if (state.condswitch.defaultIdx == uint32_t(-1)) {
             // The default target is the last target.
             JS_ASSERT(lastTarget < defaultTarget);
             state.condswitch.defaultIdx = currentIdx++;
             caseIsNew = true;
-        } else if (bodies[state.condswitch.defaultIdx] == NULL) {
+        } else if (bodies[state.condswitch.defaultIdx] == nullptr) {
             // The default target is in the middle and it does not alias any
             // case target.
             JS_ASSERT(defaultTarget < lastTarget);
             caseIsNew = true;
         } else {
             // The default target is in the middle and it alias a case target.
             JS_ASSERT(defaultTarget <= lastTarget);
             caseBlock = bodies[state.condswitch.defaultIdx];
@@ -3182,17 +3182,17 @@ IonBuilder::processCondSwitchCase(CFGSta
         // Handle break statements in processSwitchBreak while processing
         // bodies.
         ControlFlowInfo breakInfo(cfgStack_.length() - 1, state.condswitch.exitpc);
         if (!switches_.append(breakInfo))
             return ControlStatus_Error;
 
         // Jump into the first body.
         currentIdx = 0;
-        setCurrent(NULL);
+        setCurrent(nullptr);
         state.state = CFGState::COND_SWITCH_BODY;
         return processCondSwitchBody(state);
     }
 
     // Continue until the case condition.
     setCurrentAndSpecializePhis(caseBlock);
     pc = current->pc();
     state.stopAt = casePc;
@@ -3426,17 +3426,17 @@ IonBuilder::jsop_try()
         if (!successor)
             return false;
 
         // Add MTest(true, tryBlock, successorBlock).
         MConstant *true_ = MConstant::New(BooleanValue(true));
         current->add(true_);
         current->end(MTest::New(true_, tryBlock, successor));
     } else {
-        successor = NULL;
+        successor = nullptr;
         current->end(MGoto::New(tryBlock));
     }
 
     if (!cfgStack_.append(CFGState::Try(endpc, successor)))
         return false;
 
     // The baseline compiler should not attempt to enter the catch block
     // via OSR.
@@ -3460,30 +3460,30 @@ IonBuilder::processReturn(JSOp op)
       {
         MInstruction *ins = MConstant::New(UndefinedValue());
         current->add(ins);
         def = ins;
         break;
       }
 
       default:
-        def = NULL;
+        def = nullptr;
         MOZ_ASSUME_UNREACHABLE("unknown return op");
     }
 
     if (instrumentedProfiling())
         current->add(MFunctionBoundary::New(script(), MFunctionBoundary::Exit));
     MReturn *ret = MReturn::New(def);
     current->end(ret);
 
     if (!graph().addExit(current))
         return ControlStatus_Error;
 
     // Make sure no one tries to use this block now.
-    setCurrent(NULL);
+    setCurrent(nullptr);
     return processControlEnd();
 }
 
 IonBuilder::ControlStatus
 IonBuilder::processThrow()
 {
     // JSOP_THROW can't be compiled within inlined frames.
     if (callerBuilder_)
@@ -3522,17 +3522,17 @@ IonBuilder::processThrow()
 
     MThrow *ins = MThrow::New(def);
     current->end(ins);
 
     if (!graph().addExit(current))
         return ControlStatus_Error;
 
     // Make sure no one tries to use this block now.
-    setCurrent(NULL);
+    setCurrent(nullptr);
     return processControlEnd();
 }
 
 bool
 IonBuilder::pushConstant(const Value &v)
 {
     MConstant *ins = MConstant::New(v);
     current->add(ins);
@@ -3778,27 +3778,27 @@ IonBuilder::inlineScriptedCall(CallInfo 
             current->add(barrier);
             callInfo.setThis(barrier);
         }
     }
 
     // Start inlining.
     LifoAlloc *alloc = GetIonContext()->temp->lifoAlloc();
     CompileInfo *info = alloc->new_<CompileInfo>(calleeScript, target,
-                                                 (jsbytecode *)NULL, callInfo.constructing(),
+                                                 (jsbytecode *)nullptr, callInfo.constructing(),
                                                  this->info().executionMode());
     if (!info)
         return false;
 
     MIRGraphExits saveExits;
     AutoAccumulateExits aae(graph(), saveExits);
 
     // Build the graph.
     JS_ASSERT(!cx->isExceptionPending());
-    IonBuilder inlineBuilder(cx, &temp(), &graph(), &inspector, info, NULL,
+    IonBuilder inlineBuilder(cx, &temp(), &graph(), &inspector, info, nullptr,
                              inliningDepth_ + 1, loopDepth_);
     if (!inlineBuilder.buildInline(this, outerResumePoint, callInfo)) {
         if (cx->isExceptionPending()) {
             IonSpew(IonSpew_Abort, "Inline builder raised exception.");
             abortReason_ = AbortReason_Error;
             return false;
         }
 
@@ -3811,24 +3811,24 @@ IonBuilder::inlineScriptedCall(CallInfo 
             abortReason_ = AbortReason_Inlining;
         }
 
         return false;
     }
 
     // Create return block.
     jsbytecode *postCall = GetNextPc(pc);
-    MBasicBlock *returnBlock = newBlock(NULL, postCall);
+    MBasicBlock *returnBlock = newBlock(nullptr, postCall);
     if (!returnBlock)
         return false;
     returnBlock->setCallerResumePoint(callerResumePoint_);
 
     // When profiling add Inline_Exit instruction to indicate end of inlined function.
     if (instrumentedProfiling())
-        returnBlock->add(MFunctionBoundary::New(NULL, MFunctionBoundary::Inline_Exit));
+        returnBlock->add(MFunctionBoundary::New(nullptr, MFunctionBoundary::Inline_Exit));
 
     // Inherit the slots from current and pop |fun|.
     returnBlock->inheritSlots(current);
     returnBlock->pop();
 
     // Accumulate return values.
     MIRGraphExits &exits = *inlineBuilder.graph().exitAccumulator();
     if (exits.length() == 0) {
@@ -3871,40 +3871,40 @@ IonBuilder::patchInlinedReturn(CallInfo 
     } else if (callInfo.isSetter()) {
         // Setters return their argument, not whatever value is returned.
         rdef = callInfo.getArg(0);
     }
 
     MGoto *replacement = MGoto::New(bottom);
     exit->end(replacement);
     if (!bottom->addPredecessorWithoutPhis(exit))
-        return NULL;
+        return nullptr;
 
     return rdef;
 }
 
 MDefinition *
 IonBuilder::patchInlinedReturns(CallInfo &callInfo, MIRGraphExits &exits, MBasicBlock *bottom)
 {
     // Replaces MReturns with MGotos, returning the MDefinition
-    // representing the return value, or NULL.
+    // representing the return value, or nullptr.
     JS_ASSERT(exits.length() > 0);
 
     if (exits.length() == 1)
         return patchInlinedReturn(callInfo, exits[0], bottom);
 
     // Accumulate multiple returns with a phi.
     MPhi *phi = MPhi::New(bottom->stackDepth());
     if (!phi->reserveLength(exits.length()))
-        return NULL;
+        return nullptr;
 
     for (size_t i = 0; i < exits.length(); i++) {
         MDefinition *rdef = patchInlinedReturn(callInfo, exits[i], bottom);
         if (!rdef)
-            return NULL;
+            return nullptr;
         phi->addInput(rdef);
     }
 
     bottom->addPhi(phi);
     return phi;
 }
 
 static bool
@@ -3916,17 +3916,17 @@ IsSmallFunction(JSScript *script)
 bool
 IonBuilder::makeInliningDecision(JSFunction *target, CallInfo &callInfo)
 {
     // Only inline when inlining is enabled.
     if (!inliningEnabled())
         return false;
 
     // When there is no target, inlining is impossible.
-    if (target == NULL)
+    if (target == nullptr)
         return false;
 
     // Native functions provide their own detection in inlineNativeCall().
     if (target->isNative())
         return true;
 
     // Determine whether inlining is possible at callee site
     if (!canInlineTarget(target, callInfo.constructing()))
@@ -4041,60 +4041,60 @@ CanInlineGetPropertyCache(MGetPropertyCa
         return false;
     return true;
 }
 
 MGetPropertyCache *
 IonBuilder::getInlineableGetPropertyCache(CallInfo &callInfo)
 {
     if (callInfo.constructing())
-        return NULL;
+        return nullptr;
 
     MDefinition *thisDef = callInfo.thisArg();
     if (thisDef->type() != MIRType_Object)
-        return NULL;
+        return nullptr;
 
     // Unwrap thisDef for pointer comparison purposes.
     if (thisDef->isPassArg())
         thisDef = thisDef->toPassArg()->getArgument();
 
     MDefinition *funcDef = callInfo.fun();
     if (funcDef->type() != MIRType_Object)
-        return NULL;
+        return nullptr;
 
     // MGetPropertyCache with no uses may be optimized away.
     if (funcDef->isGetPropertyCache()) {
         MGetPropertyCache *cache = funcDef->toGetPropertyCache();
         if (cache->hasUses())
-            return NULL;
+            return nullptr;
         if (!CanInlineGetPropertyCache(cache, thisDef))
-            return NULL;
+            return nullptr;
         return cache;
     }
 
     // Optimize away the following common pattern:
     // MTypeBarrier[MIRType_Object] <- MGetPropertyCache
     if (funcDef->isTypeBarrier()) {
         MTypeBarrier *barrier = funcDef->toTypeBarrier();
         if (barrier->hasUses())
-            return NULL;
+            return nullptr;
         if (barrier->type() != MIRType_Object)
-            return NULL;
+            return nullptr;
         if (!barrier->input()->isGetPropertyCache())
-            return NULL;
+            return nullptr;
 
         MGetPropertyCache *cache = barrier->input()->toGetPropertyCache();
         if (cache->hasUses() && !cache->hasOneUse())
-            return NULL;
+            return nullptr;
         if (!CanInlineGetPropertyCache(cache, thisDef))
-            return NULL;
+            return nullptr;
         return cache;
     }
 
-    return NULL;
+    return nullptr;
 }
 
 IonBuilder::InliningStatus
 IonBuilder::inlineSingleCall(CallInfo &callInfo, JSFunction *target)
 {
     // Expects formals to be popped and wrapped.
     if (target->isNative())
         return inlineNativeCall(callInfo, target->native());
@@ -4228,18 +4228,18 @@ IonBuilder::inlineTypeObjectFallback(Cal
     MBasicBlock *prepBlock = newBlock(dispatchBlock, pc);
     if (!prepBlock)
         return false;
     fallbackInfo.popFormals(prepBlock);
 
     // Construct a block into which the MGetPropertyCache can be moved.
     // This is subtle: the pc and resume point are those of the MGetPropertyCache!
     InlinePropertyTable *propTable = cache->propTable();
-    JS_ASSERT(propTable->pc() != NULL);
-    JS_ASSERT(propTable->priorResumePoint() != NULL);
+    JS_ASSERT(propTable->pc() != nullptr);
+    JS_ASSERT(propTable->priorResumePoint() != nullptr);
     MBasicBlock *getPropBlock = newBlock(prepBlock, propTable->pc(), propTable->priorResumePoint());
     if (!getPropBlock)
         return false;
 
     prepBlock->end(MGoto::New(getPropBlock));
 
     // Since the getPropBlock inherited the stack from right before the MGetPropertyCache,
     // the target of the MGetPropertyCache is still on the stack.
@@ -4264,17 +4264,17 @@ IonBuilder::inlineTypeObjectFallback(Cal
 
     // Construct an end block with the correct resume point.
     MBasicBlock *preCallBlock = newBlock(getPropBlock, pc, preCallResumePoint);
     if (!preCallBlock)
         return false;
     getPropBlock->end(MGoto::New(preCallBlock));
 
     // Now inline the MCallGeneric, using preCallBlock as the dispatch point.
-    if (!inlineGenericFallback(NULL, fallbackInfo, preCallBlock, false))
+    if (!inlineGenericFallback(nullptr, fallbackInfo, preCallBlock, false))
         return false;
 
     // inlineGenericFallback() set the return block as |current|.
     preCallBlock->end(MGoto::New(current));
     *fallbackTarget = prepBlock;
     return true;
 }
 
@@ -4303,31 +4303,31 @@ IonBuilder::inlineCalls(CallInfo &callIn
     // targets, as the entries should only be used for comparison.
     //
     // The InlinePropertyTable will also be patched at the end to exclude native functions
     // that vetoed inlining.
     if (maybeCache) {
         InlinePropertyTable *propTable = maybeCache->propTable();
         propTable->trimToTargets(originals);
         if (propTable->numEntries() == 0)
-            maybeCache = NULL;
+            maybeCache = nullptr;
     }
 
     // Generate a dispatch based on guard kind.
     MDispatchInstruction *dispatch;
     if (maybeCache) {
         dispatch = MTypeObjectDispatch::New(maybeCache->object(), maybeCache->propTable());
         callInfo.fun()->setFoldedUnchecked();
     } else {
         dispatch = MFunctionDispatch::New(callInfo.fun());
     }
 
     // Generate a return block to host the rval-collecting MPhi.
     jsbytecode *postCall = GetNextPc(pc);
-    MBasicBlock *returnBlock = newBlock(NULL, postCall);
+    MBasicBlock *returnBlock = newBlock(nullptr, postCall);
     if (!returnBlock)
         return false;
     returnBlock->setCallerResumePoint(callerResumePoint_);
 
     // Set up stack, used to manually create a post-call resume point.
     returnBlock->inheritSlots(dispatchBlock);
     callInfo.popFormals(returnBlock);
 
@@ -4346,17 +4346,17 @@ IonBuilder::inlineCalls(CallInfo &callIn
             count++;
     }
     retPhi->reserveLength(count);
 
     // During inlining the 'this' value is assigned a type set which is
     // specialized to the type objects which can generate that inlining target.
     // After inlining the original type set is restored.
     types::TemporaryTypeSet *cacheObjectTypeSet =
-        maybeCache ? maybeCache->object()->resultTypeSet() : NULL;
+        maybeCache ? maybeCache->object()->resultTypeSet() : nullptr;
 
     // Inline each of the inlineable targets.
     JS_ASSERT(targets.length() == originals.length());
     for (uint32_t i = 0; i < targets.length(); i++) {
         // When original != target, the target is a callsite clone. The
         // original should be used for guards, and the target should be the
         // actual function inlined.
         JSFunction *original = &originals[i]->as<JSFunction>();
@@ -4443,34 +4443,34 @@ IonBuilder::inlineCalls(CallInfo &callIn
         maybeCache->object()->setResultTypeSet(cacheObjectTypeSet);
 
         InlinePropertyTable *propTable = maybeCache->propTable();
         propTable->trimTo(originals, choiceSet);
 
         // If all paths were vetoed, output only a generic fallback path.
         if (propTable->numEntries() == 0) {
             JS_ASSERT(dispatch->numCases() == 0);
-            maybeCache = NULL;
+            maybeCache = nullptr;
         }
     }
 
     // If necessary, generate a fallback path.
     // MTypeObjectDispatch always uses a fallback path.
     if (maybeCache || dispatch->numCases() < targets.length()) {
         // Generate fallback blocks, and set |current| to the fallback return block.
         if (maybeCache) {
             MBasicBlock *fallbackTarget;
             if (!inlineTypeObjectFallback(callInfo, dispatchBlock, (MTypeObjectDispatch *)dispatch,
                                           maybeCache, &fallbackTarget))
             {
                 return false;
             }
             dispatch->addFallback(fallbackTarget);
         } else {
-            JSFunction *remaining = NULL;
+            JSFunction *remaining = nullptr;
             bool clonedAtCallsite = false;
 
             // If there is only 1 remaining case, we can annotate the fallback call
             // with the target information.
             if (dispatch->numCases() + 1 == originals.length()) {
                 for (uint32_t i = 0; i < originals.length(); i++) {
                     if (choiceSet[i])
                         continue;
@@ -4514,17 +4514,17 @@ IonBuilder::createDeclEnvObject(MDefinit
     // Create a template CallObject that we'll use to generate inline object
     // creation. Even though this template will get discarded at the end of
     // compilation, it is used by the background compilation thread and thus
     // cannot use the Nursery.
 
     RootedFunction fun(cx, info().fun());
     RootedObject templateObj(cx, DeclEnvObject::createTemplateObject(cx, fun, gc::TenuredHeap));
     if (!templateObj)
-        return NULL;
+        return nullptr;
 
     // Add dummy values on the slot of the template object such as we do not try
     // mark uninitialized values.
     templateObj->setFixedSlot(DeclEnvObject::enclosingScopeSlot(), MagicValue(JS_GENERIC_MAGIC));
     templateObj->setFixedSlot(DeclEnvObject::lambdaSlot(), MagicValue(JS_GENERIC_MAGIC));
 
     // One field is added to the function to handle its name.  This cannot be a
     // dynamic slot because there is still plenty of room on the DeclEnv object.
@@ -4552,17 +4552,17 @@ IonBuilder::createCallObject(MDefinition
     // Create a template CallObject that we'll use to generate inline object
     // creation. Even though this template will get discarded at the end of
     // compilation, it is used by the background compilation thread and thus
     // cannot use the Nursery.
 
     RootedScript scriptRoot(cx, script());
     RootedObject templateObj(cx, CallObject::createTemplateObject(cx, scriptRoot, gc::TenuredHeap));
     if (!templateObj)
-        return NULL;
+        return nullptr;
 
     // If the CallObject needs dynamic slots, allocate those now.
     MInstruction *slots;
     if (templateObj->hasDynamicSlots()) {
         size_t nslots = JSObject::dynamicSlotsCount(templateObj->numFixedSlots(),
                                                     templateObj->slotSpan());
         slots = MNewSlots::New(nslots);
     } else {
@@ -4633,52 +4633,52 @@ IonBuilder::createThisScripted(MDefiniti
 
     return createThis;
 }
 
 JSObject *
 IonBuilder::getSingletonPrototype(JSFunction *target)
 {
     if (!target || !target->hasSingletonType())
-        return NULL;
+        return nullptr;
     types::TypeObject *targetType = target->getType(cx);
     if (targetType->unknownProperties())
-        return NULL;
+        return nullptr;
 
     jsid protoid = NameToId(cx->names().classPrototype);
     types::HeapTypeSet *protoTypes = targetType->getProperty(cx, protoid);
     if (!protoTypes)
-        return NULL;
+        return nullptr;
 
     return protoTypes->getSingleton(cx);
 }
 
 MDefinition *
 IonBuilder::createThisScriptedSingleton(JSFunction *target, MDefinition *callee)
 {
     // Get the singleton prototype (if exists)
     JSObject *proto = getSingletonPrototype(target);
     if (!proto)
-        return NULL;
+        return nullptr;
 
     if (!target->nonLazyScript()->types)
-        return NULL;
+        return nullptr;
 
     // Generate an inline path to create a new |this| object with
     // the given singleton prototype.
     types::TypeObject *type = cx->getNewType(&JSObject::class_, proto, target);
     if (!type)
-        return NULL;
+        return nullptr;
     if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(type)))
-        return NULL;
+        return nullptr;
 
     RootedObject targetRoot(cx, target);
     JSObject *templateObject = CreateThisForFunctionWithProto(cx, targetRoot, proto, TenuredObject);
     if (!templateObject)
-        return NULL;
+        return nullptr;
 
     // Trigger recompilation if the templateObject changes.
     if (templateObject->type()->hasNewScript())
         types::HeapTypeSet::WatchObjectStateChange(cx, templateObject->type());
 
     MCreateThisWithTemplate *createThis = MCreateThisWithTemplate::New(templateObject);
     current->add(createThis);
 
@@ -4693,17 +4693,17 @@ IonBuilder::createThis(JSFunction *targe
         MCreateThis *createThis = MCreateThis::New(callee);
         current->add(createThis);
         return createThis;
     }
 
     // Native constructors build the new Object themselves.
     if (target->isNative()) {
         if (!target->isNativeConstructor())
-            return NULL;
+            return nullptr;
 
         MConstant *magic = MConstant::New(MagicValue(JS_IS_CONSTRUCTING));
         current->add(magic);
         return magic;
     }
 
     // Try baking in the prototype.
     MDefinition *createThis = createThisScriptedSingleton(target, callee);
@@ -4977,17 +4977,17 @@ IonBuilder::jsop_call(uint32_t argc, boo
     // Try inlining
     InliningStatus status = inlineCallsite(targets, originals, gotLambda, callInfo);
     if (status == InliningStatus_Inlined)
         return true;
     if (status == InliningStatus_Error)
         return false;
 
     // No inline, just make the call.
-    JSFunction *target = NULL;
+    JSFunction *target = nullptr;
     if (targets.length() == 1)
         target = &targets[0]->as<JSFunction>();
 
     return makeCall(target, callInfo, hasClones);
 }
 
 MDefinition *
 IonBuilder::makeCallsiteClone(JSFunction *target, MDefinition *fun)
@@ -5145,17 +5145,17 @@ IonBuilder::makeCallHelper(JSFunction *t
     // Collect number of missing arguments provided that the target is
     // scripted. Native functions are passed an explicit 'argc' parameter.
     if (target && !target->isNative())
         targetArgs = Max<uint32_t>(target->nargs, callInfo.argc());
 
     MCall *call =
         MCall::New(target, targetArgs + 1, callInfo.argc(), callInfo.constructing());
     if (!call)
-        return NULL;
+        return nullptr;
 
     // Explicitly pad any missing arguments with |undefined|.
     // This permits skipping the argumentsRectifier.
     for (int i = targetArgs; i > (int)callInfo.argc(); i--) {
         JS_ASSERT_IF(target, !target->isNative());
         MConstant *undef = MConstant::New(UndefinedValue());
         current->add(undef);
         MPassArg *pass = MPassArg::New(undef);
@@ -5178,17 +5178,17 @@ IonBuilder::makeCallHelper(JSFunction *t
     thisArg->block()->insertBefore(thisArg, start);
     call->initPrepareCall(start);
 
     // Inline the constructor on the caller-side.
     if (callInfo.constructing()) {
         MDefinition *create = createThis(target, callInfo.fun());
         if (!create) {
             abort("Failure inlining constructor for call.");
-            return NULL;
+            return nullptr;
         }
 
         // Unwrap the MPassArg before discarding: it may have been captured by an MResumePoint.
         thisArg->replaceAllUsesWith(thisArg->getArgument());
         thisArg->block()->discard(thisArg);
 
         MPassArg *newThis = MPassArg::New(create);
         current->add(newThis);
@@ -5341,17 +5341,17 @@ IonBuilder::jsop_eval(uint32_t argc)
 
                 current->push(dynamicName);
                 current->push(thisv);
 
                 CallInfo evalCallInfo(/* constructing = */ false);
                 if (!evalCallInfo.init(current, /* argc = */ 0))
                     return false;
 
-                return makeCall(NULL, evalCallInfo, false);
+                return makeCall(nullptr, evalCallInfo, false);
             }
         }
 
         MInstruction *filterArguments = MFilterArguments::New(string);
         current->add(filterArguments);
 
         MInstruction *ins = MCallDirectEval::New(scopeChain, string, thisValue, pc);
         current->add(ins);
@@ -5385,24 +5385,24 @@ JSObject *
 IonBuilder::getNewArrayTemplateObject(uint32_t count)
 {
     NewObjectKind newKind = types::UseNewTypeForInitializer(cx, script(), pc, JSProto_Array);
 
     // Do not allocate template objects in the nursery.
     if (newKind == GenericObject)
         newKind = TenuredObject;
 
-    JSObject *templateObject = NewDenseUnallocatedArray(cx, count, NULL, newKind);
+    JSObject *templateObject = NewDenseUnallocatedArray(cx, count, nullptr, newKind);
     if (!templateObject)
-        return NULL;
+        return nullptr;
 
     if (newKind != SingletonObject) {
         types::TypeObject *type = types::TypeScript::InitObject(cx, script(), pc, JSProto_Array);
         if (!type)
-            return NULL;
+            return nullptr;
         templateObject->setType(type);
     }
 
     return templateObject;
 }
 
 bool
 IonBuilder::jsop_newarray(uint32_t count)
@@ -5667,17 +5667,17 @@ IonBuilder::jsop_initelem_getter_setter(
     current->add(init);
     return resumeAfter(init);
 }
 
 MBasicBlock *
 IonBuilder::addBlock(MBasicBlock *block, uint32_t loopDepth)
 {
     if (!block)
-        return NULL;
+        return nullptr;
     graph().addBlock(block);
     block->setLoopDepth(loopDepth);
     return block;
 }
 
 MBasicBlock *
 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc)
 {
@@ -5702,17 +5702,17 @@ IonBuilder::newBlockPopN(MBasicBlock *pr
 }
 
 MBasicBlock *
 IonBuilder::newBlockAfter(MBasicBlock *at, MBasicBlock *predecessor, jsbytecode *pc)
 {
     MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
                                           predecessor, pc, MBasicBlock::NORMAL);
     if (!block)
-        return NULL;
+        return nullptr;
     graph().insertBlockAfter(at, block);
     return block;
 }
 
 MBasicBlock *
 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc, uint32_t loopDepth)
 {
     MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
@@ -5727,17 +5727,17 @@ IonBuilder::newOsrPreheader(MBasicBlock 
     JS_ASSERT(loopEntry == info().osrPc());
 
     // Create two blocks: one for the OSR entry with no predecessors, one for
     // the preheader, which has the OSR entry block as a predecessor. The
     // OSR block is always the second block (with id 1).
     MBasicBlock *osrBlock  = newBlockAfter(*graph().begin(), loopEntry);
     MBasicBlock *preheader = newBlock(predecessor, loopEntry);
     if (!osrBlock || !preheader)
-        return NULL;
+        return nullptr;
 
     MOsrEntry *entry = MOsrEntry::New();
     osrBlock->add(entry);
 
     // Initialize |scopeChain|.
     {
         uint32_t slot = info().scopeChainSlot();
 
@@ -5752,17 +5752,17 @@ IonBuilder::newOsrPreheader(MBasicBlock 
         }
 
         osrBlock->add(scopev);
         osrBlock->initSlot(slot, scopev);
     }
 
     // Initialize arguments object.
     bool needsArgsObj = info().needsArgsObj();
-    MInstruction *argsObj = NULL;
+    MInstruction *argsObj = nullptr;
     if (info().hasArguments()) {
         if (needsArgsObj)
             argsObj = MOsrArgumentsObject::New(entry);
         else
             argsObj = MConstant::New(UndefinedValue());
         osrBlock->add(argsObj);
         osrBlock->initSlot(info().argsObjSlot(), argsObj);
     }
@@ -5829,17 +5829,17 @@ IonBuilder::newOsrPreheader(MBasicBlock 
     // Create an MStart to hold the first valid MResumePoint.
     MStart *start = MStart::New(MStart::StartType_Osr);
     osrBlock->add(start);
     graph().setOsrStart(start);
 
     // MOsrValue instructions are infallible, so the first MResumePoint must
     // occur after they execute, at the point of the MStart.
     if (!resumeAt(start, loopEntry))
-        return NULL;
+        return nullptr;
 
     // Link the same MResumePoint from the MStart to each MOsrValue.
     // This causes logic in ShouldSpecializeInput() to not replace Uses with
     // Unboxes in the MResumePiont, so that the MStart always sees Values.
     osrBlock->linkOsrValues(start);
 
     // Clone types of the other predecessor of the pre-header to the osr block,
     // such as pre-header phi's won't discard specialized type of the
@@ -5879,17 +5879,17 @@ IonBuilder::newOsrPreheader(MBasicBlock 
 }
 
 MBasicBlock *
 IonBuilder::newPendingLoopHeader(MBasicBlock *predecessor, jsbytecode *pc, bool osr)
 {
     loopDepth_++;
     MBasicBlock *block = MBasicBlock::NewPendingLoopHeader(graph(), info(), predecessor, pc);
     if (!addBlock(block, loopDepth_))
-        return NULL;
+        return nullptr;
 
     if (osr) {
         // Incorporate type information from the OSR frame into the loop
         // header. The OSR frame may have unexpected types due to type changes
         // within the loop body or due to incomplete profiling information,
         // in which case this may avoid restarts of loop analysis or bailouts
         // during the OSR itself.
 
@@ -5926,17 +5926,17 @@ IonBuilder::newPendingLoopHeader(MBasicB
             // Extract typeset from value.
             MIRType type = existingValue.isDouble()
                          ? MIRType_Double
                          : MIRTypeFromValueType(existingValue.extractNonDoubleType());
             types::Type ntype = types::GetValueType(existingValue);
             types::TemporaryTypeSet *typeSet =
                 GetIonContext()->temp->lifoAlloc()->new_<types::TemporaryTypeSet>(ntype);
             if (!typeSet)
-                return NULL;
+                return nullptr;
             phi->addBackedgeType(type, typeSet);
         }
     }
 
     return block;
 }
 
 // A resume point is a mapping of stack slots to MDefinitions. It is used to
@@ -6101,17 +6101,17 @@ TestSingletonPropertyTypes(JSContext *cx
         return true;
 
     if (types && types->unknownObject())
         return true;
 
     if (id != types::IdToTypeId(id))
         return true;
 
-    JSObject *objectSingleton = types ? types->getSingleton() : NULL;
+    JSObject *objectSingleton = types ? types->getSingleton() : nullptr;
     if (objectSingleton)
         return TestSingletonProperty(cx, objectSingleton, singleton, id, isKnownConstant);
 
     if (!globalObj)
         return true;
 
     JSProtoKey key;
     switch (obj->type()) {
@@ -6182,17 +6182,17 @@ TestSingletonPropertyTypes(JSContext *cx
         *isKnownConstant = true;
         return true;
       }
       default:
         return true;
     }
 
     RootedObject proto(cx);
-    if (!js_GetClassPrototype(cx, key, &proto, NULL))
+    if (!js_GetClassPrototype(cx, key, &proto, nullptr))
         return false;
 
     return TestSingletonProperty(cx, proto, singleton, id, isKnownConstant);
 }
 
 // Given an observed type set, annotates the IR as much as possible:
 // (1) If no type information is provided, the value on the top of the stack is
 //     left in place.
@@ -6214,17 +6214,17 @@ IonBuilder::pushTypeBarrier(MInstruction
     // If the instruction has no side effects, we'll resume the entire operation.
     // The actual type barrier will occur in the interpreter. If the
     // instruction is effectful, even if it has a singleton type, there
     // must be a resume point capturing the original def, and resuming
     // to that point will explicitly monitor the new type.
 
     if (!needsBarrier) {
         JSValueType type = observed->getKnownTypeTag();
-        MInstruction *replace = NULL;
+        MInstruction *replace = nullptr;
         switch (type) {
           case JSVAL_TYPE_UNDEFINED:
             ins->setFoldedUnchecked();
             replace = MConstant::New(UndefinedValue());
             break;
           case JSVAL_TYPE_NULL:
             ins->setFoldedUnchecked();
             replace = MConstant::New(NullValue());
@@ -6293,17 +6293,17 @@ IonBuilder::getStaticName(JSObject *stat
     if (!shape || !shape->hasDefaultGetter() || !shape->hasSlot()) {
         *psucceeded = false;
         return true;
     }
 
     types::TypeObject *staticType = staticObject->getType(cx);
     if (!staticType)
         return false;
-    types::HeapTypeSet *propertyTypes = NULL;
+    types::HeapTypeSet *propertyTypes = nullptr;
     if (!staticType->unknownProperties()) {
         propertyTypes = staticType->getProperty(cx, id);
         if (!propertyTypes)
             return false;
     }
     if (propertyTypes && propertyTypes->isConfiguredProperty(cx, staticType)) {
         // The property has been reconfigured as non-configurable, non-enumerable
         // or non-writable.
@@ -6404,17 +6404,17 @@ IonBuilder::setStaticName(JSObject *stat
     // as a normal data property on exactly the global object.
     Shape *shape = staticObject->nativeLookup(cx, id);
     if (!shape || !shape->hasDefaultSetter() || !shape->writable() || !shape->hasSlot())
         return jsop_setprop(name);
 
     types::TypeObject *staticType = staticObject->getType(cx);
     if (!staticType)
         return false;
-    types::HeapTypeSet *propertyTypes = NULL;
+    types::HeapTypeSet *propertyTypes = nullptr;
     if (!staticType->unknownProperties()) {
         propertyTypes = staticType->getProperty(cx, id);
         if (!propertyTypes)
             return false;
     }
     if (!propertyTypes || propertyTypes->isConfiguredProperty(cx, staticType)) {
         // The property has been reconfigured as non-configurable, non-enumerable
         // or non-writable.
@@ -6831,17 +6831,17 @@ IonBuilder::getElemTryCache(bool *emitte
     bool nonNativeGetElement = inspector->hasSeenNonNativeGetElement(pc);
     if (index->mightBeType(MIRType_Int32) && nonNativeGetElement)
         return true;
 
     // Emit GetElementCache.
 
     types::StackTypeSet *baseTypes = types::TypeScript::BytecodeTypes(script(), pc);
     bool barrier;
-    if (!PropertyReadNeedsTypeBarrier(cx, obj, NULL, baseTypes, &barrier))
+    if (!PropertyReadNeedsTypeBarrier(cx, obj, nullptr, baseTypes, &barrier))
         return false;
     types::TemporaryTypeSet *types = cloneTypeSet(baseTypes);
 
     // Always add a barrier if the index might be a string, so that the cache
     // can attach stubs for particular properties.
     if (index->mightBeType(MIRType_String))
         barrier = true;
 
@@ -6877,22 +6877,22 @@ bool
 IonBuilder::jsop_getelem_dense(MDefinition *obj, MDefinition *index)
 {
     types::StackTypeSet *baseTypes = types::TypeScript::BytecodeTypes(script(), pc);
 
     if (JSOp(*pc) == JSOP_CALLELEM && !index->mightBeType(MIRType_String) && baseTypes->noConstraints()) {
         // Indexed call on an element of an array. Populate the observed types
         // with any objects that could be in the array, to avoid extraneous
         // type barriers.
-        if (!AddObjectsForPropertyRead(cx, obj, NULL, baseTypes))
+        if (!AddObjectsForPropertyRead(cx, obj, nullptr, baseTypes))
             return false;
     }
 
     bool barrier;
-    if (!PropertyReadNeedsTypeBarrier(cx, obj, NULL, baseTypes, &barrier))
+    if (!PropertyReadNeedsTypeBarrier(cx, obj, nullptr, baseTypes, &barrier))
         return false;
     types::TemporaryTypeSet *types = cloneTypeSet(baseTypes);
 
     bool needsHoleCheck = !ElementAccessIsPacked(cx, obj);
 
     // Reads which are on holes in the object do not have to bail out if
     // undefined values have been observed at this access site and the access
     // cannot hit another indexed property on the object or its prototypes.
@@ -7012,29 +7012,29 @@ IonBuilder::convertShiftToMaskForStaticT
     if (id->isConstant() && id->toConstant()->value().isInt32()) {
         int32_t index = id->toConstant()->value().toInt32();
         MConstant *offset = MConstant::New(Int32Value(index << TypedArrayShift(viewType)));
         current->add(offset);
         return offset;
     }
 
     if (!id->isRsh() || id->isEffectful())
-        return NULL;
+        return nullptr;
     if (!id->getOperand(1)->isConstant())
-        return NULL;
+        return nullptr;
     const Value &value = id->getOperand(1)->toConstant()->value();
     if (!value.isInt32() || uint32_t(value.toInt32()) != TypedArrayShift(viewType))
-        return NULL;
+        return nullptr;
 
     // Instead of shifting, mask off the low bits of the index so that
     // a non-scaled access on the typed array can be performed.
     MConstant *mask = MConstant::New(Int32Value(~((1 << value.toInt32()) - 1)));
     MBitAnd *ptr = MBitAnd::New(id->getOperand(0), mask);
 
-    ptr->infer(NULL, NULL);
+    ptr->infer(nullptr, nullptr);
     JS_ASSERT(!ptr->isEffectful());
 
     current->add(mask);
     current->add(ptr);
 
     return ptr;
 }
 
@@ -7254,18 +7254,18 @@ bool
 IonBuilder::setElemTryDense(bool *emitted, MDefinition *object,
                             MDefinition *index, MDefinition *value)
 {
     JS_ASSERT(*emitted == false);
 
     if (!ElementAccessIsDenseNative(object, index))
         return true;
     bool needsBarrier;
-    if (!PropertyWriteNeedsTypeBarrier(cx, current, &object, NULL, &value, /* canModify = */ true,
-                                       &needsBarrier))
+    if (!PropertyWriteNeedsTypeBarrier(cx, current, &object, nullptr, &value,
+                                       /* canModify = */ true, &needsBarrier))
     {
         return false;
     }
     if (needsBarrier)
         return true;
     if (!object->resultTypeSet())
         return true;
 
@@ -7320,18 +7320,18 @@ IonBuilder::setElemTryCache(bool *emitte
     // TODO: Bug 876650: remove this check:
     // Temporary disable the cache if non dense native,
     // until the cache supports more ics
     SetElemICInspector icInspect(inspector->setElemICInspector(pc));
     if (!icInspect.sawDenseWrite() && !icInspect.sawTypedArrayWrite())
         return true;
 
     bool needsBarrier;
-    if (!PropertyWriteNeedsTypeBarrier(cx, current, &object, NULL, &value, /* canModify = */ true,
-                                       &needsBarrier))
+    if (!PropertyWriteNeedsTypeBarrier(cx, current, &object, nullptr, &value,
+                                       /* canModify = */ true, &needsBarrier))
     {
         return false;
     }
 
     if (needsBarrier)
         return true;
 
     // Emit SetElementCache.
@@ -7608,17 +7608,17 @@ IonBuilder::jsop_arguments_length()
 
     // We are inlining and know the number of arguments the callee pushed
     return pushConstant(Int32Value(inlineCallInfo_->argv().length()));
 }
 
 static JSObject *
 CreateRestArgumentsTemplateObject(JSContext *cx, unsigned length)
 {
-    JSObject *templateObject = NewDenseUnallocatedArray(cx, length, NULL, TenuredObject);
+    JSObject *templateObject = NewDenseUnallocatedArray(cx, length, nullptr, TenuredObject);
     if (templateObject)
         types::FixRestArgumentsType(cx, templateObject);
     return templateObject;
 }
 
 bool
 IonBuilder::jsop_rest()
 {
@@ -7676,32 +7676,32 @@ IonBuilder::jsop_rest()
 
     return true;
 }
 
 inline types::HeapTypeSet *
 GetDefiniteSlot(JSContext *cx, types::TemporaryTypeSet *types, JSAtom *atom)
 {
     if (!types || types->unknownObject() || types->getObjectCount() != 1)
-        return NULL;
+        return nullptr;
 
     types::TypeObject *type = types->getTypeObject(0);
     if (!type || type->unknownProperties())
-        return NULL;
+        return nullptr;
 
     jsid id = AtomToId(atom);
     if (id != types::IdToTypeId(id))
-        return NULL;
+        return nullptr;
 
     types::HeapTypeSet *propertyTypes = type->getProperty(cx, id);
     if (!propertyTypes ||
         !propertyTypes->definiteProperty() ||
         propertyTypes->isConfiguredProperty(cx, type))
     {
-        return NULL;
+        return nullptr;
     }
 
     return propertyTypes;
 }
 
 bool
 IonBuilder::jsop_runonce()
 {
@@ -7889,32 +7889,32 @@ SearchCommonPropFunc(JSContext *cx, type
 }
 
 inline bool
 FreezePropTypeSets(JSContext *cx, types::TemporaryTypeSet *types, JSObject *foundProto, jsid id)
 {
     types::TypeObject *curType;
     for (unsigned i = 0; i < types->getObjectCount(); i++) {
         curType = types->getTypeObject(i);
-        JSObject *obj = NULL;
+        JSObject *obj = nullptr;
         if (!curType) {
             obj = types->getSingleObject(i);
             if (!obj)
                 continue;
 
             curType = obj->getType(cx);
             if (!curType)
                 return false;
         }
 
         // If we found a Singleton object's own-property, there's nothing to
         // freeze.
         if (obj != foundProto) {
             // Walk the prototype chain. Everyone has to have the property, since we
-            // just checked, so propSet cannot be NULL.
+            // just checked, so propSet cannot be nullptr.
             jsid typeId = types::IdToTypeId(id);
             while (true) {
                 types::HeapTypeSet *propSet = curType->getProperty(cx, typeId);
                 // This assert is now assured, since we have faulted them in
                 // above.
                 JS_ASSERT(propSet && propSet->empty());
                 propSet->addFreeze(cx);
                 // Don't mark the proto. It will be held down by the shape
@@ -7931,20 +7931,20 @@ FreezePropTypeSets(JSContext *cx, types:
     return true;
 }
 
 inline bool
 IonBuilder::testCommonPropFunc(JSContext *cx, types::TemporaryTypeSet *types, jsid id,
                                JSFunction **funcp, bool isGetter, bool *isDOM,
                                MDefinition **guardOut)
 {
-    JSObject *found = NULL;
-    JSObject *foundProto = NULL;
-
-    *funcp = NULL;
+    JSObject *found = nullptr;
+    JSObject *foundProto = nullptr;
+
+    *funcp = nullptr;
     *isDOM = false;
 
     // No sense looking if we don't know what's going on.
     if (!types || types->unknownObject())
         return true;
 
     // Iterate down all the types to see if they all have the same getter or
     // setter.
@@ -7993,17 +7993,17 @@ IonBuilder::annotateGetPropertyCache(JSC
     if (id != types::IdToTypeId(id))
         return true;
 
     // Ensure every pushed value is a singleton.
     if (pushedTypes->unknownObject() || pushedTypes->baseFlags() != 0)
         return true;
 
     for (unsigned i = 0; i < pushedTypes->getObjectCount(); i++) {
-        if (pushedTypes->getTypeObject(i) != NULL)
+        if (pushedTypes->getTypeObject(i) != nullptr)
             return true;
     }
 
     // Object's typeset should be a proper object
     if (!objTypes || objTypes->baseFlags() || objTypes->unknownObject())
         return true;
 
     unsigned int objCount = objTypes->getObjectCount();
@@ -8024,32 +8024,32 @@ IonBuilder::annotateGetPropertyCache(JSC
         types::HeapTypeSet *ownTypes = typeObj->getProperty(cx, id);
         if (!ownTypes)
             continue;
 
         if (!ownTypes->empty())
             continue;
         ownTypes->addFreeze(cx);
 
-        JSObject *singleton = NULL;
+        JSObject *singleton = nullptr;
         JSObject *proto = typeObj->proto;
         while (true) {
             types::TypeObject *protoType = proto->getType(cx);
             if (!protoType)
                 return false;
             if (!protoType->unknownProperties()) {
                 types::HeapTypeSet *protoTypes = protoType->getProperty(cx, id);
                 if (!protoTypes)
                     return false;
 
                 singleton = protoTypes->getSingleton(cx);
                 if (singleton) {
                     if (singleton->is<JSFunction>())
                         break;
-                    singleton = NULL;
+                    singleton = nullptr;
                 }
             }
             TaggedProto taggedProto = proto->getTaggedProto();
             if (!taggedProto.isObject())
                 break;
             proto = taggedProto.toObject();
         }
         if (!singleton)
@@ -8245,17 +8245,17 @@ IonBuilder::getPropTryArgumentsLength(bo
     *emitted = true;
     return jsop_arguments_length();
 }
 
 bool
 IonBuilder::getPropTryConstant(bool *emitted, jsid id, types::TemporaryTypeSet *types)
 {
     JS_ASSERT(*emitted == false);
-    JSObject *singleton = types ? types->getSingleton() : NULL;
+    JSObject *singleton = types ? types->getSingleton() : nullptr;
     if (!singleton)
         return true;
 
     JSObject *global = &script()->global();
 
     bool isConstant, testObject, testString;
     if (!TestSingletonPropertyTypes(cx, current->peek(-1), singleton, global, id,
                                     &isConstant, &testObject, &testString))
@@ -8719,17 +8719,17 @@ IonBuilder::setPropTryCommonSetter(bool 
                                    MDefinition *value)
 {
     JS_ASSERT(*emitted == false);
 
     JSFunction *commonSetter;
     bool isDOM;
 
     types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
-    if (!testCommonPropFunc(cx, objTypes, id, &commonSetter, false, &isDOM, NULL))
+    if (!testCommonPropFunc(cx, objTypes, id, &commonSetter, false, &isDOM, nullptr))
         return false;
 
     if (!commonSetter)
         return true;
 
     // Emit common setter.
 
     // Setters can be called even if the property write needs a type
@@ -9330,17 +9330,17 @@ IonBuilder::hasStaticScopeObject(ScopeCo
     }
 
     return true;
 }
 
 bool
 IonBuilder::jsop_getaliasedvar(ScopeCoordinate sc)
 {
-    JSObject *call = NULL;
+    JSObject *call = nullptr;
     if (hasStaticScopeObject(sc, &call) && call) {
         PropertyName *name = ScopeCoordinateName(cx, script(), pc);
         bool succeeded;
         if (!getStaticName(call, name, &succeeded))
             return false;
         if (succeeded)
             return true;
     }
@@ -9364,17 +9364,17 @@ IonBuilder::jsop_getaliasedvar(ScopeCoor
 
     types::TemporaryTypeSet *types = bytecodeTypes(pc);
     return pushTypeBarrier(load, types, true);
 }
 
 bool
 IonBuilder::jsop_setaliasedvar(ScopeCoordinate sc)
 {
-    JSObject *call = NULL;
+    JSObject *call = nullptr;
     if (hasStaticScopeObject(sc, &call)) {
         uint32_t depth = current->stackDepth() + 1;
         if (depth > current->nslots()) {
             if (!current->increaseSlots(depth - current->nslots()))
                 return false;
         }
         MDefinition *value = current->pop();
         PropertyName *name = ScopeCoordinateName(cx, script(), pc);
@@ -9472,27 +9472,27 @@ IonBuilder::jsop_instanceof()
 {
     MDefinition *rhs = current->pop();
     MDefinition *obj = current->pop();
 
     // If this is an 'x instanceof function' operation and we can determine the
     // exact function and prototype object being tested for, use a typed path.
     do {
         types::TemporaryTypeSet *rhsTypes = rhs->resultTypeSet();
-        JSObject *rhsObject = rhsTypes ? rhsTypes->getSingleton() : NULL;
+        JSObject *rhsObject = rhsTypes ? rhsTypes->getSingleton() : nullptr;
         if (!rhsObject || !rhsObject->is<JSFunction>() || rhsObject->isBoundFunction())
             break;
 
         types::TypeObject *rhsType = rhsObject->getType(cx);
         if (!rhsType || rhsType->unknownProperties())
             break;
 
         types::HeapTypeSet *protoTypes =
             rhsType->getProperty(cx, NameToId(cx->names().classPrototype));
-        JSObject *protoObject = protoTypes ? protoTypes->getSingleton(cx) : NULL;
+        JSObject *protoObject = protoTypes ? protoTypes->getSingleton(cx) : nullptr;
         if (!protoObject)
             break;
 
         rhs->setFoldedUnchecked();
 
         MInstanceOf *ins = new MInstanceOf(obj, protoObject);
 
         current->add(ins);
@@ -9562,17 +9562,17 @@ IonBuilder::cloneTypeSet(types::StackTyp
 TypeRepresentationSetHash *
 IonBuilder::getOrCreateReprSetHash()
 {
     if (!reprSetHash_) {
         TypeRepresentationSetHash* hash =
             cx->new_<TypeRepresentationSetHash>();
         if (!hash || !hash->init()) {
             js_delete(hash);
-            return NULL;
+            return nullptr;
         }
 
         reprSetHash_ = hash;
     }
     return reprSetHash_.get();
 }
 
 bool