Bug 1144366 followup - Stop declaring multiple pointers on a single line. r=jorendorff
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 02 Apr 2015 11:22:21 +0200
changeset 267123 fb6ceba6f57e8e1458894d6ebae0bd6a30ac47a3
parent 267122 efe9fa6820b478cca2857041b2859ca9fa44d973
child 267124 9ce01d06146e9a16f8eba50c067a805d247921c4
push id4830
push userjlund@mozilla.com
push dateMon, 29 Jun 2015 20:18:48 +0000
treeherdermozilla-beta@4c2175bb0420 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjorendorff
bugs1144366
milestone40.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1144366 followup - Stop declaring multiple pointers on a single line. r=jorendorff
js/ipc/JavaScriptLogging.h
js/public/HashTable.h
js/public/RootingAPI.h
js/src/asmjs/AsmJSValidate.cpp
js/src/builtin/MapObject.cpp
js/src/ds/InlineMap.h
js/src/ds/SplayTree.h
js/src/frontend/BytecodeEmitter.cpp
js/src/frontend/FoldConstants.cpp
js/src/frontend/ParseMaps.cpp
js/src/frontend/ParseMaps.h
js/src/frontend/ParseNode.cpp
js/src/frontend/Parser.cpp
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/gc/Zone.h
js/src/irregexp/RegExpParser.h
js/src/jit/BacktrackingAllocator.cpp
js/src/jit/BaselineDebugModeOSR.h
js/src/jit/BaselineInspector.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/Ion.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonBuilder.h
js/src/jit/LoopUnroller.cpp
js/src/jit/MIR.cpp
js/src/jit/MIRGraph.cpp
js/src/jit/VMFunctions.cpp
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jscntxt.cpp
js/src/jsdtoa.cpp
js/src/jsgc.cpp
js/src/jsnum.cpp
js/src/jsopcode.cpp
js/src/jsopcodeinlines.h
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jsstr.cpp
js/src/jstypes.h
js/src/jsweakmap.cpp
js/src/proxy/CrossCompartmentWrapper.cpp
js/src/shell/js.cpp
js/src/shell/jsoptparse.cpp
js/src/vm/ArgumentsObject.cpp
js/src/vm/Debugger.cpp
js/src/vm/Interpreter-inl.h
js/src/vm/Interpreter.cpp
js/src/vm/Interpreter.h
js/src/vm/NativeObject.cpp
js/src/vm/NativeObject.h
js/src/vm/Runtime.cpp
js/src/vm/Shape.cpp
js/xpconnect/src/Sandbox.cpp
js/xpconnect/src/XPCJSRuntime.cpp
js/xpconnect/src/XPCShellImpl.cpp
--- a/js/ipc/JavaScriptLogging.h
+++ b/js/ipc/JavaScriptLogging.h
@@ -93,17 +93,18 @@ class Logging
         print(nsPrintfCString(fmt, tmp1.get(), tmp2.get(), tmp3.get()));
     }
 
     void format(const nsString& str, nsCString& out) {
         out = NS_ConvertUTF16toUTF8(str);
     }
 
     void formatObject(bool incoming, bool local, ObjectId id, nsCString& out) {
-        const char* side, *objDesc;
+        const char* side;
+        const char* objDesc;
         void* ptr;
 
         if (local == incoming) {
             JS::RootedObject obj(cx);
             obj = shared->objects_.find(id);
             if (obj) {
                 JSAutoCompartment ac(cx, obj);
                 objDesc = js::ObjectClassName(cx, obj);
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -865,17 +865,18 @@ class HashTable : private AllocPolicy
           , generation(tableArg.generation())
           , validEntry(true)
 #endif
         {
             while (cur < end && !cur->isLive())
                 ++cur;
         }
 
-        Entry* cur, *end;
+        Entry* cur;
+        Entry* end;
 #ifdef JS_DEBUG
         const HashTable* table_;
         uint64_t mutationCount;
         uint32_t generation;
         bool validEntry;
 #endif
 
       public:
@@ -1085,17 +1086,18 @@ class HashTable : private AllocPolicy
     {
         static_assert(sFreeKey == 0,
                       "newly-calloc'd tables have to be considered empty");
         return alloc.template pod_calloc<Entry>(capacity);
     }
 
     static void destroyTable(AllocPolicy& alloc, Entry* oldTable, uint32_t capacity)
     {
-        for (Entry* e = oldTable, *end = e + capacity; e < end; ++e)
+        Entry* end = oldTable + capacity;
+        for (Entry* e = oldTable; e < end; ++e)
             e->destroyIfLive();
         alloc.free_(oldTable);
     }
 
   public:
     explicit HashTable(AllocPolicy ap)
       : AllocPolicy(ap)
       , table(nullptr)
@@ -1341,17 +1343,18 @@ class HashTable : private AllocPolicy
 
         // We can't fail from here on, so update table parameters.
         setTableSizeLog2(newLog2);
         removedCount = 0;
         gen++;
         table = newTable;
 
         // Copy only live entries, leaving removed ones behind.
-        for (Entry* src = oldTable, *end = src + oldCap; src < end; ++src) {
+        Entry* end = oldTable + oldCap;
+        for (Entry* src = oldTable; src < end; ++src) {
             if (src->isLive()) {
                 HashNumber hn = src->getKeyHash();
                 findFreeEntry(hn).setLive(
                     hn, mozilla::Move(const_cast<typename Entry::NonConstT&>(src->get())));
                 src->destroy();
             }
         }
 
@@ -1475,17 +1478,18 @@ class HashTable : private AllocPolicy
 
   public:
     void clear()
     {
         if (mozilla::IsPod<Entry>::value) {
             memset(table, 0, sizeof(*table) * capacity());
         } else {
             uint32_t tableCapacity = capacity();
-            for (Entry* e = table, *end = table + tableCapacity; e < end; ++e)
+            Entry* end = table + tableCapacity;
+            for (Entry* e = table; e < end; ++e)
                 e->clear();
         }
         removedCount = 0;
         entryCount = 0;
 #ifdef JS_DEBUG
         mutationCount++;
 #endif
     }
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -790,17 +790,18 @@ class MOZ_STACK_CLASS Rooted : public js
     DECLARE_NONPOINTER_MUTABLE_ACCESSOR_METHODS(ptr);
 
   private:
     /*
      * These need to be templated on void* to avoid aliasing issues between, for
      * example, Rooted<JSObject> and Rooted<JSFunction>, which use the same
      * stack head pointer for different classes.
      */
-    Rooted<void*>** stack, *prev;
+    Rooted<void*>** stack;
+    Rooted<void*>* prev;
 
     /*
      * |ptr| must be the last field in Rooted because the analysis treats all
      * Rooted as Rooted<void*> during the analysis. See bug 829372.
      */
     T ptr;
 
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
--- a/js/src/asmjs/AsmJSValidate.cpp
+++ b/js/src/asmjs/AsmJSValidate.cpp
@@ -6358,17 +6358,18 @@ CheckConditional(FunctionCompiler& f, Pa
     MDefinition* condDef;
     Type condType;
     if (!CheckExpr(f, cond, &condDef, &condType))
         return false;
 
     if (!condType.isInt())
         return f.failf(cond, "%s is not a subtype of int", condType.toChars());
 
-    MBasicBlock* thenBlock = nullptr, *elseBlock = nullptr;
+    MBasicBlock* thenBlock = nullptr;
+    MBasicBlock* elseBlock = nullptr;
     if (!f.branchAndStartThen(condDef, &thenBlock, &elseBlock, thenExpr, elseExpr))
         return false;
 
     MDefinition* thenDef;
     Type thenType;
     if (!CheckExpr(f, thenExpr, &thenDef, &thenType))
         return false;
 
@@ -6477,17 +6478,18 @@ CheckAddOrSub(FunctionCompiler& f, Parse
               unsigned* numAddOrSubOut = nullptr)
 {
     JS_CHECK_RECURSION_DONT_REPORT(f.cx(), return f.m().failOverRecursed());
 
     MOZ_ASSERT(expr->isKind(PNK_ADD) || expr->isKind(PNK_SUB));
     ParseNode* lhs = AddSubLeft(expr);
     ParseNode* rhs = AddSubRight(expr);
 
-    MDefinition* lhsDef, *rhsDef;
+    MDefinition* lhsDef;
+    MDefinition* rhsDef;
     Type lhsType, rhsType;
     unsigned lhsNumAddOrSub, rhsNumAddOrSub;
 
     if (lhs->isKind(PNK_ADD) || lhs->isKind(PNK_SUB)) {
         if (!CheckAddOrSub(f, lhs, &lhsDef, &lhsType, &lhsNumAddOrSub))
             return false;
         if (lhsType == Type::Intish)
             lhsType = Type::Int;
@@ -6540,17 +6542,18 @@ CheckAddOrSub(FunctionCompiler& f, Parse
 static bool
 CheckDivOrMod(FunctionCompiler& f, ParseNode* expr, MDefinition** def, Type* type)
 {
     MOZ_ASSERT(expr->isKind(PNK_DIV) || expr->isKind(PNK_MOD));
 
     ParseNode* lhs = DivOrModLeft(expr);
     ParseNode* rhs = DivOrModRight(expr);
 
-    MDefinition* lhsDef, *rhsDef;
+    MDefinition* lhsDef;
+    MDefinition* rhsDef;
     Type lhsType, rhsType;
     if (!CheckExpr(f, lhs, &lhsDef, &lhsType))
         return false;
     if (!CheckExpr(f, rhs, &rhsDef, &rhsType))
         return false;
 
     if (lhsType.isMaybeDouble() && rhsType.isMaybeDouble()) {
         *def = expr->isKind(PNK_DIV)
@@ -6595,17 +6598,18 @@ static bool
 CheckComparison(FunctionCompiler& f, ParseNode* comp, MDefinition** def, Type* type)
 {
     MOZ_ASSERT(comp->isKind(PNK_LT) || comp->isKind(PNK_LE) || comp->isKind(PNK_GT) ||
                comp->isKind(PNK_GE) || comp->isKind(PNK_EQ) || comp->isKind(PNK_NE));
 
     ParseNode* lhs = ComparisonLeft(comp);
     ParseNode* rhs = ComparisonRight(comp);
 
-    MDefinition* lhsDef, *rhsDef;
+    MDefinition* lhsDef;
+    MDefinition* rhsDef;
     Type lhsType, rhsType;
     if (!CheckExpr(f, lhs, &lhsDef, &lhsType))
         return false;
     if (!CheckExpr(f, rhs, &rhsDef, &rhsType))
         return false;
 
     if ((lhsType.isSigned() && rhsType.isSigned()) || (lhsType.isUnsigned() && rhsType.isUnsigned())) {
         MCompare::CompareType compareType = (lhsType.isUnsigned() && rhsType.isUnsigned())
@@ -6942,19 +6946,22 @@ CheckIfConditional(FunctionCompiler& f, 
 
     // a ? b : c <=> (a && b) || (!a && c)
     // b is always referred to the AND condition, as we need A and B to reach this test,
     // c is always referred as the OR condition, as we reach it if we don't have A.
     ParseNode* cond = TernaryKid1(conditional);
     ParseNode* lhs = TernaryKid2(conditional);
     ParseNode* rhs = TernaryKid3(conditional);
 
-    MBasicBlock* maybeAndTest = nullptr, *maybeOrTest = nullptr;
-    MBasicBlock** ifTrueBlock = &maybeAndTest, **ifFalseBlock = &maybeOrTest;
-    ParseNode* ifTrueBlockNode = lhs, *ifFalseBlockNode = rhs;
+    MBasicBlock* maybeAndTest = nullptr;
+    MBasicBlock* maybeOrTest = nullptr;
+    MBasicBlock** ifTrueBlock = &maybeAndTest;
+    MBasicBlock** ifFalseBlock = &maybeOrTest;
+    ParseNode* ifTrueBlockNode = lhs;
+    ParseNode* ifFalseBlockNode = rhs;
 
     // Try to spot opportunities for short-circuiting in the AND subpart
     uint32_t andTestLiteral = 0;
     bool skipAndTest = false;
 
     if (IsLiteralInt(f.m(), lhs, &andTestLiteral)) {
         skipAndTest = true;
         if (andTestLiteral == 0) {
@@ -7066,17 +7073,18 @@ CheckIf(FunctionCompiler& f, ParseNode* 
 
     ParseNode* nextStmt = NextNode(ifStmt);
   recurse:
     MOZ_ASSERT(ifStmt->isKind(PNK_IF));
     ParseNode* cond = TernaryKid1(ifStmt);
     ParseNode* thenStmt = TernaryKid2(ifStmt);
     ParseNode* elseStmt = TernaryKid3(ifStmt);
 
-    MBasicBlock* thenBlock = nullptr, *elseBlock = nullptr;
+    MBasicBlock* thenBlock = nullptr;
+    MBasicBlock* elseBlock = nullptr;
     ParseNode* elseOrJoinStmt = elseStmt ? elseStmt : nextStmt;
 
     if (!CheckIfCondition(f, cond, thenStmt, elseOrJoinStmt, &thenBlock, &elseBlock))
         return false;
 
     if (!CheckStatement(f, thenStmt))
         return false;
 
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -132,19 +132,20 @@ class OrderedHashTable
         dataCapacity = capacity;
         liveCount = 0;
         hashShift = HashNumberSizeBits - initialBucketsLog2();
         MOZ_ASSERT(hashBuckets() == buckets);
         return true;
     }
 
     ~OrderedHashTable() {
-        for (Range* r = ranges, *next; r; r = next) {
-            next = r->next;
+        for (Range* r = ranges; r; ) {
+            Range* next = r->next;
             r->onTableDestroyed();
+            r = next;
         }
         alloc.free_(hashTable);
         freeData(data, dataLength);
     }
 
     /* Return the number of elements in the table. */
     uint32_t count() const { return liveCount; }
 
@@ -588,17 +589,18 @@ class OrderedHashTable
         for (Range* r = ranges; r; r = r->next)
             r->onCompact();
     }
 
     /* Compact the entries in |data| and rehash them. */
     void rehashInPlace() {
         for (uint32_t i = 0, N = hashBuckets(); i < N; i++)
             hashTable[i] = nullptr;
-        Data* wp = data, *end = data + dataLength;
+        Data* wp = data;
+        Data* end = data + dataLength;
         for (Data* rp = data; rp != end; rp++) {
             if (!Ops::isEmpty(Ops::getKey(rp->element))) {
                 HashNumber h = prepareHash(Ops::getKey(rp->element)) >> hashShift;
                 if (rp != wp)
                     wp->element = Move(rp->element);
                 wp->chain = hashTable[h];
                 hashTable[h] = wp;
                 wp++;
@@ -637,17 +639,18 @@ class OrderedHashTable
         uint32_t newCapacity = uint32_t(newHashBuckets * fillFactor());
         Data* newData = alloc.template pod_malloc<Data>(newCapacity);
         if (!newData) {
             alloc.free_(newHashTable);
             return false;
         }
 
         Data* wp = newData;
-        for (Data* p = data, *end = data + dataLength; p != end; p++) {
+        Data* end = data + dataLength;
+        for (Data* p = data; p != end; p++) {
             if (!Ops::isEmpty(Ops::getKey(p->element))) {
                 HashNumber h = prepareHash(Ops::getKey(p->element)) >> newHashShift;
                 new (wp) Data(Move(p->element), newHashTable[h]);
                 newHashTable[h] = wp;
                 wp++;
             }
         }
         MOZ_ASSERT(wp == newData + liveCount);
--- a/js/src/ds/InlineMap.h
+++ b/js/src/ds/InlineMap.h
@@ -55,17 +55,18 @@ class InlineMap
         if (map.initialized()) {
             map.clear();
         } else {
             if (!map.init(count()))
                 return false;
             MOZ_ASSERT(map.initialized());
         }
 
-        for (InlineElem* it = inl, *end = inl + inlNext; it != end; ++it) {
+        InlineElem* end = inl + inlNext;
+        for (InlineElem* it = inl; it != end; ++it) {
             if (it->key && !map.putNew(it->key, it->value))
                 return false;
         }
 
         inlNext = InlineElems + 1;
         MOZ_ASSERT(map.count() == inlCount);
         MOZ_ASSERT(usingMap());
         return true;
@@ -206,30 +207,32 @@ class InlineMap
         return inl + inlNext;
     }
 
     MOZ_ALWAYS_INLINE
     Ptr lookup(const K& key) {
         if (usingMap())
             return Ptr(map.lookup(key));
 
-        for (InlineElem* it = inl, *end = inl + inlNext; it != end; ++it) {
+        InlineElem* end = inl + inlNext;
+        for (InlineElem* it = inl; it != end; ++it) {
             if (it->key == key)
                 return Ptr(it);
         }
 
         return Ptr(nullptr);
     }
 
     MOZ_ALWAYS_INLINE
     AddPtr lookupForAdd(const K& key) {
         if (usingMap())
             return AddPtr(map.lookupForAdd(key));
 
-        for (InlineElem* it = inl, *end = inl + inlNext; it != end; ++it) {
+        InlineElem* end = inl + inlNext;
+        for (InlineElem* it = inl; it != end; ++it) {
             if (it->key == key)
                 return AddPtr(it, true);
         }
 
         /*
          * The add pointer that's returned here may indicate the limit entry of
          * the linear space, in which case the |add| operation will initialize
          * the map if necessary and add the entry there.
--- a/js/src/ds/SplayTree.h
+++ b/js/src/ds/SplayTree.h
@@ -20,25 +20,28 @@ namespace js {
  * compare(const T&, const T&) method ordering the elements. As for LifoAlloc
  * objects, T objects stored in the tree will not be explicitly destroyed.
  */
 template <class T, class C>
 class SplayTree
 {
     struct Node {
         T item;
-        Node* left, *right, *parent;
+        Node* left;
+        Node* right;
+        Node* parent;
 
         explicit Node(const T& item)
           : item(item), left(nullptr), right(nullptr), parent(nullptr)
         {}
     };
 
     LifoAlloc* alloc;
-    Node* root, *freeList;
+    Node* root;
+    Node* freeList;
 
 #ifdef DEBUG
     bool enableCheckCoherency;
 #endif
 
     SplayTree(const SplayTree&) = delete;
     SplayTree& operator=(const SplayTree&) = delete;
 
@@ -119,17 +122,18 @@ class SplayTree
         MOZ_ASSERT(last && C::compare(v, last->item) == 0);
 
         splay(last);
         MOZ_ASSERT(last == root);
 
         // Find another node which can be swapped in for the root: either the
         // rightmost child of the root's left, or the leftmost child of the
         // root's right.
-        Node* swap, *swapChild;
+        Node* swap;
+        Node* swapChild;
         if (root->left) {
             swap = root->left;
             while (swap->right)
                 swap = swap->right;
             swapChild = swap->left;
         } else if (root->right) {
             swap = root->right;
             while (swap->left)
@@ -162,17 +166,18 @@ class SplayTree
         forEachInner<Op>(op, root);
     }
 
   private:
 
     Node* lookup(const T& v)
     {
         MOZ_ASSERT(root);
-        Node* node = root, *parent;
+        Node* node = root;
+        Node* parent;
         do {
             parent = node;
             int c = C::compare(v, node->item);
             if (c == 0)
                 return node;
             else if (c < 0)
                 node = node->left;
             else
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -2275,17 +2275,18 @@ BytecodeEmitter::emitPropLHS(ParseNode* 
 
     /*
      * If the object operand is also a dotted property reference, reverse the
      * list linked via pn_expr temporarily so we can iterate over it from the
      * bottom up (reversing again as we go), to avoid excessive recursion.
      */
     if (pn2->isKind(PNK_DOT)) {
         ParseNode* pndot = pn2;
-        ParseNode* pnup = nullptr, *pndown;
+        ParseNode* pnup = nullptr;
+        ParseNode* pndown;
         ptrdiff_t top = offset();
         for (;;) {
             /* Reverse pndot->pn_expr to point up, not down. */
             pndot->pn_offset = top;
             MOZ_ASSERT(!pndot->isUsed());
             pndown = pndot->pn_expr;
             pndot->pn_expr = pnup;
             if (!pndown->isKind(PNK_DOT))
@@ -6668,18 +6669,18 @@ BytecodeEmitter::emitUnary(ParseNode* pn
     return emit1(op);
 }
 
 bool
 BytecodeEmitter::emitDefaults(ParseNode* pn)
 {
     MOZ_ASSERT(pn->isKind(PNK_ARGSBODY));
 
-    ParseNode* arg, *pnlast = pn->last();
-    for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) {
+    ParseNode* pnlast = pn->last();
+    for (ParseNode* arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) {
         if (!(arg->pn_dflags & PND_DEFAULT))
             continue;
         if (!bindNameToSlot(arg))
             return false;
         if (!emitVarOp(arg, JSOP_GETARG))
             return false;
         if (!emit1(JSOP_UNDEFINED))
             return false;
--- a/js/src/frontend/FoldConstants.cpp
+++ b/js/src/frontend/FoldConstants.cpp
@@ -612,17 +612,19 @@ condIf(const ParseNode* pn, ParseNodeKin
 }
 
 static bool
 Fold(ExclusiveContext* cx, ParseNode** pnp,
      FullParseHandler& handler, const ReadOnlyCompileOptions& options,
      bool inGenexpLambda, SyntacticContext sc)
 {
     ParseNode* pn = *pnp;
-    ParseNode* pn1 = nullptr, *pn2 = nullptr, *pn3 = nullptr;
+    ParseNode* pn1 = nullptr;
+    ParseNode* pn2 = nullptr;
+    ParseNode* pn3 = nullptr;
 
     JS_CHECK_RECURSION(cx, return false);
 
     // First, recursively fold constants on the children of this node.
     switch (pn->getArity()) {
       case PN_CODE:
         if (pn->isKind(PNK_FUNCTION) && pn->pn_funbox->useAsmOrInsideUseAsm())
             return true;
--- a/js/src/frontend/ParseMaps.cpp
+++ b/js/src/frontend/ParseMaps.cpp
@@ -30,17 +30,18 @@ ParseMapPool::checkInvariants()
     JS_STATIC_ASSERT(mozilla::IsPod<AtomIndexMap::WordMap::Entry>::value);
     JS_STATIC_ASSERT(mozilla::IsPod<AtomDefnListMap::WordMap::Entry>::value);
     JS_STATIC_ASSERT(mozilla::IsPod<AtomDefnMap::WordMap::Entry>::value);
 }
 
 void
 ParseMapPool::purgeAll()
 {
-    for (void** it = all.begin(), **end = all.end(); it != end; ++it)
+    void** end = all.end();
+    for (void** it = all.begin(); it != end; ++it)
         js_delete<AtomMapT>(asAtomMap(*it));
 
     all.clearAndFree();
     recyclable.clearAndFree();
 }
 
 void*
 ParseMapPool::allocateFresh()
@@ -116,18 +117,18 @@ frontend::InitAtomMap(frontend::AtomInde
         const WordMap& wm = indices->asMap();
         for (WordMap::Range r = wm.all(); !r.empty(); r.popFront()) {
             JSAtom* atom = r.front().key();
             jsatomid index = r.front().value();
             MOZ_ASSERT(index < indices->count());
             atoms[index].init(atom);
         }
     } else {
-        for (const AtomIndexMap::InlineElem* it = indices->asInline(), *end = indices->inlineEnd();
-             it != end; ++it) {
+        const AtomIndexMap::InlineElem* end = indices->inlineEnd();
+        for (const AtomIndexMap::InlineElem* it = indices->asInline(); it != end; ++it) {
             JSAtom* atom = it->key;
             if (!atom)
                 continue;
             MOZ_ASSERT(it->value < indices->count());
             atoms[it->value].init(atom);
         }
     }
 }
--- a/js/src/frontend/ParseMaps.h
+++ b/js/src/frontend/ParseMaps.h
@@ -55,24 +55,24 @@ class ParseMapPool
 
     void checkInvariants();
 
     void recycle(void* map) {
         MOZ_ASSERT(map);
 #ifdef DEBUG
         bool ok = false;
         /* Make sure the map is in |all| but not already in |recyclable|. */
-        for (void** it = all.begin(), **end = all.end(); it != end; ++it) {
+        for (void** it = all.begin(); it != all.end(); ++it) {
             if (*it == map) {
                 ok = true;
                 break;
             }
         }
         MOZ_ASSERT(ok);
-        for (void** it = recyclable.begin(), **end = recyclable.end(); it != end; ++it)
+        for (void** it = recyclable.begin(); it != recyclable.end(); ++it)
             MOZ_ASSERT(*it != map);
 #endif
         MOZ_ASSERT(recyclable.length() < all.length());
         recyclable.infallibleAppend(map); /* Reserved in allocateFresh. */
     }
 
     void* allocateFresh();
     void* allocate() {
--- a/js/src/frontend/ParseNode.cpp
+++ b/js/src/frontend/ParseNode.cpp
@@ -18,19 +18,24 @@ using mozilla::IsFinite;
 #ifdef DEBUG
 void
 ParseNode::checkListConsistency()
 {
     MOZ_ASSERT(isArity(PN_LIST));
     ParseNode** tail;
     uint32_t count = 0;
     if (pn_head) {
-        ParseNode* pn, *last;
-        for (pn = last = pn_head; pn; last = pn, pn = pn->pn_next, count++)
-            ;
+        ParseNode* last = pn_head;
+        ParseNode* pn = last;
+        while (pn) {
+            last = pn;
+            pn = pn->pn_next;
+            count++;
+        }
+
         tail = &last->pn_next;
     } else {
         tail = &pn_head;
     }
     MOZ_ASSERT(pn_tail == tail);
     MOZ_ASSERT(pn_count == count);
 }
 #endif
--- a/js/src/frontend/Parser.cpp
+++ b/js/src/frontend/Parser.cpp
@@ -4740,17 +4740,18 @@ Parser<FullParseHandler>::forStatement()
 
     /*
      * We can be sure that it's a for/in loop if there's still an 'in'
      * keyword here, even if JavaScript recognizes 'in' as an operator,
      * as we've excluded 'in' from being parsed in RelExpr by setting
      * pc->parsingForInit.
      */
     StmtInfoPC letStmt(context); /* used if blockObj != nullptr. */
-    ParseNode* pn2, *pn3;      /* forHead->pn_kid2 and pn_kid3. */
+    ParseNode* pn2;      /* forHead->pn_kid2 */
+    ParseNode* pn3;      /* forHead->pn_kid3 */
     ParseNodeKind headKind = PNK_FORHEAD;
     if (pn1) {
         bool isForIn, isForOf;
         if (!matchInOrOf(&isForIn, &isForOf))
             return null();
         if (isForIn)
             headKind = PNK_FORIN;
         else if (isForOf)
@@ -6898,17 +6899,19 @@ Parser<FullParseHandler>::legacyComprehe
      */
     if (handler.syntaxParser) {
         handler.disableSyntaxParser();
         abortedSyntaxParse = true;
         return nullptr;
     }
 
     unsigned adjust;
-    ParseNode* pn, *pn3, **pnp;
+    ParseNode* pn;
+    ParseNode* pn3;
+    ParseNode** pnp;
     StmtInfoPC stmtInfo(context);
     BindData<FullParseHandler> data(context);
     TokenKind tt;
 
     MOZ_ASSERT(tokenStream.isCurrentTokenType(TOK_FOR));
 
     bool isGenexp = comprehensionKind != NotGenerator;
 
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1738,17 +1738,18 @@ GCMarker::restoreValueArray(NativeObject
 void
 GCMarker::processMarkStackOther(uintptr_t tag, uintptr_t addr)
 {
     if (tag == GroupTag) {
         ScanObjectGroup(this, reinterpret_cast<ObjectGroup*>(addr));
     } else if (tag == SavedValueArrayTag) {
         MOZ_ASSERT(!(addr & CellMask));
         NativeObject* obj = reinterpret_cast<NativeObject*>(addr);
-        HeapValue* vp, *end;
+        HeapValue* vp;
+        HeapValue* end;
         if (restoreValueArray(obj, (void**)&vp, (void**)&end))
             pushValueArray(obj, vp, end);
         else
             repush(obj);
     } else if (tag == JitCodeTag) {
         reinterpret_cast<jit::JitCode*>(addr)->trace(this);
     }
 }
@@ -1778,17 +1779,18 @@ GCMarker::markAndScanSymbol(JSObject* so
 inline void
 GCMarker::processMarkStackTop(SliceBudget& budget)
 {
     /*
      * The function uses explicit goto and implements the scanning of the
      * object directly. It allows to eliminate the tail recursion and
      * significantly improve the marking performance, see bug 641025.
      */
-    HeapSlot* vp, *end;
+    HeapSlot* vp;
+    HeapSlot* end;
     JSObject* obj;
 
     const int32_t* unboxedTraceList;
     uint8_t* unboxedMemory;
 
     uintptr_t addr = stack.pop();
     uintptr_t tag = addr & StackTagMask;
     addr &= ~StackTagMask;
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -606,17 +606,20 @@ js::Nursery::traceObject(MinorCollection
         return;
     NativeObject* nobj = &obj->as<NativeObject>();
 
     // Note: the contents of copy on write elements pointers are filled in
     // during parsing and cannot contain nursery pointers.
     if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite())
         markSlots(trc, nobj->getDenseElements(), nobj->getDenseInitializedLength());
 
-    HeapSlot* fixedStart, *fixedEnd, *dynStart, *dynEnd;
+    HeapSlot* fixedStart;
+    HeapSlot* fixedEnd;
+    HeapSlot* dynStart;
+    HeapSlot* dynEnd;
     nobj->getSlotRange(0, nobj->slotSpan(), &fixedStart, &fixedEnd, &dynStart, &dynEnd);
     markSlots(trc, fixedStart, fixedEnd);
     markSlots(trc, dynStart, dynEnd);
 }
 
 MOZ_ALWAYS_INLINE void
 js::Nursery::markSlots(MinorCollectionTracer* trc, HeapSlot* vp, uint32_t nslots)
 {
@@ -685,17 +688,18 @@ js::Nursery::moveObjectToTenured(MinorCo
      * because moveElementsToTenured() accounts for all Array elements,
      * even if they are inlined.
      */
     if (src->is<ArrayObject>())
         tenuredSize = srcSize = sizeof(NativeObject);
 
     js_memcpy(dst, src, srcSize);
     if (src->isNative()) {
-        NativeObject* ndst = &dst->as<NativeObject>(), *nsrc = &src->as<NativeObject>();
+        NativeObject* ndst = &dst->as<NativeObject>();
+        NativeObject* nsrc = &src->as<NativeObject>();
         tenuredSize += moveSlotsToTenured(ndst, nsrc, dstKind);
         tenuredSize += moveElementsToTenured(ndst, nsrc, dstKind);
 
         // The shape's list head may point into the old object. This can only
         // happen for dictionaries, which are native objects.
         if (&nsrc->shape_ == ndst->shape_->listp)
             ndst->shape_->listp = &ndst->shape_;
     }
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -324,17 +324,18 @@ namespace js {
 enum ZoneSelector {
     WithAtoms,
     SkipAtoms
 };
 
 class ZonesIter
 {
     gc::AutoEnterIteration iterMarker;
-    JS::Zone** it, **end;
+    JS::Zone** it;
+    JS::Zone** end;
 
   public:
     ZonesIter(JSRuntime* rt, ZoneSelector selector) : iterMarker(&rt->gc) {
         it = rt->gc.zones.begin();
         end = rt->gc.zones.end();
 
         if (selector == SkipAtoms) {
             MOZ_ASSERT(atAtomsZone(rt));
--- a/js/src/irregexp/RegExpParser.h
+++ b/js/src/irregexp/RegExpParser.h
@@ -277,17 +277,18 @@ class RegExpParser
             return *next_pos_;
         return kEndMarker;
     }
     void ScanForCaptures();
 
     frontend::TokenStream& ts;
     LifoAlloc* alloc;
     RegExpCaptureVector* captures_;
-    const CharT* next_pos_, *end_;
+    const CharT* next_pos_;
+    const CharT* end_;
     widechar current_;
     // The capture count is only valid after we have scanned for captures.
     int capture_count_;
     bool has_more_;
     bool multiline_;
     bool simple_;
     bool contains_anchor_;
     bool is_scanned_for_captures_;
--- a/js/src/jit/BacktrackingAllocator.cpp
+++ b/js/src/jit/BacktrackingAllocator.cpp
@@ -136,17 +136,18 @@ BacktrackingAllocator::go()
 
 static bool
 LifetimesOverlap(BacktrackingVirtualRegister* reg0, BacktrackingVirtualRegister* reg1)
 {
     // Registers may have been eagerly split in two, see tryGroupReusedRegister.
     // In such cases, only consider the first interval.
     MOZ_ASSERT(reg0->numIntervals() <= 2 && reg1->numIntervals() <= 2);
 
-    LiveInterval* interval0 = reg0->getInterval(0), *interval1 = reg1->getInterval(0);
+    LiveInterval* interval0 = reg0->getInterval(0);
+    LiveInterval* interval1 = reg1->getInterval(0);
 
     // Interval ranges are sorted in reverse order. The lifetimes overlap if
     // any of their ranges overlap.
     size_t index0 = 0, index1 = 0;
     while (index0 < interval0->numRanges() && index1 < interval1->numRanges()) {
         const LiveInterval::Range
             *range0 = interval0->getRange(index0),
             *range1 = interval1->getRange(index1);
@@ -185,17 +186,18 @@ IsThisSlotDefinition(LDefinition* def)
 }
 
 bool
 BacktrackingAllocator::tryGroupRegisters(uint32_t vreg0, uint32_t vreg1)
 {
     // See if reg0 and reg1 can be placed in the same group, following the
     // restrictions imposed by VirtualRegisterGroup and any other registers
     // already grouped with reg0 or reg1.
-    BacktrackingVirtualRegister* reg0 = &vregs[vreg0], *reg1 = &vregs[vreg1];
+    BacktrackingVirtualRegister* reg0 = &vregs[vreg0];
+    BacktrackingVirtualRegister* reg1 = &vregs[vreg1];
 
     if (!reg0->isCompatibleVReg(*reg1))
         return true;
 
     // Registers which might spill to the frame's |this| slot can only be
     // grouped with other such registers. The frame's |this| slot must always
     // hold the |this| value, as required by JitFrame tracing and by the Ion
     // constructor calling convention.
@@ -210,17 +212,18 @@ BacktrackingAllocator::tryGroupRegisters
     if (IsArgumentSlotDefinition(reg0->def()) || IsArgumentSlotDefinition(reg1->def())) {
         JSScript* script = graph.mir().entryBlock()->info().script();
         if (script && script->argumentsHasVarBinding()) {
             if (*reg0->def()->output() != *reg1->def()->output())
                 return true;
         }
     }
 
-    VirtualRegisterGroup* group0 = reg0->group(), *group1 = reg1->group();
+    VirtualRegisterGroup* group0 = reg0->group();
+    VirtualRegisterGroup* group1 = reg1->group();
 
     if (!group0 && group1)
         return tryGroupRegisters(vreg1, vreg0);
 
     if (group0) {
         if (group1) {
             if (group0 == group1) {
                 // The registers are already grouped together.
@@ -257,17 +260,18 @@ BacktrackingAllocator::tryGroupRegisters
     reg0->setGroup(group);
     reg1->setGroup(group);
     return true;
 }
 
 bool
 BacktrackingAllocator::tryGroupReusedRegister(uint32_t def, uint32_t use)
 {
-    BacktrackingVirtualRegister& reg = vregs[def], &usedReg = vregs[use];
+    BacktrackingVirtualRegister& reg = vregs[def];
+    BacktrackingVirtualRegister& usedReg = vregs[use];
 
     // reg is a vreg which reuses its input usedReg for its output physical
     // register. Try to group reg with usedReg if at all possible, as avoiding
     // copies before reg's instruction is crucial for the quality of the
     // generated code (MUST_REUSE_INPUT is used by all arithmetic instructions
     // on x86/x64).
 
     if (reg.intervalFor(inputOf(reg.ins()))) {
@@ -1909,17 +1913,18 @@ BacktrackingAllocator::trySplitAcrossHot
         SplitPositionVector splitPositions;
         if (!splitPositions.append(hotRange->from) || !splitPositions.append(hotRange->to))
             return false;
         *success = true;
         return splitAt(interval, splitPositions);
     }
 
     LiveInterval* hotInterval = LiveInterval::New(alloc(), interval->vreg(), 0);
-    LiveInterval* preInterval = nullptr, *postInterval = nullptr;
+    LiveInterval* preInterval = nullptr;
+    LiveInterval* postInterval = nullptr;
 
     // Accumulate the ranges of hot and cold code in the interval. Note that
     // we are only comparing with the single hot range found, so the cold code
     // may contain separate hot ranges.
     Vector<LiveInterval::Range, 1, SystemAllocPolicy> hotList, coldList;
     for (size_t i = 0; i < interval->numRanges(); i++) {
         LiveInterval::Range hot, coldPre, coldPost;
         interval->getRange(i)->intersect(hotRange, &coldPre, &hot, &coldPost);
--- a/js/src/jit/BaselineDebugModeOSR.h
+++ b/js/src/jit/BaselineDebugModeOSR.h
@@ -70,17 +70,18 @@ class DebugModeOSRVolatileStub
 };
 
 //
 // A JitFrameIterator that updates itself in case of recompilation of an
 // on-stack baseline script.
 //
 class DebugModeOSRVolatileJitFrameIterator : public JitFrameIterator
 {
-    DebugModeOSRVolatileJitFrameIterator** stack, *prev;
+    DebugModeOSRVolatileJitFrameIterator** stack;
+    DebugModeOSRVolatileJitFrameIterator* prev;
 
   public:
     explicit DebugModeOSRVolatileJitFrameIterator(JSContext* cx)
       : JitFrameIterator(cx)
     {
         stack = &cx->liveVolatileJitFrameIterators_;
         prev = *stack;
         *stack = this;
--- a/js/src/jit/BaselineInspector.cpp
+++ b/js/src/jit/BaselineInspector.cpp
@@ -251,17 +251,18 @@ static bool
 CanUseInt32Compare(ICStub::Kind kind)
 {
     return kind == ICStub::Compare_Int32 || kind == ICStub::Compare_Int32WithBoolean;
 }
 
 MCompare::CompareType
 BaselineInspector::expectedCompareType(jsbytecode* pc)
 {
-    ICStub* first = monomorphicStub(pc), *second = nullptr;
+    ICStub* first = monomorphicStub(pc);
+    ICStub* second = nullptr;
     if (!first && !dimorphicStub(pc, &first, &second))
         return MCompare::Compare_Unknown;
 
     if (ICStub* fallback = second ? second->next() : first->next()) {
         MOZ_ASSERT(fallback->isFallback());
         if (fallback->toCompare_Fallback()->hadUnoptimizableAccess())
             return MCompare::Compare_Unknown;
     }
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -181,17 +181,18 @@ CodeGenerator::visitValueToInt32(LValueT
         input = lir->mirTruncate()->input();
 
     Label fails;
     if (lir->mode() == LValueToInt32::TRUNCATE) {
         OutOfLineCode* oolDouble = oolTruncateDouble(temp, output, lir->mir());
 
         // We can only handle strings in truncation contexts, like bitwise
         // operations.
-        Label* stringEntry, *stringRejoin;
+        Label* stringEntry;
+        Label* stringRejoin;
         Register stringReg;
         if (input->mightBeType(MIRType_String)) {
             stringReg = ToRegister(lir->temp());
             OutOfLineCode* oolString = oolCallVM(StringToNumberInfo, lir, (ArgList(), stringReg),
                                                  StoreFloatRegisterTo(temp));
             stringEntry = oolString->entry();
             stringRejoin = oolString->rejoin();
         } else {
@@ -8953,17 +8954,18 @@ CodeGenerator::visitClampDToUint8(LClamp
 void
 CodeGenerator::visitClampVToUint8(LClampVToUint8* lir)
 {
     ValueOperand operand = ToValue(lir, LClampVToUint8::Input);
     FloatRegister tempFloat = ToFloatRegister(lir->tempFloat());
     Register output = ToRegister(lir->output());
     MDefinition* input = lir->mir()->input();
 
-    Label* stringEntry, *stringRejoin;
+    Label* stringEntry;
+    Label* stringRejoin;
     if (input->mightBeType(MIRType_String)) {
         OutOfLineCode* oolString = oolCallVM(StringToNumberInfo, lir, (ArgList(), output),
                                              StoreFloatRegisterTo(tempFloat));
         stringEntry = oolString->entry();
         stringRejoin = oolString->rejoin();
     } else {
         stringEntry = nullptr;
         stringRejoin = nullptr;
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -1036,20 +1036,18 @@ IonScript::getSafepointIndex(uint32_t di
     }
 
     MOZ_CRASH("displacement not found.");
 }
 
 const OsiIndex*
 IonScript::getOsiIndex(uint32_t disp) const
 {
-    for (const OsiIndex* it = osiIndices(), *end = osiIndices() + osiIndexEntries_;
-         it != end;
-         ++it)
-    {
+    const OsiIndex* end = osiIndices() + osiIndexEntries_;
+    for (const OsiIndex* it = osiIndices(); it != end; ++it) {
         if (it->returnPointDisplacement() == disp)
             return it;
     }
 
     MOZ_CRASH("Failed to find OSI point return address");
 }
 
 const OsiIndex*
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -590,17 +590,18 @@ IonBuilder::analyzeNewLoopTypes(MBasicBl
             // new types flow to the phis and the loop is processed at least
             // three times.
             loopHeaders_[i].header = entry;
             return true;
         }
     }
     loopHeaders_.append(LoopHeader(start, entry));
 
-    jsbytecode* last = nullptr, *earlier = nullptr;
+    jsbytecode* last = nullptr;
+    jsbytecode* earlier = nullptr;
     for (jsbytecode* pc = start; pc != end; earlier = last, last = pc, pc += GetBytecodeLength(pc)) {
         uint32_t slot;
         if (*pc == JSOP_SETLOCAL)
             slot = info().localSlot(GET_LOCALNO(pc));
         else if (*pc == JSOP_SETARG)
             slot = info().argSlotUnchecked(GET_ARGNO(pc));
         else
             continue;
@@ -2572,17 +2573,18 @@ IonBuilder::processForUpdateEnd(CFGState
 
     current->end(MGoto::New(alloc(), state.loop.entry));
     return finishLoop(state, state.loop.successor);
 }
 
 IonBuilder::DeferredEdge*
 IonBuilder::filterDeadDeferredEdges(DeferredEdge* edge)
 {
-    DeferredEdge* head = edge, *prev = nullptr;
+    DeferredEdge* head = edge;
+    DeferredEdge* prev = nullptr;
 
     while (edge) {
         if (edge->block->isDead()) {
             if (prev)
                 prev->next = edge->next;
             else
                 head = edge->next;
         } else {
@@ -7926,17 +7928,18 @@ bool
 IonBuilder::pushScalarLoadFromTypedObject(MDefinition* obj,
                                           const LinearSum& byteOffset,
                                           ScalarTypeDescr::Type elemType)
 {
     int32_t size = ScalarTypeDescr::size(elemType);
     MOZ_ASSERT(size == ScalarTypeDescr::alignment(elemType));
 
     // Find location within the owner object.
-    MDefinition* elements, *scaledOffset;
+    MDefinition* elements;
+    MDefinition* scaledOffset;
     int32_t adjustment;
     loadTypedObjectElements(obj, byteOffset, size, &elements, &scaledOffset, &adjustment);
 
     // Load the element.
     MLoadUnboxedScalar* load = MLoadUnboxedScalar::New(alloc(), elements, scaledOffset,
                                                        elemType,
                                                        DoesNotRequireMemoryBarrier,
                                                        adjustment);
@@ -7965,17 +7968,18 @@ IonBuilder::pushScalarLoadFromTypedObjec
 
 bool
 IonBuilder::pushReferenceLoadFromTypedObject(MDefinition* typedObj,
                                              const LinearSum& byteOffset,
                                              ReferenceTypeDescr::Type type,
                                              PropertyName* name)
 {
     // Find location within the owner object.
-    MDefinition* elements, *scaledOffset;
+    MDefinition* elements;
+    MDefinition* scaledOffset;
     int32_t adjustment;
     size_t alignment = ReferenceTypeDescr::alignment(type);
     loadTypedObjectElements(typedObj, byteOffset, alignment, &elements, &scaledOffset, &adjustment);
 
     TemporaryTypeSet* observedTypes = bytecodeTypes(pc);
 
     MInstruction* load = nullptr;  // initialize to silence GCC warning
     BarrierKind barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(),
@@ -12743,17 +12747,18 @@ IonBuilder::typeObjectForFieldFromStruct
 
 bool
 IonBuilder::storeScalarTypedObjectValue(MDefinition* typedObj,
                                         const LinearSum& byteOffset,
                                         ScalarTypeDescr::Type type,
                                         MDefinition* value)
 {
     // Find location within the owner object.
-    MDefinition* elements, *scaledOffset;
+    MDefinition* elements;
+    MDefinition* scaledOffset;
     int32_t adjustment;
     size_t alignment = ScalarTypeDescr::alignment(type);
     loadTypedObjectElements(typedObj, byteOffset, alignment, &elements, &scaledOffset, &adjustment);
 
     // Clamp value to [0, 255] when type is Uint8Clamped
     MDefinition* toWrite = value;
     if (type == Scalar::Uint8Clamped) {
         toWrite = MClampToUint8::New(alloc(), value);
@@ -12787,17 +12792,18 @@ IonBuilder::storeReferenceTypedObjectVal
                                           /* canModify = */ true, implicitType))
         {
             trackOptimizationOutcome(TrackedOutcome::NeedsTypeBarrier);
             return false;
         }
     }
 
     // Find location within the owner object.
-    MDefinition* elements, *scaledOffset;
+    MDefinition* elements;
+    MDefinition* scaledOffset;
     int32_t adjustment;
     size_t alignment = ReferenceTypeDescr::alignment(type);
     loadTypedObjectElements(typedObj, byteOffset, alignment, &elements, &scaledOffset, &adjustment);
 
     MInstruction* store = nullptr;  // initialize to silence GCC warning
     switch (type) {
       case ReferenceTypeDescr::TYPE_ANY:
         if (NeedsPostBarrier(info(), value))
--- a/js/src/jit/IonBuilder.h
+++ b/js/src/jit/IonBuilder.h
@@ -1025,17 +1025,19 @@ class IonBuilder
     CompilerConstraintList* constraints_;
 
     // Basic analysis information about the script.
     BytecodeAnalysis analysis_;
     BytecodeAnalysis& analysis() {
         return analysis_;
     }
 
-    TemporaryTypeSet* thisTypes, *argTypes, *typeArray;
+    TemporaryTypeSet* thisTypes;
+    TemporaryTypeSet* argTypes;
+    TemporaryTypeSet* typeArray;
     uint32_t typeArrayHint;
     uint32_t* bytecodeTypeMap;
 
     GSNCache gsn;
     ScopeCoordinateNameCache scopeCoordinateNameCache;
 
     jsbytecode* pc;
     MBasicBlock* current;
--- a/js/src/jit/LoopUnroller.cpp
+++ b/js/src/jit/LoopUnroller.cpp
@@ -23,25 +23,28 @@ struct LoopUnroller
     explicit LoopUnroller(MIRGraph& graph)
       : graph(graph), alloc(graph.alloc())
     {}
 
     MIRGraph& graph;
     TempAllocator& alloc;
 
     // Header and body of the original loop.
-    MBasicBlock* header, *backedge;
+    MBasicBlock* header;
+    MBasicBlock* backedge;
 
     // Header and body of the unrolled loop.
-    MBasicBlock* unrolledHeader, *unrolledBackedge;
+    MBasicBlock* unrolledHeader;
+    MBasicBlock* unrolledBackedge;
 
     // Old and new preheaders. The old preheader starts out associated with the
     // original loop, but becomes the preheader of the new loop. The new
     // preheader will be given to the original loop.
-    MBasicBlock* oldPreheader, *newPreheader;
+    MBasicBlock* oldPreheader;
+    MBasicBlock* newPreheader;
 
     // Map terms in the original loop to terms in the current unrolled iteration.
     DefinitionMap unrolledDefinitions;
 
     MDefinition* getReplacementDefinition(MDefinition* def);
     MResumePoint* makeReplacementResumePoint(MBasicBlock* block, MResumePoint* rp);
     void makeReplacementInstruction(MInstruction* ins);
 
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -1548,18 +1548,18 @@ MPhi::removeOperand(size_t index)
 
     // truncate the inputs_ list:
     inputs_.popBack();
 }
 
 void
 MPhi::removeAllOperands()
 {
-    for (MUse* p = inputs_.begin(), *e = inputs_.end(); p < e; ++p)
-        p->producer()->removeUse(p);
+    for (MUse& p : inputs_)
+        p.producer()->removeUse(&p);
     inputs_.clear();
 }
 
 MDefinition*
 MPhi::foldsTernary()
 {
     /* Look if this MPhi is a ternary construct.
      * This is a very loose term as it actually only checks for
@@ -2774,17 +2774,18 @@ MustBeUInt32(MDefinition* def, MDefiniti
     }
 
     return false;
 }
 
 bool
 MBinaryInstruction::tryUseUnsignedOperands()
 {
-    MDefinition* newlhs, *newrhs;
+    MDefinition* newlhs;
+    MDefinition* newrhs;
     if (MustBeUInt32(getOperand(0), &newlhs) && MustBeUInt32(getOperand(1), &newrhs)) {
         if (newlhs->type() != MIRType_Int32 || newrhs->type() != MIRType_Int32)
             return false;
         if (newlhs != getOperand(0)) {
             getOperand(0)->setImplicitlyUsedUnchecked();
             replaceOperand(0, newlhs);
         }
         if (newrhs != getOperand(1)) {
--- a/js/src/jit/MIRGraph.cpp
+++ b/js/src/jit/MIRGraph.cpp
@@ -985,18 +985,18 @@ MBasicBlock::discardPhi(MPhi* phi)
     MOZ_ASSERT(!phis_.empty());
 
     phi->removeAllOperands();
     phi->setDiscarded();
 
     phis_.remove(phi);
 
     if (phis_.empty()) {
-        for (MBasicBlock** pred = predecessors_.begin(), **end = predecessors_.end(); pred < end; ++pred)
-            (*pred)->clearSuccessorWithPhis();
+        for (MBasicBlock* pred : predecessors_)
+            pred->clearSuccessorWithPhis();
     }
 }
 
 void
 MBasicBlock::flagOperandsOfPrunedBranches(MInstruction* ins)
 {
     // Find the previous resume point which would be used for bailing out.
     MResumePoint* rp = nullptr;
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -597,17 +597,18 @@ GetDynamicName(JSContext* cx, JSObject* 
     }
 
     if (!frontend::IsIdentifier(atom) || frontend::IsKeyword(atom)) {
         vp->setUndefined();
         return;
     }
 
     Shape* shape = nullptr;
-    JSObject* scope = nullptr, *pobj = nullptr;
+    JSObject* scope = nullptr;
+    JSObject* pobj = nullptr;
     if (LookupNameNoGC(cx, atom->asPropertyName(), scopeChain, &scope, &pobj, &shape)) {
         if (FetchNameNoGC(pobj, shape, MutableHandleValue::fromMarkedLocation(vp)))
             return;
     }
 
     vp->setUndefined();
 }
 
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -337,17 +337,18 @@ CodeGeneratorShared::encodeAllocation(LS
     RValueAllocation alloc;
 
     switch (type) {
       case MIRType_None:
       {
         MOZ_ASSERT(mir->isRecoveredOnBailout());
         uint32_t index = 0;
         LRecoverInfo* recoverInfo = snapshot->recoverInfo();
-        MNode** it = recoverInfo->begin(), **end = recoverInfo->end();
+        MNode** it = recoverInfo->begin();
+        MNode** end = recoverInfo->end();
         while (it != end && mir != *it) {
             ++it;
             ++index;
         }
 
         // This MDefinition is recovered, thus it should be listed in the
         // LRecoverInfo.
         MOZ_ASSERT(it != end && mir == *it);
@@ -482,18 +483,18 @@ CodeGeneratorShared::encode(LRecoverInfo
             (void*)recover, recover->mir()->frameCount(), numInstructions);
 
     MResumePoint::Mode mode = recover->mir()->mode();
     MOZ_ASSERT(mode != MResumePoint::Outer);
     bool resumeAfter = (mode == MResumePoint::ResumeAfter);
 
     RecoverOffset offset = recovers_.startRecover(numInstructions, resumeAfter);
 
-    for (MNode** it = recover->begin(), **end = recover->end(); it != end; ++it)
-        recovers_.writeInstruction(*it);
+    for (MNode* insn : *recover)
+        recovers_.writeInstruction(insn);
 
     recovers_.endRecover();
     recover->setRecoverOffset(offset);
     masm.propagateOOM(!recovers_.oom());
 }
 
 void
 CodeGeneratorShared::encode(LSnapshot* snapshot)
@@ -567,28 +568,25 @@ CodeGeneratorShared::assignBailoutId(LSn
     snapshot->setBailoutId(bailoutId);
     JitSpew(JitSpew_IonSnapshots, "Assigned snapshot bailout id %u", bailoutId);
     return bailouts_.append(snapshot->snapshotOffset());
 }
 
 void
 CodeGeneratorShared::encodeSafepoints()
 {
-    for (SafepointIndex* it = safepointIndices_.begin(), *end = safepointIndices_.end();
-         it != end;
-         ++it)
-    {
-        LSafepoint* safepoint = it->safepoint();
+    for (SafepointIndex& index : safepointIndices_) {
+        LSafepoint* safepoint = index.safepoint();
 
         if (!safepoint->encoded()) {
             safepoint->fixupOffset(&masm);
             safepoints_.encode(safepoint);
         }
 
-        it->resolve();
+        index.resolve();
     }
 }
 
 bool
 CodeGeneratorShared::createNativeToBytecodeScriptList(JSContext* cx)
 {
     js::Vector<JSScript*, 0, SystemAllocPolicy> scriptList;
     InlineScriptTree* tree = gen->info().inlineScriptTree();
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -615,17 +615,19 @@ js::ExpandErrorArgumentsVA(ExclusiveCont
             }
         }
         /*
          * Parse the error format, substituting the argument X
          * for {X} in the format.
          */
         if (argCount > 0) {
             if (efs->format) {
-                char16_t* buffer, *fmt, *out;
+                char16_t* buffer;
+                char16_t* fmt;
+                char16_t* out;
                 int expandedArgs = 0;
                 size_t expandedLength;
                 size_t len = strlen(efs->format);
 
                 buffer = fmt = InflateString(cx, efs->format, &len);
                 if (!buffer)
                     goto error;
                 expandedLength = len
--- a/js/src/jsdtoa.cpp
+++ b/js/src/jsdtoa.cpp
@@ -243,17 +243,18 @@ divrem(Bigint* b, uint32_t divisor)
     return remainder;
 }
 
 /* Return floor(b/2^k) and set b to be the remainder.  The returned quotient must be less than 2^32. */
 static uint32_t quorem2(Bigint* b, int32_t k)
 {
     ULong mask;
     ULong result;
-    ULong* bx, *bxe;
+    ULong* bx;
+    ULong* bxe;
     int32_t w;
     int32_t n = k >> 5;
     k &= 0x1F;
     mask = (1<<k) - 1;
 
     w = b->wds - n;
     if (w <= 0)
         return 0;
@@ -360,19 +361,20 @@ js_dtobasestr(DtoaState* state, int base
         *q-- = ch;
     }
 
     dval(df) = dval(d) - dval(di);
     if (dval(df) != 0.0) {
         /* We have a fraction. */
         int e, bbits;
         int32_t s2, done;
-        Bigint* b, *s, *mlo, *mhi;
-
-        b = s = mlo = mhi = nullptr;
+        Bigint* b = nullptr;
+        Bigint* s = nullptr;
+        Bigint* mlo = nullptr;
+        Bigint* mhi = nullptr;
 
         *p++ = '.';
         b = d2b(PASS_STATE df, &e, &bbits);
         if (!b) {
           nomem2:
             Bfree(PASS_STATE b);
             Bfree(PASS_STATE s);
             if (mlo != mhi)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2594,26 +2594,27 @@ GCRuntime::updatePointersToRelocatedCell
     // Call callbacks to get the rest of the system to fixup other untraced pointers.
     callWeakPointerCallbacks();
 }
 
 #ifdef DEBUG
 void
 GCRuntime::protectRelocatedArenas()
 {
-    for (ArenaHeader* arena = relocatedArenasToRelease, *next; arena; arena = next) {
-        next = arena->next;
+    for (ArenaHeader* arena = relocatedArenasToRelease; arena; ) {
+        ArenaHeader* next = arena->next;
 #if defined(XP_WIN)
         DWORD oldProtect;
         if (!VirtualProtect(arena, ArenaSize, PAGE_NOACCESS, &oldProtect))
             MOZ_CRASH();
 #else  // assume Unix
         if (mprotect(arena, ArenaSize, PROT_NONE))
             MOZ_CRASH();
 #endif
+        arena = next;
     }
 }
 
 void
 GCRuntime::unprotectRelocatedArenas()
 {
     for (ArenaHeader* arena = relocatedArenasToRelease; arena; arena = arena->next) {
 #if defined(XP_WIN)
--- a/js/src/jsnum.cpp
+++ b/js/src/jsnum.cpp
@@ -1053,17 +1053,19 @@ js::InitRuntimeNumberState(JSRuntime* rt
 {
     FIX_FPU();
 
     // XXX If EXPOSE_INTL_API becomes true all the time at some point,
     //     js::InitRuntimeNumberState is no longer fallible, and we should
     //     change its return type.
 #if !EXPOSE_INTL_API
     /* Copy locale-specific separators into the runtime strings. */
-    const char* thousandsSeparator, *decimalPoint, *grouping;
+    const char* thousandsSeparator;
+    const char* decimalPoint;
+    const char* grouping;
 #ifdef HAVE_LOCALECONV
     struct lconv* locale = localeconv();
     thousandsSeparator = locale->thousands_sep;
     decimalPoint = locale->decimal_point;
     grouping = locale->grouping;
 #else
     thousandsSeparator = getenv("LOCALE_THOUSANDS_SEP");
     decimalPoint = getenv("LOCALE_DECIMAL_POINT");
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -721,19 +721,16 @@ js::ReconstructStackDepth(JSContext* cx,
  */
 static bool
 DisassembleAtPC(JSContext* cx, JSScript* scriptArg, bool lines,
                 jsbytecode* pc, bool showAll, Sprinter* sp)
 {
     RootedScript script(cx, scriptArg);
     BytecodeParser parser(cx, script);
 
-    jsbytecode* next, *end;
-    unsigned len;
-
     if (showAll && !parser.parse())
         return false;
 
     if (showAll)
         Sprint(sp, "%s:%" PRIuSIZE "\n", script->filename(), script->lineno());
 
     if (pc != nullptr)
         sp->put("    ");
@@ -748,18 +745,18 @@ DisassembleAtPC(JSContext* cx, JSScript*
         sp->put("    ");
     if (showAll)
         sp->put("-- ----- ");
     sp->put("----- ");
     if (lines)
         sp->put("----");
     sp->put("  --\n");
 
-    next = script->code();
-    end = script->codeEnd();
+    jsbytecode* next = script->code();
+    jsbytecode* end = script->codeEnd();
     while (next < end) {
         if (next == script->main())
             sp->put("main:\n");
         if (pc != nullptr) {
             if (pc == next)
                 sp->put("--> ");
             else
                 sp->put("    ");
@@ -778,17 +775,17 @@ DisassembleAtPC(JSContext* cx, JSScript*
             }
             else
                 sp->put("   ");
             if (parser.isReachable(next))
                 Sprint(sp, "%05u ", parser.stackDepthAtPC(next));
             else
                 Sprint(sp, "      ");
         }
-        len = Disassemble1(cx, script, next, script->pcToOffset(next), lines, sp);
+        unsigned len = Disassemble1(cx, script, next, script->pcToOffset(next), lines, sp);
         if (!len)
             return false;
         next += len;
     }
     return true;
 }
 
 bool
--- a/js/src/jsopcodeinlines.h
+++ b/js/src/jsopcodeinlines.h
@@ -106,14 +106,15 @@ class BytecodeRange {
     bool empty() const { return pc == end; }
     jsbytecode* frontPC() const { return pc; }
     JSOp frontOpcode() const { return JSOp(*pc); }
     size_t frontOffset() const { return script->pcToOffset(pc); }
     void popFront() { pc += GetBytecodeLength(pc); }
 
   private:
     RootedScript script;
-    jsbytecode* pc, *end;
+    jsbytecode* pc;
+    jsbytecode* end;
 };
 
 }
 
 #endif /* jsopcodeinlines_h */
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -315,18 +315,18 @@ Bindings::trace(JSTracer* trc)
     /*
      * As the comment in Bindings explains, bindingsArray may point into freed
      * storage when bindingArrayUsingTemporaryStorage so we don't mark it.
      * Note: during compilation, atoms are already kept alive by gcKeepAtoms.
      */
     if (bindingArrayUsingTemporaryStorage())
         return;
 
-    for (Binding* b = bindingArray(), *end = b + count(); b != end; b++) {
-        PropertyName* name = b->name();
+    for (const Binding& b : *this) {
+        PropertyName* name = b.name();
         MarkStringUnbarriered(trc, &name, "bindingArray");
     }
 }
 
 template<XDRMode mode>
 bool
 js::XDRScriptConst(XDRState<mode>* xdr, MutableHandleValue vp)
 {
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -309,16 +309,19 @@ class Bindings
     /* Return whether this scope has any aliased bindings. */
     bool hasAnyAliasedBindings() const {
         if (!callObjShape_)
             return false;
 
         return !callObjShape_->isEmptyShape();
     }
 
+    Binding* begin() const { return bindingArray(); }
+    Binding* end() const { return bindingArray() + count(); }
+
     static js::ThingRootKind rootKind() { return js::THING_ROOT_BINDINGS; }
     void trace(JSTracer* trc);
 };
 
 template <>
 struct GCMethods<Bindings> {
     static Bindings initial();
 };
@@ -1776,17 +1779,19 @@ class BindingIter
 
 /*
  * Iterator over the aliased formal bindings in ascending index order. This can
  * be veiwed as a filtering of BindingIter with predicate
  *   bi->aliased() && bi->kind() == Binding::ARGUMENT
  */
 class AliasedFormalIter
 {
-    const Binding* begin_, *p_, *end_;
+    const Binding* begin_;
+    const Binding* p_;
+    const Binding* end_;
     unsigned slot_;
 
     void settle() {
         while (p_ != end_ && !p_->aliased())
             p_++;
     }
 
   public:
@@ -1804,17 +1809,17 @@ class AliasedFormalIter
 
 // Information about a script which may be (or has been) lazily compiled to
 // bytecode from its source.
 class LazyScript : public gc::TenuredCell
 {
   public:
     class FreeVariable
     {
-        // Free variable names are possible tagged JSAtom* s.
+        // Variable name is stored as a tagged JSAtom pointer.
         uintptr_t bits_;
 
         static const uintptr_t HOISTED_USE_BIT = 0x1;
         static const uintptr_t MASK = ~HOISTED_USE_BIT;
 
       public:
         explicit FreeVariable()
           : bits_(0)
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -1234,17 +1234,18 @@ StringMatch(const TextChar* text, uint32
 
 #if defined(__i386__) || defined(_M_IX86) || defined(__i386)
     /*
      * Given enough registers, the unrolled loop below is faster than the
      * following loop. 32-bit x86 does not have enough registers.
      */
     if (patLen == 1) {
         const PatChar p0 = *pat;
-        for (const TextChar* c = text, *end = text + textLen; c != end; ++c) {
+        const TextChar* end = text + textLen;
+        for (const TextChar* c = text; c != end; ++c) {
             if (*c == p0)
                 return c - text;
         }
         return -1;
     }
 #endif
 
     /*
--- a/js/src/jstypes.h
+++ b/js/src/jstypes.h
@@ -164,20 +164,20 @@
 
 /***********************************************************************
 ** MACROS:      JS_ARRAY_LENGTH
 **              JS_ARRAY_END
 ** DESCRIPTION:
 **      Macros to get the number of elements and the pointer to one past the
 **      last element of a C array. Use them like this:
 **
-**      char16_t buf[10], *s;
+**      char16_t buf[10];
 **      JSString* str;
 **      ...
-**      for (s = buf; s != JS_ARRAY_END(buf); ++s) *s = ...;
+**      for (char16_t* s = buf; s != JS_ARRAY_END(buf); ++s) *s = ...;
 **      ...
 **      str = JS_NewStringCopyN(cx, buf, JS_ARRAY_LENGTH(buf));
 **      ...
 **
 ***********************************************************************/
 
 #define JS_ARRAY_LENGTH(array) (sizeof (array) / sizeof (array)[0])
 #define JS_ARRAY_END(array)    ((array) + JS_ARRAY_LENGTH(array))
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -105,27 +105,28 @@ WeakMapBase::findZoneEdgesForCompartment
     }
     return true;
 }
 
 void
 WeakMapBase::sweepCompartment(JSCompartment* c)
 {
     WeakMapBase** tailPtr = &c->gcWeakMapList;
-    for (WeakMapBase* m = c->gcWeakMapList, *next; m; m = next) {
-        next = m->next;
+    for (WeakMapBase* m = c->gcWeakMapList; m; ) {
+        WeakMapBase* next = m->next;
         if (m->marked) {
             m->sweep();
             *tailPtr = m;
             tailPtr = &m->next;
         } else {
             /* Destroy the hash map now to catch any use after this point. */
             m->finish();
             m->next = WeakMapNotInList;
         }
+        m = next;
     }
     *tailPtr = nullptr;
 
 #ifdef DEBUG
     for (WeakMapBase* m = c->gcWeakMapList; m; m = m->next)
         MOZ_ASSERT(m->isInList() && m->marked);
 #endif
 }
--- a/js/src/proxy/CrossCompartmentWrapper.cpp
+++ b/js/src/proxy/CrossCompartmentWrapper.cpp
@@ -575,20 +575,18 @@ js::RemapAllWrappersForObject(JSContext*
 
     for (CompartmentsIter c(cx->runtime(), SkipAtoms); !c.done(); c.next()) {
         if (WrapperMap::Ptr wp = c->lookupWrapper(origv)) {
             // We found a wrapper. Remember and root it.
             toTransplant.infallibleAppend(WrapperValue(wp));
         }
     }
 
-    for (WrapperValue* begin = toTransplant.begin(), *end = toTransplant.end();
-         begin != end; ++begin)
-    {
-        if (!RemapWrapper(cx, &begin->toObject(), newTarget))
+    for (const WrapperValue& v : toTransplant) {
+        if (!RemapWrapper(cx, &v.toObject(), newTarget))
             MOZ_CRASH();
     }
 
     return true;
 }
 
 JS_FRIEND_API(bool)
 js::RecomputeWrappers(JSContext* cx, const CompartmentFilter& sourceFilter,
@@ -614,18 +612,17 @@ js::RecomputeWrappers(JSContext* cx, con
 
             // Add it to the list.
             if (!toRecompute.append(WrapperValue(e)))
                 return false;
         }
     }
 
     // Recompute all the wrappers in the list.
-    for (WrapperValue* begin = toRecompute.begin(), *end = toRecompute.end(); begin != end; ++begin)
-    {
-        JSObject* wrapper = &begin->toObject();
+    for (const WrapperValue& v : toRecompute) {
+        JSObject* wrapper = &v.toObject();
         JSObject* wrapped = Wrapper::wrappedObject(wrapper);
         if (!RemapWrapper(cx, wrapper, wrapped))
             MOZ_CRASH();
     }
 
     return true;
 }
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -2184,23 +2184,21 @@ JS_STATIC_ASSERT(JSTRY_CATCH == 0);
 JS_STATIC_ASSERT(JSTRY_FINALLY == 1);
 JS_STATIC_ASSERT(JSTRY_FOR_IN == 2);
 
 static const char* const TryNoteNames[] = { "catch", "finally", "for-in", "for-of", "loop" };
 
 static bool
 TryNotes(JSContext* cx, HandleScript script, Sprinter* sp)
 {
-    JSTryNote* tn, *tnlimit;
-
     if (!script->hasTrynotes())
         return true;
 
-    tn = script->trynotes()->vector;
-    tnlimit = tn + script->trynotes()->length;
+    JSTryNote* tn = script->trynotes()->vector;
+    JSTryNote* tnlimit = tn + script->trynotes()->length;
     Sprint(sp, "\nException table:\nkind      stack    start      end\n");
     do {
         MOZ_ASSERT(tn->kind < ArrayLength(TryNoteNames));
         Sprint(sp, " %-7s %6u %8u %8u\n",
                TryNoteNames[tn->kind], tn->stackDepth,
                tn->start, tn->start + tn->length);
     } while (++tn != tnlimit);
     return true;
@@ -2402,17 +2400,16 @@ static bool
 DisassWithSrc(JSContext* cx, unsigned argc, jsval* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
 #define LINE_BUF_LEN 512
     unsigned len, line1, line2, bupline;
     FILE* file;
     char linebuf[LINE_BUF_LEN];
-    jsbytecode* pc, *end;
     static const char sep[] = ";-------------------------";
 
     bool ok = true;
     RootedScript script(cx);
     for (unsigned i = 0; ok && i < args.length(); i++) {
         script = ValueToScript(cx, args[i]);
         if (!script)
            return false;
@@ -2426,18 +2423,18 @@ DisassWithSrc(JSContext* cx, unsigned ar
         file = fopen(script->filename(), "r");
         if (!file) {
             JS_ReportErrorNumber(cx, my_GetErrorMessage, nullptr,
                                  JSSMSG_CANT_OPEN, script->filename(),
                                  strerror(errno));
             return false;
         }
 
-        pc = script->code();
-        end = script->codeEnd();
+        jsbytecode* pc = script->code();
+        jsbytecode* end = script->codeEnd();
 
         Sprinter sprinter(cx);
         if (!sprinter.init()) {
             ok = false;
             goto bail;
         }
 
         /* burn the leading lines */
--- a/js/src/shell/jsoptparse.cpp
+++ b/js/src/shell/jsoptparse.cpp
@@ -189,51 +189,48 @@ OptionParser::printHelp(const char* prog
         printf("\nVersion: %s\n\n", version);
 
     if (!arguments.empty()) {
         printf("Arguments:\n");
 
         static const char fmt[] = "  %s ";
         size_t fmtChars = sizeof(fmt) - 2;
         size_t lhsLen = 0;
-        for (Option** it = arguments.begin(), **end = arguments.end(); it != end; ++it)
-            lhsLen = Max(lhsLen, strlen((*it)->longflag) + fmtChars);
+        for (Option* arg : arguments)
+            lhsLen = Max(lhsLen, strlen(arg->longflag) + fmtChars);
 
-        for (Option** it = arguments.begin(), **end = arguments.end(); it != end; ++it) {
-            Option* arg = *it;
+        for (Option* arg : arguments) {
             size_t chars = printf(fmt, arg->longflag);
             for (; chars < lhsLen; ++chars)
                 putchar(' ');
             PrintParagraph(arg->help, lhsLen, helpWidth, false);
             putchar('\n');
         }
         putchar('\n');
     }
 
     if (!options.empty()) {
         printf("Options:\n");
 
         /* Calculate sizes for column alignment. */
         size_t lhsLen = 0;
-        for (Option** it = options.begin(), **end = options.end(); it != end; ++it) {
-            Option* opt = *it;
+        for (Option* opt : options) {
             size_t longflagLen = strlen(opt->longflag);
 
             size_t fmtLen;
             OptionFlagsToFormatInfo(opt->shortflag, opt->isValued(), &fmtLen);
 
             size_t len = fmtLen + longflagLen;
             if (opt->isValued())
                 len += strlen(opt->asValued()->metavar);
             lhsLen = Max(lhsLen, len);
         }
 
         /* Print option help text. */
-        for (Option** it = options.begin(), **end = options.end(); it != end; ++it) {
-            Option* opt = *it;
+        for (Option* opt : options) {
             size_t fmtLen;
             const char* fmt = OptionFlagsToFormatInfo(opt->shortflag, opt->isValued(), &fmtLen);
             size_t chars;
             if (opt->isValued()) {
                 if (opt->shortflag)
                     chars = printf(fmt, opt->shortflag, opt->longflag, opt->asValued()->metavar);
                 else
                     chars = printf(fmt, opt->longflag, opt->asValued()->metavar);
@@ -464,28 +461,28 @@ MultiStringRange
 OptionParser::getMultiStringOption(const char* longflag) const
 {
     const MultiStringOption* mso = findOption(longflag)->asMultiStringOption();
     return MultiStringRange(mso->strings.begin(), mso->strings.end());
 }
 
 OptionParser::~OptionParser()
 {
-    for (Option** it = options.begin(), **end = options.end(); it != end; ++it)
-        js_delete<Option>(*it);
-    for (Option** it = arguments.begin(), **end = arguments.end(); it != end; ++it)
-        js_delete<Option>(*it);
+    for (Option* opt : options)
+        js_delete<Option>(opt);
+    for (Option* arg : arguments)
+        js_delete<Option>(arg);
 }
 
 Option*
 OptionParser::findOption(char shortflag)
 {
-    for (Option** it = options.begin(), **end = options.end(); it != end; ++it) {
-        if ((*it)->shortflag == shortflag)
-            return *it;
+    for (Option* opt : options) {
+        if (opt->shortflag == shortflag)
+            return opt;
     }
 
     if (versionOption.shortflag == shortflag)
         return &versionOption;
 
     return helpOption.shortflag == shortflag ? &helpOption : nullptr;
 }
 
@@ -493,30 +490,30 @@ const Option*
 OptionParser::findOption(char shortflag) const
 {
     return const_cast<OptionParser*>(this)->findOption(shortflag);
 }
 
 Option*
 OptionParser::findOption(const char* longflag)
 {
-    for (Option** it = options.begin(), **end = options.end(); it != end; ++it) {
-        const char* target = (*it)->longflag;
-        if ((*it)->isValued()) {
+    for (Option* opt : options) {
+        const char* target = opt->longflag;
+        if (opt->isValued()) {
             size_t targetLen = strlen(target);
             /* Permit a trailing equals sign on the longflag argument. */
             for (size_t i = 0; i < targetLen; ++i) {
                 if (longflag[i] == '\0' || longflag[i] != target[i])
                     goto no_match;
             }
             if (longflag[targetLen] == '\0' || longflag[targetLen] == '=')
-                return *it;
+                return opt;
         } else {
             if (strcmp(target, longflag) == 0)
-                return *it;
+                return opt;
         }
   no_match:;
     }
 
     if (strcmp(versionOption.longflag, longflag) == 0)
         return &versionOption;
 
     return strcmp(helpOption.longflag, longflag) ? nullptr : &helpOption;
--- a/js/src/vm/ArgumentsObject.cpp
+++ b/js/src/vm/ArgumentsObject.cpp
@@ -132,17 +132,18 @@ struct CopyScriptFrameIterArgs
         /* Define formals which are not part of the actuals. */
         unsigned numActuals = iter_.numActualArgs();
         unsigned numFormals = iter_.calleeTemplate()->nargs();
         MOZ_ASSERT(numActuals <= totalArgs);
         MOZ_ASSERT(numFormals <= totalArgs);
         MOZ_ASSERT(Max(numActuals, numFormals) == totalArgs);
 
         if (numActuals < numFormals) {
-            HeapValue* dst = dstBase + numActuals, *dstEnd = dstBase + totalArgs;
+            HeapValue* dst = dstBase + numActuals;
+            HeapValue* dstEnd = dstBase + totalArgs;
             while (dst != dstEnd)
                 (dst++)->init(UndefinedValue());
         }
     }
 
     /*
      * Ion frames are copying every argument onto the stack, other locations are
      * invalid.
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -4159,17 +4159,19 @@ Debugger::drainTraceLoggerScriptCalls(JS
         if (textId != TraceLogger_Stop && !logger->textIdIsScriptEvent(textId))
             continue;
 
         const char* type = (textId == TraceLogger_Stop) ? "Stop" : "Script";
         if (!DefineProperty(cx, item, logTypeId, type, strlen(type)))
             return false;
 
         if (textId != TraceLogger_Stop) {
-            const char* filename, *lineno, *colno;
+            const char* filename;
+            const char* lineno;
+            const char* colno;
             size_t filename_len, lineno_len, colno_len;
             logger->extractScriptDetails(textId, &filename, &filename_len, &lineno, &lineno_len,
                                          &colno, &colno_len);
 
             if (!DefineProperty(cx, item, fileNameId, filename, filename_len))
                 return false;
             if (!DefineProperty(cx, item, lineNumberId, lineno, lineno_len))
                 return false;
--- a/js/src/vm/Interpreter-inl.h
+++ b/js/src/vm/Interpreter-inl.h
@@ -682,17 +682,18 @@ ProcessCallSiteObjOperation(JSContext* c
         if (lhs.isInt32() && rhs.isInt32()) {                                 \
             *res = lhs.toInt32() OP rhs.toInt32();                            \
         } else {                                                              \
             if (!ToPrimitive(cx, JSTYPE_NUMBER, lhs))                         \
                 return false;                                                 \
             if (!ToPrimitive(cx, JSTYPE_NUMBER, rhs))                         \
                 return false;                                                 \
             if (lhs.isString() && rhs.isString()) {                           \
-                JSString* l = lhs.toString(), *r = rhs.toString();            \
+                JSString* l = lhs.toString();                                 \
+                JSString* r = rhs.toString();                                 \
                 int32_t result;                                               \
                 if (!CompareStrings(cx, l, r, &result))                       \
                     return false;                                             \
                 *res = result OP 0;                                           \
             } else {                                                          \
                 double l, r;                                                  \
                 if (!ToNumber(cx, lhs, &l) || !ToNumber(cx, rhs, &r))         \
                     return false;                                             \
--- a/js/src/vm/Interpreter.cpp
+++ b/js/src/vm/Interpreter.cpp
@@ -295,17 +295,18 @@ GetNameOperation(JSContext* cx, Interpre
      * accessing the global object, and the inferred behavior should match
      * the actual behavior even if the id could be found on the scope chain
      * before the global object.
      */
     if (IsGlobalOp(JSOp(*pc)) && !fp->script()->hasPollutedGlobalScope())
         obj = &obj->global();
 
     Shape* shape = nullptr;
-    JSObject* scope = nullptr, *pobj = nullptr;
+    JSObject* scope = nullptr;
+    JSObject* pobj = nullptr;
     if (LookupNameNoGC(cx, name, obj, &scope, &pobj, &shape)) {
         if (FetchNameNoGC(pobj, shape, vp))
             return CheckUninitializedLexical(cx, name, vp);
     }
 
     RootedObject objRoot(cx, obj), scopeRoot(cx), pobjRoot(cx);
     RootedPropertyName nameRoot(cx, name);
     RootedShape shapeRoot(cx);
@@ -1499,24 +1500,26 @@ AddOperation(JSContext* cx, MutableHandl
 
     if (!ToPrimitive(cx, lhs))
         return false;
     if (!ToPrimitive(cx, rhs))
         return false;
 
     bool lIsString, rIsString;
     if ((lIsString = lhs.isString()) | (rIsString = rhs.isString())) {
-        JSString* lstr, *rstr;
+        JSString* lstr;
         if (lIsString) {
             lstr = lhs.toString();
         } else {
             lstr = ToString<CanGC>(cx, lhs);
             if (!lstr)
                 return false;
         }
+
+        JSString* rstr;
         if (rIsString) {
             rstr = rhs.toString();
         } else {
             // Save/restore lstr in case of GC activity under ToString.
             lhs.setString(lstr);
             rstr = ToString<CanGC>(cx, rhs);
             if (!rstr)
                 return false;
@@ -2443,53 +2446,57 @@ CASE(JSOP_ADD)
     if (!AddOperation(cx, lval, rval, res))
         goto error;
     REGS.sp--;
 }
 END_CASE(JSOP_ADD)
 
 CASE(JSOP_SUB)
 {
-    RootedValue& lval = rootValue0, &rval = rootValue1;
+    RootedValue& lval = rootValue0;
+    RootedValue& rval = rootValue1;
     lval = REGS.sp[-2];
     rval = REGS.sp[-1];
     MutableHandleValue res = REGS.stackHandleAt(-2);
     if (!SubOperation(cx, lval, rval, res))
         goto error;
     REGS.sp--;
 }
 END_CASE(JSOP_SUB)
 
 CASE(JSOP_MUL)
 {
-    RootedValue& lval = rootValue0, &rval = rootValue1;
+    RootedValue& lval = rootValue0;
+    RootedValue& rval = rootValue1;
     lval = REGS.sp[-2];
     rval = REGS.sp[-1];
     MutableHandleValue res = REGS.stackHandleAt(-2);
     if (!MulOperation(cx, lval, rval, res))
         goto error;
     REGS.sp--;
 }
 END_CASE(JSOP_MUL)
 
 CASE(JSOP_DIV)
 {
-    RootedValue& lval = rootValue0, &rval = rootValue1;
+    RootedValue& lval = rootValue0;
+    RootedValue& rval = rootValue1;
     lval = REGS.sp[-2];
     rval = REGS.sp[-1];
     MutableHandleValue res = REGS.stackHandleAt(-2);
     if (!DivOperation(cx, lval, rval, res))
         goto error;
     REGS.sp--;
 }
 END_CASE(JSOP_DIV)
 
 CASE(JSOP_MOD)
 {
-    RootedValue& lval = rootValue0, &rval = rootValue1;
+    RootedValue& lval = rootValue0;
+    RootedValue& rval = rootValue1;
     lval = REGS.sp[-2];
     rval = REGS.sp[-1];
     MutableHandleValue res = REGS.stackHandleAt(-2);
     if (!ModOperation(cx, lval, rval, res))
         goto error;
     REGS.sp--;
 }
 END_CASE(JSOP_MOD)
@@ -2596,17 +2603,18 @@ END_CASE(JSOP_DELELEM)
 
 CASE(JSOP_TOID)
 {
     /*
      * Increment or decrement requires use to lookup the same property twice,
      * but we need to avoid the observable stringification the second time.
      * There must be an object value below the id, which will not be popped.
      */
-    RootedValue& objval = rootValue0, &idval = rootValue1;
+    RootedValue& objval = rootValue0;
+    RootedValue& idval = rootValue1;
     objval = REGS.sp[-2];
     idval = REGS.sp[-1];
 
     MutableHandleValue res = REGS.stackHandleAt(-1);
     if (!ToIdOperation(cx, script, REGS.pc, objval, idval, res))
         goto error;
 }
 END_CASE(JSOP_TOID)
--- a/js/src/vm/Interpreter.h
+++ b/js/src/vm/Interpreter.h
@@ -265,17 +265,18 @@ UnwindForUncatchableException(JSContext*
 extern bool
 OnUnknownMethod(JSContext* cx, HandleObject obj, Value idval, MutableHandleValue vp);
 
 class TryNoteIter
 {
     const InterpreterRegs& regs;
     RootedScript script; /* TryNotIter is always stack allocated. */
     uint32_t pcOffset;
-    JSTryNote* tn, *tnEnd;
+    JSTryNote* tn;
+    JSTryNote* tnEnd;
 
     void settle();
 
   public:
     explicit TryNoteIter(JSContext* cx, const InterpreterRegs& regs);
     bool done() const;
     void operator++();
     JSTryNote* operator*() const { return tn; }
--- a/js/src/vm/NativeObject.cpp
+++ b/js/src/vm/NativeObject.cpp
@@ -158,42 +158,51 @@ js::NativeObject::checkShapeConsistency(
 
 void
 js::NativeObject::initializeSlotRange(uint32_t start, uint32_t length)
 {
     /*
      * No bounds check, as this is used when the object's shape does not
      * reflect its allocated slots (updateSlotsForSpan).
      */
-    HeapSlot* fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
+    HeapSlot* fixedStart;
+    HeapSlot* fixedEnd;
+    HeapSlot* slotsStart;
+    HeapSlot* slotsEnd;
     getSlotRangeUnchecked(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
 
     uint32_t offset = start;
     for (HeapSlot* sp = fixedStart; sp < fixedEnd; sp++)
         sp->init(this, HeapSlot::Slot, offset++, UndefinedValue());
     for (HeapSlot* sp = slotsStart; sp < slotsEnd; sp++)
         sp->init(this, HeapSlot::Slot, offset++, UndefinedValue());
 }
 
 void
 js::NativeObject::initSlotRange(uint32_t start, const Value* vector, uint32_t length)
 {
-    HeapSlot* fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
+    HeapSlot* fixedStart;
+    HeapSlot* fixedEnd;
+    HeapSlot* slotsStart;
+    HeapSlot* slotsEnd;
     getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
     for (HeapSlot* sp = fixedStart; sp < fixedEnd; sp++)
         sp->init(this, HeapSlot::Slot, start++, *vector++);
     for (HeapSlot* sp = slotsStart; sp < slotsEnd; sp++)
         sp->init(this, HeapSlot::Slot, start++, *vector++);
 }
 
 void
 js::NativeObject::copySlotRange(uint32_t start, const Value* vector, uint32_t length)
 {
     JS::Zone* zone = this->zone();
-    HeapSlot* fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
+    HeapSlot* fixedStart;
+    HeapSlot* fixedEnd;
+    HeapSlot* slotsStart;
+    HeapSlot* slotsEnd;
     getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
     for (HeapSlot* sp = fixedStart; sp < fixedEnd; sp++)
         sp->set(zone, this, HeapSlot::Slot, start++, *vector++);
     for (HeapSlot* sp = slotsStart; sp < slotsEnd; sp++)
         sp->set(zone, this, HeapSlot::Slot, start++, *vector++);
 }
 
 #ifdef DEBUG
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -501,17 +501,20 @@ class NativeObject : public JSObject
 
   protected:
     friend class GCMarker;
     friend class Shape;
     friend class NewObjectCache;
 
     void invalidateSlotRange(uint32_t start, uint32_t length) {
 #ifdef DEBUG
-        HeapSlot* fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
+        HeapSlot* fixedStart;
+        HeapSlot* fixedEnd;
+        HeapSlot* slotsStart;
+        HeapSlot* slotsEnd;
         getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
         Debug_SetSlotRangeToCrashOnTouch(fixedStart, fixedEnd);
         Debug_SetSlotRangeToCrashOnTouch(slotsStart, slotsEnd);
 #endif /* DEBUG */
     }
 
     void initializeSlotRange(uint32_t start, uint32_t count);
 
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -671,17 +671,19 @@ JSRuntime::resetDefaultLocale()
 }
 
 const char*
 JSRuntime::getDefaultLocale()
 {
     if (defaultLocale)
         return defaultLocale;
 
-    char* locale, *lang, *p;
+    char* locale;
+    char* lang;
+    char* p;
 #ifdef HAVE_SETLOCALE
     locale = setlocale(LC_ALL, nullptr);
 #else
     locale = getenv("LANG");
 #endif
     // convert to a well-formed BCP 47 language tag
     if (!locale || !strcmp(locale, "C"))
         locale = const_cast<char*>("und");
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -785,17 +785,20 @@ NativeObject::putProperty(ExclusiveConte
         bool updateLast = (shape == obj->lastProperty());
         bool accessorShape = getter || setter || (attrs & (JSPROP_GETTER | JSPROP_SETTER));
         shape = obj->replaceWithNewEquivalentShape(cx, shape, nullptr, accessorShape);
         if (!shape)
             return nullptr;
         if (!updateLast && !obj->generateOwnShape(cx))
             return nullptr;
 
-        /* FIXME bug 593129 -- slot allocation and JSObject* this must move out of here! */
+        /*
+         * FIXME bug 593129 -- slot allocation and NativeObject *this must move
+         * out of here!
+         */
         if (slot == SHAPE_INVALID_SLOT && !(attrs & JSPROP_SHARED)) {
             if (!allocSlot(cx, obj, &slot))
                 return nullptr;
         }
 
         if (updateLast)
             shape->base()->adoptUnowned(nbase);
         else
--- a/js/xpconnect/src/Sandbox.cpp
+++ b/js/xpconnect/src/Sandbox.cpp
@@ -104,17 +104,18 @@ SandboxDump(JSContext* cx, unsigned argc
 
     JSAutoByteString utf8str;
     char* cstr = utf8str.encodeUtf8(cx, str);
     if (!cstr)
         return false;
 
 #if defined(XP_MACOSX)
     // Be nice and convert all \r to \n.
-    char* c = cstr, *cEnd = cstr + strlen(cstr);
+    char* c = cstr;
+    char* cEnd = cstr + strlen(cstr);
     while (c < cEnd) {
         if (*c == '\r')
             *c = '\n';
         c++;
     }
 #endif
 #ifdef ANDROID
     __android_log_write(ANDROID_LOG_INFO, "GeckoDump", cstr);
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -3226,17 +3226,18 @@ ReadSourceFromFilename(JSContext* cx, co
     if (rawLen > UINT32_MAX)
         return NS_ERROR_FILE_TOO_BIG;
 
     // Allocate an internal buf the size of the file.
     nsAutoArrayPtr<unsigned char> buf(new unsigned char[rawLen]);
     if (!buf)
         return NS_ERROR_OUT_OF_MEMORY;
 
-    unsigned char* ptr = buf, *end = ptr + rawLen;
+    unsigned char* ptr = buf;
+    unsigned char* end = ptr + rawLen;
     while (ptr < end) {
         uint32_t bytesRead;
         rv = scriptStream->Read(reinterpret_cast<char*>(ptr), end - ptr, &bytesRead);
         if (NS_FAILED(rv))
             return rv;
         MOZ_ASSERT(bytesRead > 0, "stream promised more bytes before EOF");
         ptr += bytesRead;
     }
--- a/js/xpconnect/src/XPCShellImpl.cpp
+++ b/js/xpconnect/src/XPCShellImpl.cpp
@@ -707,17 +707,19 @@ env_setProperty(JSContext* cx, HandleObj
 #endif /* !defined SOLARIS */
     return result.succeed();
 }
 
 static bool
 env_enumerate(JSContext* cx, HandleObject obj)
 {
     static bool reflected;
-    char** evp, *name, *value;
+    char** evp;
+    char* name;
+    char* value;
     RootedString valstr(cx);
     bool ok;
 
     if (reflected)
         return true;
 
     for (evp = (char**)JS_GetPrivate(obj); (name = *evp) != nullptr; evp++) {
         value = strchr(name, '=');