Bug 1488698 - Always use braces for if/for/while statements in js/src/gc, part 4. r=sfink
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 06 Sep 2018 10:38:19 +0200
changeset 491458 5150b70229209c5d020eef82819f5b8d5aebda0e
parent 491457 68e5c74b6e90baf1212846920435dbe0ae17f346
child 491459 bc503aa87b272bdcef98a35c9759e25a15031241
push id9984
push userffxbld-merge
push dateMon, 15 Oct 2018 21:07:35 +0000
treeherdermozilla-beta@183d27ea8570 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1488698
milestone64.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1488698 - Always use braces for if/for/while statements in js/src/gc, part 4. r=sfink
js/src/gc/RootMarking.cpp
js/src/gc/Scheduling.h
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/gc/StoreBuffer-inl.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Tracer.cpp
js/src/gc/Tracer.h
js/src/gc/Verifier.cpp
js/src/gc/WeakMap-inl.h
js/src/gc/WeakMap.cpp
js/src/gc/WeakMap.h
js/src/gc/WeakMapPtr.cpp
js/src/gc/Zone-inl.h
js/src/gc/Zone.cpp
js/src/gc/Zone.h
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -98,18 +98,19 @@ TraceExactStackRoots(JSContext* cx, JSTr
     cx->traceStackRoots(trc);
 }
 
 template <typename T>
 static inline void
 TracePersistentRootedList(JSTracer* trc, mozilla::LinkedList<PersistentRooted<void*>>& list,
                          const char* name)
 {
-    for (PersistentRooted<void*>* r : list)
+    for (PersistentRooted<void*>* r : list) {
         TraceStackOrPersistentRoot(trc, reinterpret_cast<PersistentRooted<T>*>(r)->address(), name);
+    }
 }
 
 void
 JSRuntime::tracePersistentRoots(JSTracer* trc)
 {
 #define TRACE_ROOTS(name, type, _) \
     TracePersistentRootedList<type*>(trc, heapRoots.ref()[JS::RootKind::name], "persistent-" #name);
 JS_FOR_EACH_TRACEKIND(TRACE_ROOTS)
@@ -130,18 +131,19 @@ TracePersistentRooted(JSRuntime* rt, JST
     rt->tracePersistentRoots(trc);
 }
 
 template <typename T>
 static void
 FinishPersistentRootedChain(mozilla::LinkedList<PersistentRooted<void*>>& listArg)
 {
     auto& list = reinterpret_cast<mozilla::LinkedList<PersistentRooted<T>>&>(listArg);
-    while (!list.isEmpty())
+    while (!list.isEmpty()) {
         list.getFirst()->reset();
+    }
 }
 
 void
 JSRuntime::finishPersistentRoots()
 {
 #define FINISH_ROOT_LIST(name, type, _)                                 \
     FinishPersistentRootedChain<type*>(heapRoots.ref()[JS::RootKind::name]);
 JS_FOR_EACH_TRACEKIND(FINISH_ROOT_LIST)
@@ -191,72 +193,80 @@ AutoGCRooter::trace(JSTracer* trc)
 
       case Tag::WrapperVector: {
         auto vector = static_cast<AutoWrapperVector*>(this);
         /*
          * We need to use TraceManuallyBarrieredEdge here because we trace
          * wrapper roots in every slice. This is because of some rule-breaking
          * in RemapAllWrappersForObject; see comment there.
          */
-        for (WrapperValue* p = vector->begin(); p < vector->end(); p++)
+        for (WrapperValue* p = vector->begin(); p < vector->end(); p++) {
             TraceManuallyBarrieredEdge(trc, &p->get(), "js::AutoWrapperVector.vector");
+        }
         return;
       }
 
       case Tag::Custom:
         static_cast<JS::CustomAutoRooter*>(this)->trace(trc);
         return;
 
       case Tag::Array: {
         auto array = static_cast<AutoArrayRooter*>(this);
-        if (Value* vp = array->begin())
+        if (Value* vp = array->begin()) {
             TraceRootRange(trc, array->length(), vp, "js::AutoArrayRooter");
+        }
         return;
       }
     }
 
     MOZ_CRASH("Bad AutoGCRooter::Tag");
 }
 
 /* static */ void
 AutoGCRooter::traceAll(JSContext* cx, JSTracer* trc)
 {
-    for (AutoGCRooter* gcr = cx->autoGCRooters_; gcr; gcr = gcr->down)
+    for (AutoGCRooter* gcr = cx->autoGCRooters_; gcr; gcr = gcr->down) {
         gcr->trace(trc);
+    }
 }
 
 /* static */ void
 AutoGCRooter::traceAllWrappers(JSContext* cx, JSTracer* trc)
 {
     for (AutoGCRooter* gcr = cx->autoGCRooters_; gcr; gcr = gcr->down) {
-        if (gcr->tag_ == Tag::WrapperVector || gcr->tag_ == Tag::Wrapper)
+        if (gcr->tag_ == Tag::WrapperVector || gcr->tag_ == Tag::Wrapper) {
             gcr->trace(trc);
+        }
     }
 }
 
 void
 StackShape::trace(JSTracer* trc)
 {
-    if (base)
+    if (base) {
         TraceRoot(trc, &base, "StackShape base");
+    }
 
     TraceRoot(trc, (jsid*) &propid, "StackShape id");
 
-    if ((attrs & JSPROP_GETTER) && rawGetter)
+    if ((attrs & JSPROP_GETTER) && rawGetter) {
         TraceRoot(trc, (JSObject**)&rawGetter, "StackShape getter");
+    }
 
-    if ((attrs & JSPROP_SETTER) && rawSetter)
+    if ((attrs & JSPROP_SETTER) && rawSetter) {
         TraceRoot(trc, (JSObject**)&rawSetter, "StackShape setter");
+    }
 }
 
 void
 PropertyDescriptor::trace(JSTracer* trc)
 {
-    if (obj)
+    if (obj) {
         TraceRoot(trc, &obj, "Descriptor::obj");
+    }
     TraceRoot(trc, &value, "Descriptor::value");
     if ((attrs & JSPROP_GETTER) && getter) {
         JSObject* tmp = JS_FUNC_TO_DATA_PTR(JSObject*, getter);
         TraceRoot(trc, &tmp, "Descriptor::get");
         getter = JS_DATA_TO_FUNC_PTR(JSGetterOp, tmp);
     }
     if ((attrs & JSPROP_SETTER) && setter) {
         JSObject* tmp = JS_FUNC_TO_DATA_PTR(JSObject*, setter);
@@ -267,22 +277,24 @@ PropertyDescriptor::trace(JSTracer* trc)
 
 void
 js::gc::GCRuntime::traceRuntimeForMajorGC(JSTracer* trc, AutoGCSession& session)
 {
     MOZ_ASSERT(!TlsContext.get()->suppressGC);
 
     // FinishRoots will have asserted that every root that we do not expect
     // is gone, so we can simply skip traceRuntime here.
-    if (rt->isBeingDestroyed())
+    if (rt->isBeingDestroyed()) {
         return;
+    }
 
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
-    if (atomsZone->isCollecting())
+    if (atomsZone->isCollecting()) {
         traceRuntimeAtoms(trc, session.checkAtomsAccess());
+    }
     traceKeptAtoms(trc);
     Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(trc);
     traceRuntimeCommon(trc, MarkRuntime);
 }
 
 void
 js::gc::GCRuntime::traceRuntimeForMinorGC(JSTracer* trc, AutoGCSession& session)
 {
@@ -337,18 +349,19 @@ js::gc::GCRuntime::traceRuntimeAtoms(JST
 
 void
 js::gc::GCRuntime::traceKeptAtoms(JSTracer* trc)
 {
     // We don't have exact rooting information for atoms while parsing. When
     // this is happeninng we set a flag on the zone and trace all atoms in the
     // zone's cache.
     for (GCZonesIter zone(trc->runtime()); !zone.done(); zone.next()) {
-        if (zone->hasKeptAtoms())
+        if (zone->hasKeptAtoms()) {
             zone->traceAtomCache(trc);
+        }
     }
 }
 
 void
 js::gc::GCRuntime::traceRuntimeCommon(JSTracer* trc, TraceOrMarkRuntime traceOrMark)
 {
     {
         gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_STACK);
@@ -380,18 +393,19 @@ js::gc::GCRuntime::traceRuntimeCommon(JS
     // Trace the shared Intl data.
     rt->traceSharedIntlData(trc);
 
     // Trace the JSContext.
     rt->mainContextFromOwnThread()->trace(trc);
 
     // Trace all realm roots, but not the realm itself; it is traced via the
     // parent pointer if traceRoots actually traces anything.
-    for (RealmsIter r(rt); !r.done(); r.next())
+    for (RealmsIter r(rt); !r.done(); r.next()) {
         r->traceRoots(trc, traceOrMark);
+    }
 
     // Trace helper thread roots.
     HelperThreadState().trace(trc);
 
     // Trace the embedding's black and gray roots.
     if (!JS::RuntimeHeapIsMinorCollecting()) {
         gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_EMBEDDING);
 
@@ -407,18 +421,19 @@ js::gc::GCRuntime::traceRuntimeCommon(JS
          */
         for (size_t i = 0; i < blackRootTracers.ref().length(); i++) {
             const Callback<JSTraceDataOp>& e = blackRootTracers.ref()[i];
             (*e.op)(trc, e.data);
         }
 
         /* During GC, we don't trace gray roots at this stage. */
         if (JSTraceDataOp op = grayRootTracer.op) {
-            if (traceOrMark == TraceRuntime)
+            if (traceOrMark == TraceRuntime) {
                 (*op)(trc, grayRootTracer.data);
+            }
         }
     }
 }
 
 #ifdef DEBUG
 class AssertNoRootsTracer : public JS::CallbackTracer
 {
     void onChild(const JS::GCCellPtr& thing) override {
@@ -440,18 +455,19 @@ js::gc::GCRuntime::finishRoots()
     rt->finishAtoms();
 
     rootsHash.ref().clear();
 
     rt->finishPersistentRoots();
 
     rt->finishSelfHosting();
 
-    for (RealmsIter r(rt); !r.done(); r.next())
+    for (RealmsIter r(rt); !r.done(); r.next()) {
         r->finishRoots();
+    }
 
 #ifdef DEBUG
     // The nsWrapperCache may not be empty before our shutdown GC, so we have
     // to skip that table when verifying that we are fully unrooted.
     auto prior = grayRootTracer;
     grayRootTracer = Callback<JSTraceDataOp>(nullptr, nullptr);
 
     AssertNoRootsTracer trc(rt, TraceWeakMapKeysValues);
@@ -511,22 +527,24 @@ js::IsBufferGrayRootsTracer(JSTracer* tr
 #endif
 
 void
 js::gc::GCRuntime::bufferGrayRoots()
 {
     // Precondition: the state has been reset to "unused" after the last GC
     //               and the zone's buffers have been cleared.
     MOZ_ASSERT(grayBufferState == GrayBufferState::Unused);
-    for (GCZonesIter zone(rt); !zone.done(); zone.next())
+    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
         MOZ_ASSERT(zone->gcGrayRoots().empty());
+    }
 
     BufferGrayRootsTracer grayBufferer(rt);
-    if (JSTraceDataOp op = grayRootTracer.op)
+    if (JSTraceDataOp op = grayRootTracer.op) {
         (*op)(&grayBufferer, grayRootTracer.data);
+    }
 
     // Propagate the failure flag from the marker to the runtime.
     if (grayBufferer.failed()) {
       grayBufferState = GrayBufferState::Failed;
       resetBufferedGrayRoots();
     } else {
       grayBufferState = GrayBufferState::Okay;
     }
@@ -548,30 +566,32 @@ BufferGrayRootsTracer::bufferRoot(T* thi
     Zone* zone = tenured->zoneFromAnyThread();
     if (zone->isCollectingFromAnyThread()) {
         // See the comment on SetMaybeAliveFlag to see why we only do this for
         // objects and scripts. We rely on gray root buffering for this to work,
         // but we only need to worry about uncollected dead compartments during
         // incremental GCs (when we do gray root buffering).
         SetMaybeAliveFlag(thing);
 
-        if (!zone->gcGrayRoots().append(tenured))
+        if (!zone->gcGrayRoots().append(tenured)) {
             bufferingGrayRootsFailed = true;
+        }
     }
 }
 
 void
 GCRuntime::markBufferedGrayRoots(JS::Zone* zone)
 {
     MOZ_ASSERT(grayBufferState == GrayBufferState::Okay);
     MOZ_ASSERT(zone->isGCMarkingGray() || zone->isGCCompacting());
 
     auto& roots = zone->gcGrayRoots();
-    if (roots.empty())
+    if (roots.empty()) {
         return;
+    }
 
     for (size_t i = 0; i < roots.length(); i++) {
         Cell* cell = roots[i];
 
         // Bug 1203273: Check for bad pointers on OSX and output diagnostics.
 #if defined(XP_DARWIN) && defined(MOZ_DIAGNOSTIC_ASSERT_ENABLED)
         auto addr = uintptr_t(cell);
         if (addr < ChunkSize || addr % CellAlignBytes != 0) {
@@ -587,18 +607,19 @@ GCRuntime::markBufferedGrayRoots(JS::Zon
     }
 }
 
 void
 GCRuntime::resetBufferedGrayRoots() const
 {
     MOZ_ASSERT(grayBufferState != GrayBufferState::Okay,
                "Do not clear the gray buffers unless we are Failed or becoming Unused");
-    for (GCZonesIter zone(rt); !zone.done(); zone.next())
+    for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
         zone->gcGrayRoots().clearAndFree();
+    }
 }
 
 JS_PUBLIC_API(void)
 JS::AddPersistentRoot(JS::RootingContext* cx, RootKind kind, PersistentRooted<void*>* root)
 {
     static_cast<JSContext*>(cx)->runtime()->heapRoots.ref()[kind].insertBack(root);
 }
 
--- a/js/src/gc/Scheduling.h
+++ b/js/src/gc/Scheduling.h
@@ -531,21 +531,23 @@ class MemoryCounter
 
     void update(size_t bytes) {
         bytes_ += bytes;
     }
 
     void adopt(MemoryCounter& other);
 
     TriggerKind shouldTriggerGC(const GCSchedulingTunables& tunables) const {
-        if (MOZ_LIKELY(bytes_ < maxBytes_ * tunables.allocThresholdFactor()))
+        if (MOZ_LIKELY(bytes_ < maxBytes_ * tunables.allocThresholdFactor())) {
             return NoTrigger;
+        }
 
-        if (bytes_ < maxBytes_)
+        if (bytes_ < maxBytes_) {
             return IncrementalTrigger;
+        }
 
         return NonIncrementalTrigger;
     }
 
     bool shouldResetIncrementalGC(const GCSchedulingTunables& tunables) const {
         return bytes_ > maxBytes_ * tunables.allocThresholdFactorAvoidInterrupt();
     }
 
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -56,20 +56,21 @@ MajorGCPhaseKinds()
     return mozilla::MakeEnumeratedRange(PhaseKind::GC_BEGIN,
                                         PhaseKind(size_t(PhaseKind::GC_END) + 1));
 }
 
 const char*
 js::gcstats::ExplainInvocationKind(JSGCInvocationKind gckind)
 {
     MOZ_ASSERT(gckind == GC_NORMAL || gckind == GC_SHRINK);
-    if (gckind == GC_NORMAL)
+    if (gckind == GC_NORMAL) {
          return "Normal";
-    else
+    } else {
          return "Shrinking";
+    }
 }
 
 JS_PUBLIC_API(const char*)
 JS::gcreason::ExplainReason(JS::gcreason::Reason reason)
 {
     switch (reason) {
 #define SWITCH_REASON(name)                         \
         case JS::gcreason::name:                    \
@@ -98,29 +99,31 @@ js::gcstats::ExplainAbortReason(gc::Abor
 }
 
 static FILE*
 MaybeOpenFileFromEnv(const char* env)
 {
     FILE *file;
     const char* value = getenv(env);
 
-    if (!value)
+    if (!value) {
         return nullptr;
+    }
 
     if (strcmp(value, "none") == 0) {
         file = nullptr;
     } else if (strcmp(value, "stdout") == 0) {
         file = stdout;
     } else if (strcmp(value, "stderr") == 0) {
         file = stderr;
     } else {
         file = fopen(value, "a");
-        if (!file)
+        if (!file) {
             MOZ_CRASH("Failed to open log file.");
+        }
     }
 
     return file;
 }
 
 struct PhaseKindInfo
 {
     Phase firstPhase;
@@ -163,41 +166,45 @@ Statistics::currentPhase() const
 PhaseKind
 Statistics::currentPhaseKind() const
 {
     // Public API to get the current phase kind, suppressing the synthetic
     // PhaseKind::MUTATOR phase.
 
     Phase phase = currentPhase();
     MOZ_ASSERT_IF(phase == Phase::MUTATOR, phaseStack.length() == 1);
-    if (phase == Phase::NONE || phase == Phase::MUTATOR)
+    if (phase == Phase::NONE || phase == Phase::MUTATOR) {
         return PhaseKind::NONE;
+    }
 
     return phases[phase].phaseKind;
 }
 
 Phase
 Statistics::lookupChildPhase(PhaseKind phaseKind) const
 {
-    if (phaseKind == PhaseKind::IMPLICIT_SUSPENSION)
+    if (phaseKind == PhaseKind::IMPLICIT_SUSPENSION) {
         return Phase::IMPLICIT_SUSPENSION;
-    if (phaseKind == PhaseKind::EXPLICIT_SUSPENSION)
+    }
+    if (phaseKind == PhaseKind::EXPLICIT_SUSPENSION) {
         return Phase::EXPLICIT_SUSPENSION;
+    }
 
     MOZ_ASSERT(phaseKind < PhaseKind::LIMIT);
 
     // Search all expanded phases that correspond to the required
     // phase to find the one whose parent is the current expanded phase.
     Phase phase;
     for (phase = phaseKinds[phaseKind].firstPhase;
          phase != Phase::NONE;
          phase = phases[phase].nextWithPhaseKind)
     {
-        if (phases[phase].parent == currentPhase())
+        if (phases[phase].parent == currentPhase()) {
             break;
+        }
     }
 
     MOZ_RELEASE_ASSERT(phase != Phase::NONE,
                        "Requested child phase not found under current phase");
 
     return phase;
 }
 
@@ -208,21 +215,23 @@ AllPhases()
 }
 
 void
 Statistics::gcDuration(TimeDuration* total, TimeDuration* maxPause) const
 {
     *total = *maxPause = 0;
     for (auto& slice : slices_) {
         *total += slice.duration();
-        if (slice.duration() > *maxPause)
+        if (slice.duration() > *maxPause) {
             *maxPause = slice.duration();
+        }
     }
-    if (*maxPause > maxPauseInInterval)
+    if (*maxPause > maxPauseInInterval) {
         maxPauseInInterval = *maxPause;
+    }
 }
 
 void
 Statistics::sccDurations(TimeDuration* total, TimeDuration* maxPause) const
 {
     *total = *maxPause = 0;
     for (size_t i = 0; i < sccTimes.length(); i++) {
         *total += sccTimes[i];
@@ -234,33 +243,37 @@ typedef Vector<UniqueChars, 8, SystemAll
 
 static UniqueChars
 Join(const FragmentVector& fragments, const char* separator = "")
 {
     const size_t separatorLength = strlen(separator);
     size_t length = 0;
     for (size_t i = 0; i < fragments.length(); ++i) {
         length += fragments[i] ? strlen(fragments[i].get()) : 0;
-        if (i < (fragments.length() - 1))
+        if (i < (fragments.length() - 1)) {
             length += separatorLength;
+        }
     }
 
     char* joined = js_pod_malloc<char>(length + 1);
-    if (!joined)
+    if (!joined) {
         return UniqueChars();
+    }
 
     joined[length] = '\0';
     char* cursor = joined;
     for (size_t i = 0; i < fragments.length(); ++i) {
-        if (fragments[i])
+        if (fragments[i]) {
             strcpy(cursor, fragments[i].get());
+        }
         cursor += fragments[i] ? strlen(fragments[i].get()) : 0;
         if (i < (fragments.length() - 1)) {
-            if (separatorLength)
+            if (separatorLength) {
                 strcpy(cursor, separator);
+            }
             cursor += separatorLength;
         }
     }
 
     return UniqueChars(joined);
 }
 
 static TimeDuration
@@ -275,18 +288,19 @@ SumChildTimes(Phase phase, const Statist
     }
     return total;
 }
 
 UniqueChars
 Statistics::formatCompactSliceMessage() const
 {
     // Skip if we OOM'ed.
-    if (slices_.length() == 0)
+    if (slices_.length() == 0) {
         return UniqueChars(nullptr);
+    }
 
     const size_t index = slices_.length() - 1;
     const SliceData& slice = slices_.back();
 
     char budgetDescription[200];
     slice.budget.describe(budgetDescription, sizeof(budgetDescription) - 1);
 
     const char* format =
@@ -308,57 +322,61 @@ Statistics::formatCompactSliceMessage() 
 }
 
 UniqueChars
 Statistics::formatCompactSummaryMessage() const
 {
     const double bytesPerMiB = 1024 * 1024;
 
     FragmentVector fragments;
-    if (!fragments.append(DuplicateString("Summary - ")))
+    if (!fragments.append(DuplicateString("Summary - "))) {
         return UniqueChars(nullptr);
+    }
 
     TimeDuration total, longest;
     gcDuration(&total, &longest);
 
     const double mmu20 = computeMMU(TimeDuration::FromMilliseconds(20));
     const double mmu50 = computeMMU(TimeDuration::FromMilliseconds(50));
 
     char buffer[1024];
     if (!nonincremental()) {
         SprintfLiteral(buffer,
                        "Max Pause: %.3fms; MMU 20ms: %.1f%%; MMU 50ms: %.1f%%; Total: %.3fms; ",
                        t(longest), mmu20 * 100., mmu50 * 100., t(total));
     } else {
         SprintfLiteral(buffer, "Non-Incremental: %.3fms (%s); ",
                        t(total), ExplainAbortReason(nonincrementalReason_));
     }
-    if (!fragments.append(DuplicateString(buffer)))
+    if (!fragments.append(DuplicateString(buffer))) {
         return UniqueChars(nullptr);
+    }
 
     SprintfLiteral(buffer,
                    "Zones: %d of %d (-%d); Compartments: %d of %d (-%d); HeapSize: %.3f MiB; " \
                    "HeapChange (abs): %+d (%d); ",
                    zoneStats.collectedZoneCount, zoneStats.zoneCount, zoneStats.sweptZoneCount,
                    zoneStats.collectedCompartmentCount, zoneStats.compartmentCount,
                    zoneStats.sweptCompartmentCount,
                    double(preBytes) / bytesPerMiB,
                    counts[STAT_NEW_CHUNK] - counts[STAT_DESTROY_CHUNK],
                    counts[STAT_NEW_CHUNK] + counts[STAT_DESTROY_CHUNK]);
-    if (!fragments.append(DuplicateString(buffer)))
+    if (!fragments.append(DuplicateString(buffer))) {
         return UniqueChars(nullptr);
+    }
 
     MOZ_ASSERT_IF(counts[STAT_ARENA_RELOCATED], gckind == GC_SHRINK);
     if (gckind == GC_SHRINK) {
         SprintfLiteral(buffer,
                        "Kind: %s; Relocated: %.3f MiB; ",
                        ExplainInvocationKind(gckind),
                        double(ArenaSize * counts[STAT_ARENA_RELOCATED]) / bytesPerMiB);
-        if (!fragments.append(DuplicateString(buffer)))
+        if (!fragments.append(DuplicateString(buffer))) {
             return UniqueChars(nullptr);
+        }
     }
 
     return Join(fragments);
 }
 
 UniqueChars
 Statistics::formatCompactSlicePhaseTimes(const PhaseTimeTable& phaseTimes) const
 {
@@ -369,50 +387,57 @@ Statistics::formatCompactSlicePhaseTimes
     for (auto phase : AllPhases()) {
         DebugOnly<uint8_t> level = phases[phase].depth;
         MOZ_ASSERT(level < 4);
 
         TimeDuration ownTime = phaseTimes[phase];
         TimeDuration childTime = SumChildTimes(phase, phaseTimes);
         if (ownTime > MaxUnaccountedTime) {
             SprintfLiteral(buffer, "%s: %.3fms", phases[phase].name, t(ownTime));
-            if (!fragments.append(DuplicateString(buffer)))
+            if (!fragments.append(DuplicateString(buffer))) {
                 return UniqueChars(nullptr);
+            }
 
             if (childTime && (ownTime - childTime) > MaxUnaccountedTime) {
                 MOZ_ASSERT(level < 3);
                 SprintfLiteral(buffer, "%s: %.3fms", "Other", t(ownTime - childTime));
-                if (!fragments.append(DuplicateString(buffer)))
+                if (!fragments.append(DuplicateString(buffer))) {
                     return UniqueChars(nullptr);
+                }
             }
         }
     }
     return Join(fragments, ", ");
 }
 
 UniqueChars
 Statistics::formatDetailedMessage() const
 {
     FragmentVector fragments;
 
-    if (!fragments.append(formatDetailedDescription()))
+    if (!fragments.append(formatDetailedDescription())) {
         return UniqueChars(nullptr);
+    }
 
     if (!slices_.empty()) {
         for (unsigned i = 0; i < slices_.length(); i++) {
-            if (!fragments.append(formatDetailedSliceDescription(i, slices_[i])))
+            if (!fragments.append(formatDetailedSliceDescription(i, slices_[i]))) {
                 return UniqueChars(nullptr);
-            if (!fragments.append(formatDetailedPhaseTimes(slices_[i].phaseTimes)))
+            }
+            if (!fragments.append(formatDetailedPhaseTimes(slices_[i].phaseTimes))) {
                 return UniqueChars(nullptr);
+            }
         }
     }
-    if (!fragments.append(formatDetailedTotals()))
+    if (!fragments.append(formatDetailedTotals())) {
         return UniqueChars(nullptr);
-    if (!fragments.append(formatDetailedPhaseTimes(phaseTimes)))
+    }
+    if (!fragments.append(formatDetailedPhaseTimes(phaseTimes))) {
         return UniqueChars(nullptr);
+    }
 
     return Join(fragments);
 }
 
 UniqueChars
 Statistics::formatDetailedDescription() const
 {
     const double bytesPerMiB = 1024 * 1024;
@@ -511,24 +536,26 @@ Statistics::formatDetailedPhaseTimes(con
     char buffer[128];
     for (auto phase : AllPhases()) {
         uint8_t level = phases[phase].depth;
         TimeDuration ownTime = phaseTimes[phase];
         TimeDuration childTime = SumChildTimes(phase, phaseTimes);
         if (IncludePhase(ownTime)) {
             SprintfLiteral(buffer, "      %*s%s: %.3fms\n",
                            level * 2, "", phases[phase].name, t(ownTime));
-            if (!fragments.append(DuplicateString(buffer)))
+            if (!fragments.append(DuplicateString(buffer))) {
                 return UniqueChars(nullptr);
+            }
 
             if (childTime && (ownTime - childTime) > MaxUnaccountedChildTime) {
                 SprintfLiteral(buffer, "      %*s%s: %.3fms\n",
                                (level + 1) * 2, "", "Other", t(ownTime - childTime));
-                if (!fragments.append(DuplicateString(buffer)))
+                if (!fragments.append(DuplicateString(buffer))) {
                     return UniqueChars(nullptr);
+                }
             }
         }
     }
     return Join(fragments);
 }
 
 UniqueChars
 Statistics::formatDetailedTotals() const
@@ -563,30 +590,32 @@ Statistics::formatJsonSlice(size_t slice
 
     json.endObject();
 }
 
 UniqueChars
 Statistics::renderJsonSlice(size_t sliceNum) const
 {
     Sprinter printer(nullptr, false);
-    if (!printer.init())
+    if (!printer.init()) {
         return UniqueChars(nullptr);
+    }
     JSONPrinter json(printer);
 
     formatJsonSlice(sliceNum, json);
     return printer.release();
 }
 
 UniqueChars
 Statistics::renderNurseryJson(JSRuntime* rt) const
 {
     Sprinter printer(nullptr, false);
-    if (!printer.init())
+    if (!printer.init()) {
         return UniqueChars(nullptr);
+    }
     JSONPrinter json(printer);
     rt->gc.nursery().renderProfileJSON(json);
     return printer.release();
 }
 
 #ifdef DEBUG
 void
 Statistics::writeLogMessage(const char* fmt, ...)
@@ -611,32 +640,35 @@ Statistics::renderJsonMessage(uint64_t t
     /*
      * The format of the JSON message is specified by the GCMajorMarkerPayload
      * type in perf.html
      * https://github.com/devtools-html/perf.html/blob/master/src/types/markers.js#L62
      *
      * All the properties listed here are created within the timings property
      * of the GCMajor marker.
      */
-    if (aborted)
+    if (aborted) {
         return DuplicateString("{status:\"aborted\"}"); // May return nullptr
+    }
 
     Sprinter printer(nullptr, false);
-    if (!printer.init())
+    if (!printer.init()) {
         return UniqueChars(nullptr);
+    }
     JSONPrinter json(printer);
 
     json.beginObject();
     json.property("status", "completed"); // JSON Key #1
     formatJsonDescription(timestamp, json); // #2-22
 
     if (includeSlices) {
         json.beginListProperty("slices_list"); // #23
-        for (unsigned i = 0; i < slices_.length(); i++)
+        for (unsigned i = 0; i < slices_.length(); i++) {
             formatJsonSlice(i, json);
+        }
         json.endList();
     }
 
     json.beginObjectProperty("totals"); // #24
     formatJsonPhaseTimes(phaseTimes, json);
     json.endObject();
 
     json.endObject();
@@ -669,39 +701,43 @@ Statistics::formatJsonDescription(uint64
     // We might be able to omit reason if perf.html was able to retrive it
     // from the first slice.  But it doesn't do this yet.
     json.property("reason", ExplainReason(slices_[0].reason)); // #5
     json.property("zones_collected", zoneStats.collectedZoneCount); // #6
     json.property("total_zones", zoneStats.zoneCount); // #7
     json.property("total_compartments", zoneStats.compartmentCount); // #8
     json.property("minor_gcs", getCount(STAT_MINOR_GC)); // #9
     uint32_t storebufferOverflows = getCount(STAT_STOREBUFFER_OVERFLOW);
-    if (storebufferOverflows)
+    if (storebufferOverflows) {
         json.property("store_buffer_overflows", storebufferOverflows); // #10
+    }
     json.property("slices", slices_.length()); // #11
 
     const double mmu20 = computeMMU(TimeDuration::FromMilliseconds(20));
     const double mmu50 = computeMMU(TimeDuration::FromMilliseconds(50));
     json.property("mmu_20ms", int(mmu20 * 100)); // #12
     json.property("mmu_50ms", int(mmu50 * 100)); // #13
 
     TimeDuration sccTotal, sccLongest;
     sccDurations(&sccTotal, &sccLongest);
     json.property("scc_sweep_total", sccTotal, JSONPrinter::MILLISECONDS); // #14
     json.property("scc_sweep_max_pause", sccLongest, JSONPrinter::MILLISECONDS); // #15
 
-    if (nonincrementalReason_ != AbortReason::None)
+    if (nonincrementalReason_ != AbortReason::None) {
         json.property("nonincremental_reason", ExplainAbortReason(nonincrementalReason_)); // #16
+    }
     json.property("allocated_bytes", preBytes); // #17
     uint32_t addedChunks = getCount(STAT_NEW_CHUNK);
-    if (addedChunks)
+    if (addedChunks) {
         json.property("added_chunks", addedChunks); // #18
+    }
     uint32_t removedChunks = getCount(STAT_DESTROY_CHUNK);
-    if (removedChunks)
+    if (removedChunks) {
         json.property("removed_chunks", removedChunks); // #19
+    }
     json.property("major_gc_number", startingMajorGCNumber); // #20
     json.property("minor_gc_number", startingMinorGCNumber); // #21
     json.property("slice_number", startingSliceNumber); // #22
 }
 
 void
 Statistics::formatJsonSliceDescription(unsigned i, const SliceData& slice, JSONPrinter& json) const
 {
@@ -727,28 +763,30 @@ Statistics::formatJsonSliceDescription(u
     json.property("final_state", gc::StateName(slice.finalState)); // #5
     json.property("budget", budgetDescription); // #6
     json.property("major_gc_number", startingMajorGCNumber); // #7
     if (thresholdTriggered) {
         json.floatProperty("trigger_amount", triggerAmount, 0); // #8
         json.floatProperty("trigger_threshold", triggerThreshold, 0); // #9
     }
     int64_t numFaults = slice.endFaults - slice.startFaults;
-    if (numFaults != 0)
+    if (numFaults != 0) {
         json.property("page_faults", numFaults); // #10
+    }
     json.property("start_timestamp", slice.start - originTime, JSONPrinter::SECONDS); // #11
 }
 
 void
 Statistics::formatJsonPhaseTimes(const PhaseTimeTable& phaseTimes, JSONPrinter& json) const
 {
     for (auto phase : AllPhases()) {
         TimeDuration ownTime = phaseTimes[phase];
-        if (!ownTime.IsZero())
+        if (!ownTime.IsZero()) {
             json.property(phases[phase].path, ownTime, JSONPrinter::MILLISECONDS);
+        }
     }
 }
 
 Statistics::Statistics(JSRuntime* rt)
   : runtime(rt),
     gcTimerFile(nullptr),
     gcDebugFile(nullptr),
     nonincrementalReason_(gc::AbortReason::None),
@@ -762,18 +800,19 @@ Statistics::Statistics(JSRuntime* rt)
     startingSliceNumber(0),
     maxPauseInInterval(0),
     sliceCallback(nullptr),
     nurseryCollectionCallback(nullptr),
     aborted(false),
     enableProfiling_(false),
     sliceCount_(0)
 {
-    for (auto& count : counts)
+    for (auto& count : counts) {
         count = 0;
+    }
 
 #ifdef DEBUG
     for (const auto& duration : totalTimes_) {
 #if defined(XP_WIN) || defined(XP_MACOSX) || (defined(XP_UNIX) && !defined(__clang__))
         // build-linux64-asan/debug and static-analysis-linux64-st-an/debug
         // currently use an STL that lacks std::is_trivially_constructible.
         // This #ifdef probably isn't as precise as it could be, but given
         // |totalTimes_| contains |TimeDuration| defined platform-independently
@@ -805,20 +844,22 @@ Statistics::Statistics(JSRuntime* rt)
         }
         enableProfiling_ = true;
         profileThreshold_ = TimeDuration::FromMilliseconds(atoi(env));
     }
 }
 
 Statistics::~Statistics()
 {
-    if (gcTimerFile && gcTimerFile != stdout && gcTimerFile != stderr)
+    if (gcTimerFile && gcTimerFile != stdout && gcTimerFile != stderr) {
         fclose(gcTimerFile);
-    if (gcDebugFile && gcDebugFile != stdout && gcDebugFile != stderr)
+    }
+    if (gcDebugFile && gcDebugFile != stdout && gcDebugFile != stderr) {
         fclose(gcDebugFile);
+    }
 }
 
 /* static */ bool
 Statistics::initialize()
 {
 #ifdef DEBUG
     // Sanity check generated tables.
     for (auto i : AllPhases()) {
@@ -933,27 +974,29 @@ LongestPhaseSelfTimeInMajorGC(const Stat
     for (auto i : AllPhases()) {
         Phase parent = phases[i].parent;
         if (parent != Phase::NONE) {
             bool ok = CheckSelfTime(parent, i, times, selfTimes, times[i]);
 
             // This happens very occasionally in release builds. Skip collecting
             // longest phase telemetry if it does.
             MOZ_ASSERT(ok, "Inconsistent time data; see bug 1400153");
-            if (!ok)
+            if (!ok) {
                 return PhaseKind::NONE;
+            }
 
             selfTimes[parent] -= times[i];
         }
     }
 
     // Sum expanded phases corresponding to the same phase.
     EnumeratedArray<PhaseKind, PhaseKind::LIMIT, TimeDuration> phaseTimes;
-    for (auto i : AllPhaseKinds())
+    for (auto i : AllPhaseKinds()) {
         phaseTimes[i] = SumPhase(i, selfTimes);
+    }
 
     // Loop over this table to find the longest phase.
     TimeDuration longestTime = 0;
     PhaseKind longestPhase = PhaseKind::NONE;
     for (auto i : MajorGCPhaseKinds()) {
         if (phaseTimes[i] > longestTime) {
             longestTime = phaseTimes[i];
             longestPhase = i;
@@ -1005,18 +1048,19 @@ Statistics::endGC()
     runtime->addTelemetry(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[Phase::SWEEP]));
     if (runtime->gc.isCompactingGc()) {
         runtime->addTelemetry(JS_TELEMETRY_GC_COMPACT_MS,
                               t(phaseTimes[Phase::COMPACT]));
     }
     runtime->addTelemetry(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(markRootsTotal));
     runtime->addTelemetry(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[Phase::SWEEP_MARK_GRAY]));
     runtime->addTelemetry(JS_TELEMETRY_GC_NON_INCREMENTAL, nonincremental());
-    if (nonincremental())
+    if (nonincremental()) {
         runtime->addTelemetry(JS_TELEMETRY_GC_NON_INCREMENTAL_REASON, uint32_t(nonincrementalReason_));
+    }
     runtime->addTelemetry(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.isIncrementalGCAllowed());
     runtime->addTelemetry(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal));
     runtime->addTelemetry(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest));
 
     TimeDuration total, longest;
     gcDuration(&total, &longest);
 
     runtime->addTelemetry(JS_TELEMETRY_GC_MS, t(total));
@@ -1056,18 +1100,19 @@ Statistics::beginSlice(const ZoneGCStats
                        SliceBudget budget, JS::gcreason::Reason reason)
 {
     MOZ_ASSERT(phaseStack.empty() ||
                (phaseStack.length() == 1 && phaseStack[0] == Phase::MUTATOR));
 
     this->zoneStats = zoneStats;
 
     bool first = !runtime->gc.isIncrementalGCInProgress();
-    if (first)
+    if (first) {
         beginGC(gckind);
+    }
 
     if (!slices_.emplaceBack(budget,
                              reason,
                              ReallyNow(),
                              GetPageFaultCount(),
                              runtime->gc.state()))
     {
         // If we are OOM, set a flag to indicate we have missing slice data.
@@ -1077,18 +1122,19 @@ Statistics::beginSlice(const ZoneGCStats
 
     runtime->addTelemetry(JS_TELEMETRY_GC_REASON, reason);
 
     // Slice callbacks should only fire for the outermost level.
     bool wasFullGC = zoneStats.isFullCollection();
     if (sliceCallback) {
         JSContext* cx = runtime->mainContextFromOwnThread();
         JS::GCDescription desc(!wasFullGC, false, gckind, reason);
-        if (first)
+        if (first) {
             (*sliceCallback)(cx, JS::GC_CYCLE_BEGIN, desc);
+        }
         (*sliceCallback)(cx, JS::GC_SLICE_BEGIN, desc);
     }
 
     writeLogMessage("begin slice");
 }
 
 void
 Statistics::endSlice()
@@ -1101,24 +1147,26 @@ Statistics::endSlice()
         slice.end = ReallyNow();
         slice.endFaults = GetPageFaultCount();
         slice.finalState = runtime->gc.state();
 
         writeLogMessage("end slice");
         TimeDuration sliceTime = slice.end - slice.start;
         runtime->addTelemetry(JS_TELEMETRY_GC_SLICE_MS, t(sliceTime));
         runtime->addTelemetry(JS_TELEMETRY_GC_RESET, slice.wasReset());
-        if (slice.wasReset())
+        if (slice.wasReset()) {
             runtime->addTelemetry(JS_TELEMETRY_GC_RESET_REASON, uint32_t(slice.resetReason));
+        }
 
         if (slice.budget.isTimeBudget()) {
             int64_t budget_ms = slice.budget.timeBudget.budget;
             runtime->addTelemetry(JS_TELEMETRY_GC_BUDGET_MS, budget_ms);
-            if (budget_ms == runtime->gc.defaultSliceBudget())
+            if (budget_ms == runtime->gc.defaultSliceBudget()) {
                 runtime->addTelemetry(JS_TELEMETRY_GC_ANIMATION_MS, t(sliceTime));
+            }
 
             // Record any phase that goes 1.5 times or 5ms over its budget.
             double longSliceThreshold = std::min(1.5 * budget_ms, budget_ms + 5.0);
             if (sliceTime.ToMilliseconds() > longSliceThreshold) {
                 PhaseKind longest = LongestPhaseSelfTimeInMajorGC(slice.phaseTimes);
                 reportLongestPhaseInMajorGC(longest, JS_TELEMETRY_GC_SLOW_PHASE);
 
                 // If the longest phase was waiting for parallel tasks then
@@ -1126,61 +1174,69 @@ Statistics::endSlice()
                 if (longest == PhaseKind::JOIN_PARALLEL_TASKS) {
                     PhaseKind longestParallel = LongestPhaseSelfTimeInMajorGC(slice.parallelTimes);
                     reportLongestPhaseInMajorGC(longestParallel, JS_TELEMETRY_GC_SLOW_TASK);
                 }
             }
 
             // Record how long we went over budget.
             int64_t overrun = sliceTime.ToMicroseconds() - (1000 * budget_ms);
-            if (overrun > 0)
+            if (overrun > 0) {
                 runtime->addTelemetry(JS_TELEMETRY_GC_BUDGET_OVERRUN, uint32_t(overrun));
+            }
         }
 
         sliceCount_++;
     }
 
     bool last = !runtime->gc.isIncrementalGCInProgress();
     if (last) {
-        if (gcTimerFile)
+        if (gcTimerFile) {
             printStats();
+        }
 
-        if (!aborted)
+        if (!aborted) {
             endGC();
+        }
     }
 
-    if (enableProfiling_ && !aborted && slices_.back().duration() >= profileThreshold_)
+    if (enableProfiling_ && !aborted && slices_.back().duration() >= profileThreshold_) {
         printSliceProfile();
+    }
 
     // Slice callbacks should only fire for the outermost level.
     if (!aborted) {
         bool wasFullGC = zoneStats.isFullCollection();
         if (sliceCallback) {
             JSContext* cx = runtime->mainContextFromOwnThread();
             JS::GCDescription desc(!wasFullGC, last, gckind, slices_.back().reason);
             (*sliceCallback)(cx, JS::GC_SLICE_END, desc);
-            if (last)
+            if (last) {
                 (*sliceCallback)(cx, JS::GC_CYCLE_END, desc);
+            }
         }
     }
 
     // Do this after the slice callback since it uses these values.
     if (last) {
-        for (auto& count : counts)
+        for (auto& count : counts) {
             count = 0;
+        }
 
         // Clear the timers at the end of a GC, preserving the data for PhaseKind::MUTATOR.
         auto mutatorStartTime = phaseStartTimes[Phase::MUTATOR];
         auto mutatorTime = phaseTimes[Phase::MUTATOR];
 
-        for (mozilla::TimeStamp& t : phaseStartTimes)
+        for (mozilla::TimeStamp& t : phaseStartTimes) {
             t = TimeStamp();
+        }
 #ifdef DEBUG
-        for (mozilla::TimeStamp& t : phaseEndTimes)
+        for (mozilla::TimeStamp& t : phaseEndTimes) {
             t = TimeStamp();
+        }
 #endif
 
         for (TimeDuration& duration : phaseTimes) {
             duration = TimeDuration();
             MOZ_ASSERT(duration.IsZero());
         }
 
         phaseStartTimes[Phase::MUTATOR] = mutatorStartTime;
@@ -1219,18 +1275,19 @@ Statistics::startTimingMutator()
     beginPhase(PhaseKind::MUTATOR);
     return true;
 }
 
 bool
 Statistics::stopTimingMutator(double& mutator_ms, double& gc_ms)
 {
     // This should only be called from outside of GC, while timing the mutator.
-    if (phaseStack.length() != 1 || phaseStack[0] != Phase::MUTATOR)
+    if (phaseStack.length() != 1 || phaseStack[0] != Phase::MUTATOR) {
         return false;
+    }
 
     endPhase(PhaseKind::MUTATOR);
     mutator_ms = t(phaseTimes[Phase::MUTATOR]);
     gc_ms = t(timedGCTime);
 
     return true;
 }
 
@@ -1255,31 +1312,33 @@ Statistics::resumePhases()
                suspendedPhases.back() == Phase::IMPLICIT_SUSPENSION);
     suspendedPhases.popBack();
 
     while (!suspendedPhases.empty() &&
            suspendedPhases.back() != Phase::EXPLICIT_SUSPENSION &&
            suspendedPhases.back() != Phase::IMPLICIT_SUSPENSION)
     {
         Phase resumePhase = suspendedPhases.popCopy();
-        if (resumePhase == Phase::MUTATOR)
+        if (resumePhase == Phase::MUTATOR) {
             timedGCTime += ReallyNow() - timedGCStart;
+        }
         recordPhaseBegin(resumePhase);
     }
 }
 
 void
 Statistics::beginPhase(PhaseKind phaseKind)
 {
     // No longer timing these phases. We should never see these.
     MOZ_ASSERT(phaseKind != PhaseKind::GC_BEGIN && phaseKind != PhaseKind::GC_END);
 
     // PhaseKind::MUTATOR is suspended while performing GC.
-    if (currentPhase() == Phase::MUTATOR)
+    if (currentPhase() == Phase::MUTATOR) {
         suspendPhases(PhaseKind::IMPLICIT_SUSPENSION);
+    }
 
     recordPhaseBegin(lookupChildPhase(phaseKind));
 }
 
 void
 Statistics::recordPhaseBegin(Phase phase)
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
@@ -1319,41 +1378,45 @@ Statistics::recordPhaseEnd(Phase phase)
     // Make sure this phase ends after it starts.
     MOZ_ASSERT(now >= phaseStartTimes[phase], "Inconsistent time data; see bug 1400153");
 
 #ifdef DEBUG
     // Make sure this phase ends after all of its children. Note that some
     // children might not have run in this instance, in which case they will
     // have run in a previous instance of this parent or not at all.
     for (Phase kid = phases[phase].firstChild; kid != Phase::NONE; kid = phases[kid].nextSibling) {
-        if (phaseEndTimes[kid].IsNull())
+        if (phaseEndTimes[kid].IsNull()) {
             continue;
-        if (phaseEndTimes[kid] > now)
+        }
+        if (phaseEndTimes[kid] > now) {
             fprintf(stderr, "Parent %s ended at %.3fms, before child %s ended at %.3fms?\n",
                     phases[phase].name,
                     t(now - TimeStamp::ProcessCreation()),
                     phases[kid].name,
                     t(phaseEndTimes[kid] - TimeStamp::ProcessCreation()));
+        }
         MOZ_ASSERT(phaseEndTimes[kid] <= now, "Inconsistent time data; see bug 1400153");
     }
 #endif
 
     if (now < phaseStartTimes[phase]) {
         now = phaseStartTimes[phase];
         aborted = true;
     }
 
-    if (phase == Phase::MUTATOR)
+    if (phase == Phase::MUTATOR) {
         timedGCStart = now;
+    }
 
     phaseStack.popBack();
 
     TimeDuration t = now - phaseStartTimes[phase];
-    if (!slices_.empty())
+    if (!slices_.empty()) {
         slices_.back().phaseTimes[phase] += t;
+    }
     phaseTimes[phase] += t;
     phaseStartTimes[phase] = TimeStamp();
 
 #ifdef DEBUG
     phaseEndTimes[phase] = now;
     writeLogMessage("end: %s", phases[phase].path);
 #endif
 }
@@ -1383,34 +1446,36 @@ Statistics::recordParallelPhase(PhaseKin
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
 
     Phase phase = lookupChildPhase(phaseKind);
 
     // Record the duration for all phases in the tree up to the root. This is
     // not strictly necessary but makes the invariant that parent phase times
     // include their children apply to both phaseTimes and parallelTimes.
     while (phase != Phase::NONE) {
-        if (!slices_.empty())
+        if (!slices_.empty()) {
             slices_.back().parallelTimes[phase] += duration;
+        }
         parallelTimes[phase] += duration;
         phase = phases[phase].parent;
     }
 }
 
 TimeStamp
 Statistics::beginSCC()
 {
     return ReallyNow();
 }
 
 void
 Statistics::endSCC(unsigned scc, TimeStamp start)
 {
-    if (scc >= sccTimes.length() && !sccTimes.resize(scc + 1))
+    if (scc >= sccTimes.length() && !sccTimes.resize(scc + 1)) {
         return;
+    }
 
     sccTimes[scc] += ReallyNow() - start;
 }
 
 /*
  * MMU (minimum mutator utilization) is a measure of how much garbage collection
  * is affecting the responsiveness of the system. MMU measurements are given
  * with respect to a certain window size. If we report MMU(50ms) = 80%, then
@@ -1422,72 +1487,78 @@ Statistics::endSCC(unsigned scc, TimeSta
 double
 Statistics::computeMMU(TimeDuration window) const
 {
     MOZ_ASSERT(!slices_.empty());
 
     TimeDuration gc = slices_[0].end - slices_[0].start;
     TimeDuration gcMax = gc;
 
-    if (gc >= window)
+    if (gc >= window) {
         return 0.0;
+    }
 
     int startIndex = 0;
     for (size_t endIndex = 1; endIndex < slices_.length(); endIndex++) {
         auto* startSlice = &slices_[startIndex];
         auto& endSlice = slices_[endIndex];
         gc += endSlice.end - endSlice.start;
 
         while (endSlice.end - startSlice->end >= window) {
             gc -= startSlice->end - startSlice->start;
             startSlice = &slices_[++startIndex];
         }
 
         TimeDuration cur = gc;
-        if (endSlice.end - startSlice->start > window)
+        if (endSlice.end - startSlice->start > window) {
             cur -= (endSlice.end - startSlice->start - window);
-        if (cur > gcMax)
+        }
+        if (cur > gcMax) {
             gcMax = cur;
+        }
     }
 
     return double((window - gcMax) / window);
 }
 
 void
 Statistics::maybePrintProfileHeaders()
 {
     static int printedHeader = 0;
     if ((printedHeader++ % 200) == 0) {
         printProfileHeader();
-        if (runtime->gc.nursery().enableProfiling())
+        if (runtime->gc.nursery().enableProfiling()) {
             Nursery::printProfileHeader();
+        }
     }
 }
 
 void
 Statistics::printProfileHeader()
 {
-    if (!enableProfiling_)
+    if (!enableProfiling_) {
         return;
+    }
 
     fprintf(stderr, "MajorGC:               Reason States FSNR ");
     fprintf(stderr, " %6s", "budget");
     fprintf(stderr, " %6s", "total");
 #define PRINT_PROFILE_HEADER(name, text, phase)                               \
     fprintf(stderr, " %6s", text);
 FOR_EACH_GC_PROFILE_TIME(PRINT_PROFILE_HEADER)
 #undef PRINT_PROFILE_HEADER
     fprintf(stderr, "\n");
 }
 
 /* static */ void
 Statistics::printProfileTimes(const ProfileDurations& times)
 {
-    for (auto time : times)
+    for (auto time : times) {
         fprintf(stderr, " %6" PRIi64, static_cast<int64_t>(time.ToMilliseconds()));
+    }
     fprintf(stderr, "\n");
 }
 
 void
 Statistics::printSliceProfile()
 {
     const SliceData& slice = slices_.back();
 
@@ -1501,20 +1572,21 @@ Statistics::printSliceProfile()
     fprintf(stderr, "MajorGC: %20s %1d -> %1d %1s%1s%1s%1s ",
             ExplainReason(slice.reason),
             int(slice.initialState), int(slice.finalState),
             full ? "F": "",
             shrinking ? "S" : "",
             nonIncremental ? "N" : "",
             reset ? "R" : "");
 
-    if (!nonIncremental && !slice.budget.isUnlimited() && slice.budget.isTimeBudget())
+    if (!nonIncremental && !slice.budget.isUnlimited() && slice.budget.isTimeBudget()) {
         fprintf(stderr, " %6" PRIi64, static_cast<int64_t>(slice.budget.timeBudget.budget));
-    else
+    } else {
         fprintf(stderr, "       ");
+    }
 
     ProfileDurations times;
     times[ProfileKey::Total] = slice.duration();
     totalTimes_[ProfileKey::Total] += times[ProfileKey::Total];
 
 #define GET_PROFILE_TIME(name, text, phase)                                   \
     times[ProfileKey::name] = SumPhase(phase, slice.phaseTimes);              \
     totalTimes_[ProfileKey::name] += times[ProfileKey::name];
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -154,18 +154,19 @@ struct Statistics
     MOZ_MUST_USE bool stopTimingMutator(double& mutator_ms, double& gc_ms);
 
     // Note when we sweep a zone or compartment.
     void sweptZone() { ++zoneStats.sweptZoneCount; }
     void sweptCompartment() { ++zoneStats.sweptCompartmentCount; }
 
     void reset(gc::AbortReason reason) {
         MOZ_ASSERT(reason != gc::AbortReason::None);
-        if (!aborted)
+        if (!aborted) {
             slices_.back().resetReason = reason;
+        }
     }
 
     void nonincremental(gc::AbortReason reason) {
         MOZ_ASSERT(reason != gc::AbortReason::None);
         nonincrementalReason_ = reason;
         writeLogMessage("Non-incremental reason: %s",
             nonincrementalReason());
     }
@@ -457,23 +458,25 @@ struct MOZ_RAII AutoPhase
       : stats(stats), phaseKind(phaseKind), enabled(true)
     {
         stats.beginPhase(phaseKind);
     }
 
     AutoPhase(Statistics& stats, bool condition, PhaseKind phaseKind)
       : stats(stats), phaseKind(phaseKind), enabled(condition)
     {
-        if (enabled)
+        if (enabled) {
             stats.beginPhase(phaseKind);
+        }
     }
 
     ~AutoPhase() {
-        if (enabled)
+        if (enabled) {
             stats.endPhase(phaseKind);
+        }
     }
 
     Statistics& stats;
     PhaseKind phaseKind;
     bool enabled;
 };
 
 struct MOZ_RAII AutoSCC
--- a/js/src/gc/StoreBuffer-inl.h
+++ b/js/src/gc/StoreBuffer-inl.h
@@ -68,18 +68,19 @@ inline void
 StoreBuffer::WholeCellBuffer::put(const Cell* cell)
 {
     MOZ_ASSERT(cell->isTenured());
 
     Arena* arena = cell->asTenured().arena();
     ArenaCellSet* cells = arena->bufferedCells();
     if (cells->isEmpty()) {
         cells = allocateCellSet(arena);
-        if (!cells)
+        if (!cells) {
             return;
+        }
     }
 
     cells->putCell(&cell->asTenured());
     cells->check();
 }
 
 inline void
 StoreBuffer::putWholeCell(Cell* cell)
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -17,18 +17,19 @@
 using namespace js;
 using namespace js::gc;
 
 void
 StoreBuffer::GenericBuffer::trace(StoreBuffer* owner, JSTracer* trc)
 {
     mozilla::ReentrancyGuard g(*owner);
     MOZ_ASSERT(owner->isEnabled());
-    if (!storage_)
+    if (!storage_) {
         return;
+    }
 
     for (LifoAlloc::Enum e(*storage_); !e.empty();) {
         unsigned size = *e.read<unsigned>();
         BufferableRef* edge = e.read<BufferableRef>(size);
         edge->trace(trc);
     }
 }
 
@@ -40,18 +41,19 @@ StoreBuffer::checkEmpty() const
     MOZ_ASSERT(bufferSlot.isEmpty());
     MOZ_ASSERT(bufferWholeCell.isEmpty());
     MOZ_ASSERT(bufferGeneric.isEmpty());
 }
 
 bool
 StoreBuffer::enable()
 {
-    if (enabled_)
+    if (enabled_) {
         return true;
+    }
 
     checkEmpty();
 
     if (!bufferWholeCell.init() ||
         !bufferGeneric.init())
     {
         return false;
     }
@@ -60,29 +62,31 @@ StoreBuffer::enable()
     return true;
 }
 
 void
 StoreBuffer::disable()
 {
     checkEmpty();
 
-    if (!enabled_)
+    if (!enabled_) {
         return;
+    }
 
     aboutToOverflow_ = false;
 
     enabled_ = false;
 }
 
 void
 StoreBuffer::clear()
 {
-    if (!enabled_)
+    if (!enabled_) {
         return;
+    }
 
     aboutToOverflow_ = false;
     cancelIonCompilations_ = false;
 
     bufferVal.clear();
     bufferCell.clear();
     bufferSlot.clear();
     bufferWholeCell.clear();
@@ -123,39 +127,44 @@ ArenaCellSet::ArenaCellSet(Arena* arena,
     MOZ_ASSERT(bits.isAllClear());
 }
 
 ArenaCellSet*
 StoreBuffer::WholeCellBuffer::allocateCellSet(Arena* arena)
 {
     Zone* zone = arena->zone;
     JSRuntime* rt = zone->runtimeFromMainThread();
-    if (!rt->gc.nursery().isEnabled())
+    if (!rt->gc.nursery().isEnabled()) {
         return nullptr;
+    }
 
     AutoEnterOOMUnsafeRegion oomUnsafe;
     auto cells = storage_->new_<ArenaCellSet>(arena, head_);
-    if (!cells)
+    if (!cells) {
         oomUnsafe.crash("Failed to allocate ArenaCellSet");
+    }
 
     arena->bufferedCells() = cells;
     head_ = cells;
 
-    if (isAboutToOverflow())
+    if (isAboutToOverflow()) {
         rt->gc.storeBuffer().setAboutToOverflow(JS::gcreason::FULL_WHOLE_CELL_BUFFER);
+    }
 
     return cells;
 }
 
 void
 StoreBuffer::WholeCellBuffer::clear()
 {
-    for (ArenaCellSet* set = head_; set; set = set->next)
+    for (ArenaCellSet* set = head_; set; set = set->next) {
         set->arena->bufferedCells() = &ArenaCellSet::Empty;
+    }
     head_ = nullptr;
 
-    if (storage_)
+    if (storage_) {
         storage_->used() ? storage_->releaseAll() : storage_->freeAll();
+    }
 }
 
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -104,23 +104,25 @@ class StoreBuffer
             }
             stores_.remove(v);
         }
 
         /* Move any buffered stores to the canonical store set. */
         void sinkStore(StoreBuffer* owner) {
             if (last_) {
                 AutoEnterOOMUnsafeRegion oomUnsafe;
-                if (!stores_.put(last_))
+                if (!stores_.put(last_)) {
                     oomUnsafe.crash("Failed to allocate for MonoTypeBuffer::put.");
+                }
             }
             last_ = T();
 
-            if (MOZ_UNLIKELY(stores_.count() > MaxEntries))
+            if (MOZ_UNLIKELY(stores_.count() > MaxEntries)) {
                 owner->setAboutToOverflow(T::FullBufferReason);
+            }
         }
 
         bool has(StoreBuffer* owner, const T& v) {
             sinkStore(owner);
             return stores_.has(v);
         }
 
         /* Trace the source of all edges in the store buffer. */
@@ -144,18 +146,19 @@ class StoreBuffer
         LifoAlloc* storage_;
         ArenaCellSet* head_;
 
         WholeCellBuffer() : storage_(nullptr), head_(nullptr) {}
         ~WholeCellBuffer() { js_delete(storage_); }
 
         MOZ_MUST_USE bool init() {
             MOZ_ASSERT(!head_);
-            if (!storage_)
+            if (!storage_) {
                 storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
+            }
             clear();
             return bool(storage_);
         }
 
         void clear();
 
         bool isAboutToOverflow() const {
             return !storage_->isEmpty() && storage_->used() > WholeCellBufferOverflowThresholdBytes;
@@ -185,25 +188,27 @@ class StoreBuffer
     {
 
         LifoAlloc* storage_;
 
         explicit GenericBuffer() : storage_(nullptr) {}
         ~GenericBuffer() { js_delete(storage_); }
 
         MOZ_MUST_USE bool init() {
-            if (!storage_)
+            if (!storage_) {
                 storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
+            }
             clear();
             return bool(storage_);
         }
 
         void clear() {
-            if (!storage_)
+            if (!storage_) {
                 return;
+            }
 
             storage_->used() ? storage_->releaseAll() : storage_->freeAll();
         }
 
         bool isAboutToOverflow() const {
             return !storage_->isEmpty() &&
                    storage_->availableInCurrentChunk() < GenericBufferLowAvailableThreshold;
         }
@@ -216,26 +221,29 @@ class StoreBuffer
             MOZ_ASSERT(storage_);
 
             /* Ensure T is derived from BufferableRef. */
             (void)static_cast<const BufferableRef*>(&t);
 
             AutoEnterOOMUnsafeRegion oomUnsafe;
             unsigned size = sizeof(T);
             unsigned* sizep = storage_->pod_malloc<unsigned>();
-            if (!sizep)
+            if (!sizep) {
                 oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
+            }
             *sizep = size;
 
             T* tp = storage_->new_<T>(t);
-            if (!tp)
+            if (!tp) {
                 oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
+            }
 
-            if (isAboutToOverflow())
+            if (isAboutToOverflow()) {
                 owner->setAboutToOverflow(JS::gcreason::FULL_GENERIC_BUFFER);
+            }
         }
 
         size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
             return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
         }
 
         bool isEmpty() const {
             return !storage_ || storage_->isEmpty();
@@ -341,18 +349,19 @@ class StoreBuffer
 
         bool operator!=(const SlotsEdge& other) const {
             return !(*this == other);
         }
 
         // True if this SlotsEdge range overlaps with the other SlotsEdge range,
         // false if they do not overlap.
         bool overlaps(const SlotsEdge& other) const {
-            if (objectAndKind_ != other.objectAndKind_)
+            if (objectAndKind_ != other.objectAndKind_) {
                 return false;
+            }
 
             // Widen our range by one on each side so that we consider
             // adjacent-but-not-actually-overlapping ranges as overlapping. This
             // is particularly useful for coalescing a series of increasing or
             // decreasing single index writes 0, 1, 2, ..., N into a SlotsEdge
             // range of elements [0, N].
             uint32_t end = start_ + count_ + 1;
             uint32_t start = start_ > 0 ? start_ - 1 : 0;
@@ -392,31 +401,34 @@ class StoreBuffer
 
         static const auto FullBufferReason = JS::gcreason::FULL_SLOT_BUFFER;
     };
 
     template <typename Buffer, typename Edge>
     void unput(Buffer& buffer, const Edge& edge) {
         MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
-        if (!isEnabled())
+        if (!isEnabled()) {
             return;
+        }
         mozilla::ReentrancyGuard g(*this);
         buffer.unput(this, edge);
     }
 
     template <typename Buffer, typename Edge>
     void put(Buffer& buffer, const Edge& edge) {
         MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
         MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
-        if (!isEnabled())
+        if (!isEnabled()) {
             return;
+        }
         mozilla::ReentrancyGuard g(*this);
-        if (edge.maybeInRememberedSet(nursery_))
+        if (edge.maybeInRememberedSet(nursery_)) {
             buffer.put(this, edge);
+        }
     }
 
     MonoTypeBuffer<ValueEdge> bufferVal;
     MonoTypeBuffer<CellPtrEdge> bufferCell;
     MonoTypeBuffer<SlotsEdge> bufferSlot;
     WholeCellBuffer bufferWholeCell;
     GenericBuffer bufferGeneric;
     bool cancelIonCompilations_;
@@ -457,20 +469,21 @@ class StoreBuffer
 
     /* Insert a single edge into the buffer/remembered set. */
     void putValue(JS::Value* vp) { put(bufferVal, ValueEdge(vp)); }
     void unputValue(JS::Value* vp) { unput(bufferVal, ValueEdge(vp)); }
     void putCell(Cell** cellp) { put(bufferCell, CellPtrEdge(cellp)); }
     void unputCell(Cell** cellp) { unput(bufferCell, CellPtrEdge(cellp)); }
     void putSlot(NativeObject* obj, int kind, uint32_t start, uint32_t count) {
         SlotsEdge edge(obj, kind, start, count);
-        if (bufferSlot.last_.overlaps(edge))
+        if (bufferSlot.last_.overlaps(edge)) {
             bufferSlot.last_.merge(edge);
-        else
+        } else {
             put(bufferSlot, edge);
+        }
     }
 
     inline void putWholeCell(Cell* cell);
 
     /* Insert an entry into the generic buffer. */
     template <typename T>
     void putGeneric(const T& t) { put(bufferGeneric, t);}
 
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -66,38 +66,41 @@ struct DoCallbackFunctor : public Identi
 template <>
 Value
 DoCallback<Value>(JS::CallbackTracer* trc, Value* vp, const char* name)
 {
     // Only update *vp if the value changed, to avoid TSan false positives for
     // template objects when using DumpHeapTracer or UbiNode tracers while Ion
     // compiling off-thread.
     Value v = DispatchTyped(DoCallbackFunctor<Value>(), *vp, trc, name);
-    if (*vp != v)
+    if (*vp != v) {
         *vp = v;
+    }
     return v;
 }
 
 template <>
 jsid
 DoCallback<jsid>(JS::CallbackTracer* trc, jsid* idp, const char* name)
 {
     jsid id = DispatchTyped(DoCallbackFunctor<jsid>(), *idp, trc, name);
-    if (*idp != id)
+    if (*idp != id) {
         *idp = id;
+    }
     return id;
 }
 
 template <>
 TaggedProto
 DoCallback<TaggedProto>(JS::CallbackTracer* trc, TaggedProto* protop, const char* name)
 {
     TaggedProto proto = DispatchTyped(DoCallbackFunctor<TaggedProto>(), *protop, trc, name);
-    if (*protop != proto)
+    if (*protop != proto) {
         *protop = proto;
+    }
     return proto;
 }
 
 void
 JS::CallbackTracer::getTracingEdgeName(char* buffer, size_t bufferSize)
 {
     MOZ_ASSERT(bufferSize > 0);
     if (contextFunctor_) {
@@ -144,33 +147,35 @@ namespace {
 struct TraceIncomingFunctor {
     JSTracer* trc_;
     const JS::CompartmentSet& compartments_;
     TraceIncomingFunctor(JSTracer* trc, const JS::CompartmentSet& compartments)
       : trc_(trc), compartments_(compartments)
     {}
     template <typename T>
     void operator()(T tp) {
-        if (!compartments_.has((*tp)->compartment()))
+        if (!compartments_.has((*tp)->compartment())) {
             return;
+        }
         TraceManuallyBarrieredEdge(trc_, tp, "cross-compartment wrapper");
     }
     // StringWrappers are just used to avoid copying strings
     // across zones multiple times, and don't hold a strong
     // reference.
     void operator()(JSString** tp) {}
 };
 } // namespace (anonymous)
 
 JS_PUBLIC_API(void)
 JS::TraceIncomingCCWs(JSTracer* trc, const JS::CompartmentSet& compartments)
 {
     for (js::CompartmentsIter comp(trc->runtime()); !comp.done(); comp.next()) {
-        if (compartments.has(comp))
+        if (compartments.has(comp)) {
             continue;
+        }
 
         for (Compartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
             mozilla::DebugOnly<const CrossCompartmentKey> prior = e.front().key();
             e.front().mutableKey().applyToWrapped(TraceIncomingFunctor(trc, compartments));
             MOZ_ASSERT(e.front().key() == prior);
         }
     }
 }
@@ -251,18 +256,19 @@ ObjectGroupCycleCollectorTracer::onChild
 
     if (thing.is<ObjectGroup>()) {
         // If this group is required to be in an ObjectGroup chain, trace it
         // via the provided worklist rather than continuing to recurse.
         ObjectGroup& group = thing.as<ObjectGroup>();
         AutoSweepObjectGroup sweep(&group);
         if (group.maybeUnboxedLayout(sweep)) {
             for (size_t i = 0; i < seen.length(); i++) {
-                if (seen[i] == &group)
+                if (seen[i] == &group) {
                     return;
+                }
             }
             if (seen.append(&group) && worklist.append(&group)) {
                 return;
             } else {
                 // If append fails, keep tracing normally. The worst that will
                 // happen is we end up overrecursing.
             }
         }
@@ -273,18 +279,19 @@ ObjectGroupCycleCollectorTracer::onChild
 
 void
 gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, ObjectGroup* group)
 {
     MOZ_ASSERT(trc->isCallbackTracer());
 
     // Early return if this group is not required to be in an ObjectGroup chain.
     AutoSweepObjectGroup sweep(group);
-    if (!group->maybeUnboxedLayout(sweep))
+    if (!group->maybeUnboxedLayout(sweep)) {
         return group->traceChildren(trc);
+    }
 
     ObjectGroupCycleCollectorTracer groupTracer(trc->asCallbackTracer());
     group->traceChildren(&groupTracer);
 
     while (!groupTracer.worklist.empty()) {
         ObjectGroup* innerGroup = groupTracer.worklist.popCopy();
         innerGroup->traceChildren(&groupTracer);
     }
@@ -306,50 +313,57 @@ CountDecimalDigits(size_t num)
 }
 
 static const char*
 StringKindHeader(JSString* str)
 {
     MOZ_ASSERT(str->isLinear());
 
     if (str->isAtom()) {
-        if (str->isPermanentAtom())
+        if (str->isPermanentAtom()) {
             return "permanent atom: ";
+        }
         return "atom: ";
     }
 
     if (str->isFlat()) {
-        if (str->isExtensible())
+        if (str->isExtensible()) {
             return "extensible: ";
-        if (str->isUndepended())
+        }
+        if (str->isUndepended()) {
             return "undepended: ";
+        }
         if (str->isInline()) {
-            if (str->isFatInline())
+            if (str->isFatInline()) {
                 return "fat inline: ";
+            }
             return "inline: ";
         }
         return "flat: ";
     }
 
-    if (str->isDependent())
+    if (str->isDependent()) {
         return "dependent: ";
-    if (str->isExternal())
+    }
+    if (str->isExternal()) {
         return "external: ";
+    }
     return "linear: ";
 }
 
 JS_PUBLIC_API(void)
 JS_GetTraceThingInfo(char* buf, size_t bufsize, JSTracer* trc, void* thing,
                      JS::TraceKind kind, bool details)
 {
     const char* name = nullptr; /* silence uninitialized warning */
     size_t n;
 
-    if (bufsize == 0)
+    if (bufsize == 0) {
         return;
+    }
 
     switch (kind) {
       case JS::TraceKind::BaseShape:
         name = "base_shape";
         break;
 
       case JS::TraceKind::JitCode:
         name = "jitcode";
@@ -406,18 +420,19 @@ JS_GetTraceThingInfo(char* buf, size_t b
 #endif
 
       default:
         name = "INVALID";
         break;
     }
 
     n = strlen(name);
-    if (n > bufsize - 1)
+    if (n > bufsize - 1) {
         n = bufsize - 1;
+    }
     js_memcpy(buf, name, n + 1);
     buf += n;
     bufsize -= n;
     *buf = '\0';
 
     if (details && bufsize > 2) {
         switch (kind) {
           case JS::TraceKind::Object:
--- a/js/src/gc/Tracer.h
+++ b/js/src/gc/Tracer.h
@@ -118,26 +118,28 @@ TraceEdge(JSTracer* trc, ReadBarriered<T
 
 // Trace through a possibly-null edge in the live object graph on behalf of
 // tracing.
 
 template <typename T>
 inline void
 TraceNullableEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name)
 {
-    if (InternalBarrierMethods<T>::isMarkable(thingp->get()))
+    if (InternalBarrierMethods<T>::isMarkable(thingp->get())) {
         TraceEdge(trc, thingp, name);
+    }
 }
 
 template <typename T>
 inline void
 TraceNullableEdge(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
 {
-    if (InternalBarrierMethods<T>::isMarkable(thingp->unbarrieredGet()))
+    if (InternalBarrierMethods<T>::isMarkable(thingp->unbarrieredGet())) {
         TraceEdge(trc, thingp, name);
+    }
 }
 
 // Trace through a "root" edge. These edges are the initial edges in the object
 // graph traversal. Root edges are asserted to only be traversed in the initial
 // phase of a GC.
 
 template <typename T>
 inline void
@@ -157,18 +159,19 @@ TraceRoot(JSTracer* trc, ReadBarriered<T
 // Idential to TraceRoot, except that this variant will not crash if |*thingp|
 // is null.
 
 template <typename T>
 inline void
 TraceNullableRoot(JSTracer* trc, T* thingp, const char* name)
 {
     gc::AssertRootMarkingPhase(trc);
-    if (InternalBarrierMethods<T>::isMarkable(*thingp))
+    if (InternalBarrierMethods<T>::isMarkable(*thingp)) {
         gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp), name);
+    }
 }
 
 template <typename T>
 inline void
 TraceNullableRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
 {
     TraceNullableRoot(trc, thingp->unsafeGet(), name);
 }
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -115,18 +115,19 @@ class js::VerifyPreTracer final : public
  * node.
  */
 void
 VerifyPreTracer::onChild(const JS::GCCellPtr& thing)
 {
     MOZ_ASSERT(!IsInsideNursery(thing.asCell()));
 
     // Skip things in other runtimes.
-    if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime())
+    if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
         return;
+    }
 
     edgeptr += sizeof(EdgeValue);
     if (edgeptr >= term) {
         edgeptr = term;
         return;
     }
 
     VerifyNode* node = curnode;
@@ -161,86 +162,94 @@ MakeNode(VerifyPreTracer* trc, void* thi
         return node;
     }
     return nullptr;
 }
 
 static VerifyNode*
 NextNode(VerifyNode* node)
 {
-    if (node->count == 0)
+    if (node->count == 0) {
         return (VerifyNode*)((char*)node + sizeof(VerifyNode) - sizeof(EdgeValue));
-    else
+    } else {
         return (VerifyNode*)((char*)node + sizeof(VerifyNode) +
                              sizeof(EdgeValue)*(node->count - 1));
+    }
 }
 
 void
 gc::GCRuntime::startVerifyPreBarriers()
 {
-    if (verifyPreData || isIncrementalGCInProgress())
+    if (verifyPreData || isIncrementalGCInProgress()) {
         return;
+    }
 
     JSContext* cx = rt->mainContextFromOwnThread();
-    if (temporaryAbortIfWasmGc(cx))
+    if (temporaryAbortIfWasmGc(cx)) {
         return;
+    }
 
     if (IsIncrementalGCUnsafe(rt) != AbortReason::None ||
         rt->hasHelperThreadZones())
     {
         return;
     }
 
     number++;
 
     VerifyPreTracer* trc = js_new<VerifyPreTracer>(rt);
-    if (!trc)
+    if (!trc) {
         return;
+    }
 
     AutoPrepareForTracing prep(cx);
 
     {
         AutoLockGC lock(cx->runtime());
-        for (auto chunk = allNonEmptyChunks(lock); !chunk.done(); chunk.next())
+        for (auto chunk = allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
             chunk->bitmap.clear();
+        }
     }
 
     gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::TRACE_HEAP);
 
     const size_t size = 64 * 1024 * 1024;
     trc->root = (VerifyNode*)js_malloc(size);
-    if (!trc->root)
+    if (!trc->root) {
         goto oom;
+    }
     trc->edgeptr = (char*)trc->root;
     trc->term = trc->edgeptr + size;
 
     /* Create the root node. */
     trc->curnode = MakeNode(trc, nullptr, JS::TraceKind(0));
 
     incrementalState = State::MarkRoots;
 
     /* Make all the roots be edges emanating from the root node. */
     traceRuntime(trc, prep);
 
     VerifyNode* node;
     node = trc->curnode;
-    if (trc->edgeptr == trc->term)
+    if (trc->edgeptr == trc->term) {
         goto oom;
+    }
 
     /* For each edge, make a node for it if one doesn't already exist. */
     while ((char*)node < trc->edgeptr) {
         for (uint32_t i = 0; i < node->count; i++) {
             EdgeValue& e = node->edges[i];
             VerifyNode* child = MakeNode(trc, e.thing, e.kind);
             if (child) {
                 trc->curnode = child;
                 js::TraceChildren(trc, e.thing, e.kind);
             }
-            if (trc->edgeptr == trc->term)
+            if (trc->edgeptr == trc->term) {
                 goto oom;
+            }
         }
 
         node = NextNode(node);
     }
 
     verifyPreData = trc;
     incrementalState = State::Mark;
     marker.start();
@@ -279,22 +288,24 @@ static const uint32_t MAX_VERIFIER_EDGES
  * it with nullptr. EndVerifyBarriers later asserts that the remaining
  * non-nullptr edges (i.e., the ones from the original snapshot that must have
  * been modified) must point to marked objects.
  */
 void
 CheckEdgeTracer::onChild(const JS::GCCellPtr& thing)
 {
     // Skip things in other runtimes.
-    if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime())
+    if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
         return;
+    }
 
     /* Avoid n^2 behavior. */
-    if (node->count > MAX_VERIFIER_EDGES)
+    if (node->count > MAX_VERIFIER_EDGES) {
         return;
+    }
 
     for (uint32_t i = 0; i < node->count; i++) {
         if (node->edges[i].thing == thing.asCell()) {
             MOZ_ASSERT(node->edges[i].kind == thing.kind());
             node->edges[i].thing = nullptr;
             return;
         }
     }
@@ -305,46 +316,51 @@ js::gc::AssertSafeToSkipBarrier(TenuredC
 {
     mozilla::DebugOnly<Zone*> zone = thing->zoneFromAnyThread();
     MOZ_ASSERT(!zone->needsIncrementalBarrier() || zone->isAtomsZone());
 }
 
 static bool
 IsMarkedOrAllocated(const EdgeValue& edge)
 {
-    if (!edge.thing || IsMarkedOrAllocated(TenuredCell::fromPointer(edge.thing)))
+    if (!edge.thing || IsMarkedOrAllocated(TenuredCell::fromPointer(edge.thing))) {
         return true;
+    }
 
     // Permanent atoms and well-known symbols aren't marked during graph traversal.
-    if (edge.kind == JS::TraceKind::String && static_cast<JSString*>(edge.thing)->isPermanentAtom())
+    if (edge.kind == JS::TraceKind::String && static_cast<JSString*>(edge.thing)->isPermanentAtom()) {
         return true;
-    if (edge.kind == JS::TraceKind::Symbol && static_cast<JS::Symbol*>(edge.thing)->isWellKnownSymbol())
+    }
+    if (edge.kind == JS::TraceKind::Symbol && static_cast<JS::Symbol*>(edge.thing)->isWellKnownSymbol()) {
         return true;
+    }
 
     return false;
 }
 
 void
 gc::GCRuntime::endVerifyPreBarriers()
 {
     VerifyPreTracer* trc = verifyPreData;
 
-    if (!trc)
+    if (!trc) {
         return;
+    }
 
     MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
 
     AutoPrepareForTracing prep(rt->mainContextFromOwnThread());
 
     bool compartmentCreated = false;
 
     /* We need to disable barriers before tracing, which may invoke barriers. */
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        if (!zone->needsIncrementalBarrier())
+        if (!zone->needsIncrementalBarrier()) {
             compartmentCreated = true;
+        }
 
         zone->setNeedsIncrementalBarrier(false);
     }
 
     /*
      * We need to bump gcNumber so that the methodjit knows that jitcode has
      * been discarded.
      */
@@ -392,43 +408,49 @@ gc::GCRuntime::endVerifyPreBarriers()
     js_delete(trc);
 }
 
 /*** Barrier Verifier Scheduling ***/
 
 void
 gc::GCRuntime::verifyPreBarriers()
 {
-    if (verifyPreData)
+    if (verifyPreData) {
         endVerifyPreBarriers();
-    else
+    } else {
         startVerifyPreBarriers();
+    }
 }
 
 void
 gc::VerifyBarriers(JSRuntime* rt, VerifierType type)
 {
-    if (GCRuntime::temporaryAbortIfWasmGc(rt->mainContextFromOwnThread()))
+    if (GCRuntime::temporaryAbortIfWasmGc(rt->mainContextFromOwnThread())) {
         return;
-    if (type == PreBarrierVerifier)
+    }
+    if (type == PreBarrierVerifier) {
         rt->gc.verifyPreBarriers();
+    }
 }
 
 void
 gc::GCRuntime::maybeVerifyPreBarriers(bool always)
 {
-    if (!hasZealMode(ZealMode::VerifierPre))
+    if (!hasZealMode(ZealMode::VerifierPre)) {
         return;
+    }
 
-    if (rt->mainContextFromOwnThread()->suppressGC)
+    if (rt->mainContextFromOwnThread()->suppressGC) {
         return;
+    }
 
     if (verifyPreData) {
-        if (++verifyPreData->count < zealFrequency && !always)
+        if (++verifyPreData->count < zealFrequency && !always) {
             return;
+        }
 
         endVerifyPreBarriers();
     }
 
     startVerifyPreBarriers();
 }
 
 void
@@ -502,52 +524,58 @@ HeapCheckTracerBase::HeapCheckTracerBase
 }
 
 void
 HeapCheckTracerBase::onChild(const JS::GCCellPtr& thing)
 {
     Cell* cell = thing.asCell();
     checkCell(cell);
 
-    if (visited.lookup(cell))
+    if (visited.lookup(cell)) {
         return;
+    }
 
     if (!visited.put(cell)) {
         oom = true;
         return;
     }
 
     // Don't trace into GC things owned by another runtime.
-    if (cell->runtimeFromAnyThread() != rt)
+    if (cell->runtimeFromAnyThread() != rt) {
         return;
+    }
 
     // Don't trace into GC in zones being used by helper threads.
     Zone* zone;
-    if (thing.is<JSObject>())
+    if (thing.is<JSObject>()) {
         zone = thing.as<JSObject>().zone();
-    else if (thing.is<JSString>())
+    } else if (thing.is<JSString>()) {
         zone = thing.as<JSString>().zone();
-    else
+    } else {
         zone = cell->asTenured().zone();
+    }
 
-    if (zone->usedByHelperThread())
+    if (zone->usedByHelperThread()) {
         return;
+    }
 
     WorkItem item(thing, contextName(), parentIndex);
-    if (!stack.append(item))
+    if (!stack.append(item)) {
         oom = true;
+    }
 }
 
 bool
 HeapCheckTracerBase::traceHeap(AutoTraceSession& session)
 {
     // The analysis thinks that traceRuntime might GC by calling a GC callback.
     JS::AutoSuppressGCAnalysis nogc;
-    if (!rt->isBeingDestroyed())
+    if (!rt->isBeingDestroyed()) {
         rt->gc.traceRuntime(this, session);
+    }
 
     while (!stack.empty() && !oom) {
         WorkItem item = stack.back();
         if (item.processed) {
             stack.popBack();
         } else {
             parentIndex = stack.length() - 1;
             stack.back().processed = true;
@@ -556,35 +584,39 @@ HeapCheckTracerBase::traceHeap(AutoTrace
     }
 
     return !oom;
 }
 
 static const char*
 GetCellColorName(Cell* cell)
 {
-    if (cell->isMarkedBlack())
+    if (cell->isMarkedBlack()) {
         return "black";
-    if (cell->isMarkedGray())
+    }
+    if (cell->isMarkedGray()) {
         return "gray";
+    }
     return "white";
 }
 
 void
 HeapCheckTracerBase::dumpCellInfo(Cell* cell)
 {
     auto kind = cell->getTraceKind();
     JSObject* obj = kind == JS::TraceKind::Object ? static_cast<JSObject*>(cell) : nullptr;
 
     fprintf(stderr, "%s %s", GetCellColorName(cell), GCTraceKindToAscii(kind));
-    if (obj)
+    if (obj) {
         fprintf(stderr, " %s", obj->getClass()->name);
+    }
     fprintf(stderr, " %p", cell);
-    if (obj)
+    if (obj) {
         fprintf(stderr, " (compartment %p)", obj->compartment());
+    }
 }
 
 void
 HeapCheckTracerBase::dumpCellPath()
 {
     const char* name = contextName();
     for (int index = parentIndex; index != -1; index = stack[index].parentIndex) {
         const WorkItem& parent = stack[index];
@@ -639,34 +671,37 @@ CheckHeapTracer::checkCell(Cell* cell)
         fprintf(stderr, "Bad pointer %p\n", cell);
         dumpCellPath();
     }
 }
 
 void
 CheckHeapTracer::check(AutoTraceSession& session)
 {
-    if (!traceHeap(session))
+    if (!traceHeap(session)) {
         return;
+    }
 
-    if (failures)
+    if (failures) {
         fprintf(stderr, "Heap check: %zu failure(s)\n", failures);
+    }
     MOZ_RELEASE_ASSERT(failures == 0);
 }
 
 void
 js::gc::CheckHeapAfterGC(JSRuntime* rt)
 {
     AutoTraceSession session(rt);
     CheckHeapTracer::GCType gcType;
 
-    if (rt->gc.nursery().isEmpty())
+    if (rt->gc.nursery().isEmpty()) {
         gcType = CheckHeapTracer::GCType::Moving;
-    else
+    } else {
         gcType = CheckHeapTracer::GCType::NonMoving;
+    }
 
     CheckHeapTracer tracer(rt, gcType);
     tracer.check(session);
 }
 
 #endif /* JSGC_HASH_TABLE_CHECKS */
 
 #if defined(JS_GC_ZEAL) || defined(DEBUG)
@@ -687,18 +722,19 @@ CheckGrayMarkingTracer::CheckGrayMarking
     // Weak gray->black edges are allowed.
     setTraceWeakEdges(false);
 }
 
 void
 CheckGrayMarkingTracer::checkCell(Cell* cell)
 {
     Cell* parent = parentCell();
-    if (!parent)
+    if (!parent) {
         return;
+    }
 
     if (parent->isMarkedBlack() && cell->isMarkedGray()) {
         failures++;
 
         fprintf(stderr, "Found black to gray edge to ");
         dumpCellInfo(cell);
         fprintf(stderr, "\n");
         dumpCellPath();
@@ -710,29 +746,31 @@ CheckGrayMarkingTracer::checkCell(Cell* 
         }
 #endif
     }
 }
 
 bool
 CheckGrayMarkingTracer::check(AutoTraceSession& session)
 {
-    if (!traceHeap(session))
+    if (!traceHeap(session)) {
         return true; // Ignore failure.
+    }
 
     return failures == 0;
 }
 
 JS_FRIEND_API(bool)
 js::CheckGrayMarkingState(JSRuntime* rt)
 {
     MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
     MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
-    if (!rt->gc.areGrayBitsValid())
+    if (!rt->gc.areGrayBitsValid()) {
         return true;
+    }
 
     gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
     AutoTraceSession session(rt);
     CheckGrayMarkingTracer tracer(rt);
 
     return tracer.check(session);
 }
 
--- a/js/src/gc/WeakMap-inl.h
+++ b/js/src/gc/WeakMap-inl.h
@@ -73,48 +73,53 @@ WeakMap<K, V>::trace(JSTracer* trc)
 
     if (trc->isMarkingTracer()) {
         MOZ_ASSERT(trc->weakMapAction() == ExpandWeakMaps);
         marked = true;
         (void) markIteratively(GCMarker::fromTracer(trc));
         return;
     }
 
-    if (trc->weakMapAction() == DoNotTraceWeakMaps)
+    if (trc->weakMapAction() == DoNotTraceWeakMaps) {
         return;
+    }
 
     // Trace keys only if weakMapAction() says to.
     if (trc->weakMapAction() == TraceWeakMapKeysValues) {
-        for (Enum e(*this); !e.empty(); e.popFront())
+        for (Enum e(*this); !e.empty(); e.popFront()) {
             TraceEdge(trc, &e.front().mutableKey(), "WeakMap entry key");
+        }
     }
 
     // Always trace all values (unless weakMapAction() is
     // DoNotTraceWeakMaps).
-    for (Range r = Base::all(); !r.empty(); r.popFront())
+    for (Range r = Base::all(); !r.empty(); r.popFront()) {
         TraceEdge(trc, &r.front().value(), "WeakMap entry value");
+    }
 }
 
 template <class K, class V>
 /* static */ void
 WeakMap<K, V>::addWeakEntry(GCMarker* marker, JS::GCCellPtr key,
                                 const gc::WeakMarkable& markable)
 {
     Zone* zone = key.asCell()->asTenured().zone();
 
     auto p = zone->gcWeakKeys().get(key);
     if (p) {
         gc::WeakEntryVector& weakEntries = p->value;
-        if (!weakEntries.append(markable))
+        if (!weakEntries.append(markable)) {
             marker->abortLinearWeakMarking();
+        }
     } else {
         gc::WeakEntryVector weakEntries;
         MOZ_ALWAYS_TRUE(weakEntries.append(markable));
-        if (!zone->gcWeakKeys().put(JS::GCCellPtr(key), std::move(weakEntries)))
+        if (!zone->gcWeakKeys().put(JS::GCCellPtr(key), std::move(weakEntries))) {
             marker->abortLinearWeakMarking();
+        }
     }
 }
 
 template <class K, class V>
 bool
 WeakMap<K, V>::markIteratively(GCMarker* marker)
 {
     MOZ_ASSERT(marked);
@@ -138,37 +143,40 @@ WeakMap<K, V>::markIteratively(GCMarker*
         } else if (marker->isWeakMarkingTracer()) {
             // Entry is not yet known to be live. Record this weakmap and
             // the lookup key in the list of weak keys. Also record the
             // delegate, if any, because marking the delegate also marks
             // the entry.
             JS::GCCellPtr weakKey(extractUnbarriered(e.front().key()));
             gc::WeakMarkable markable(this, weakKey);
             addWeakEntry(marker, weakKey, markable);
-            if (JSObject* delegate = getDelegate(e.front().key()))
+            if (JSObject* delegate = getDelegate(e.front().key())) {
                 addWeakEntry(marker, JS::GCCellPtr(delegate), markable);
+            }
         }
     }
 
     return markedAny;
 }
 
 template <class K, class V>
 inline JSObject*
 WeakMap<K, V>::getDelegate(JSObject* key) const
 {
     JS::AutoSuppressGCAnalysis nogc;
 
     JSWeakmapKeyDelegateOp op = key->getClass()->extWeakmapKeyDelegateOp();
-    if (!op)
+    if (!op) {
         return nullptr;
+    }
 
     JSObject* obj = op(key);
-    if (!obj)
+    if (!obj) {
         return nullptr;
+    }
 
     MOZ_ASSERT(obj->runtimeFromMainThread() == zone()->runtimeFromMainThread());
     return obj;
 }
 
 template <class K, class V>
 inline JSObject*
 WeakMap<K, V>::getDelegate(JSScript* script) const
@@ -211,18 +219,19 @@ WeakMap<K, V>::keyNeedsMark(LazyScript* 
 
 
 template <class K, class V>
 void
 WeakMap<K, V>::sweep()
 {
     /* Remove all entries whose keys remain unmarked. */
     for (Enum e(*this); !e.empty(); e.popFront()) {
-        if (gc::IsAboutToBeFinalized(&e.front().mutableKey()))
+        if (gc::IsAboutToBeFinalized(&e.front().mutableKey())) {
             e.removeFront();
+        }
     }
 
 #if DEBUG
     // Once we've swept, all remaining edges should stay within the known-live
     // part of the graph.
     assertEntriesNotAboutToBeFinalized();
 #endif
 }
--- a/js/src/gc/WeakMap.cpp
+++ b/js/src/gc/WeakMap.cpp
@@ -33,18 +33,19 @@ WeakMapBase::WeakMapBase(JSObject* memOf
 WeakMapBase::~WeakMapBase()
 {
     MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadCanAccessZone(zone_));
 }
 
 void
 WeakMapBase::unmarkZone(JS::Zone* zone)
 {
-    for (WeakMapBase* m : zone->gcWeakMapList())
+    for (WeakMapBase* m : zone->gcWeakMapList()) {
         m->marked = false;
+    }
 }
 
 void
 WeakMapBase::traceZone(JS::Zone* zone, JSTracer* tracer)
 {
     MOZ_ASSERT(tracer->weakMapAction() != DoNotTraceWeakMaps);
     for (WeakMapBase* m : zone->gcWeakMapList()) {
         m->trace(tracer);
@@ -52,28 +53,30 @@ WeakMapBase::traceZone(JS::Zone* zone, J
     }
 }
 
 bool
 WeakMapBase::markZoneIteratively(JS::Zone* zone, GCMarker* marker)
 {
     bool markedAny = false;
     for (WeakMapBase* m : zone->gcWeakMapList()) {
-        if (m->marked && m->markIteratively(marker))
+        if (m->marked && m->markIteratively(marker)) {
             markedAny = true;
+        }
     }
     return markedAny;
 }
 
 bool
 WeakMapBase::findInterZoneEdges(JS::Zone* zone)
 {
     for (WeakMapBase* m : zone->gcWeakMapList()) {
-        if (!m->findZoneEdges())
+        if (!m->findZoneEdges()) {
             return false;
+        }
     }
     return true;
 }
 
 void
 WeakMapBase::sweepZone(JS::Zone* zone)
 {
     for (WeakMapBase* m = zone->gcWeakMapList().getFirst(); m; ) {
@@ -83,18 +86,19 @@ WeakMapBase::sweepZone(JS::Zone* zone)
         } else {
             m->clearAndCompact();
             m->removeFrom(zone->gcWeakMapList());
         }
         m = next;
     }
 
 #ifdef DEBUG
-    for (WeakMapBase* m : zone->gcWeakMapList())
+    for (WeakMapBase* m : zone->gcWeakMapList()) {
         MOZ_ASSERT(m->isInList() && m->marked);
+    }
 #endif
 }
 
 void
 WeakMapBase::traceAllMappings(WeakMapTracer* tracer)
 {
     JSRuntime* rt = tracer->runtime;
     for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
@@ -105,18 +109,19 @@ WeakMapBase::traceAllMappings(WeakMapTra
         }
     }
 }
 
 bool
 WeakMapBase::saveZoneMarkedWeakMaps(JS::Zone* zone, WeakMapSet& markedWeakMaps)
 {
     for (WeakMapBase* m : zone->gcWeakMapList()) {
-        if (m->marked && !markedWeakMaps.put(m))
+        if (m->marked && !markedWeakMaps.put(m)) {
             return false;
+        }
     }
     return true;
 }
 
 void
 WeakMapBase::restoreMarkedWeakMaps(WeakMapSet& markedWeakMaps)
 {
     for (WeakMapSet::Range r = markedWeakMaps.all(); !r.empty(); r.popFront()) {
@@ -133,39 +138,44 @@ ObjectValueMap::findZoneEdges()
     /*
      * For unmarked weakmap keys with delegates in a different zone, add a zone
      * edge to ensure that the delegate zone finishes marking before the key
      * zone.
      */
     JS::AutoSuppressGCAnalysis nogc;
     for (Range r = all(); !r.empty(); r.popFront()) {
         JSObject* key = r.front().key();
-        if (key->asTenured().isMarkedBlack())
+        if (key->asTenured().isMarkedBlack()) {
             continue;
+        }
         JSObject* delegate = getDelegate(key);
-        if (!delegate)
+        if (!delegate) {
             continue;
+        }
         Zone* delegateZone = delegate->zone();
-        if (delegateZone == zone() || !delegateZone->isGCMarking())
+        if (delegateZone == zone() || !delegateZone->isGCMarking()) {
             continue;
-        if (!delegateZone->gcSweepGroupEdges().put(key->zone()))
+        }
+        if (!delegateZone->gcSweepGroupEdges().put(key->zone())) {
             return false;
+        }
     }
     return true;
 }
 
 ObjectWeakMap::ObjectWeakMap(JSContext* cx)
   : map(cx, nullptr)
 {}
 
 JSObject*
 ObjectWeakMap::lookup(const JSObject* obj)
 {
-    if (ObjectValueMap::Ptr p = map.lookup(const_cast<JSObject*>(obj)))
+    if (ObjectValueMap::Ptr p = map.lookup(const_cast<JSObject*>(obj))) {
         return &p->value().toObject();
+    }
     return nullptr;
 }
 
 bool
 ObjectWeakMap::add(JSContext* cx, JSObject* obj, JSObject* target)
 {
     MOZ_ASSERT(obj && target);
 
--- a/js/src/gc/WeakMap.h
+++ b/js/src/gc/WeakMap.h
@@ -127,25 +127,27 @@ class WeakMap : public HashMap<Key, Valu
 
     explicit WeakMap(JSContext* cx, JSObject* memOf = nullptr);
 
     // Overwritten to add a read barrier to prevent an incorrectly gray value
     // from escaping the weak map. See the UnmarkGrayTracer::onChild comment in
     // gc/Marking.cpp.
     Ptr lookup(const Lookup& l) const {
         Ptr p = Base::lookup(l);
-        if (p)
+        if (p) {
             exposeGCThingToActiveJS(p->value());
+        }
         return p;
     }
 
     AddPtr lookupForAdd(const Lookup& l) {
         AddPtr p = Base::lookupForAdd(l);
-        if (p)
+        if (p) {
             exposeGCThingToActiveJS(p->value());
+        }
         return p;
     }
 
     // Resolve ambiguity with LinkedListElement<>::remove.
     using Base::remove;
 
     void markEntry(GCMarker* marker, gc::Cell* markedCell, JS::GCCellPtr origKey) override;
 
--- a/js/src/gc/WeakMapPtr.cpp
+++ b/js/src/gc/WeakMapPtr.cpp
@@ -60,18 +60,19 @@ JS::WeakMapPtr<K, V>::destroy()
 
 template <typename K, typename V>
 bool
 JS::WeakMapPtr<K, V>::init(JSContext* cx)
 {
     MOZ_ASSERT(!initialized());
     typename WeakMapDetails::Utils<K, V>::PtrType map =
         cx->new_<typename WeakMapDetails::Utils<K,V>::Type>(cx);
-    if (!map)
+    if (!map) {
         return false;
+    }
     ptr = map;
     return true;
 }
 
 template <typename K, typename V>
 void
 JS::WeakMapPtr<K, V>::trace(JSTracer* trc)
 {
@@ -81,18 +82,19 @@ JS::WeakMapPtr<K, V>::trace(JSTracer* tr
 
 template <typename K, typename V>
 V
 JS::WeakMapPtr<K, V>::lookup(const K& key)
 {
     MOZ_ASSERT(initialized());
     typename WeakMapDetails::Utils<K, V>::Type::Ptr result =
         WeakMapDetails::Utils<K, V>::cast(ptr)->lookup(key);
-    if (!result)
+    if (!result) {
         return WeakMapDetails::DataType<V>::NullValue();
+    }
     return result->value();
 }
 
 template <typename K, typename V>
 bool
 JS::WeakMapPtr<K, V>::put(JSContext* cx, const K& key, const V& value)
 {
     MOZ_ASSERT(initialized());
--- a/js/src/gc/Zone-inl.h
+++ b/js/src/gc/Zone-inl.h
@@ -47,32 +47,34 @@ JS::Zone::UniqueIdToHash(uint64_t uid)
 {
     return mozilla::HashGeneric(uid);
 }
 
 inline bool
 JS::Zone::getHashCode(js::gc::Cell* cell, js::HashNumber* hashp)
 {
     uint64_t uid;
-    if (!getOrCreateUniqueId(cell, &uid))
+    if (!getOrCreateUniqueId(cell, &uid)) {
         return false;
+    }
     *hashp = UniqueIdToHash(uid);
     return true;
 }
 
 inline bool
 JS::Zone::maybeGetUniqueId(js::gc::Cell* cell, uint64_t* uidp)
 {
     MOZ_ASSERT(uidp);
     MOZ_ASSERT(js::CurrentThreadCanAccessZone(this));
 
     // Get an existing uid, if one has been set.
     auto p = uniqueIds().lookup(cell);
-    if (p)
+    if (p) {
         *uidp = p->value();
+    }
 
     return p.found();
 }
 
 inline bool
 JS::Zone::getOrCreateUniqueId(js::gc::Cell* cell, uint64_t* uidp)
 {
     MOZ_ASSERT(uidp);
@@ -84,18 +86,19 @@ JS::Zone::getOrCreateUniqueId(js::gc::Ce
         *uidp = p->value();
         return true;
     }
 
     MOZ_ASSERT(js::CurrentThreadCanAccessZone(this));
 
     // Set a new uid on the cell.
     *uidp = js::gc::NextCellUniqueId(runtimeFromAnyThread());
-    if (!uniqueIds().add(p, cell, *uidp))
+    if (!uniqueIds().add(p, cell, *uidp)) {
         return false;
+    }
 
     // If the cell was in the nursery, hopefully unlikely, then we need to
     // tell the nursery about it so that it can sweep the uid if the thing
     // does not get tenured.
     if (IsInsideNursery(cell) &&
         !runtimeFromMainThread()->gc.nursery().addedUniqueIdToCell(cell))
     {
         uniqueIds().remove(cell);
@@ -111,18 +114,19 @@ JS::Zone::getHashCodeInfallible(js::gc::
     return UniqueIdToHash(getUniqueIdInfallible(cell));
 }
 
 inline uint64_t
 JS::Zone::getUniqueIdInfallible(js::gc::Cell* cell)
 {
     uint64_t uid;
     js::AutoEnterOOMUnsafeRegion oomUnsafe;
-    if (!getOrCreateUniqueId(cell, &uid))
+    if (!getOrCreateUniqueId(cell, &uid)) {
         oomUnsafe.crash("failed to allocate uid");
+    }
     return uid;
 }
 
 inline bool
 JS::Zone::hasUniqueId(js::gc::Cell* cell)
 {
     MOZ_ASSERT(js::CurrentThreadCanAccessZone(this) || js::CurrentThreadIsPerformingGC());
     return uniqueIds().has(cell);
@@ -147,15 +151,16 @@ JS::Zone::removeUniqueId(js::gc::Cell* c
 }
 
 inline void
 JS::Zone::adoptUniqueIds(JS::Zone* source)
 {
     js::AutoEnterOOMUnsafeRegion oomUnsafe;
     for (js::gc::UniqueIdMap::Enum e(source->uniqueIds()); !e.empty(); e.popFront()) {
         MOZ_ASSERT(!uniqueIds().has(e.front().key()));
-        if (!uniqueIds().put(e.front().key(), e.front().value()))
+        if (!uniqueIds().put(e.front().key(), e.front().value())) {
             oomUnsafe.crash("failed to transfer unique ids from off-thread");
+        }
     }
     source->uniqueIds().clear();
 }
 
 #endif // gc_Zone_inl_h
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -80,18 +80,19 @@ JS::Zone::Zone(JSRuntime* rt)
     jitCodeCounter.setMax(jit::MaxCodeBytesPerProcess * 0.8, lock);
 }
 
 Zone::~Zone()
 {
     MOZ_ASSERT(helperThreadUse_ == HelperThreadUse::None);
 
     JSRuntime* rt = runtimeFromAnyThread();
-    if (this == rt->gc.systemZone)
+    if (this == rt->gc.systemZone) {
         rt->gc.systemZone = nullptr;
+    }
 
     js_delete(debuggers.ref());
     js_delete(jitZone_.ref());
 
 #ifdef DEBUG
     // Avoid assertions failures warning that not everything has been destroyed
     // if the embedding leaked GC things.
     if (!rt->gc.shutdownCollectedEverything()) {
@@ -120,48 +121,53 @@ void
 Zone::beginSweepTypes(bool releaseTypes)
 {
     types.beginSweep(releaseTypes);
 }
 
 Zone::DebuggerVector*
 Zone::getOrCreateDebuggers(JSContext* cx)
 {
-    if (debuggers)
+    if (debuggers) {
         return debuggers;
+    }
 
     debuggers = js_new<DebuggerVector>();
-    if (!debuggers)
+    if (!debuggers) {
         ReportOutOfMemory(cx);
+    }
     return debuggers;
 }
 
 void
 Zone::sweepBreakpoints(FreeOp* fop)
 {
-    if (fop->runtime()->debuggerList().isEmpty())
+    if (fop->runtime()->debuggerList().isEmpty()) {
         return;
+    }
 
     /*
      * Sweep all compartments in a zone at the same time, since there is no way
      * to iterate over the scripts belonging to a single compartment in a zone.
      */
 
     MOZ_ASSERT(isGCSweepingOrCompacting());
     for (auto iter = cellIter<JSScript>(); !iter.done(); iter.next()) {
         JSScript* script = iter;
-        if (!script->hasAnyBreakpointsOrStepMode())
+        if (!script->hasAnyBreakpointsOrStepMode()) {
             continue;
+        }
 
         bool scriptGone = IsAboutToBeFinalizedUnbarriered(&script);
         MOZ_ASSERT(script == iter);
         for (unsigned i = 0; i < script->length(); i++) {
             BreakpointSite* site = script->getBreakpointSite(script->offsetToPC(i));
-            if (!site)
+            if (!site) {
                 continue;
+            }
 
             Breakpoint* nextbp;
             for (Breakpoint* bp = site->firstBreakpoint(); bp; bp = nextbp) {
                 nextbp = bp->nextInSite();
                 GCPtrNativeObject& dbgobj = bp->debugger->toJSObjectRef();
 
                 // If we are sweeping, then we expect the script and the
                 // debugger object to be swept in the same sweep group, except
@@ -169,76 +175,82 @@ Zone::sweepBreakpoints(FreeOp* fop)
                 // groups. In this case both script and debugger object must be
                 // live.
                 MOZ_ASSERT_IF(isGCSweeping() && dbgobj->zone()->isCollecting(),
                               dbgobj->zone()->isGCSweeping() ||
                               (!scriptGone && dbgobj->asTenured().isMarkedAny()));
 
                 bool dying = scriptGone || IsAboutToBeFinalized(&dbgobj);
                 MOZ_ASSERT_IF(!dying, !IsAboutToBeFinalized(&bp->getHandlerRef()));
-                if (dying)
+                if (dying) {
                     bp->destroy(fop);
+                }
             }
         }
     }
 }
 
 void
 Zone::sweepWeakMaps()
 {
     /* Finalize unreachable (key,value) pairs in all weak maps. */
     WeakMapBase::sweepZone(this);
 }
 
 void
 Zone::discardJitCode(FreeOp* fop, bool discardBaselineCode)
 {
-    if (!jitZone())
+    if (!jitZone()) {
         return;
+    }
 
-    if (isPreservingCode())
+    if (isPreservingCode()) {
         return;
+    }
 
     if (discardBaselineCode) {
 #ifdef DEBUG
         /* Assert no baseline scripts are marked as active. */
-        for (auto script = cellIter<JSScript>(); !script.done(); script.next())
+        for (auto script = cellIter<JSScript>(); !script.done(); script.next()) {
             MOZ_ASSERT_IF(script->hasBaselineScript(), !script->baselineScript()->active());
+        }
 #endif
 
         /* Mark baseline scripts on the stack as active. */
         jit::MarkActiveBaselineScripts(this);
     }
 
     /* Only mark OSI points if code is being discarded. */
     jit::InvalidateAll(fop, this);
 
     for (auto script = cellIter<JSScript>(); !script.done(); script.next())  {
         jit::FinishInvalidation(fop, script);
 
         /*
          * Discard baseline script if it's not marked as active. Note that
          * this also resets the active flag.
          */
-        if (discardBaselineCode)
+        if (discardBaselineCode) {
             jit::FinishDiscardBaselineScript(fop, script);
+        }
 
         /*
          * Warm-up counter for scripts are reset on GC. After discarding code we
          * need to let it warm back up to get information such as which
          * opcodes are setting array holes or accessing getter properties.
          */
         script->resetWarmUpCounter();
 
         /*
          * Make it impossible to use the control flow graphs cached on the
          * BaselineScript. They get deleted.
          */
-        if (script->hasBaselineScript())
+        if (script->hasBaselineScript()) {
             script->baselineScript()->setControlFlowGraph(nullptr);
+        }
     }
 
     /*
      * When scripts contains pointers to nursery things, the store buffer
      * can contain entries that point into the optimized stub space. Since
      * this method can be called outside the context of a GC, this situation
      * could result in us trying to mark invalid store buffer entries.
      *
@@ -256,82 +268,89 @@ Zone::discardJitCode(FreeOp* fop, bool d
      */
     jitZone()->cfgSpace()->lifoAlloc().freeAll();
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
 JS::Zone::checkUniqueIdTableAfterMovingGC()
 {
-    for (auto r = uniqueIds().all(); !r.empty(); r.popFront())
+    for (auto r = uniqueIds().all(); !r.empty(); r.popFront()) {
         js::gc::CheckGCThingAfterMovingGC(r.front().key());
+    }
 }
 #endif
 
 uint64_t
 Zone::gcNumber()
 {
     // Zones in use by exclusive threads are not collected, and threads using
     // them cannot access the main runtime's gcNumber without racing.
     return usedByHelperThread() ? 0 : runtimeFromMainThread()->gc.gcNumber();
 }
 
 js::jit::JitZone*
 Zone::createJitZone(JSContext* cx)
 {
     MOZ_ASSERT(!jitZone_);
 
-    if (!cx->runtime()->getJitRuntime(cx))
+    if (!cx->runtime()->getJitRuntime(cx)) {
         return nullptr;
+    }
 
     UniquePtr<jit::JitZone> jitZone(cx->new_<js::jit::JitZone>());
-    if (!jitZone)
+    if (!jitZone) {
         return nullptr;
+    }
 
     jitZone_ = jitZone.release();
     return jitZone_;
 }
 
 bool
 Zone::hasMarkedRealms()
 {
     for (RealmsInZoneIter realm(this); !realm.done(); realm.next()) {
-        if (realm->marked())
+        if (realm->marked()) {
             return true;
+        }
     }
     return false;
 }
 
 bool
 Zone::canCollect()
 {
     // The atoms zone cannot be collected while off-thread parsing is taking
     // place.
-    if (isAtomsZone())
+    if (isAtomsZone()) {
         return !runtimeFromAnyThread()->hasHelperThreadZones();
+    }
 
     // Zones that will be or are currently used by other threads cannot be
     // collected.
     return !createdForHelperThread();
 }
 
 void
 Zone::notifyObservingDebuggers()
 {
     JSRuntime* rt = runtimeFromMainThread();
     JSContext* cx = rt->mainContextFromOwnThread();
 
     for (RealmsInZoneIter realms(this); !realms.done(); realms.next()) {
         RootedGlobalObject global(cx, realms->unsafeUnbarrieredMaybeGlobal());
-        if (!global)
+        if (!global) {
             continue;
+        }
 
         GlobalObject::DebuggerVector* dbgs = global->getDebuggers();
-        if (!dbgs)
+        if (!dbgs) {
             continue;
+        }
 
         for (GlobalObject::DebuggerVector::Range r = dbgs->all(); !r.empty(); r.popFront()) {
             if (!r.front()->debuggeeIsBeingCollected(rt->gc.majorGCCount())) {
 #ifdef DEBUG
                 fprintf(stderr,
                         "OOM while notifying observing Debuggers of a GC: The onGarbageCollection\n"
                         "hook will not be fired for this GC for some Debuggers!\n");
 #endif
@@ -445,56 +464,60 @@ Zone::purgeAtomCache()
 {
     MOZ_ASSERT(!hasKeptAtoms());
     MOZ_ASSERT(!purgeAtomsDeferred);
 
     atomCache().clearAndCompact();
 
     // Also purge the dtoa caches so that subsequent lookups populate atom
     // cache too.
-    for (RealmsInZoneIter r(this); !r.done(); r.next())
+    for (RealmsInZoneIter r(this); !r.done(); r.next()) {
         r->dtoaCache.purge();
+    }
 }
 
 void
 Zone::traceAtomCache(JSTracer* trc)
 {
     MOZ_ASSERT(hasKeptAtoms());
     for (auto r = atomCache().all(); !r.empty(); r.popFront()) {
         JSAtom* atom = r.front().asPtrUnbarriered();
         TraceRoot(trc, &atom, "kept atom");
         MOZ_ASSERT(r.front().asPtrUnbarriered() == atom);
     }
 }
 
 void*
 Zone::onOutOfMemory(js::AllocFunction allocFunc, size_t nbytes, void* reallocPtr)
 {
-    if (!js::CurrentThreadCanAccessRuntime(runtime_))
+    if (!js::CurrentThreadCanAccessRuntime(runtime_)) {
         return nullptr;
+    }
     return runtimeFromMainThread()->onOutOfMemory(allocFunc, nbytes, reallocPtr);
 }
 
 void
 Zone::reportAllocationOverflow()
 {
     js::ReportAllocationOverflow(nullptr);
 }
 
 void
 JS::Zone::maybeTriggerGCForTooMuchMalloc(js::gc::MemoryCounter& counter, TriggerKind trigger)
 {
     JSRuntime* rt = runtimeFromAnyThread();
 
-    if (!js::CurrentThreadCanAccessRuntime(rt))
+    if (!js::CurrentThreadCanAccessRuntime(rt)) {
         return;
+    }
 
     bool wouldInterruptGC = rt->gc.isIncrementalGCInProgress() && !isCollecting();
-    if (wouldInterruptGC && !counter.shouldResetIncrementalGC(rt->gc.tunables))
+    if (wouldInterruptGC && !counter.shouldResetIncrementalGC(rt->gc.tunables)) {
         return;
+    }
 
     if (!rt->gc.triggerZoneGC(this, JS::gcreason::TOO_MUCH_MALLOC,
                               counter.bytes(), counter.maxBytes()))
     {
         return;
     }
 
     counter.recordTrigger(trigger);
@@ -516,18 +539,19 @@ ZoneList::~ZoneList()
     MOZ_ASSERT(isEmpty());
 }
 
 void
 ZoneList::check() const
 {
 #ifdef DEBUG
     MOZ_ASSERT((head == nullptr) == (tail == nullptr));
-    if (!head)
+    if (!head) {
         return;
+    }
 
     Zone* zone = head;
     for (;;) {
         MOZ_ASSERT(zone && zone->isOnList());
         if  (zone == tail)
             break;
         zone = zone->listNext_;
     }
@@ -558,46 +582,49 @@ ZoneList::append(Zone* zone)
 
 void
 ZoneList::transferFrom(ZoneList& other)
 {
     check();
     other.check();
     MOZ_ASSERT(tail != other.tail);
 
-    if (tail)
+    if (tail) {
         tail->listNext_ = other.head;
-    else
+    } else {
         head = other.head;
+    }
     tail = other.tail;
 
     other.head = nullptr;
     other.tail = nullptr;
 }
 
 Zone*
 ZoneList::removeFront()
 {
     MOZ_ASSERT(!isEmpty());
     check();
 
     Zone* front = head;
     head = head->listNext_;
-    if (!head)
+    if (!head) {
         tail = nullptr;
+    }
 
     front->listNext_ = Zone::NotOnList;
 
     return front;
 }
 
 void
 ZoneList::clear()
 {
-    while (!isEmpty())
+    while (!isEmpty()) {
         removeFront();
+    }
 }
 
 JS_PUBLIC_API(void)
 JS::shadow::RegisterWeakCache(JS::Zone* zone, detail::WeakCacheBase* cachep)
 {
     zone->registerWeakCache(cachep);
 }
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -243,20 +243,21 @@ class Zone : public JS::shadow::Zone,
     }
 
     bool isCollecting() const {
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromMainThread()));
         return isCollectingFromAnyThread();
     }
 
     bool isCollectingFromAnyThread() const {
-        if (RuntimeHeapIsCollecting())
+        if (RuntimeHeapIsCollecting()) {
             return gcState_ != NoGC;
-        else
+        } else {
             return needsIncrementalBarrier();
+        }
     }
 
     bool shouldMarkInZone() const {
         return needsIncrementalBarrier() || isGCMarking();
     }
 
     // Get a number that is incremented whenever this zone is collected, and
     // possibly at other times too.
@@ -421,18 +422,19 @@ class Zone : public JS::shadow::Zone,
     // since wasm can generate code that outlives a zone.
     js::gc::MemoryCounter jitCodeCounter;
 
     void updateMemoryCounter(js::gc::MemoryCounter& counter, size_t nbytes) {
         JSRuntime* rt = runtimeFromAnyThread();
 
         counter.update(nbytes);
         auto trigger = counter.shouldTriggerGC(rt->gc.tunables);
-        if (MOZ_LIKELY(trigger == js::gc::NoTrigger) || trigger <= counter.triggered())
+        if (MOZ_LIKELY(trigger == js::gc::NoTrigger) || trigger <= counter.triggered()) {
             return;
+        }
 
         maybeTriggerGCForTooMuchMalloc(counter, trigger);
     }
 
     void maybeTriggerGCForTooMuchMalloc(js::gc::MemoryCounter& counter,
                                         js::gc::TriggerKind trigger);
 
     js::MainThreadData<js::UniquePtr<js::RegExpZone>> regExps_;