Bug 1488698 - Always use braces for if/for/while statements in js/public. r=jandem
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 06 Sep 2018 12:11:07 +0200
changeset 491489 f2bedf1fe932f1bb3277a4db1e11fbb02b3242d8
parent 491488 a6baf63a4fd58b89dd1aad32a840aeb1288cb5f1
child 491490 65906ffca0b55b15c64db6cddf03ca1f99482551
push id9984
push userffxbld-merge
push dateMon, 15 Oct 2018 21:07:35 +0000
treeherdermozilla-beta@183d27ea8570 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs1488698
milestone64.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1488698 - Always use braces for if/for/while statements in js/public. r=jandem
js/public/AllocPolicy.h
js/public/CallArgs.h
js/public/CallNonGenericMethod.h
js/public/Class.h
js/public/CompileOptions.h
js/public/Conversions.h
js/public/Date.h
js/public/GCHashTable.h
js/public/GCPolicyAPI.h
js/public/GCVariant.h
js/public/GCVector.h
js/public/HeapAPI.h
js/public/Id.h
js/public/MemoryMetrics.h
js/public/ProfilingStack.h
js/public/Proxy.h
js/public/Realm.h
js/public/RootingAPI.h
js/public/SliceBudget.h
js/public/SourceBufferHolder.h
js/public/StableStringChars.h
js/public/StructuredClone.h
js/public/TracingAPI.h
js/public/UbiNodeBreadthFirst.h
js/public/UbiNodeDominatorTree.h
js/public/UbiNodePostOrder.h
js/public/UbiNodeShortestPaths.h
js/public/Utility.h
js/public/Value.h
--- a/js/public/AllocPolicy.h
+++ b/js/public/AllocPolicy.h
@@ -74,45 +74,49 @@ class TempAllocPolicy : public AllocPoli
      * code bloat.
      */
     JS_FRIEND_API(void*) onOutOfMemory(AllocFunction allocFunc, size_t nbytes,
                                        void* reallocPtr = nullptr);
 
     template <typename T>
     T* onOutOfMemoryTyped(AllocFunction allocFunc, size_t numElems, void* reallocPtr = nullptr) {
         size_t bytes;
-        if (MOZ_UNLIKELY(!CalculateAllocSize<T>(numElems, &bytes)))
+        if (MOZ_UNLIKELY(!CalculateAllocSize<T>(numElems, &bytes))) {
             return nullptr;
+        }
         return static_cast<T*>(onOutOfMemory(allocFunc, bytes, reallocPtr));
     }
 
   public:
     MOZ_IMPLICIT TempAllocPolicy(JSContext* cx) : cx_(cx) {}
 
     template <typename T>
     T* pod_malloc(size_t numElems) {
         T* p = this->maybe_pod_malloc<T>(numElems);
-        if (MOZ_UNLIKELY(!p))
+        if (MOZ_UNLIKELY(!p)) {
             p = onOutOfMemoryTyped<T>(AllocFunction::Malloc, numElems);
+        }
         return p;
     }
 
     template <typename T>
     T* pod_calloc(size_t numElems) {
         T* p = this->maybe_pod_calloc<T>(numElems);
-        if (MOZ_UNLIKELY(!p))
+        if (MOZ_UNLIKELY(!p)) {
             p = onOutOfMemoryTyped<T>(AllocFunction::Calloc, numElems);
+        }
         return p;
     }
 
     template <typename T>
     T* pod_realloc(T* prior, size_t oldSize, size_t newSize) {
         T* p2 = this->maybe_pod_realloc<T>(prior, oldSize, newSize);
-        if (MOZ_UNLIKELY(!p2))
+        if (MOZ_UNLIKELY(!p2)) {
             p2 = onOutOfMemoryTyped<T>(AllocFunction::Realloc, newSize, prior);
+        }
         return p2;
     }
 
     template <typename T>
     void free_(T* p, size_t numElems = 0) {
         js_free(p);
     }
 
--- a/js/public/CallArgs.h
+++ b/js/public/CallArgs.h
@@ -160,22 +160,24 @@ class MOZ_STACK_CLASS CallArgsBase
      */
     JSObject& callee() const {
         return calleev().toObject();
     }
 
     // CALLING/CONSTRUCTING-DIFFERENTIATIONS
 
     bool isConstructing() const {
-        if (!argv_[-1].isMagic())
+        if (!argv_[-1].isMagic()) {
             return false;
+        }
 
 #ifdef JS_DEBUG
-        if (!this->usedRval())
+        if (!this->usedRval()) {
             CheckIsValidConstructible(calleev());
+        }
 #endif
 
         return true;
     }
 
     bool ignoresReturnValue() const {
         return ignoresReturnValue_;
     }
@@ -307,18 +309,19 @@ class MOZ_STACK_CLASS CallArgs : public 
         args.clearUsedRval();
         args.argv_ = argv;
         args.argc_ = argc;
         args.constructing_ = constructing;
         args.ignoresReturnValue_ = ignoresReturnValue;
 #ifdef DEBUG
         MOZ_ASSERT(ValueIsNotGray(args.thisv()));
         MOZ_ASSERT(ValueIsNotGray(args.calleev()));
-        for (unsigned i = 0; i < argc; ++i)
+        for (unsigned i = 0; i < argc; ++i) {
             MOZ_ASSERT(ValueIsNotGray(argv[i]));
+        }
 #endif
         return args;
     }
 
   public:
     /*
      * Returns true if there are at least |required| arguments passed in. If
      * false, it reports an error message on the context.
--- a/js/public/CallNonGenericMethod.h
+++ b/js/public/CallNonGenericMethod.h
@@ -91,27 +91,29 @@ CallMethodIfWrapped(JSContext* cx, IsAcc
 // Note: JS::CallNonGenericMethod will only work correctly if it's called in
 //       tail position in a JSNative.  Do not call it from any other place.
 //
 template<IsAcceptableThis Test, NativeImpl Impl>
 MOZ_ALWAYS_INLINE bool
 CallNonGenericMethod(JSContext* cx, const CallArgs& args)
 {
     HandleValue thisv = args.thisv();
-    if (Test(thisv))
+    if (Test(thisv)) {
         return Impl(cx, args);
+    }
 
     return detail::CallMethodIfWrapped(cx, Test, Impl, args);
 }
 
 MOZ_ALWAYS_INLINE bool
 CallNonGenericMethod(JSContext* cx, IsAcceptableThis Test, NativeImpl Impl, const CallArgs& args)
 {
     HandleValue thisv = args.thisv();
-    if (Test(thisv))
+    if (Test(thisv)) {
         return Impl(cx, args);
+    }
 
     return detail::CallMethodIfWrapped(cx, Test, Impl, args);
 }
 
 } // namespace JS
 
 #endif /* js_CallNonGenericMethod_h */
--- a/js/public/Class.h
+++ b/js/public/Class.h
@@ -219,18 +219,19 @@ class ObjectOpResult
      * -   If ok(), then we succeeded. Do nothing and return true.
      * -   Otherwise, if |strict| is true, or if cx has both extraWarnings and
      *     werrorOption enabled, throw a TypeError and return false.
      * -   Otherwise, if cx has extraWarnings enabled, emit a warning and
      *     return true.
      * -   Otherwise, do nothing and return true.
      */
     bool checkStrictErrorOrWarning(JSContext* cx, HandleObject obj, HandleId id, bool strict) {
-        if (ok())
+        if (ok()) {
             return true;
+        }
         return reportStrictErrorOrWarning(cx, obj, id, strict);
     }
 
     /*
      * The same as checkStrictErrorOrWarning(cx, id, strict), except the
      * operation is not associated with a particular property id. This is
      * used for [[PreventExtensions]] and [[SetPrototypeOf]]. failureCode()
      * must not be an error that has "{0}" in the error message.
@@ -682,18 +683,19 @@ struct MOZ_STATIC_CLASS ClassSpec
     bool defined() const { return !!createConstructor; }
 
     // The ProtoKey this class inherits from.
     JSProtoKey inheritanceProtoKey() const {
         MOZ_ASSERT(defined());
         static_assert(JSProto_Null == 0, "zeroed key must be null");
 
         // Default: Inherit from Object.
-        if (!(flags & ProtoKeyMask))
+        if (!(flags & ProtoKeyMask)) {
             return JSProto_Object;
+        }
 
         return JSProtoKey(flags & ProtoKeyMask);
     }
 
     bool shouldDefineConstructor() const {
         MOZ_ASSERT(defined());
         return !(flags & DontDefineConstructor);
     }
--- a/js/public/CompileOptions.h
+++ b/js/public/CompileOptions.h
@@ -335,18 +335,19 @@ class JS_PUBLIC_API(OwningCompileOptions
         introductionType = t;
         return *this;
     }
 
     bool setIntroductionInfo(JSContext* cx, const char* introducerFn,
                              const char* intro, unsigned line,
                              JSScript* script, uint32_t offset)
     {
-        if (!setIntroducerFilename(cx, introducerFn))
+        if (!setIntroducerFilename(cx, introducerFn)) {
             return false;
+        }
 
         introductionType = intro;
         introductionLineno = line;
         introductionScriptRoot = script;
         introductionOffset = offset;
         hasIntroductionInfo = true;
         return true;
     }
--- a/js/public/Conversions.h
+++ b/js/public/Conversions.h
@@ -104,28 +104,32 @@ inline void AssertArgumentsAreSane(JSCon
  */
 extern JS_PUBLIC_API(bool)
 OrdinaryToPrimitive(JSContext* cx, HandleObject obj, JSType type, MutableHandleValue vp);
 
 /* ES6 draft 20141224, 7.1.2. */
 MOZ_ALWAYS_INLINE bool
 ToBoolean(HandleValue v)
 {
-    if (v.isBoolean())
+    if (v.isBoolean()) {
         return v.toBoolean();
-    if (v.isInt32())
+    }
+    if (v.isInt32()) {
         return v.toInt32() != 0;
-    if (v.isNullOrUndefined())
+    }
+    if (v.isNullOrUndefined()) {
         return false;
+    }
     if (v.isDouble()) {
         double d = v.toDouble();
         return !mozilla::IsNaN(d) && d != 0;
     }
-    if (v.isSymbol())
+    if (v.isSymbol()) {
         return true;
+    }
 
     /* The slow path handles strings, BigInts and objects. */
     return js::ToBooleanSlow(v);
 }
 
 /* ES6 draft 20141224, 7.1.3. */
 MOZ_ALWAYS_INLINE bool
 ToNumber(JSContext* cx, HandleValue v, double* out)
@@ -138,22 +142,24 @@ ToNumber(JSContext* cx, HandleValue v, d
     }
     return js::ToNumberSlow(cx, v, out);
 }
 
 /* ES6 draft 20141224, ToInteger (specialized for doubles). */
 inline double
 ToInteger(double d)
 {
-    if (d == 0)
+    if (d == 0) {
         return d;
+    }
 
     if (!mozilla::IsFinite(d)) {
-        if (mozilla::IsNaN(d))
+        if (mozilla::IsNaN(d)) {
             return 0;
+        }
         return d;
     }
 
     return d < 0 ? ceil(d) : floor(d);
 }
 
 /* ES6 draft 20141224, 7.1.5. */
 MOZ_ALWAYS_INLINE bool
@@ -266,29 +272,31 @@ ToUint64(JSContext* cx, HandleValue v, u
 }
 
 /* ES6 draft 20141224, 7.1.12. */
 MOZ_ALWAYS_INLINE JSString*
 ToString(JSContext* cx, HandleValue v)
 {
     detail::AssertArgumentsAreSane(cx, v);
 
-    if (v.isString())
+    if (v.isString()) {
         return v.toString();
+    }
     return js::ToStringSlow(cx, v);
 }
 
 /* ES6 draft 20141224, 7.1.13. */
 inline JSObject*
 ToObject(JSContext* cx, HandleValue v)
 {
     detail::AssertArgumentsAreSane(cx, v);
 
-    if (v.isObject())
+    if (v.isObject()) {
         return &v.toObject();
+    }
     return js::ToObjectSlow(cx, v, false);
 }
 
 /**
  * Convert a double value to UnsignedInteger (an unsigned integral type) using
  * ECMAScript-style semantics (that is, in like manner to how ECMAScript's
  * ToInt32 converts to int32_t).
  *
@@ -313,30 +321,32 @@ ToUnsignedInteger(double d)
     // Extract the exponent component.  (Be careful here!  It's not technically
     // the exponent in NaN, infinities, and subnormals.)
     int_fast16_t exp =
         int_fast16_t((bits & mozilla::FloatingPoint<double>::kExponentBits) >> DoubleExponentShift) -
         int_fast16_t(mozilla::FloatingPoint<double>::kExponentBias);
 
     // If the exponent's less than zero, abs(d) < 1, so the result is 0.  (This
     // also handles subnormals.)
-    if (exp < 0)
+    if (exp < 0) {
         return 0;
+    }
 
     uint_fast16_t exponent = mozilla::AssertedCast<uint_fast16_t>(exp);
 
     // If the exponent is greater than or equal to the bits of precision of a
     // double plus UnsignedInteger's width, the number is either infinite, NaN,
     // or too large to have lower-order bits in the congruent value.  (Example:
     // 2**84 is exactly representable as a double.  The next exact double is
     // 2**84 + 2**32.  Thus if UnsignedInteger is uint32_t, an exponent >= 84
     // implies floor(abs(d)) == 0 mod 2**32.)  Return 0 in all these cases.
     constexpr size_t ResultWidth = CHAR_BIT * sizeof(UnsignedInteger);
-    if (exponent >= DoubleExponentShift + ResultWidth)
+    if (exponent >= DoubleExponentShift + ResultWidth) {
         return 0;
+    }
 
     // The significand contains the bits that will determine the final result.
     // Shift those bits left or right, according to the exponent, to their
     // locations in the unsigned binary representation of floor(abs(d)).
     static_assert(sizeof(UnsignedInteger) <= sizeof(uint64_t),
                   "left-shifting below would lose upper bits");
     UnsignedInteger result = (exponent > DoubleExponentShift)
                              ? UnsignedInteger(bits << (exponent - DoubleExponentShift))
--- a/js/public/Date.h
+++ b/js/public/Date.h
@@ -98,18 +98,19 @@ class ClippedTime
 //
 // Clip a double to JavaScript's date range (or to an invalid date) using the
 // ECMAScript TimeClip algorithm.
 inline ClippedTime
 TimeClip(double time)
 {
     // Steps 1-2.
     const double MaxTimeMagnitude = 8.64e15;
-    if (!mozilla::IsFinite(time) || mozilla::Abs(time) > MaxTimeMagnitude)
+    if (!mozilla::IsFinite(time) || mozilla::Abs(time) > MaxTimeMagnitude) {
         return ClippedTime(mozilla::UnspecifiedNaN<double>());
+    }
 
     // Step 3.
     return ClippedTime(ToInteger(time) + (+0.0));
 }
 
 // Produce a double Value from the given time.  Because times may be NaN,
 // prefer using this to manual canonicalization.
 inline Value
--- a/js/public/GCHashTable.h
+++ b/js/public/GCHashTable.h
@@ -72,18 +72,19 @@ class GCHashMap : public js::HashMap<Key
     }
 
     bool needsSweep() const {
         return !this->empty();
     }
 
     void sweep() {
         for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
-            if (MapSweepPolicy::needsSweep(&e.front().mutableKey(), &e.front().value()))
+            if (MapSweepPolicy::needsSweep(&e.front().mutableKey(), &e.front().value())) {
                 e.removeFront();
+            }
         }
     }
 
     // GCHashMap is movable
     GCHashMap(GCHashMap&& rhs) : Base(std::move(rhs)) {}
     void operator=(GCHashMap&& rhs) {
         MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
         Base::operator=(std::move(rhs));
@@ -116,20 +117,21 @@ class GCRekeyableHashMap : public JS::GC
   public:
     explicit GCRekeyableHashMap(AllocPolicy a = AllocPolicy()) : Base(a) {}
     explicit GCRekeyableHashMap(size_t length) : Base(length) {}
     GCRekeyableHashMap(AllocPolicy a, size_t length) : Base(a, length) {}
 
     void sweep() {
         for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
             Key key(e.front().key());
-            if (MapSweepPolicy::needsSweep(&key, &e.front().value()))
+            if (MapSweepPolicy::needsSweep(&key, &e.front().value())) {
                 e.removeFront();
-            else if (!HashPolicy::match(key, e.front().key()))
+            } else if (!HashPolicy::match(key, e.front().key())) {
                 e.rekeyFront(key);
+            }
         }
     }
 
     // GCRekeyableHashMap is movable
     GCRekeyableHashMap(GCRekeyableHashMap&& rhs) : Base(std::move(rhs)) {}
     void operator=(GCRekeyableHashMap&& rhs) {
         MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
         Base::operator=(std::move(rhs));
@@ -237,28 +239,30 @@ class GCHashSet : public js::HashSet<T, 
 
   public:
     explicit GCHashSet(AllocPolicy a = AllocPolicy()) : Base(a)  {}
     explicit GCHashSet(size_t length) : Base(length)  {}
     GCHashSet(AllocPolicy a, size_t length) : Base(a, length)  {}
 
     static void trace(GCHashSet* set, JSTracer* trc) { set->trace(trc); }
     void trace(JSTracer* trc) {
-        for (typename Base::Enum e(*this); !e.empty(); e.popFront())
+        for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
             GCPolicy<T>::trace(trc, &e.mutableFront(), "hashset element");
+        }
     }
 
     bool needsSweep() const {
         return !this->empty();
     }
 
     void sweep() {
         for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
-            if (GCPolicy<T>::needsSweep(&e.mutableFront()))
+            if (GCPolicy<T>::needsSweep(&e.mutableFront())) {
                 e.removeFront();
+            }
         }
     }
 
     // GCHashSet is movable
     GCHashSet(GCHashSet&& rhs) : Base(std::move(rhs)) {}
     void operator=(GCHashSet&& rhs) {
         MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
         Base::operator=(std::move(rhs));
@@ -434,18 +438,19 @@ class WeakCache<GCHashMap<Key, Value, Ha
             range.popFront();
             settle();
         }
 
       private:
         typename Map::Range range;
 
         void settle() {
-            while (!empty() && entryNeedsSweep(front()))
+            while (!empty() && entryNeedsSweep(front())) {
                 popFront();
+            }
         }
     };
 
     struct Enum : public Map::Enum
     {
         explicit Enum(Self& cache)
           : Map::Enum(cache.map)
         {
@@ -526,18 +531,19 @@ class WeakCache<GCHashMap<Key, Value, Ha
         // This currently supports removing entries during incremental
         // sweeping. If we allow these tables to be swept incrementally this may
         // no longer be possible.
         map.remove(p);
     }
 
     void remove(const Lookup& l) {
         Ptr p = lookup(l);
-        if (p)
+        if (p) {
             remove(p);
+        }
     }
 
     template<typename KeyInput, typename ValueInput>
     bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
         return map.add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v));
     }
 
     template<typename KeyInput, typename ValueInput>
@@ -629,18 +635,19 @@ class WeakCache<GCHashSet<T, HashPolicy,
             range.popFront();
             settle();
         }
 
       private:
         typename Set::Range range;
 
         void settle() {
-            while (!empty() && entryNeedsSweep(front()))
+            while (!empty() && entryNeedsSweep(front())) {
                 popFront();
+            }
         }
     };
 
     struct Enum : public Set::Enum
     {
         explicit Enum(Self& cache)
           : Set::Enum(cache.set)
         {
@@ -721,18 +728,19 @@ class WeakCache<GCHashSet<T, HashPolicy,
         // This currently supports removing entries during incremental
         // sweeping. If we allow these tables to be swept incrementally this may
         // no longer be possible.
         set.remove(p);
     }
 
     void remove(const Lookup& l) {
         Ptr p = lookup(l);
-        if (p)
+        if (p) {
             remove(p);
+        }
     }
 
     template<typename TInput>
     bool add(AddPtr& p, TInput&& t) {
         return set.add(p, std::forward<TInput>(t));
     }
 
     template<typename TInput>
--- a/js/public/GCPolicyAPI.h
+++ b/js/public/GCPolicyAPI.h
@@ -107,44 +107,48 @@ template <> struct GCPolicy<uint64_t> : 
 
 template <typename T>
 struct GCPointerPolicy
 {
     static_assert(mozilla::IsPointer<T>::value,
                   "Non-pointer type not allowed for GCPointerPolicy");
 
     static void trace(JSTracer* trc, T* vp, const char* name) {
-        if (*vp)
+        if (*vp) {
             js::UnsafeTraceManuallyBarrieredEdge(trc, vp, name);
+        }
     }
     static bool needsSweep(T* vp) {
-        if (*vp)
+        if (*vp) {
             return js::gc::IsAboutToBeFinalizedUnbarriered(vp);
+        }
         return false;
     }
     static bool isValid(T v) {
         return js::gc::IsCellPointerValidOrNull(v);
     }
 };
 #define EXPAND_SPECIALIZE_GCPOLICY(Type) \
     template <> struct GCPolicy<Type> : public GCPointerPolicy<Type> {}; \
     template <> struct GCPolicy<Type const> : public GCPointerPolicy<Type const> {};
 FOR_EACH_PUBLIC_GC_POINTER_TYPE(EXPAND_SPECIALIZE_GCPOLICY)
 #undef EXPAND_SPECIALIZE_GCPOLICY
 
 template <typename T>
 struct NonGCPointerPolicy
 {
     static void trace(JSTracer* trc, T* vp, const char* name) {
-        if (*vp)
+        if (*vp) {
             (*vp)->trace(trc);
+        }
     }
     static bool needsSweep(T* vp) {
-        if (*vp)
+        if (*vp) {
             return (*vp)->needsSweep();
+        }
         return false;
     }
     static bool isValid(T v) {
         return true;
     }
 };
 
 template <typename T>
@@ -158,48 +162,54 @@ struct GCPolicy<JS::Heap<T>>
     }
 };
 
 // GCPolicy<UniquePtr<T>> forwards the contained pointer to GCPolicy<T>.
 template <typename T, typename D>
 struct GCPolicy<mozilla::UniquePtr<T, D>>
 {
     static void trace(JSTracer* trc, mozilla::UniquePtr<T,D>* tp, const char* name) {
-        if (tp->get())
+        if (tp->get()) {
             GCPolicy<T>::trace(trc, tp->get(), name);
+        }
     }
     static bool needsSweep(mozilla::UniquePtr<T,D>* tp) {
-        if (tp->get())
+        if (tp->get()) {
             return GCPolicy<T>::needsSweep(tp->get());
+        }
         return false;
     }
     static bool isValid(const mozilla::UniquePtr<T,D>& t) {
-        if (t.get())
+        if (t.get()) {
             return GCPolicy<T>::isValid(*t.get());
+        }
         return true;
     }
 };
 
 // GCPolicy<Maybe<T>> forwards tracing/sweeping to GCPolicy<T*> if
 // when the Maybe<T> is full.
 template <typename T>
 struct GCPolicy<mozilla::Maybe<T>>
 {
     static void trace(JSTracer* trc, mozilla::Maybe<T>* tp, const char* name) {
-        if (tp->isSome())
+        if (tp->isSome()) {
             GCPolicy<T>::trace(trc, tp->ptr(), name);
+        }
     }
     static bool needsSweep(mozilla::Maybe<T>* tp) {
-        if (tp->isSome())
+        if (tp->isSome()) {
             return GCPolicy<T>::needsSweep(tp->ptr());
+        }
         return false;
     }
     static bool isValid(const mozilla::Maybe<T>& t) {
-        if (t.isSome())
+        if (t.isSome()) {
             return GCPolicy<T>::isValid(t.ref());
+        }
         return true;
     }
 };
 
 template <> struct GCPolicy<JS::Realm*>;  // see Realm.h
 
 } // namespace JS
 
--- a/js/public/GCVariant.h
+++ b/js/public/GCVariant.h
@@ -43,18 +43,19 @@ struct GCVariantImplementation;
 
 // The base case.
 template <typename T>
 struct GCVariantImplementation<T>
 {
     template <typename ConcreteVariant>
     static void trace(JSTracer* trc, ConcreteVariant* v, const char* name) {
         T& thing = v->template as<T>();
-        if (!mozilla::IsPointer<T>::value || thing)
+        if (!mozilla::IsPointer<T>::value || thing) {
             GCPolicy<T>::trace(trc, &thing, name);
+        }
     }
 
     template <typename Matcher, typename ConcreteVariant>
     static typename Matcher::ReturnType
     match(Matcher& matcher, Handle<ConcreteVariant> v) {
         const T& thing = v.get().template as<T>();
         return matcher.match(Handle<T>::fromMarkedLocation(&thing));
     }
@@ -72,18 +73,19 @@ template <typename T, typename... Ts>
 struct GCVariantImplementation<T, Ts...>
 {
     using Next = GCVariantImplementation<Ts...>;
 
     template <typename ConcreteVariant>
     static void trace(JSTracer* trc, ConcreteVariant* v, const char* name) {
         if (v->template is<T>()) {
             T& thing = v->template as<T>();
-            if (!mozilla::IsPointer<T>::value || thing)
+            if (!mozilla::IsPointer<T>::value || thing) {
                 GCPolicy<T>::trace(trc, &thing, name);
+            }
         } else {
             Next::trace(trc, v, name);
         }
     }
 
     template <typename Matcher, typename ConcreteVariant>
     static typename Matcher::ReturnType
     match(Matcher& matcher, Handle<ConcreteVariant> v) {
--- a/js/public/GCVector.h
+++ b/js/public/GCVector.h
@@ -126,36 +126,39 @@ class GCVector
 
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
         return vector.sizeOfIncludingThis(mallocSizeOf);
     }
 
     static void trace(GCVector* vec, JSTracer* trc) { vec->trace(trc); }
 
     void trace(JSTracer* trc) {
-        for (auto& elem : vector)
+        for (auto& elem : vector) {
             GCPolicy<T>::trace(trc, &elem, "vector element");
+        }
     }
 
     bool needsSweep() const {
         return !this->empty();
     }
 
     void sweep() {
         uint32_t src, dst = 0;
         for (src = 0; src < length(); src++) {
             if (!GCPolicy<T>::needsSweep(&vector[src])) {
-                if (dst != src)
+                if (dst != src) {
                     vector[dst] = vector[src].unbarrieredGet();
+                }
                 dst++;
             }
         }
 
-        if (dst != length())
+        if (dst != length()) {
             vector.shrinkTo(dst);
+        }
     }
 };
 
 } // namespace JS
 
 namespace js {
 
 template <typename Wrapper, typename T, size_t Capacity, typename AllocPolicy>
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -270,18 +270,19 @@ class JS_FRIEND_API(GCCellPtr)
     template <typename T>
     explicit GCCellPtr(T* p) : ptr(checkedCast(p, JS::MapTypeToTraceKind<T>::kind)) { }
     explicit GCCellPtr(JSFunction* p) : ptr(checkedCast(p, JS::TraceKind::Object)) { }
     explicit GCCellPtr(JSFlatString* str) : ptr(checkedCast(str, JS::TraceKind::String)) { }
     explicit GCCellPtr(const Value& v);
 
     JS::TraceKind kind() const {
         JS::TraceKind traceKind = JS::TraceKind(ptr & OutOfLineTraceKindMask);
-        if (uintptr_t(traceKind) != OutOfLineTraceKindMask)
+        if (uintptr_t(traceKind) != OutOfLineTraceKindMask) {
             return traceKind;
+        }
         return outOfLineKind();
     }
 
     // Allow GCCellPtr to be used in a boolean context.
     explicit operator bool() const {
         MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null));
         return asCell();
     }
@@ -314,20 +315,22 @@ class JS_FRIEND_API(GCCellPtr)
     // Inline mark bitmap access requires direct pointer arithmetic.
     uintptr_t unsafeAsUIntPtr() const {
         MOZ_ASSERT(asCell());
         MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
         return reinterpret_cast<uintptr_t>(asCell());
     }
 
     MOZ_ALWAYS_INLINE bool mayBeOwnedByOtherRuntime() const {
-        if (!is<JSString>() && !is<JS::Symbol>())
+        if (!is<JSString>() && !is<JS::Symbol>()) {
             return false;
-        if (is<JSString>())
+        }
+        if (is<JSString>()) {
             return JS::shadow::String::isPermanentAtom(asCell());
+        }
         MOZ_ASSERT(is<JS::Symbol>());
         return JS::shadow::Symbol::isWellKnownSymbol(asCell());
     }
 
   private:
     static uintptr_t checkedCast(void* p, JS::TraceKind traceKind) {
         js::gc::Cell* cell = static_cast<js::gc::Cell*>(p);
         MOZ_ASSERT((uintptr_t(p) & OutOfLineTraceKindMask) == 0);
@@ -424,31 +427,33 @@ TenuredCellIsMarkedGray(const Cell* cell
 {
     // Return true if GrayOrBlackBit is set and BlackBit is not set.
     MOZ_ASSERT(cell);
     MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
 
     uintptr_t* grayWord, grayMask;
     js::gc::detail::GetGCThingMarkWordAndMask(uintptr_t(cell), js::gc::ColorBit::GrayOrBlackBit,
                                               &grayWord, &grayMask);
-    if (!(*grayWord & grayMask))
+    if (!(*grayWord & grayMask)) {
         return false;
+    }
 
     uintptr_t* blackWord, blackMask;
     js::gc::detail::GetGCThingMarkWordAndMask(uintptr_t(cell), js::gc::ColorBit::BlackBit,
                                               &blackWord, &blackMask);
     return !(*blackWord & blackMask);
 }
 
 static MOZ_ALWAYS_INLINE bool
 CellIsMarkedGray(const Cell* cell)
 {
     MOZ_ASSERT(cell);
-    if (js::gc::IsInsideNursery(cell))
+    if (js::gc::IsInsideNursery(cell)) {
         return false;
+    }
     return TenuredCellIsMarkedGray(cell);
 }
 
 extern JS_PUBLIC_API(bool)
 CellIsMarkedGrayIfKnown(const Cell* cell);
 
 #ifdef DEBUG
 extern JS_PUBLIC_API(bool)
@@ -476,42 +481,47 @@ NurseryCellHasStoreBuffer(const void* ce
     return *reinterpret_cast<void**>(addr) != nullptr;
 }
 
 } /* namespace detail */
 
 MOZ_ALWAYS_INLINE bool
 IsInsideNursery(const js::gc::Cell* cell)
 {
-    if (!cell)
+    if (!cell) {
         return false;
+    }
     auto location = detail::GetCellLocation(cell);
     MOZ_ASSERT(location == ChunkLocation::Nursery || location == ChunkLocation::TenuredHeap);
     return location == ChunkLocation::Nursery;
 }
 
 MOZ_ALWAYS_INLINE bool
 IsCellPointerValid(const void* cell)
 {
     auto addr = uintptr_t(cell);
-    if (addr < ChunkSize || addr % CellAlignBytes != 0)
+    if (addr < ChunkSize || addr % CellAlignBytes != 0) {
         return false;
+    }
     auto location = detail::GetCellLocation(cell);
-    if (location == ChunkLocation::TenuredHeap)
+    if (location == ChunkLocation::TenuredHeap) {
         return !!detail::GetGCThingZone(addr);
-    if (location == ChunkLocation::Nursery)
+    }
+    if (location == ChunkLocation::Nursery) {
         return detail::NurseryCellHasStoreBuffer(cell);
+    }
     return false;
 }
 
 MOZ_ALWAYS_INLINE bool
 IsCellPointerValidOrNull(const void* cell)
 {
-    if (!cell)
+    if (!cell) {
         return true;
+    }
     return IsCellPointerValid(cell);
 }
 
 } /* namespace gc */
 } /* namespace js */
 
 namespace JS {
 
@@ -523,29 +533,31 @@ GetTenuredGCThingZone(GCCellPtr thing)
 }
 
 extern JS_PUBLIC_API(Zone*)
 GetNurseryStringZone(JSString* str);
 
 static MOZ_ALWAYS_INLINE Zone*
 GetStringZone(JSString* str)
 {
-    if (!js::gc::IsInsideNursery(reinterpret_cast<js::gc::Cell*>(str)))
+    if (!js::gc::IsInsideNursery(reinterpret_cast<js::gc::Cell*>(str))) {
         return js::gc::detail::GetGCThingZone(reinterpret_cast<uintptr_t>(str));
+    }
     return GetNurseryStringZone(str);
 }
 
 extern JS_PUBLIC_API(Zone*)
 GetObjectZone(JSObject* obj);
 
 static MOZ_ALWAYS_INLINE bool
 GCThingIsMarkedGray(GCCellPtr thing)
 {
-    if (thing.mayBeOwnedByOtherRuntime())
+    if (thing.mayBeOwnedByOtherRuntime()) {
         return false;
+    }
     return js::gc::detail::CellIsMarkedGrayIfKnown(thing.asCell());
 }
 
 extern JS_PUBLIC_API(JS::TraceKind)
 GCThingTraceKind(void* thing);
 
 extern JS_PUBLIC_API(void)
 EnableNurseryStrings(JSContext* cx);
@@ -605,49 +617,54 @@ IsIncrementalBarrierNeededOnTenuredGCThi
 }
 
 static MOZ_ALWAYS_INLINE void
 ExposeGCThingToActiveJS(JS::GCCellPtr thing)
 {
     // GC things residing in the nursery cannot be gray: they have no mark bits.
     // All live objects in the nursery are moved to tenured at the beginning of
     // each GC slice, so the gray marker never sees nursery things.
-    if (IsInsideNursery(thing.asCell()))
+    if (IsInsideNursery(thing.asCell())) {
         return;
+    }
 
     // There's nothing to do for permanent GC things that might be owned by
     // another runtime.
-    if (thing.mayBeOwnedByOtherRuntime())
+    if (thing.mayBeOwnedByOtherRuntime()) {
         return;
+    }
 
-    if (IsIncrementalBarrierNeededOnTenuredGCThing(thing))
+    if (IsIncrementalBarrierNeededOnTenuredGCThing(thing)) {
         JS::IncrementalReadBarrier(thing);
-    else if (js::gc::detail::TenuredCellIsMarkedGray(thing.asCell()))
+    } else if (js::gc::detail::TenuredCellIsMarkedGray(thing.asCell())) {
         JS::UnmarkGrayGCThingRecursively(thing);
+    }
 
     MOZ_ASSERT(!js::gc::detail::TenuredCellIsMarkedGray(thing.asCell()));
 }
 
 template <typename T>
 extern JS_PUBLIC_API(bool)
 EdgeNeedsSweepUnbarrieredSlow(T* thingp);
 
 static MOZ_ALWAYS_INLINE bool
 EdgeNeedsSweepUnbarriered(JSObject** objp)
 {
     // This function does not handle updating nursery pointers. Raw JSObject
     // pointers should be updated separately or replaced with
     // JS::Heap<JSObject*> which handles this automatically.
     MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
-    if (IsInsideNursery(reinterpret_cast<Cell*>(*objp)))
+    if (IsInsideNursery(reinterpret_cast<Cell*>(*objp))) {
         return false;
+    }
 
     auto zone = JS::shadow::Zone::asShadowZone(detail::GetGCThingZone(uintptr_t(*objp)));
-    if (!zone->isGCSweepingOrCompacting())
+    if (!zone->isGCSweepingOrCompacting()) {
         return false;
+    }
 
     return EdgeNeedsSweepUnbarrieredSlow(objp);
 }
 
 } // namespace gc
 } // namesapce js
 
 namespace JS {
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -146,18 +146,19 @@ JSID_IS_GCTHING(jsid id)
 {
     return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id);
 }
 
 static MOZ_ALWAYS_INLINE JS::GCCellPtr
 JSID_TO_GCTHING(jsid id)
 {
     void* thing = (void*)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK);
-    if (JSID_IS_STRING(id))
+    if (JSID_IS_STRING(id)) {
         return JS::GCCellPtr(thing, JS::TraceKind::String);
+    }
     MOZ_ASSERT(JSID_IS_SYMBOL(id));
     return JS::GCCellPtr(thing, JS::TraceKind::Symbol);
 }
 
 static MOZ_ALWAYS_INLINE bool
 JSID_IS_VOID(const jsid id)
 {
     MOZ_ASSERT_IF((JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_VOID,
@@ -191,55 +192,61 @@ struct GCPolicy<jsid>
         return !JSID_IS_GCTHING(id) || js::gc::IsCellPointerValid(JSID_TO_GCTHING(id).asCell());
     }
 };
 
 #ifdef DEBUG
 MOZ_ALWAYS_INLINE bool
 IdIsNotGray(jsid id)
 {
-    if (!JSID_IS_GCTHING(id))
+    if (!JSID_IS_GCTHING(id)) {
         return true;
+    }
 
     return CellIsNotGray(JSID_TO_GCTHING(id).asCell());
 }
 #endif
 
 } // namespace JS
 
 namespace js {
 
 template <>
 struct BarrierMethods<jsid>
 {
     static gc::Cell* asGCThingOrNull(jsid id) {
-        if (JSID_IS_STRING(id))
+        if (JSID_IS_STRING(id)) {
             return reinterpret_cast<gc::Cell*>(JSID_TO_STRING(id));
-        if (JSID_IS_SYMBOL(id))
+        }
+        if (JSID_IS_SYMBOL(id)) {
             return reinterpret_cast<gc::Cell*>(JSID_TO_SYMBOL(id));
+        }
         return nullptr;
     }
     static void postBarrier(jsid* idp, jsid prev, jsid next) {}
     static void exposeToJS(jsid id) {
-        if (JSID_IS_GCTHING(id))
+        if (JSID_IS_GCTHING(id)) {
             js::gc::ExposeGCThingToActiveJS(JSID_TO_GCTHING(id));
+        }
     }
 };
 
 // If the jsid is a GC pointer type, convert to that type and call |f| with
 // the pointer. If the jsid is not a GC type, calls F::defaultValue.
 template <typename F, typename... Args>
 auto
 DispatchTyped(F f, const jsid& id, Args&&... args)
   -> decltype(f(static_cast<JSString*>(nullptr), std::forward<Args>(args)...))
 {
-    if (JSID_IS_STRING(id))
+    if (JSID_IS_STRING(id)) {
         return f(JSID_TO_STRING(id), std::forward<Args>(args)...);
-    if (JSID_IS_SYMBOL(id))
+    }
+    if (JSID_IS_SYMBOL(id)) {
         return f(JSID_TO_SYMBOL(id), std::forward<Args>(args)...);
+    }
     MOZ_ASSERT(!JSID_IS_GCTHING(id));
     return F::defaultValue(id);
 }
 
 #undef id
 
 } // namespace js
 
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -564,18 +564,19 @@ struct RuntimeSizes
     RuntimeSizes()
       : FOR_EACH_SIZE(ZERO_SIZE)
         scriptSourceInfo(),
         code(),
         gc(),
         notableScriptSources()
     {
         allScriptSources = js_new<ScriptSourcesHashMap>();
-        if (!allScriptSources)
+        if (!allScriptSources) {
             MOZ_CRASH("oom");
+        }
     }
 
     ~RuntimeSizes() {
         // |allScriptSources| is usually deleted and set to nullptr before this
         // destructor runs. But there are failure cases due to OOMs that may
         // prevent that, so it doesn't hurt to try again here.
         js_delete(allScriptSources);
     }
--- a/js/public/ProfilingStack.h
+++ b/js/public/ProfilingStack.h
@@ -361,46 +361,49 @@ class ProfilingStack final
     {}
 
     ~ProfilingStack();
 
     void pushLabelFrame(const char* label, const char* dynamicString, void* sp,
                         uint32_t line, js::ProfilingStackFrame::Category category) {
         uint32_t oldStackPointer = stackPointer;
 
-        if (MOZ_LIKELY(capacity > oldStackPointer) || MOZ_LIKELY(ensureCapacitySlow()))
+        if (MOZ_LIKELY(capacity > oldStackPointer) || MOZ_LIKELY(ensureCapacitySlow())) {
             frames[oldStackPointer].initLabelFrame(label, dynamicString, sp, line, category);
+        }
 
         // This must happen at the end! The compiler will not reorder this
         // update because stackPointer is Atomic<..., ReleaseAcquire>, so any
         // the writes above will not be reordered below the stackPointer store.
         // Do the read and the write as two separate statements, in order to
         // make it clear that we don't need an atomic increment, which would be
         // more expensive on x86 than the separate operations done here.
         // This thread is the only one that ever changes the value of
         // stackPointer.
         stackPointer = oldStackPointer + 1;
     }
 
     void pushSpMarkerFrame(void* sp) {
         uint32_t oldStackPointer = stackPointer;
 
-        if (MOZ_LIKELY(capacity > oldStackPointer) || MOZ_LIKELY(ensureCapacitySlow()))
+        if (MOZ_LIKELY(capacity > oldStackPointer) || MOZ_LIKELY(ensureCapacitySlow())) {
             frames[oldStackPointer].initSpMarkerFrame(sp);
+        }
 
         // This must happen at the end, see the comment in pushLabelFrame.
         stackPointer = oldStackPointer + 1;
     }
 
     void pushJsFrame(const char* label, const char* dynamicString, JSScript* script,
                      jsbytecode* pc) {
         uint32_t oldStackPointer = stackPointer;
 
-        if (MOZ_LIKELY(capacity > oldStackPointer) || MOZ_LIKELY(ensureCapacitySlow()))
+        if (MOZ_LIKELY(capacity > oldStackPointer) || MOZ_LIKELY(ensureCapacitySlow())) {
             frames[oldStackPointer].initJsFrame(label, dynamicString, script, pc);
+        }
 
         // This must happen at the end, see the comment in pushLabelFrame.
         stackPointer = oldStackPointer + 1;
     }
 
     void pop() {
         MOZ_ASSERT(stackPointer > 0);
         // Do the read and the write as two separate statements, in order to
--- a/js/public/Proxy.h
+++ b/js/public/Proxy.h
@@ -387,18 +387,19 @@ struct ProxyReservedSlots
 
     static inline int offsetOfPrivateSlot();
 
     static inline int offsetOfSlot(size_t slot) {
         return offsetof(ProxyReservedSlots, slots[0]) + slot * sizeof(Value);
     }
 
     void init(size_t nreserved) {
-        for (size_t i = 0; i < nreserved; i++)
+        for (size_t i = 0; i < nreserved; i++) {
             slots[i] = JS::UndefinedValue();
+        }
     }
 
     ProxyReservedSlots(const ProxyReservedSlots&) = delete;
     void operator=(const ProxyReservedSlots&) = delete;
 };
 
 struct ProxyValueArray
 {
@@ -472,20 +473,21 @@ SetValueInProxy(Value* slot, const Value
 inline void
 SetProxyReservedSlotUnchecked(JSObject* obj, size_t n, const Value& extra)
 {
     MOZ_ASSERT(n < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
 
     Value* vp = &GetProxyDataLayout(obj)->reservedSlots->slots[n];
 
     // Trigger a barrier before writing the slot.
-    if (vp->isGCThing() || extra.isGCThing())
+    if (vp->isGCThing() || extra.isGCThing()) {
         SetValueInProxy(vp, extra);
-    else
+    } else {
         *vp = extra;
+    }
 }
 
 } // namespace detail
 
 inline const BaseProxyHandler*
 GetProxyHandler(const JSObject* obj)
 {
     return detail::GetProxyDataLayout(obj)->handler;
@@ -526,20 +528,21 @@ SetProxyReservedSlot(JSObject* obj, size
 inline void
 SetProxyPrivate(JSObject* obj, const Value& value)
 {
     MOZ_ASSERT_IF(gc::detail::ObjectIsMarkedBlack(obj), JS::ValueIsNotGray(value));
 
     Value* vp = &detail::GetProxyDataLayout(obj)->values()->privateSlot;
 
     // Trigger a barrier before writing the slot.
-    if (vp->isGCThing() || value.isGCThing())
+    if (vp->isGCThing() || value.isGCThing()) {
         detail::SetValueInProxy(vp, value);
-    else
+    } else {
         *vp = value;
+    }
 }
 
 inline bool
 IsScriptedProxy(const JSObject* obj)
 {
     return IsProxy(obj) && GetProxyHandler(obj)->isScripted();
 }
 
@@ -604,18 +607,19 @@ class JS_FRIEND_API(AutoEnterPolicy)
         allow = handler->hasSecurityPolicy() ? handler->enter(cx, wrapper, id, act, mayThrow, &rv)
                                              : true;
         recordEnter(cx, wrapper, id, act);
         // We want to throw an exception if all of the following are true:
         // * The policy disallowed access.
         // * The policy set rv to false, indicating that we should throw.
         // * The caller did not instruct us to ignore exceptions.
         // * The policy did not throw itself.
-        if (!allow && !rv && mayThrow)
+        if (!allow && !rv && mayThrow) {
             reportErrorIfExceptionIsNotPending(cx, id);
+        }
     }
 
     virtual ~AutoEnterPolicy() { recordLeave(); }
     inline bool allowed() { return allow; }
     inline bool returnValue() { MOZ_ASSERT(!allowed()); return rv; }
 
   protected:
     // no-op constructor for subclass
--- a/js/public/Realm.h
+++ b/js/public/Realm.h
@@ -20,18 +20,19 @@ JS_PUBLIC_API(bool) RealmNeedsSweep(JS::
 
 namespace JS {
 
 // Each Realm holds a strong reference to its GlobalObject, and vice versa.
 template <>
 struct GCPolicy<Realm*> : public NonGCPointerPolicy<Realm*>
 {
     static void trace(JSTracer* trc, Realm** vp, const char* name) {
-        if (*vp)
+        if (*vp) {
             ::js::gc::TraceRealm(trc, *vp, name);
+        }
     }
     static bool needsSweep(Realm** vp) {
         return *vp && ::js::gc::RealmNeedsSweep(*vp);
     }
 };
 
 // Get the current realm, if any. The ECMAScript spec calls this "the current
 // Realm Record".
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -369,18 +369,19 @@ ObjectIsMarkedGray(const JS::Heap<JSObje
 }
 
 // The following *IsNotGray functions are for use in assertions and take account
 // of the eventual gray marking state at the end of any ongoing incremental GC.
 #ifdef DEBUG
 inline bool
 CellIsNotGray(js::gc::Cell* maybeCell)
 {
-    if (!maybeCell)
+    if (!maybeCell) {
         return true;
+    }
 
     return js::gc::detail::CellIsNotGray(maybeCell);
 }
 
 inline bool
 ObjectIsNotGray(JSObject* maybeObj)
 {
     return CellIsNotGray(reinterpret_cast<js::gc::Cell*>(maybeObj));
@@ -433,18 +434,19 @@ class TenuredHeap : public js::HeapBase<
                       "TenuredHeap<T> must be binary compatible with T.");
     }
     explicit TenuredHeap(T p) : bits(0) { setPtr(p); }
     explicit TenuredHeap(const TenuredHeap<T>& p) : bits(0) { setPtr(p.getPtr()); }
 
     void setPtr(T newPtr) {
         MOZ_ASSERT((reinterpret_cast<uintptr_t>(newPtr) & flagsMask) == 0);
         MOZ_ASSERT(js::gc::IsCellPointerValidOrNull(newPtr));
-        if (newPtr)
+        if (newPtr) {
             AssertGCThingMustBeTenured(newPtr);
+        }
         bits = (bits & flagsMask) | reinterpret_cast<uintptr_t>(newPtr);
     }
 
     void setFlags(uintptr_t flagsToSet) {
         MOZ_ASSERT((flagsToSet & ~flagsMask) == 0);
         bits |= flagsToSet;
     }
 
@@ -648,87 +650,96 @@ class MOZ_STACK_CLASS MutableHandle : pu
 
 namespace js {
 
 template <typename T>
 struct BarrierMethods<T*>
 {
     static T* initial() { return nullptr; }
     static gc::Cell* asGCThingOrNull(T* v) {
-        if (!v)
+        if (!v) {
             return nullptr;
+        }
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
     static void postBarrier(T** vp, T* prev, T* next) {
-        if (next)
+        if (next) {
             JS::AssertGCThingIsNotNurseryAllocable(reinterpret_cast<js::gc::Cell*>(next));
+        }
     }
     static void exposeToJS(T* t) {
-        if (t)
+        if (t) {
             js::gc::ExposeGCThingToActiveJS(JS::GCCellPtr(t));
+        }
     }
 };
 
 template <>
 struct BarrierMethods<JSObject*>
 {
     static JSObject* initial() { return nullptr; }
     static gc::Cell* asGCThingOrNull(JSObject* v) {
-        if (!v)
+        if (!v) {
             return nullptr;
+        }
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
     static void postBarrier(JSObject** vp, JSObject* prev, JSObject* next) {
         JS::HeapObjectPostBarrier(vp, prev, next);
     }
     static void exposeToJS(JSObject* obj) {
-        if (obj)
+        if (obj) {
             JS::ExposeObjectToActiveJS(obj);
+        }
     }
 };
 
 template <>
 struct BarrierMethods<JSFunction*>
 {
     static JSFunction* initial() { return nullptr; }
     static gc::Cell* asGCThingOrNull(JSFunction* v) {
-        if (!v)
+        if (!v) {
             return nullptr;
+        }
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
     static void postBarrier(JSFunction** vp, JSFunction* prev, JSFunction* next) {
         JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp),
                                   reinterpret_cast<JSObject*>(prev),
                                   reinterpret_cast<JSObject*>(next));
     }
     static void exposeToJS(JSFunction* fun) {
-        if (fun)
+        if (fun) {
             JS::ExposeObjectToActiveJS(reinterpret_cast<JSObject*>(fun));
+        }
     }
 };
 
 template <>
 struct BarrierMethods<JSString*>
 {
     static JSString* initial() { return nullptr; }
     static gc::Cell* asGCThingOrNull(JSString* v) {
-        if (!v)
+        if (!v) {
             return nullptr;
+        }
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
     static void postBarrier(JSString** vp, JSString* prev, JSString* next) {
         JS::HeapStringPostBarrier(vp, prev, next);
     }
     static void exposeToJS(JSString* v) {
-        if (v)
+        if (v) {
             js::gc::ExposeGCThingToActiveJS(JS::GCCellPtr(v));
+        }
     }
 };
 
 // Provide hash codes for Cell kinds that may be relocated and, thus, not have
 // a stable address to use as the base for a hash code. Instead of the address,
 // this hasher uses Cell::getUniqueId to provide exact matches and as a base
 // for generating hash codes.
 //
@@ -1064,18 +1075,19 @@ inline JS::Realm*
 GetContextRealm(const JSContext* cx)
 {
     return JS::RootingContext::get(cx)->realm_;
 }
 
 inline JS::Compartment*
 GetContextCompartment(const JSContext* cx)
 {
-    if (JS::Realm* realm = GetContextRealm(cx))
+    if (JS::Realm* realm = GetContextRealm(cx)) {
         return GetCompartmentForRealm(realm);
+    }
     return nullptr;
 }
 
 inline JS::Zone*
 GetContextZone(const JSContext* cx)
 {
     return JS::RootingContext::get(cx)->zone_;
 }
--- a/js/public/SliceBudget.h
+++ b/js/public/SliceBudget.h
@@ -74,18 +74,19 @@ class JS_PUBLIC_API(SliceBudget)
         counter = unlimitedStartCounter;
     }
 
     void step(intptr_t amt = 1) {
         counter -= amt;
     }
 
     bool isOverBudget() {
-        if (counter > 0)
+        if (counter > 0) {
             return false;
+        }
         return checkOverBudget();
     }
 
     bool isWorkBudget() const { return deadline.IsNull(); }
     bool isTimeBudget() const { return !deadline.IsNull() && !isUnlimited(); }
     bool isUnlimited() const { return deadline == unlimitedDeadline; }
 
     int describe(char* buffer, size_t maxlen) const;
--- a/js/public/SourceBufferHolder.h
+++ b/js/public/SourceBufferHolder.h
@@ -83,18 +83,19 @@ class SourceBufferHolder final
         ownsChars_(other.ownsChars_)
     {
         other.data_ = nullptr;
         other.length_ = 0;
         other.ownsChars_ = false;
     }
 
     ~SourceBufferHolder() {
-        if (ownsChars_)
+        if (ownsChars_) {
             js_free(const_cast<char16_t*>(data_));
+        }
     }
 
     /** Access the underlying source buffer without affecting ownership. */
     const char16_t* get() const {
         return data_;
     }
 
     /** Length of the source buffer in char16_t code units (not bytes). */
--- a/js/public/StableStringChars.h
+++ b/js/public/StableStringChars.h
@@ -95,18 +95,19 @@ class MOZ_STACK_CLASS JS_FRIEND_API(Auto
         MOZ_ASSERT(state_ == TwoByte);
         return mozilla::Range<const char16_t>(twoByteChars_,
                                               GetStringLength(s_));
     }
 
     /* If we own the chars, transfer ownership to the caller. */
     bool maybeGiveOwnershipToCaller() {
         MOZ_ASSERT(state_ != Uninitialized);
-        if (!ownChars_.isSome() || !ownChars_->extractRawBuffer())
+        if (!ownChars_.isSome() || !ownChars_->extractRawBuffer()) {
             return false;
+        }
         state_ = Uninitialized;
         ownChars_.reset();
         return true;
     }
 
   private:
     AutoStableStringChars(const AutoStableStringChars& other) = delete;
     void operator=(const AutoStableStringChars& other) = delete;
--- a/js/public/StructuredClone.h
+++ b/js/public/StructuredClone.h
@@ -445,18 +445,19 @@ class MOZ_NON_MEMMOVABLE JS_PUBLIC_API(J
     }
 
     bool Init(size_t initialCapacity = 0) { return bufList_.Init(0, initialCapacity); }
 
     JS::StructuredCloneScope scope() const { return scope_; }
 
     void initScope(JS::StructuredCloneScope scope) {
         MOZ_ASSERT(Size() == 0, "initScope() of nonempty JSStructuredCloneData");
-        if (scope_ != JS::StructuredCloneScope::Unassigned)
+        if (scope_ != JS::StructuredCloneScope::Unassigned) {
             MOZ_ASSERT(scope_ == scope, "Cannot change scope after it has been initialized");
+        }
         scope_ = scope;
     }
 
     size_t Size() const { return bufList_.Size(); }
 
     const Iterator Start() const { return bufList_.Iter(); }
 
     bool Advance(Iterator& iter, size_t distance) const {
@@ -511,18 +512,19 @@ class MOZ_NON_MEMMOVABLE JS_PUBLIC_API(J
     // Iterate over all contained data, one BufferList segment's worth at a
     // time, and invoke the given FunctionToApply with the data pointer and
     // size. The function should return a bool value, and this loop will exit
     // with false if the function ever returns false.
     template <typename FunctionToApply>
     bool ForEachDataChunk(FunctionToApply&& function) const {
         Iterator iter = bufList_.Iter();
         while (!iter.Done()) {
-            if (!function(iter.Data(), iter.RemainingInSegment()))
+            if (!function(iter.Data(), iter.RemainingInSegment())) {
                 return false;
+            }
             iter.Advance(bufList_, iter.RemainingInSegment());
         }
         return true;
     }
 
     // Append the entire contents of other's bufList_ to our own.
     bool Append(const JSStructuredCloneData& other) {
         MOZ_ASSERT(scope_ == other.scope());
--- a/js/public/TracingAPI.h
+++ b/js/public/TracingAPI.h
@@ -380,18 +380,19 @@ namespace JS {
 // Note that while |edgep| must never be null, it is fine for |*edgep| to be
 // nullptr.
 
 template <typename T>
 inline void
 TraceEdge(JSTracer* trc, JS::Heap<T>* thingp, const char* name)
 {
     MOZ_ASSERT(thingp);
-    if (*thingp)
+    if (*thingp) {
         js::gc::TraceExternalEdge(trc, thingp->unsafeGet(), name);
+    }
 }
 
 template <typename T>
 inline void
 TraceEdge(JSTracer* trc, JS::TenuredHeap<T>* thingp, const char* name)
 {
     MOZ_ASSERT(thingp);
     if (T ptr = thingp->unbarrieredGetPtr()) {
--- a/js/public/UbiNodeBreadthFirst.h
+++ b/js/public/UbiNodeBreadthFirst.h
@@ -91,18 +91,19 @@ struct BreadthFirst {
     // Add |node| as a starting point for the traversal. You may add
     // as many starting points as you like. Return false on OOM.
     bool addStart(Node node) { return pending.append(node); }
 
     // Add |node| as a starting point for the traversal (see addStart) and also
     // add it to the |visited| set. Return false on OOM.
     bool addStartVisited(Node node) {
         typename NodeMap::AddPtr ptr = visited.lookupForAdd(node);
-        if (!ptr && !visited.add(ptr, node, typename Handler::NodeData()))
+        if (!ptr && !visited.add(ptr, node, typename Handler::NodeData())) {
             return false;
+        }
         return addStart(node);
     }
 
     // True if the handler wants us to compute edge names; doing so can be
     // expensive in time and memory. True by default.
     bool wantNames;
 
     // Traverse the graph in breadth-first order, starting at the given
@@ -119,51 +120,56 @@ struct BreadthFirst {
 
         // While there are pending nodes, visit them.
         while (!pending.empty()) {
             Node origin = pending.front();
             pending.popFront();
 
             // Get a range containing all origin's outgoing edges.
             auto range = origin.edges(cx, wantNames);
-            if (!range)
+            if (!range) {
                 return false;
+            }
 
             // Traverse each edge.
             for (; !range->empty(); range->popFront()) {
                 MOZ_ASSERT(!stopRequested);
 
                 Edge& edge = range->front();
                 typename NodeMap::AddPtr a = visited.lookupForAdd(edge.referent);
                 bool first = !a;
 
                 if (first) {
                     // This is the first time we've reached |edge.referent|.
                     // Mark it as visited.
-                    if (!visited.add(a, edge.referent, typename Handler::NodeData()))
+                    if (!visited.add(a, edge.referent, typename Handler::NodeData())) {
                         return false;
+                    }
                 }
 
                 MOZ_ASSERT(a);
 
                 // Report this edge to the visitor function.
-                if (!handler(*this, origin, edge, &a->value(), first))
+                if (!handler(*this, origin, edge, &a->value(), first)) {
                     return false;
+                }
 
-                if (stopRequested)
+                if (stopRequested) {
                     return true;
+                }
 
                 // Arrange to traverse this edge's referent's outgoing edges
                 // later --- unless |handler| asked us not to.
                 if (abandonRequested) {
                     // Skip the enqueue; reset flag for future iterations.
                     abandonRequested = false;
                 } else if (first) {
-                    if (!pending.append(edge.referent))
+                    if (!pending.append(edge.referent)) {
                         return false;
+                    }
                 }
             }
         }
 
         return true;
     }
 
     // Stop traversal, and return true from |traverse| without visiting any
--- a/js/public/UbiNodeDominatorTree.h
+++ b/js/public/UbiNodeDominatorTree.h
@@ -158,18 +158,19 @@ class JS_PUBLIC_API(DominatorTree)
          *     // Don't care about the first ten, for whatever reason.
          *     range->skip(10);
          *     for (const JS::ubi::Node& dominatedNode : *range) {
          *         // ...
          *     }
          */
         void skip(size_t n) {
             beginPtr += n;
-            if (beginPtr > endPtr)
+            if (beginPtr > endPtr) {
                 beginPtr = endPtr;
+            }
         }
     };
 
   private:
     /**
      * The set of all dominated sets in a dominator tree.
      *
      * Internally stores the sets in a contiguous array, with a side table of
@@ -231,23 +232,25 @@ class JS_PUBLIC_API(DominatorTree)
             // 3. Iterate over the full set of nodes again, filling in bucket
             //    entries from the end of the bucket's range to its
             //    beginning. This decrements each index as a bucket entry is
             //    filled in. After having filled in all of a bucket's entries,
             //    the index points to the start of the bucket.
 
             JS::ubi::Vector<uint32_t> dominated;
             JS::ubi::Vector<uint32_t> indices;
-            if (!dominated.growBy(length) || !indices.growBy(length))
+            if (!dominated.growBy(length) || !indices.growBy(length)) {
                 return mozilla::Nothing();
+            }
 
             // 1
             memset(indices.begin(), 0, length * sizeof(uint32_t));
-            for (uint32_t i = 0; i < length; i++)
+            for (uint32_t i = 0; i < length; i++) {
                 indices[doms[i]]++;
+            }
 
             // 2
             uint32_t sumOfSizes = 0;
             for (uint32_t i = 0; i < length; i++) {
                 sumOfSizes += indices[i];
                 MOZ_ASSERT(sumOfSizes <= length);
                 indices[i] = sumOfSizes;
             }
@@ -307,34 +310,36 @@ class JS_PUBLIC_API(DominatorTree)
         , nodeToPostOrderIndex(std::move(nodeToPostOrderIndex))
         , doms(std::move(doms))
         , dominatedSets(std::move(dominatedSets))
         , retainedSizes(mozilla::Nothing())
     { }
 
     static uint32_t intersect(JS::ubi::Vector<uint32_t>& doms, uint32_t finger1, uint32_t finger2) {
         while (finger1 != finger2) {
-            if (finger1 < finger2)
+            if (finger1 < finger2) {
                 finger1 = doms[finger1];
-            else if (finger2 < finger1)
+            } else if (finger2 < finger1) {
                 finger2 = doms[finger2];
+            }
         }
         return finger1;
     }
 
     // Do the post order traversal of the heap graph and populate our
     // predecessor sets.
     static MOZ_MUST_USE bool doTraversal(JSContext* cx, AutoCheckCannotGC& noGC, const Node& root,
                                          JS::ubi::Vector<Node>& postOrder,
                                          PredecessorSets& predecessorSets) {
         uint32_t nodeCount = 0;
         auto onNode = [&](const Node& node) {
             nodeCount++;
-            if (MOZ_UNLIKELY(nodeCount == UINT32_MAX))
+            if (MOZ_UNLIKELY(nodeCount == UINT32_MAX)) {
                 return false;
+            }
             return postOrder.append(node);
         };
 
         auto onEdge = [&](const Node& origin, const Edge& edge) {
             auto p = predecessorSets.lookupForAdd(edge.referent);
             if (!p) {
                 mozilla::UniquePtr<NodeSet, DeletePolicy<NodeSet>> set(js_new<NodeSet>());
                 if (!set ||
@@ -354,72 +359,78 @@ class JS_PUBLIC_API(DominatorTree)
 
     // Populates the given `map` with an entry for each node to its index in
     // `postOrder`.
     static MOZ_MUST_USE bool mapNodesToTheirIndices(JS::ubi::Vector<Node>& postOrder,
                                                     NodeToIndexMap& map) {
         MOZ_ASSERT(map.empty());
         MOZ_ASSERT(postOrder.length() < UINT32_MAX);
         uint32_t length = postOrder.length();
-        if (!map.reserve(length))
+        if (!map.reserve(length)) {
             return false;
-        for (uint32_t i = 0; i < length; i++)
+        }
+        for (uint32_t i = 0; i < length; i++) {
             map.putNewInfallible(postOrder[i], i);
+        }
         return true;
     }
 
     // Convert the Node -> NodeSet predecessorSets to a index -> Vector<index>
     // form.
     static MOZ_MUST_USE bool convertPredecessorSetsToVectors(
         const Node& root,
         JS::ubi::Vector<Node>& postOrder,
         PredecessorSets& predecessorSets,
         NodeToIndexMap& nodeToPostOrderIndex,
         JS::ubi::Vector<JS::ubi::Vector<uint32_t>>& predecessorVectors)
     {
         MOZ_ASSERT(postOrder.length() < UINT32_MAX);
         uint32_t length = postOrder.length();
 
         MOZ_ASSERT(predecessorVectors.length() == 0);
-        if (!predecessorVectors.growBy(length))
+        if (!predecessorVectors.growBy(length)) {
             return false;
+        }
 
         for (uint32_t i = 0; i < length - 1; i++) {
             auto& node = postOrder[i];
             MOZ_ASSERT(node != root,
                        "Only the last node should be root, since this was a post order traversal.");
 
             auto ptr = predecessorSets.lookup(node);
             MOZ_ASSERT(ptr,
                        "Because this isn't the root, it had better have predecessors, or else how "
                        "did we even find it.");
 
             auto& predecessors = ptr->value();
-            if (!predecessorVectors[i].reserve(predecessors->count()))
+            if (!predecessorVectors[i].reserve(predecessors->count())) {
                 return false;
+            }
             for (auto range = predecessors->all(); !range.empty(); range.popFront()) {
                 auto ptr = nodeToPostOrderIndex.lookup(range.front());
                 MOZ_ASSERT(ptr);
                 predecessorVectors[i].infallibleAppend(ptr->value());
             }
         }
         predecessorSets.clearAndCompact();
         return true;
     }
 
     // Initialize `doms` such that the immediate dominator of the `root` is the
     // `root` itself and all others are `UNDEFINED`.
     static MOZ_MUST_USE bool initializeDominators(JS::ubi::Vector<uint32_t>& doms,
                                                   uint32_t length) {
         MOZ_ASSERT(doms.length() == 0);
-        if (!doms.growByUninitialized(length))
+        if (!doms.growByUninitialized(length)) {
             return false;
+        }
         doms[length - 1] = length - 1;
-        for (uint32_t i = 0; i < length - 1; i++)
+        for (uint32_t i = 0; i < length - 1; i++) {
             doms[i] = UNDEFINED;
+        }
         return true;
     }
 
     void assertSanity() const {
         MOZ_ASSERT(postOrder.length() == doms.length());
         MOZ_ASSERT(postOrder.length() == nodeToPostOrderIndex.count());
         MOZ_ASSERT_IF(retainedSizes.isSome(), postOrder.length() == retainedSizes->length());
     }
@@ -508,41 +519,44 @@ class JS_PUBLIC_API(DominatorTree)
      *
      * Returns `mozilla::Nothing()` on OOM failure. It is the caller's
      * responsibility to handle and report the OOM.
      */
     static mozilla::Maybe<DominatorTree>
     Create(JSContext* cx, AutoCheckCannotGC& noGC, const Node& root) {
         JS::ubi::Vector<Node> postOrder;
         PredecessorSets predecessorSets;
-        if (!doTraversal(cx, noGC, root, postOrder, predecessorSets))
+        if (!doTraversal(cx, noGC, root, postOrder, predecessorSets)) {
             return mozilla::Nothing();
+        }
 
         MOZ_ASSERT(postOrder.length() < UINT32_MAX);
         uint32_t length = postOrder.length();
         MOZ_ASSERT(postOrder[length - 1] == root);
 
         // From here on out we wish to avoid hash table lookups, and we use
         // indices into `postOrder` instead of actual nodes wherever
         // possible. This greatly improves the performance of this
         // implementation, but we have to pay a little bit of upfront cost to
         // convert our data structures to play along first.
 
         NodeToIndexMap nodeToPostOrderIndex(postOrder.length());
-        if (!mapNodesToTheirIndices(postOrder, nodeToPostOrderIndex))
+        if (!mapNodesToTheirIndices(postOrder, nodeToPostOrderIndex)) {
             return mozilla::Nothing();
+        }
 
         JS::ubi::Vector<JS::ubi::Vector<uint32_t>> predecessorVectors;
         if (!convertPredecessorSetsToVectors(root, postOrder, predecessorSets, nodeToPostOrderIndex,
                                              predecessorVectors))
             return mozilla::Nothing();
 
         JS::ubi::Vector<uint32_t> doms;
-        if (!initializeDominators(doms, length))
+        if (!initializeDominators(doms, length)) {
             return mozilla::Nothing();
+        }
 
         bool changed = true;
         while (changed) {
             changed = false;
 
             // Iterate over the non-root nodes in reverse post order.
             for (uint32_t indexPlusOne = length - 1; indexPlusOne > 0; indexPlusOne--) {
                 MOZ_ASSERT(postOrder[indexPlusOne - 1] != root);
@@ -565,33 +579,35 @@ class JS_PUBLIC_API(DominatorTree)
 
                 MOZ_ASSERT(newIDomIdx != UNDEFINED,
                            "Because the root is initialized to dominate itself and is the first "
                            "node in every path, there must exist a predecessor to this node that "
                            "also has a dominator.");
 
                 for ( ; !range.empty(); range.popFront()) {
                     auto idx = range.front();
-                    if (doms[idx] != UNDEFINED)
+                    if (doms[idx] != UNDEFINED) {
                         newIDomIdx = intersect(doms, newIDomIdx, idx);
+                    }
                 }
 
                 // If the immediate dominator changed, we will have to do
                 // another pass of the outer while loop to continue the forward
                 // dataflow.
                 if (newIDomIdx != doms[indexPlusOne - 1]) {
                     doms[indexPlusOne - 1] = newIDomIdx;
                     changed = true;
                 }
             }
         }
 
         auto maybeDominatedSets = DominatedSets::Create(doms);
-        if (maybeDominatedSets.isNothing())
+        if (maybeDominatedSets.isNothing()) {
             return mozilla::Nothing();
+        }
 
         return mozilla::Some(DominatorTree(std::move(postOrder),
                                            std::move(nodeToPostOrderIndex),
                                            std::move(doms),
                                            std::move(*maybeDominatedSets)));
     }
 
     /**
@@ -604,18 +620,19 @@ class JS_PUBLIC_API(DominatorTree)
     /**
      * Return the immediate dominator of the given `node`. If `node` was not
      * reachable from the `root` that this dominator tree was constructed from,
      * then return the null `JS::ubi::Node`.
      */
     Node getImmediateDominator(const Node& node) const {
         assertSanity();
         auto ptr = nodeToPostOrderIndex.lookup(node);
-        if (!ptr)
+        if (!ptr) {
             return Node();
+        }
 
         auto idx = ptr->value();
         MOZ_ASSERT(idx < postOrder.length());
         return postOrder[doms[idx]];
     }
 
     /**
      * Get the set of nodes immediately dominated by the given `node`. If `node`
@@ -631,18 +648,19 @@ class JS_PUBLIC_API(DominatorTree)
      *
      *     for (const JS::ubi::Node& dominatedNode : *range) {
      *         // Do something with each immediately dominated node...
      *     }
      */
     mozilla::Maybe<DominatedSetRange> getDominatedSet(const Node& node) {
         assertSanity();
         auto ptr = nodeToPostOrderIndex.lookup(node);
-        if (!ptr)
+        if (!ptr) {
             return mozilla::Nothing();
+        }
 
         auto idx = ptr->value();
         MOZ_ASSERT(idx < postOrder.length());
         return mozilla::Some(dominatedSets.dominatedSet(postOrder, idx));
     }
 
     /**
      * Get the retained size of the given `node`. The size is placed in
@@ -653,18 +671,19 @@ class JS_PUBLIC_API(DominatorTree)
                                       Node::Size& outSize) {
         assertSanity();
         auto ptr = nodeToPostOrderIndex.lookup(node);
         if (!ptr) {
             outSize = 0;
             return true;
         }
 
-        if (retainedSizes.isNothing() && !computeRetainedSizes(mallocSizeOf))
+        if (retainedSizes.isNothing() && !computeRetainedSizes(mallocSizeOf)) {
             return false;
+        }
 
         auto idx = ptr->value();
         MOZ_ASSERT(idx < postOrder.length());
         outSize = retainedSizes.ref()[idx];
         return true;
     }
 };
 
--- a/js/public/UbiNodePostOrder.h
+++ b/js/public/UbiNodePostOrder.h
@@ -88,18 +88,19 @@ struct PostOrder {
 #ifdef DEBUG
     bool                     traversed;
 #endif
 
   private:
     MOZ_MUST_USE bool fillEdgesFromRange(EdgeVector& edges, js::UniquePtr<EdgeRange>& range) {
         MOZ_ASSERT(range);
         for ( ; !range->empty(); range->popFront()) {
-            if (!edges.append(std::move(range->front())))
+            if (!edges.append(std::move(range->front()))) {
                 return false;
+            }
         }
         return true;
     }
 
     MOZ_MUST_USE bool pushForTraversing(const Node& node) {
         EdgeVector edges;
         auto range = node.edges(cx, /* wantNames */ false);
         return range &&
@@ -121,18 +122,19 @@ struct PostOrder {
 #ifdef DEBUG
       , traversed(false)
 #endif
     { }
 
     // Add `node` as a starting point for the traversal. You may add
     // as many starting points as you like. Returns false on OOM.
     MOZ_MUST_USE bool addStart(const Node& node) {
-        if (!seen.put(node))
+        if (!seen.put(node)) {
             return false;
+        }
         return pushForTraversing(node);
     }
 
     // Traverse the graph in post-order, starting with the set of nodes passed
     // to `addStart` and applying `onNode::operator()` for each node in the
     // graph and `onEdge::operator()` for each edge in the graph, as described
     // above.
     //
@@ -147,32 +149,35 @@ struct PostOrder {
         traversed = true;
 #endif
 
         while (!stack.empty()) {
             auto& origin = stack.back().origin;
             auto& edges = stack.back().edges;
 
             if (edges.empty()) {
-                if (!onNode(origin))
+                if (!onNode(origin)) {
                     return false;
+                }
                 stack.popBack();
                 continue;
             }
 
             Edge edge = std::move(edges.back());
             edges.popBack();
 
-            if (!onEdge(origin, edge))
+            if (!onEdge(origin, edge)) {
                 return false;
+            }
 
             auto ptr = seen.lookupForAdd(edge.referent);
             // We've already seen this node, don't follow its edges.
-            if (ptr)
+            if (ptr) {
                 continue;
+            }
 
             // Mark the referent as seen and follow its edges.
             if (!seen.add(ptr, edge.referent) ||
                 !pushForTraversing(edge.referent))
             {
                 return false;
             }
         }
--- a/js/public/UbiNodeShortestPaths.h
+++ b/js/public/UbiNodeShortestPaths.h
@@ -111,59 +111,66 @@ struct JS_PUBLIC_API(ShortestPaths)
         bool
         operator()(Traversal& traversal, const JS::ubi::Node& origin, JS::ubi::Edge& edge,
                    BackEdge* back, bool first)
         {
             MOZ_ASSERT(back);
             MOZ_ASSERT(origin == shortestPaths.root_ || traversal.visited.has(origin));
             MOZ_ASSERT(totalPathsRecorded < totalMaxPathsToRecord);
 
-            if (first && !back->init(origin, edge))
+            if (first && !back->init(origin, edge)) {
                 return false;
+            }
 
-            if (!shortestPaths.targets_.has(edge.referent))
+            if (!shortestPaths.targets_.has(edge.referent)) {
                 return true;
+            }
 
             // If `first` is true, then we moved the edge's name into `back` in
             // the above call to `init`. So clone that back edge to get the
             // correct edge name. If `first` is not true, then our edge name is
             // still in `edge`. This accounts for the asymmetry between
             // `back->clone()` in the first branch, and the `init` call in the
             // second branch.
 
             if (first) {
                 BackEdgeVector paths;
-                if (!paths.reserve(shortestPaths.maxNumPaths_))
+                if (!paths.reserve(shortestPaths.maxNumPaths_)) {
                     return false;
+                }
                 auto cloned = back->clone();
-                if (!cloned)
+                if (!cloned) {
                     return false;
+                }
                 paths.infallibleAppend(std::move(cloned));
-                if (!shortestPaths.paths_.putNew(edge.referent, std::move(paths)))
+                if (!shortestPaths.paths_.putNew(edge.referent, std::move(paths))) {
                     return false;
+                }
                 totalPathsRecorded++;
             } else {
                 auto ptr = shortestPaths.paths_.lookup(edge.referent);
                 MOZ_ASSERT(ptr,
                            "This isn't the first time we have seen the target node `edge.referent`. "
                            "We should have inserted it into shortestPaths.paths_ the first time we "
                            "saw it.");
 
                 if (ptr->value().length() < shortestPaths.maxNumPaths_) {
                     auto thisBackEdge = js::MakeUnique<BackEdge>();
-                    if (!thisBackEdge || !thisBackEdge->init(origin, edge))
+                    if (!thisBackEdge || !thisBackEdge->init(origin, edge)) {
                         return false;
+                    }
                     ptr->value().infallibleAppend(std::move(thisBackEdge));
                     totalPathsRecorded++;
                 }
             }
 
             MOZ_ASSERT(totalPathsRecorded <= totalMaxPathsToRecord);
-            if (totalPathsRecorded == totalMaxPathsToRecord)
+            if (totalPathsRecorded == totalMaxPathsToRecord) {
                 traversal.stop();
+            }
 
             return true;
         }
 
     };
 
     // The maximum number of paths to record for each node.
     uint32_t maxNumPaths_;
@@ -242,18 +249,19 @@ struct JS_PUBLIC_API(ShortestPaths)
         MOZ_ASSERT(targets.count() > 0);
         MOZ_ASSERT(maxNumPaths > 0);
 
         ShortestPaths paths(maxNumPaths, root, std::move(targets));
 
         Handler handler(paths);
         Traversal traversal(cx, handler, noGC);
         traversal.wantNames = true;
-        if (!traversal.addStart(root) || !traversal.traverse())
+        if (!traversal.addStart(root) || !traversal.traverse()) {
             return mozilla::Nothing();
+        }
 
         // Take ownership of the back edges we created while traversing the
         // graph so that we can follow them from `paths_` and don't
         // use-after-free.
         paths.backEdges_ = std::move(traversal.visited);
 
         return mozilla::Some(std::move(paths));
     }
@@ -279,44 +287,48 @@ struct JS_PUBLIC_API(ShortestPaths)
      */
     template <class Func>
     MOZ_MUST_USE bool forEachPath(const Node& target, Func func) {
         MOZ_ASSERT(targets_.has(target));
 
         auto ptr = paths_.lookup(target);
 
         // We didn't find any paths to this target, so nothing to do here.
-        if (!ptr)
+        if (!ptr) {
             return true;
+        }
 
         MOZ_ASSERT(ptr->value().length() <= maxNumPaths_);
 
         Path path;
         for (const auto& backEdge : ptr->value()) {
             path.clear();
 
-            if (!path.append(backEdge.get()))
+            if (!path.append(backEdge.get())) {
                 return false;
+            }
 
             Node here = backEdge->predecessor();
             MOZ_ASSERT(here);
 
             while (here != root_) {
                 auto p = backEdges_.lookup(here);
                 MOZ_ASSERT(p);
-                if (!path.append(&p->value()))
+                if (!path.append(&p->value())) {
                     return false;
+                }
                 here = p->value().predecessor();
                 MOZ_ASSERT(here);
             }
 
             path.reverse();
 
-            if (!func(path))
+            if (!func(path)) {
                 return false;
+            }
         }
 
         return true;
     }
 };
 
 #ifdef DEBUG
 // A helper function to dump the first `maxNumPaths` shortest retaining paths to
--- a/js/public/Utility.h
+++ b/js/public/Utility.h
@@ -151,18 +151,19 @@ IsSimulatedOOMAllocation()
 {
     return IsThreadSimulatingOOM() &&
            (counter == maxAllocations || (counter > maxAllocations && failAlways));
 }
 
 inline bool
 ShouldFailWithOOM()
 {
-    if (!IsThreadSimulatingOOM())
+    if (!IsThreadSimulatingOOM()) {
         return false;
+    }
 
     counter++;
     if (IsSimulatedOOMAllocation()) {
         JS_OOM_CALL_BP_FUNC();
         return true;
     }
     return false;
 }
@@ -198,18 +199,19 @@ IsSimulatedStackOOMCheck()
 {
     return IsThreadSimulatingStackOOM() &&
            (stackCheckCounter == maxStackChecks || (stackCheckCounter > maxStackChecks && stackCheckFailAlways));
 }
 
 inline bool
 ShouldFailWithStackOOM()
 {
-    if (!IsThreadSimulatingStackOOM())
+    if (!IsThreadSimulatingStackOOM()) {
         return false;
+    }
 
     stackCheckCounter++;
     if (IsSimulatedStackOOMCheck()) {
         JS_OOM_CALL_BP_FUNC();
         return true;
     }
     return false;
 }
@@ -246,18 +248,19 @@ IsSimulatedInterruptCheck()
 {
     return IsThreadSimulatingInterrupt() &&
            (interruptCheckCounter == maxInterruptChecks || (interruptCheckCounter > maxInterruptChecks && interruptCheckFailAlways));
 }
 
 inline bool
 ShouldFailWithInterrupt()
 {
-    if (!IsThreadSimulatingInterrupt())
+    if (!IsThreadSimulatingInterrupt()) {
         return false;
+    }
 
     interruptCheckCounter++;
     if (IsSimulatedInterruptCheck()) {
         JS_OOM_CALL_BP_FUNC();
         return true;
     }
     return false;
 }
@@ -574,53 +577,56 @@ js_delete_poison(const T* p)
     }
 }
 
 template <class T>
 static MOZ_ALWAYS_INLINE T*
 js_pod_arena_malloc(arena_id_t arena, size_t numElems)
 {
   size_t bytes;
-  if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(numElems, &bytes)))
+  if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(numElems, &bytes))) {
     return nullptr;
+  }
   return static_cast<T*>(js_arena_malloc(arena, bytes));
 }
 
 template <class T>
 static MOZ_ALWAYS_INLINE T*
 js_pod_malloc(size_t numElems)
 {
     return js_pod_arena_malloc<T>(js::MallocArena, numElems);
 }
 
 template <class T>
 static MOZ_ALWAYS_INLINE T*
 js_pod_arena_calloc(arena_id_t arena, size_t numElems)
 {
     size_t bytes;
-    if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(numElems, &bytes)))
+    if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(numElems, &bytes))) {
         return nullptr;
+    }
     return static_cast<T*>(js_arena_calloc(arena, bytes, 1));
 }
 
 template <class T>
 static MOZ_ALWAYS_INLINE T*
 js_pod_calloc(size_t numElems)
 {
     return js_pod_arena_calloc<T>(js::MallocArena, numElems);
 }
 
 template <class T>
 static MOZ_ALWAYS_INLINE T*
 js_pod_realloc(T* prior, size_t oldSize, size_t newSize)
 {
     MOZ_ASSERT(!(oldSize & mozilla::tl::MulOverflowMask<sizeof(T)>::value));
     size_t bytes;
-    if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(newSize, &bytes)))
+    if (MOZ_UNLIKELY(!js::CalculateAllocSize<T>(newSize, &bytes))) {
         return nullptr;
+    }
     return static_cast<T*>(js_realloc(prior, bytes));
 }
 
 namespace JS {
 
 template<typename T>
 struct DeletePolicy
 {
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -260,18 +260,19 @@ GenericNaN()
 {
   return mozilla::SpecificNaN<double>(detail::CanonicalizedNaNSignBit,
                                       detail::CanonicalizedNaNSignificand);
 }
 
 static inline double
 CanonicalizeNaN(double d)
 {
-    if (MOZ_UNLIKELY(mozilla::IsNaN(d)))
+    if (MOZ_UNLIKELY(mozilla::IsNaN(d))) {
         return GenericNaN();
+    }
     return d;
 }
 
 /**
  * [SMDOC] JS::Value type
  *
  * JS::Value is the interface for a single JavaScript Engine value.  A few
  * general notes on JS::Value:
@@ -537,20 +538,21 @@ union MOZ_NON_PARAM alignas(8) Value
             return true;
         }
 
         setDouble(d);
         return false;
     }
 
     void setObjectOrNull(JSObject* arg) {
-        if (arg)
+        if (arg) {
             setObject(*arg);
-        else
+        } else {
             setNull();
+        }
     }
 
     void swap(Value& rhs) {
         uint64_t tmp = rhs.asBits_;
         rhs.asBits_ = asBits_;
         asBits_ = tmp;
     }
 
@@ -701,21 +703,23 @@ union MOZ_NON_PARAM alignas(8) Value
     JS::TraceKind traceKind() const {
         MOZ_ASSERT(isGCThing());
         static_assert((JSVAL_TAG_STRING & 0x03) == size_t(JS::TraceKind::String),
                       "Value type tags must correspond with JS::TraceKinds.");
         static_assert((JSVAL_TAG_SYMBOL & 0x03) == size_t(JS::TraceKind::Symbol),
                       "Value type tags must correspond with JS::TraceKinds.");
         static_assert((JSVAL_TAG_OBJECT & 0x03) == size_t(JS::TraceKind::Object),
                       "Value type tags must correspond with JS::TraceKinds.");
-        if (MOZ_UNLIKELY(isPrivateGCThing()))
+        if (MOZ_UNLIKELY(isPrivateGCThing())) {
             return JS::GCThingTraceKind(toGCThing());
+        }
 #ifdef ENABLE_BIGINT
-        if (MOZ_UNLIKELY(isBigInt()))
+        if (MOZ_UNLIKELY(isBigInt())) {
             return JS::TraceKind::BigInt;
+        }
 #endif
         return JS::TraceKind(toTag() & 0x03);
     }
 
     JSWhyMagic whyMagic() const {
         MOZ_ASSERT(isMagic());
         return s_.payload_.why_;
     }
@@ -943,18 +947,19 @@ IsOptimizedPlaceholderMagicValue(const V
 
 static MOZ_ALWAYS_INLINE void
 ExposeValueToActiveJS(const Value& v)
 {
 #ifdef DEBUG
     Value tmp = v;
     MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&tmp));
 #endif
-    if (v.isGCThing())
+    if (v.isGCThing()) {
         js::gc::ExposeGCThingToActiveJS(GCCellPtr(v));
+    }
 }
 
 /************************************************************************/
 
 static inline MOZ_MAY_CALL_AFTER_MUST_RETURN Value
 NullValue()
 {
     Value v;
@@ -988,18 +993,19 @@ CanonicalizedDoubleValue(double d)
     return MOZ_UNLIKELY(mozilla::IsNaN(d))
            ? Value::fromRawBits(detail::CanonicalizedNaNBits)
            : Value::fromDouble(d);
 }
 
 static inline bool
 IsCanonicalized(double d)
 {
-  if (mozilla::IsInfinite(d) || mozilla::IsFinite(d))
+  if (mozilla::IsInfinite(d) || mozilla::IsFinite(d)) {
       return true;
+  }
 
   uint64_t bits;
   mozilla::BitwiseCast<uint64_t>(d, &bits);
   return (bits & ~mozilla::DoubleTypeTraits::kSignBit) == detail::CanonicalizedNaNBits;
 }
 
 static inline Value
 DoubleNaNValue()
@@ -1150,36 +1156,38 @@ namespace detail {
 template <bool Signed>
 class MakeNumberValue
 {
   public:
     template<typename T>
     static inline Value create(const T t)
     {
         Value v;
-        if (JSVAL_INT_MIN <= t && t <= JSVAL_INT_MAX)
+        if (JSVAL_INT_MIN <= t && t <= JSVAL_INT_MAX) {
             v.setInt32(int32_t(t));
-        else
+        } else {
             v.setDouble(double(t));
+        }
         return v;
     }
 };
 
 template <>
 class MakeNumberValue<false>
 {
   public:
     template<typename T>
     static inline Value create(const T t)
     {
         Value v;
-        if (t <= JSVAL_INT_MAX)
+        if (t <= JSVAL_INT_MAX) {
             v.setInt32(int32_t(t));
-        else
+        } else {
             v.setDouble(double(t));
+        }
         return v;
     }
 };
 
 } // namespace detail
 
 template <typename T>
 static inline Value
@@ -1409,20 +1417,21 @@ class HeapBase<JS::Value, Wrapper> : pub
             return true;
         }
 
         setDouble(d);
         return false;
     }
 
     void setObjectOrNull(JSObject* arg) {
-        if (arg)
+        if (arg) {
             setObject(*arg);
-        else
+        } else {
             setNull();
+        }
     }
 };
 
 /*
  * If the Value is a GC pointer type, convert to that type and call |f| with
  * the pointer. If the Value is not a GC type, calls F::defaultValue.
  */
 template <typename F, typename... Args>
@@ -1475,18 +1484,19 @@ PoisonedObjectValue(uintptr_t poison)
 } // namespace js
 
 #ifdef DEBUG
 namespace JS {
 
 MOZ_ALWAYS_INLINE bool
 ValueIsNotGray(const Value& value)
 {
-    if (!value.isGCThing())
+    if (!value.isGCThing()) {
         return true;
+    }
 
     return CellIsNotGray(value.toGCThing());
 }
 
 MOZ_ALWAYS_INLINE bool
 ValueIsNotGray(const Heap<Value>& value)
 {
     return ValueIsNotGray(value.unbarrieredGet());