Bug 1488698 - Always use braces for if/for/while statements in js/src/vm, part 10. r=lth
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 06 Sep 2018 11:18:40 +0200
changeset 435735 6b3a95df3c123910eeab97cb7cd7cbf0884bd413
parent 435734 045f3d6c75024958390a18753f3698f9d177ec61
child 435736 219721735c51630107ce70c295c26b88bb87ab3a
push id34618
push userbtara@mozilla.com
push dateTue, 11 Sep 2018 22:13:11 +0000
treeherdermozilla-central@1169e8a4ca2b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerslth
bugs1488698
milestone64.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1488698 - Always use braces for if/for/while statements in js/src/vm, part 10. r=lth
js/src/vm/SavedFrame.h
js/src/vm/SavedStacks-inl.h
js/src/vm/SavedStacks.cpp
js/src/vm/Scope.cpp
js/src/vm/Scope.h
js/src/vm/SelfHosting.cpp
js/src/vm/Shape-inl.h
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/SharedArrayObject.cpp
js/src/vm/SharedImmutableStringsCache-inl.h
js/src/vm/SharedImmutableStringsCache.cpp
js/src/vm/SharedImmutableStringsCache.h
js/src/vm/SharedMem.h
js/src/vm/Stack-inl.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
js/src/vm/Stopwatch.cpp
js/src/vm/Stopwatch.h
js/src/vm/StringObject-inl.h
js/src/vm/StringType-inl.h
js/src/vm/StringType.cpp
js/src/vm/StringType.h
js/src/vm/StructuredClone.cpp
js/src/vm/SymbolType.cpp
js/src/vm/SymbolType.h
--- a/js/src/vm/SavedFrame.h
+++ b/js/src/vm/SavedFrame.h
@@ -98,18 +98,19 @@ class SavedFrame : public NativeObject {
 
     static bool isSavedFrameAndNotProto(JSObject& obj) {
         return obj.is<SavedFrame>() &&
                !obj.as<SavedFrame>().getReservedSlot(JSSLOT_SOURCE).isNull();
     }
 
     static bool isSavedFrameOrWrapperAndNotProto(JSObject& obj) {
         auto unwrapped = CheckedUnwrap(&obj);
-        if (!unwrapped)
+        if (!unwrapped) {
             return false;
+        }
         return isSavedFrameAndNotProto(*unwrapped);
     }
 
     struct Lookup;
     struct HashPolicy;
 
     typedef JS::GCHashSet<ReadBarriered<SavedFrame*>,
                           HashPolicy,
@@ -266,18 +267,19 @@ class ConcreteStackFrame<SavedFrame> : p
         auto name = get().getFunctionDisplayName();
         return AtomOrTwoByteChars(name);
     }
 
     void trace(JSTracer* trc) override {
         JSObject* prev = &get();
         JSObject* next = prev;
         js::TraceRoot(trc, &next, "ConcreteStackFrame<SavedFrame>::ptr");
-        if (next != prev)
+        if (next != prev) {
             ptr = next;
+        }
     }
 
     bool isSelfHosted(JSContext* cx) const override {
         return get().isSelfHosted(cx);
     }
 
     bool isSystem() const override;
 
--- a/js/src/vm/SavedStacks-inl.h
+++ b/js/src/vm/SavedStacks-inl.h
@@ -17,13 +17,14 @@
 // SavedFrame objects and the SavedFrame accessors themselves handle wrappers
 // and use the original caller's compartment's principals to determine what
 // level of data to present. Unwrapping and entering the referent's compartment
 // would mess that up. See the module level documentation in
 // `js/src/vm/SavedStacks.h` as well as the comments in `js/src/jsapi.h`.
 inline void
 js::AssertObjectIsSavedFrameOrWrapper(JSContext* cx, HandleObject stack)
 {
-    if (stack)
+    if (stack) {
         MOZ_RELEASE_ASSERT(js::SavedFrame::isSavedFrameOrWrapperAndNotProto(*stack));
+    }
 }
 
 #endif // vm_SavedStacksInl_h
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -52,18 +52,19 @@ namespace js {
 /**
  * Maximum number of saved frames returned for an async stack.
  */
 const uint32_t ASYNC_STACK_MAX_FRAME_COUNT = 60;
 
 void
 LiveSavedFrameCache::trace(JSTracer* trc)
 {
-    if (!initialized())
+    if (!initialized()) {
         return;
+    }
 
     for (auto* entry = frames->begin(); entry < frames->end(); entry++) {
         TraceEdge(trc,
                   &entry->savedFrame,
                   "LiveSavedFrameCache::frames SavedFrame");
     }
 }
 
@@ -109,18 +110,19 @@ LiveSavedFrameCache::find(JSContext* cx,
     }
 
     // All our SavedFrames should be in the same realm. If the last
     // entry's SavedFrame's realm doesn't match cx's, flush the cache.
     if (frames->back().savedFrame->realm() != cx->realm()) {
 #ifdef DEBUG
         // Check that they are, indeed, all in the same realm.
         auto compartment = frames->back().savedFrame->realm();
-        for (const auto& f : (*frames))
+        for (const auto& f : (*frames)) {
             MOZ_ASSERT(compartment == f.savedFrame->realm());
+        }
 #endif
         frames->clear();
         frame.set(nullptr);
         return;
     }
 
     Key key(framePtr);
     while (key != frames->back().key) {
@@ -233,20 +235,22 @@ struct SavedFrame::Lookup {
     Activation*                          activation;
 
     void trace(JSTracer* trc) {
         TraceManuallyBarrieredEdge(trc, &source, "SavedFrame::Lookup::source");
         if (functionDisplayName) {
             TraceManuallyBarrieredEdge(trc, &functionDisplayName,
                                        "SavedFrame::Lookup::functionDisplayName");
         }
-        if (asyncCause)
+        if (asyncCause) {
             TraceManuallyBarrieredEdge(trc, &asyncCause, "SavedFrame::Lookup::asyncCause");
-        if (parent)
+        }
+        if (parent) {
             TraceManuallyBarrieredEdge(trc, &parent, "SavedFrame::Lookup::parent");
+        }
     }
 };
 
 class MOZ_STACK_CLASS SavedFrame::AutoLookupVector : public JS::CustomAutoRooter {
   public:
     explicit AutoLookupVector(JSContext* cx)
       : JS::CustomAutoRooter(cx),
         lookups(cx)
@@ -256,18 +260,19 @@ class MOZ_STACK_CLASS SavedFrame::AutoLo
     inline LookupVector* operator->() { return &lookups; }
     inline HandleLookup operator[](size_t i) { return HandleLookup(lookups[i]); }
     inline HandleLookup back() { return HandleLookup(lookups.back()); }
 
   private:
     LookupVector lookups;
 
     virtual void trace(JSTracer* trc) override {
-        for (size_t i = 0; i < lookups.length(); i++)
+        for (size_t i = 0; i < lookups.length(); i++) {
             lookups[i].trace(trc);
+        }
     }
 };
 
 /* static */ bool
 SavedFrame::HashPolicy::hasHash(const Lookup& l)
 {
     return SavedFramePtrHasher::hasHash(l.parent);
 }
@@ -294,39 +299,46 @@ SavedFrame::HashPolicy::hash(const Looku
                      JSPrincipalsPtrHasher::hash(lookup.principals));
 }
 
 /* static */ bool
 SavedFrame::HashPolicy::match(SavedFrame* existing, const Lookup& lookup)
 {
     MOZ_ASSERT(existing);
 
-    if (existing->getLine() != lookup.line)
+    if (existing->getLine() != lookup.line) {
         return false;
+    }
 
-    if (existing->getColumn() != lookup.column)
+    if (existing->getColumn() != lookup.column) {
         return false;
+    }
 
-    if (existing->getParent() != lookup.parent)
+    if (existing->getParent() != lookup.parent) {
         return false;
+    }
 
-    if (existing->getPrincipals() != lookup.principals)
+    if (existing->getPrincipals() != lookup.principals) {
         return false;
+    }
 
     JSAtom* source = existing->getSource();
-    if (source != lookup.source)
+    if (source != lookup.source) {
         return false;
+    }
 
     JSAtom* functionDisplayName = existing->getFunctionDisplayName();
-    if (functionDisplayName != lookup.functionDisplayName)
+    if (functionDisplayName != lookup.functionDisplayName) {
         return false;
+    }
 
     JSAtom* asyncCause = existing->getAsyncCause();
-    if (asyncCause != lookup.asyncCause)
+    if (asyncCause != lookup.asyncCause) {
         return false;
+    }
 
     return true;
 }
 
 /* static */ void
 SavedFrame::HashPolicy::rekey(Key& key, const Key& newKey)
 {
     key = newKey;
@@ -434,45 +446,48 @@ SavedFrame::getColumn()
     const Value& v = getReservedSlot(JSSLOT_COLUMN);
     return v.toPrivateUint32();
 }
 
 JSAtom*
 SavedFrame::getFunctionDisplayName()
 {
     const Value& v = getReservedSlot(JSSLOT_FUNCTIONDISPLAYNAME);
-    if (v.isNull())
+    if (v.isNull()) {
         return nullptr;
+    }
     JSString* s = v.toString();
     return &s->asAtom();
 }
 
 JSAtom*
 SavedFrame::getAsyncCause()
 {
     const Value& v = getReservedSlot(JSSLOT_ASYNCCAUSE);
-    if (v.isNull())
+    if (v.isNull()) {
         return nullptr;
+    }
     JSString* s = v.toString();
     return &s->asAtom();
 }
 
 SavedFrame*
 SavedFrame::getParent() const
 {
     const Value& v = getReservedSlot(JSSLOT_PARENT);
     return v.isObject() ? &v.toObject().as<SavedFrame>() : nullptr;
 }
 
 JSPrincipals*
 SavedFrame::getPrincipals()
 {
     const Value& v = getReservedSlot(JSSLOT_PRINCIPALS);
-    if (v.isUndefined())
+    if (v.isUndefined()) {
         return nullptr;
+    }
     return static_cast<JSPrincipals*>(v.toPrivate());
 }
 
 void
 SavedFrame::initSource(JSAtom* source)
 {
     MOZ_ASSERT(source);
     initReservedSlot(JSSLOT_SOURCE, StringValue(source));
@@ -491,18 +506,19 @@ SavedFrame::initColumn(uint32_t column)
     column = 0;
 #endif
     initReservedSlot(JSSLOT_COLUMN, PrivateUint32Value(column));
 }
 
 void
 SavedFrame::initPrincipals(JSPrincipals* principals)
 {
-    if (principals)
+    if (principals) {
         JS_HoldPrincipals(principals);
+    }
     initPrincipalsAlreadyHeld(principals);
 }
 
 void
 SavedFrame::initPrincipalsAlreadyHeld(JSPrincipals* principals)
 {
     MOZ_ASSERT_IF(principals, principals->refcount > 0);
     initReservedSlot(JSSLOT_PRINCIPALS, PrivateValue(principals));
@@ -529,22 +545,25 @@ SavedFrame::initParent(SavedFrame* maybe
 void
 SavedFrame::initFromLookup(JSContext* cx, SavedFrame::HandleLookup lookup)
 {
     // Make sure any atoms used in the lookup are marked in the current zone.
     // Normally we would try to keep these mark bits up to date around the
     // points where the context moves between compartments, but Lookups live on
     // the stack (where the atoms are kept alive regardless) and this is a
     // more convenient pinchpoint.
-    if (lookup->source)
+    if (lookup->source) {
         cx->markAtom(lookup->source);
-    if (lookup->functionDisplayName)
+    }
+    if (lookup->functionDisplayName) {
         cx->markAtom(lookup->functionDisplayName);
-    if (lookup->asyncCause)
+    }
+    if (lookup->asyncCause) {
         cx->markAtom(lookup->asyncCause);
+    }
 
     initSource(lookup->source);
     initLine(lookup->line);
     initColumn(lookup->column);
     initFunctionDisplayName(lookup->functionDisplayName);
     initAsyncCause(lookup->asyncCause);
     initParent(lookup->parent);
     initPrincipals(lookup->principals);
@@ -557,18 +576,19 @@ SavedFrame::create(JSContext* cx)
     cx->check(global);
 
     // Ensure that we don't try to capture the stack again in the
     // `SavedStacksMetadataBuilder` for this new SavedFrame object, and
     // accidentally cause O(n^2) behavior.
     SavedStacks::AutoReentrancyGuard guard(cx->realm()->savedStacks());
 
     RootedNativeObject proto(cx, GlobalObject::getOrCreateSavedFramePrototype(cx, global));
-    if (!proto)
+    if (!proto) {
         return nullptr;
+    }
     cx->check(proto);
 
     return NewObjectWithGivenProto<SavedFrame>(cx, proto, TenuredObject);
 }
 
 bool
 SavedFrame::isSelfHosted(JSContext* cx)
 {
@@ -606,29 +626,32 @@ SavedFrame::construct(JSContext* cx, uns
                               "SavedFrame");
     return false;
 }
 
 static bool
 SavedFrameSubsumedByPrincipals(JSContext* cx, JSPrincipals* principals, HandleSavedFrame frame)
 {
     auto subsumes = cx->runtime()->securityCallbacks->subsumes;
-    if (!subsumes)
+    if (!subsumes) {
         return true;
+    }
 
     MOZ_ASSERT(!ReconstructedSavedFramePrincipals::is(principals));
 
     auto framePrincipals = frame->getPrincipals();
 
     // Handle SavedFrames that have been reconstructed from stacks in a heap
     // snapshot.
-    if (framePrincipals == &ReconstructedSavedFramePrincipals::IsSystem)
+    if (framePrincipals == &ReconstructedSavedFramePrincipals::IsSystem) {
         return cx->runningWithTrustedPrincipals();
-    if (framePrincipals == &ReconstructedSavedFramePrincipals::IsNotSystem)
+    }
+    if (framePrincipals == &ReconstructedSavedFramePrincipals::IsNotSystem) {
         return true;
+    }
 
     return subsumes(principals, framePrincipals);
 }
 
 // Return the first SavedFrame in the chain that starts with |frame| whose
 // for which the given match function returns true. If there is no such frame,
 // return nullptr. |skippedAsync| is set to true if any of the skipped frames
 // had the |asyncCause| property set, otherwise it is explicitly set to false.
@@ -644,18 +667,19 @@ GetFirstMatchedFrame(JSContext* cx, JSPr
     while (rootedFrame) {
         if ((selfHosted == JS::SavedFrameSelfHosted::Include ||
              !rootedFrame->isSelfHosted(cx)) &&
             matches(cx, principals, rootedFrame))
         {
             return rootedFrame;
         }
 
-        if (rootedFrame->getAsyncCause())
+        if (rootedFrame->getAsyncCause()) {
             skippedAsync = true;
+        }
 
         rootedFrame = rootedFrame->getParent();
     }
 
     return nullptr;
 }
 
 // Return the first SavedFrame in the chain that starts with |frame| whose
@@ -671,22 +695,24 @@ GetFirstSubsumedFrame(JSContext* cx, JSP
                                 skippedAsync);
 }
 
 JS_FRIEND_API(JSObject*)
 GetFirstSubsumedSavedFrame(JSContext* cx, JSPrincipals* principals,
                            HandleObject savedFrame,
                            JS::SavedFrameSelfHosted selfHosted)
 {
-    if (!savedFrame)
+    if (!savedFrame) {
         return nullptr;
+    }
 
     auto subsumes = cx->runtime()->securityCallbacks->subsumes;
-    if (!subsumes)
+    if (!subsumes) {
         return nullptr;
+    }
 
     auto matcher =
     [subsumes](JSContext* cx, JSPrincipals* principals, HandleSavedFrame frame) -> bool {
         return subsumes(principals, frame->getPrincipals());
     };
 
     bool skippedAsync;
     RootedSavedFrame frame(cx, &savedFrame->as<SavedFrame>());
@@ -749,22 +775,24 @@ SavedFrame_checkThis(JSContext* cx, Call
 } /* namespace js */
 
 namespace JS {
 
 static inline js::SavedFrame*
 UnwrapSavedFrame(JSContext* cx, JSPrincipals* principals, HandleObject obj,
                  SavedFrameSelfHosted selfHosted, bool& skippedAsync)
 {
-    if (!obj)
+    if (!obj) {
         return nullptr;
+    }
 
     RootedObject savedFrameObj(cx, CheckedUnwrap(obj));
-    if (!savedFrameObj)
+    if (!savedFrameObj) {
         return nullptr;
+    }
 
     MOZ_RELEASE_ASSERT(js::SavedFrame::isSavedFrameAndNotProto(*savedFrameObj));
     js::RootedSavedFrame frame(cx, &savedFrameObj->as<js::SavedFrame>());
     return GetFirstSubsumedFrame(cx, principals, frame, selfHosted, skippedAsync);
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameSource(JSContext* cx, JSPrincipals* principals, HandleObject savedFrame,
@@ -780,18 +808,19 @@ GetSavedFrameSource(JSContext* cx, JSPri
         js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, principals, savedFrame, selfHosted,
                                                         skippedAsync));
         if (!frame) {
             sourcep.set(cx->runtime()->emptyString);
             return SavedFrameResult::AccessDenied;
         }
         sourcep.set(frame->getSource());
     }
-    if (sourcep->isAtom())
+    if (sourcep->isAtom()) {
         cx->markAtom(&sourcep->asAtom());
+    }
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameLine(JSContext* cx, JSPrincipals* principals, HandleObject savedFrame,
                   uint32_t* linep,
                   SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
@@ -846,18 +875,19 @@ GetSavedFrameFunctionDisplayName(JSConte
         js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, principals, savedFrame, selfHosted,
                                                         skippedAsync));
         if (!frame) {
             namep.set(nullptr);
             return SavedFrameResult::AccessDenied;
         }
         namep.set(frame->getFunctionDisplayName());
     }
-    if (namep && namep->isAtom())
+    if (namep && namep->isAtom()) {
         cx->markAtom(&namep->asAtom());
+    }
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameAsyncCause(JSContext* cx, JSPrincipals* principals, HandleObject savedFrame,
                         MutableHandleString asyncCausep,
                         SavedFrameSelfHosted unused_ /* = SavedFrameSelfHosted::Include */)
 {
@@ -875,21 +905,23 @@ GetSavedFrameAsyncCause(JSContext* cx, J
         js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, principals, savedFrame,
                                                         SavedFrameSelfHosted::Include,
                                                         skippedAsync));
         if (!frame) {
             asyncCausep.set(nullptr);
             return SavedFrameResult::AccessDenied;
         }
         asyncCausep.set(frame->getAsyncCause());
-        if (!asyncCausep && skippedAsync)
+        if (!asyncCausep && skippedAsync) {
             asyncCausep.set(cx->names().Async);
+        }
     }
-    if (asyncCausep && asyncCausep->isAtom())
+    if (asyncCausep && asyncCausep->isAtom()) {
         cx->markAtom(&asyncCausep->asAtom());
+    }
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameAsyncParent(JSContext* cx, JSPrincipals* principals, HandleObject savedFrame,
                          MutableHandleObject asyncParentp,
                          SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
@@ -910,20 +942,21 @@ GetSavedFrameAsyncParent(JSContext* cx, 
     // interested in whether we would cross any async parents to get from here
     // to the first subsumed parent frame instead.
     js::RootedSavedFrame subsumedParent(cx, GetFirstSubsumedFrame(cx, principals, parent,
                                                                   selfHosted, skippedAsync));
 
     // Even if |parent| is not subsumed, we still want to return a pointer to it
     // rather than |subsumedParent| so it can pick up any |asyncCause| from the
     // inaccessible part of the chain.
-    if (subsumedParent && (subsumedParent->getAsyncCause() || skippedAsync))
+    if (subsumedParent && (subsumedParent->getAsyncCause() || skippedAsync)) {
         asyncParentp.set(parent);
-    else
+    } else {
         asyncParentp.set(nullptr);
+    }
     return SavedFrameResult::Ok;
 }
 
 JS_PUBLIC_API(SavedFrameResult)
 GetSavedFrameParent(JSContext* cx, JSPrincipals* principals, HandleObject savedFrame,
                     MutableHandleObject parentp,
                     SavedFrameSelfHosted selfHosted /* = SavedFrameSelfHosted::Include */)
 {
@@ -944,20 +977,21 @@ GetSavedFrameParent(JSContext* cx, JSPri
     // interested in whether we would cross any async parents to get from here
     // to the first subsumed parent frame instead.
     js::RootedSavedFrame subsumedParent(cx, GetFirstSubsumedFrame(cx, principals, parent,
                                                                   selfHosted, skippedAsync));
 
     // Even if |parent| is not subsumed, we still want to return a pointer to it
     // rather than |subsumedParent| so it can pick up any |asyncCause| from the
     // inaccessible part of the chain.
-    if (subsumedParent && !(subsumedParent->getAsyncCause() || skippedAsync))
+    if (subsumedParent && !(subsumedParent->getAsyncCause() || skippedAsync)) {
         parentp.set(parent);
-    else
+    } else {
         parentp.set(nullptr);
+    }
     return SavedFrameResult::Ok;
 }
 
 static bool
 FormatStackFrameLine(JSContext* cx, js::StringBuffer& sb, js::HandleSavedFrame frame)
 {
     if (frame->isWasm()) {
         // See comment in WasmFrameIter::computeLine().
@@ -971,34 +1005,36 @@ FormatStackFrameLine(JSContext* cx, js::
 
 static bool
 FormatStackFrameColumn(JSContext* cx, js::StringBuffer& sb, js::HandleSavedFrame frame)
 {
     if (frame->isWasm()) {
         // See comment in WasmFrameIter::computeLine().
         js::ToCStringBuf cbuf;
         const char* cstr = NumberToCString(cx, &cbuf, frame->wasmBytecodeOffset(), 16);
-        if (!cstr)
+        if (!cstr) {
             return false;
+        }
 
         return sb.append("0x")
             && sb.append(cstr, strlen(cstr));
     }
 
     return NumberValueToStringBuffer(cx, NumberValue(frame->getColumn()), sb);
 }
 
 static bool
 FormatSpiderMonkeyStackFrame(JSContext* cx, js::StringBuffer& sb,
                              js::HandleSavedFrame frame, size_t indent,
                              bool skippedAsync)
 {
     RootedString asyncCause(cx, frame->getAsyncCause());
-    if (!asyncCause && skippedAsync)
+    if (!asyncCause && skippedAsync) {
         asyncCause.set(cx->names().Async);
+    }
 
     js::RootedAtom name(cx, frame->getFunctionDisplayName());
     return (!indent || sb.appendN(' ', indent))
         && (!asyncCause || (sb.append(asyncCause) && sb.append('*')))
         && (!name || sb.append(name))
         && sb.append('@')
         && sb.append(frame->getSource())
         && sb.append(':')
@@ -1034,18 +1070,19 @@ BuildStackString(JSContext* cx, JSPrinci
                  MutableHandleString stringp, size_t indent, js::StackFormat format)
 {
     js::AssertHeapIsIdle();
     CHECK_THREAD(cx);
     MOZ_RELEASE_ASSERT(cx->realm());
 
     js::StringBuffer sb(cx);
 
-    if (format == js::StackFormat::Default)
+    if (format == js::StackFormat::Default) {
         format = cx->runtime()->stackFormat();
+    }
     MOZ_ASSERT(format != js::StackFormat::Default);
 
     // Enter a new block to constrain the scope of possibly entering the stack's
     // realm. This ensures that when we finish the StringBuffer, we are back in
     // the cx's original compartment, and fulfill our contract with callers to
     // place the output string in the cx's current realm.
     {
         bool skippedAsync;
@@ -1064,36 +1101,39 @@ BuildStackString(JSContext* cx, JSPrinci
 
             parent = frame->getParent();
             bool skippedNextAsync;
             js::RootedSavedFrame nextFrame(cx, js::GetFirstSubsumedFrame(cx, principals, parent,
                                                                          SavedFrameSelfHosted::Exclude, skippedNextAsync));
 
             switch (format) {
                 case js::StackFormat::SpiderMonkey:
-                    if (!FormatSpiderMonkeyStackFrame(cx, sb, frame, indent, skippedAsync))
+                    if (!FormatSpiderMonkeyStackFrame(cx, sb, frame, indent, skippedAsync)) {
                         return false;
+                    }
                     break;
                 case js::StackFormat::V8:
-                    if (!FormatV8StackFrame(cx, sb, frame, indent, !nextFrame))
+                    if (!FormatV8StackFrame(cx, sb, frame, indent, !nextFrame)) {
                         return false;
+                    }
                     break;
                 case js::StackFormat::Default:
                     MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unexpected value");
                     break;
             }
 
             frame = nextFrame;
             skippedAsync = skippedNextAsync;
         } while (frame);
     }
 
     JSString* str = sb.finishString();
-    if (!str)
+    if (!str) {
         return false;
+    }
     cx->check(str);
     stringp.set(str);
     return true;
 }
 
 JS_PUBLIC_API(bool)
 IsMaybeWrappedSavedFrame(JSObject* obj)
 {
@@ -1114,120 +1154,128 @@ namespace js {
 
 /* static */ bool
 SavedFrame::sourceProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get source)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     RootedString source(cx);
     if (JS::GetSavedFrameSource(cx, principals, frame, &source) == JS::SavedFrameResult::Ok) {
-        if (!cx->compartment()->wrap(cx, &source))
+        if (!cx->compartment()->wrap(cx, &source)) {
             return false;
+        }
         args.rval().setString(source);
     } else {
         args.rval().setNull();
     }
     return true;
 }
 
 /* static */ bool
 SavedFrame::lineProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get line)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     uint32_t line;
-    if (JS::GetSavedFrameLine(cx, principals, frame, &line) == JS::SavedFrameResult::Ok)
+    if (JS::GetSavedFrameLine(cx, principals, frame, &line) == JS::SavedFrameResult::Ok) {
         args.rval().setNumber(line);
-    else
+    } else {
         args.rval().setNull();
+    }
     return true;
 }
 
 /* static */ bool
 SavedFrame::columnProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get column)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     uint32_t column;
-    if (JS::GetSavedFrameColumn(cx, principals, frame, &column) == JS::SavedFrameResult::Ok)
+    if (JS::GetSavedFrameColumn(cx, principals, frame, &column) == JS::SavedFrameResult::Ok) {
         args.rval().setNumber(column);
-    else
+    } else {
         args.rval().setNull();
+    }
     return true;
 }
 
 /* static */ bool
 SavedFrame::functionDisplayNameProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get functionDisplayName)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     RootedString name(cx);
     JS::SavedFrameResult result =
         JS::GetSavedFrameFunctionDisplayName(cx, principals, frame, &name);
     if (result == JS::SavedFrameResult::Ok && name) {
-        if (!cx->compartment()->wrap(cx, &name))
+        if (!cx->compartment()->wrap(cx, &name)) {
             return false;
+        }
         args.rval().setString(name);
     } else {
         args.rval().setNull();
     }
     return true;
 }
 
 /* static */ bool
 SavedFrame::asyncCauseProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get asyncCause)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     RootedString asyncCause(cx);
     JS::SavedFrameResult result = JS::GetSavedFrameAsyncCause(cx, principals, frame, &asyncCause);
     if (result == JS::SavedFrameResult::Ok && asyncCause) {
-        if (!cx->compartment()->wrap(cx, &asyncCause))
+        if (!cx->compartment()->wrap(cx, &asyncCause)) {
             return false;
+        }
         args.rval().setString(asyncCause);
     } else {
         args.rval().setNull();
     }
     return true;
 }
 
 /* static */ bool
 SavedFrame::asyncParentProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get asyncParent)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     RootedObject asyncParent(cx);
     (void) JS::GetSavedFrameAsyncParent(cx, principals, frame, &asyncParent);
-    if (!cx->compartment()->wrap(cx, &asyncParent))
+    if (!cx->compartment()->wrap(cx, &asyncParent)) {
         return false;
+    }
     args.rval().setObjectOrNull(asyncParent);
     return true;
 }
 
 /* static */ bool
 SavedFrame::parentProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "(get parent)", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     RootedObject parent(cx);
     (void) JS::GetSavedFrameParent(cx, principals, frame, &parent);
-    if (!cx->compartment()->wrap(cx, &parent))
+    if (!cx->compartment()->wrap(cx, &parent)) {
         return false;
+    }
     args.rval().setObjectOrNull(parent);
     return true;
 }
 
 /* static */ bool
 SavedFrame::toStringMethod(JSContext* cx, unsigned argc, Value* vp)
 {
     THIS_SAVEDFRAME(cx, argc, vp, "toString", args, frame);
     JSPrincipals* principals = cx->realm()->principals();
     RootedString string(cx);
-    if (!JS::BuildStackString(cx, principals, frame, &string))
+    if (!JS::BuildStackString(cx, principals, frame, &string)) {
         return false;
+    }
     args.rval().setString(string);
     return true;
 }
 
 bool
 SavedStacks::saveCurrentStack(JSContext* cx, MutableHandleSavedFrame frame,
                               JS::StackCapture&& capture /* = JS::StackCapture(JS::AllFrames()) */)
 {
@@ -1251,26 +1299,28 @@ bool
 SavedStacks::copyAsyncStack(JSContext* cx, HandleObject asyncStack, HandleString asyncCause,
                             MutableHandleSavedFrame adoptedStack,
                             const Maybe<size_t>& maxFrameCount)
 {
     MOZ_RELEASE_ASSERT(cx->realm());
     MOZ_DIAGNOSTIC_ASSERT(&cx->realm()->savedStacks() == this);
 
     RootedAtom asyncCauseAtom(cx, AtomizeString(cx, asyncCause));
-    if (!asyncCauseAtom)
+    if (!asyncCauseAtom) {
         return false;
+    }
 
     RootedObject asyncStackObj(cx, CheckedUnwrap(asyncStack));
     MOZ_RELEASE_ASSERT(asyncStackObj);
     MOZ_RELEASE_ASSERT(js::SavedFrame::isSavedFrameAndNotProto(*asyncStackObj));
     adoptedStack.set(&asyncStackObj->as<js::SavedFrame>());
 
-    if (!adoptAsyncStack(cx, adoptedStack, asyncCauseAtom, maxFrameCount))
+    if (!adoptAsyncStack(cx, adoptedStack, asyncCauseAtom, maxFrameCount)) {
         return false;
+    }
 
     return true;
 }
 
 void
 SavedStacks::sweep()
 {
     frames.sweep();
@@ -1396,42 +1446,44 @@ SavedStacks::insertFrames(JSContext* cx,
         Activation& activation = *iter.activation();
         Maybe<LiveSavedFrameCache::FramePtr> framePtr = LiveSavedFrameCache::FramePtr::create(iter);
 
         if (framePtr) {
             MOZ_ASSERT_IF(seenCached, framePtr->hasCachedSavedFrame());
             seenCached |= framePtr->hasCachedSavedFrame();
         }
 
-        if (capture.is<JS::AllFrames>() && framePtr && framePtr->hasCachedSavedFrame())
-        {
+        if (capture.is<JS::AllFrames>() && framePtr && framePtr->hasCachedSavedFrame()) {
             auto* cache = activation.getLiveSavedFrameCache(cx);
-            if (!cache)
+            if (!cache) {
                 return false;
+            }
             cache->find(cx, *framePtr, iter.pc(), &parent);
 
             // Even though iter.hasCachedSavedFrame() was true, we can't
             // necessarily stop walking the stack here. We can get cache misses
             // for two reasons:
             // 1) This is the youngest valid frame in the cache, and it has run
             //    code and advanced to a new pc since it was cached.
             // 2) The cache was populated with SavedFrames captured for a
             //    different compartment, and got purged completely. We will
             //    repopulate it from scratch.
-            if (parent)
+            if (parent) {
                 break;
+            }
         }
 
         // We'll be pushing this frame onto stackChain. Gather the information
         // needed to construct the SavedFrame::Lookup.
         Rooted<LocationValue> location(cx);
         {
             AutoRealmUnchecked ar(cx, iter.realm());
-            if (!cx->realm()->savedStacks().getLocation(cx, iter, &location))
+            if (!cx->realm()->savedStacks().getLocation(cx, iter, &location)) {
                 return false;
+            }
         }
 
         RootedAtom displayAtom(cx, iter.maybeFunctionDisplayAtom());
 
         auto principals = iter.realm()->principals();
         MOZ_ASSERT_IF(framePtr && !iter.isWasm(), iter.pc());
 
         if (!stackChain->emplaceBack(location.source(),
@@ -1480,63 +1532,69 @@ SavedStacks::insertFrames(JSContext* cx,
             activation.asyncStack() &&
             (activation.asyncCallIsExplicit() || iter.done()) &&
             !capture.is<JS::FirstSubsumedFrame>())
         {
             // Atomize the async cause string. There should only be a few
             // different strings used.
             const char* cause = activation.asyncCause();
             RootedAtom causeAtom(cx, AtomizeUTF8Chars(cx, cause, strlen(cause)));
-            if (!causeAtom)
+            if (!causeAtom) {
                 return false;
+            }
 
             // Translate our capture into a frame count limit for
             // adoptAsyncStack, which will impose further limits.
             Maybe<size_t> maxFrames =
                 !capture.is<JS::MaxFrames>() ? Nothing()
                 : capture.as<JS::MaxFrames>().maxFrames == 0 ? Nothing()
                 : Some(capture.as<JS::MaxFrames>().maxFrames);
 
             // Clip the stack if needed, attach the async cause string to the
             // top frame, and copy it into our compartment if necessary.
             parent.set(activation.asyncStack());
-            if (!adoptAsyncStack(cx, &parent, causeAtom, maxFrames))
+            if (!adoptAsyncStack(cx, &parent, causeAtom, maxFrames)) {
                 return false;
+            }
             break;
         }
 
-        if (capture.is<JS::MaxFrames>())
+        if (capture.is<JS::MaxFrames>()) {
             capture.as<JS::MaxFrames>().maxFrames--;
+        }
     }
 
     // Iterate through |stackChain| in reverse order and get or create the
     // actual SavedFrame instances.
     frame.set(parent);
     for (size_t i = stackChain->length(); i != 0; i--) {
         SavedFrame::HandleLookup lookup = stackChain[i-1];
         lookup->parent = frame;
 
         // If necessary, adjust the parent of a debugger eval frame to point to
         // the frame in whose scope the eval occurs - if we're using
         // LiveSavedFrameCache. Otherwise, we simply ask the FrameIter to follow
         // evalInFramePrev links, so that the parent is always the last frame we
         // created.
         if (capture.is<JS::AllFrames>() && lookup->framePtr) {
-            if (!checkForEvalInFramePrev(cx, lookup))
+            if (!checkForEvalInFramePrev(cx, lookup)) {
                 return false;
+            }
         }
 
         frame.set(getOrCreateSavedFrame(cx, lookup));
-        if (!frame)
+        if (!frame) {
             return false;
+        }
 
         if (capture.is<JS::AllFrames>() && lookup->framePtr) {
             auto* cache = lookup->activation->getLiveSavedFrameCache(cx);
-            if (!cache || !cache->insert(cx, *lookup->framePtr, lookup->pc, frame))
+            if (!cache || !cache->insert(cx, *lookup->framePtr, lookup->pc, frame)) {
                 return false;
+            }
         }
     }
 
     return true;
 }
 
 bool
 SavedStacks::adoptAsyncStack(JSContext* cx, MutableHandleSavedFrame asyncStack,
@@ -1580,28 +1638,30 @@ SavedStacks::adoptAsyncStack(JSContext* 
         asyncStack.set(getOrCreateSavedFrame(cx, lookup));
         return !!asyncStack;
     }
 
     // If we captured the maximum number of frames and the caller requested no
     // specific limit, we only return half of them. This means that if we do
     // many subsequent captures with the same async stack, it's likely we can
     // use the optimization above.
-    if (maxFrameCount.isNothing() && currentSavedFrame)
+    if (maxFrameCount.isNothing() && currentSavedFrame) {
         stackChain->shrinkBy(ASYNC_STACK_MAX_FRAME_COUNT / 2);
+    }
 
     // Iterate through |stackChain| in reverse order and get or create the
     // actual SavedFrame instances.
     asyncStack.set(nullptr);
     while (!stackChain->empty()) {
         SavedFrame::HandleLookup lookup = stackChain.back();
         lookup->parent = asyncStack;
         asyncStack.set(getOrCreateSavedFrame(cx, lookup));
-        if (!asyncStack)
+        if (!asyncStack) {
             return false;
+        }
         stackChain->popBack();
     }
 
     return true;
 }
 
 // Given a |lookup| for which we're about to construct a SavedFrame, if it
 // refers to a Debugger eval frame, adjust |lookup|'s parent to be the frame's
@@ -1614,44 +1674,48 @@ SavedStacks::adoptAsyncStack(JSContext* 
 // eval frames the right parents as we encounter them.
 //
 // Call this function only if we are using the LiveSavedFrameCache; otherwise,
 // FrameIter has already taken care of getting us the right parent.
 bool
 SavedStacks::checkForEvalInFramePrev(JSContext* cx, SavedFrame::HandleLookup lookup)
 {
     MOZ_ASSERT(lookup->framePtr);
-    if (!lookup->framePtr->isInterpreterFrame())
+    if (!lookup->framePtr->isInterpreterFrame()) {
         return true;
+    }
 
     InterpreterFrame& interpreterFrame = lookup->framePtr->asInterpreterFrame();
-    if (!interpreterFrame.isDebuggerEvalFrame())
+    if (!interpreterFrame.isDebuggerEvalFrame()) {
         return true;
+    }
 
     LiveSavedFrameCache::FramePtr target =
         LiveSavedFrameCache::FramePtr::create(interpreterFrame.evalInFramePrev());
 
     // If we're caching the frame to which |lookup| refers, then we should
     // definitely have the target frame in the cache as well.
     MOZ_ASSERT(target.hasCachedSavedFrame());
 
     // Search the chain of activations for a LiveSavedFrameCache that has an
     // entry for target.
     RootedSavedFrame saved(cx, nullptr);
     for (Activation* act = lookup->activation; act; act = act->prev()) {
         // It's okay to force allocation of a cache here; we're about to put
         // something in the top cache, and all the lower ones should exist
         // already.
         auto* cache = act->getLiveSavedFrameCache(cx);
-        if (!cache)
+        if (!cache) {
             return false;
+        }
 
         cache->findWithoutInvalidation(target, &saved);
-        if (saved)
+        if (saved) {
             break;
+        }
     }
 
     // Since |target| has its cached bit set, we should have found it.
     MOZ_ALWAYS_TRUE(saved);
 
     lookup->parent = saved;
     return true;
 }
@@ -1662,35 +1726,39 @@ SavedStacks::getOrCreateSavedFrame(JSCon
     const SavedFrame::Lookup& lookupInstance = lookup.get();
     DependentAddPtr<SavedFrame::Set> p(cx, frames, lookupInstance);
     if (p) {
         MOZ_ASSERT(*p);
         return *p;
     }
 
     RootedSavedFrame frame(cx, createFrameFromLookup(cx, lookup));
-    if (!frame)
+    if (!frame) {
         return nullptr;
+    }
 
-    if (!p.add(cx, frames, lookupInstance, frame))
+    if (!p.add(cx, frames, lookupInstance, frame)) {
         return nullptr;
+    }
 
     return frame;
 }
 
 SavedFrame*
 SavedStacks::createFrameFromLookup(JSContext* cx, SavedFrame::HandleLookup lookup)
 {
     RootedSavedFrame frame(cx, SavedFrame::create(cx));
-    if (!frame)
+    if (!frame) {
         return nullptr;
+    }
     frame->initFromLookup(cx, lookup);
 
-    if (!FreezeObject(cx, frame))
+    if (!FreezeObject(cx, frame)) {
         return nullptr;
+    }
 
     return frame;
 }
 
 bool
 SavedStacks::getLocation(JSContext* cx, const FrameIter& iter,
                          MutableHandle<LocationValue> locationp)
 {
@@ -1709,18 +1777,19 @@ SavedStacks::getLocation(JSContext* cx, 
     if (iter.isWasm()) {
         // Only asm.js has a displayURL.
         if (const char16_t* displayURL = iter.displayURL()) {
             locationp.setSource(AtomizeChars(cx, displayURL, js_strlen(displayURL)));
         } else {
             const char* filename = iter.filename() ? iter.filename() : "";
             locationp.setSource(Atomize(cx, filename, strlen(filename)));
         }
-        if (!locationp.source())
+        if (!locationp.source()) {
             return false;
+        }
 
         // See WasmFrameIter::computeLine() comment.
         uint32_t column = 0;
         locationp.setLine(iter.computeLine(&column));
         locationp.setColumn(column);
         return true;
     }
 
@@ -1733,18 +1802,19 @@ SavedStacks::getLocation(JSContext* cx, 
     if (!p) {
         RootedAtom source(cx);
         if (const char16_t* displayURL = iter.displayURL()) {
             source = AtomizeChars(cx, displayURL, js_strlen(displayURL));
         } else {
             const char* filename = script->filename() ? script->filename() : "";
             source = Atomize(cx, filename, strlen(filename));
         }
-        if (!source)
+        if (!source) {
             return false;
+        }
 
         uint32_t column;
         uint32_t line = PCToLineNumber(script, pc, &column);
 
         // Make the column 1-based. See comment above.
         LocationValue value(source, line, column + 1);
         if (!pcLocationMap.add(p, key, value)) {
             ReportOutOfMemory(cx);
@@ -1755,22 +1825,24 @@ SavedStacks::getLocation(JSContext* cx, 
     locationp.set(p->value());
     return true;
 }
 
 void
 SavedStacks::chooseSamplingProbability(Realm* realm)
 {
     GlobalObject* global = realm->maybeGlobal();
-    if (!global)
+    if (!global) {
         return;
+    }
 
     GlobalObject::DebuggerVector* dbgs = global->getDebuggers();
-    if (!dbgs || dbgs->empty())
+    if (!dbgs || dbgs->empty()) {
         return;
+    }
 
     mozilla::DebugOnly<ReadBarriered<Debugger*>*> begin = dbgs->begin();
     mozilla::DebugOnly<bool> foundAnyDebuggers = false;
 
     double probability = 0;
     for (auto dbgp = dbgs->begin(); dbgp < dbgs->end(); dbgp++) {
         // The set of debuggers had better not change while we're iterating,
         // such that the vector gets reallocated.
@@ -1796,55 +1868,60 @@ SavedStacks::chooseSamplingProbability(R
 
 JSObject*
 SavedStacks::MetadataBuilder::build(JSContext* cx, HandleObject target,
                                     AutoEnterOOMUnsafeRegion& oomUnsafe) const
 {
     RootedObject obj(cx, target);
 
     SavedStacks& stacks = cx->realm()->savedStacks();
-    if (!stacks.bernoulli.trial())
+    if (!stacks.bernoulli.trial()) {
         return nullptr;
+    }
 
     RootedSavedFrame frame(cx);
-    if (!stacks.saveCurrentStack(cx, &frame))
+    if (!stacks.saveCurrentStack(cx, &frame)) {
         oomUnsafe.crash("SavedStacksMetadataBuilder");
+    }
 
-    if (!Debugger::onLogAllocationSite(cx, obj, frame, mozilla::TimeStamp::Now()))
+    if (!Debugger::onLogAllocationSite(cx, obj, frame, mozilla::TimeStamp::Now())) {
         oomUnsafe.crash("SavedStacksMetadataBuilder");
+    }
 
     MOZ_ASSERT_IF(frame, !frame->is<WrapperObject>());
     return frame;
 }
 
 const SavedStacks::MetadataBuilder SavedStacks::metadataBuilder;
 
 /* static */ ReconstructedSavedFramePrincipals ReconstructedSavedFramePrincipals::IsSystem;
 /* static */ ReconstructedSavedFramePrincipals ReconstructedSavedFramePrincipals::IsNotSystem;
 
 UniqueChars
 BuildUTF8StackString(JSContext* cx, JSPrincipals* principals, HandleObject stack)
 {
     RootedString stackStr(cx);
-    if (!JS::BuildStackString(cx, principals, stack, &stackStr))
+    if (!JS::BuildStackString(cx, principals, stack, &stackStr)) {
         return nullptr;
+    }
 
     return JS_EncodeStringToUTF8(cx, stackStr);
 }
 
 uint32_t
 FixupColumnForDisplay(uint32_t column)
 {
     // As described in WasmFrameIter::computeLine(), for wasm frames, the
     // function index is returned as the column with the high bit set. In paths
     // that format error stacks into strings, this information can be used to
     // synthesize a proper wasm frame. But when raw column numbers are handed
     // out, we just fix them to 1 to avoid confusion.
-    if (column & wasm::WasmFrameIter::ColumnBit)
+    if (column & wasm::WasmFrameIter::ColumnBit) {
         return 1;
+    }
 
     // XXX: Make the column 1-based as in other browsers, instead of 0-based
     // which is how SpiderMonkey stores it internally. This will be
     // unnecessary once bug 1144340 is fixed.
     return column + 1;
 }
 
 } /* namespace js */
@@ -1904,26 +1981,28 @@ ConstructSavedFrameStackSlow(JSContext* 
     Rooted<JS::ubi::StackFrame> ubiFrame(cx, frame);
 
     while (ubiFrame.get()) {
         // Convert the source and functionDisplayName strings to atoms.
 
         js::RootedAtom source(cx);
         AtomizingMatcher atomizer(cx, ubiFrame.get().sourceLength());
         source = ubiFrame.get().source().match(atomizer);
-        if (!source)
+        if (!source) {
             return false;
+        }
 
         js::RootedAtom functionDisplayName(cx);
         auto nameLength = ubiFrame.get().functionDisplayNameLength();
         if (nameLength > 0) {
             AtomizingMatcher atomizer(cx, nameLength);
             functionDisplayName = ubiFrame.get().functionDisplayName().match(atomizer);
-            if (!functionDisplayName)
+            if (!functionDisplayName) {
                 return false;
+            }
         }
 
         auto principals = js::ReconstructedSavedFramePrincipals::getSingleton(ubiFrame.get());
 
         if (!stackChain->emplaceBack(source, ubiFrame.get().line(), ubiFrame.get().column(),
                                      functionDisplayName, /* asyncCause */ nullptr,
                                      /* parent */ nullptr, principals))
         {
@@ -1934,18 +2013,19 @@ ConstructSavedFrameStackSlow(JSContext* 
         ubiFrame = ubiFrame.get().parent();
     }
 
     js::RootedSavedFrame parentFrame(cx);
     for (size_t i = stackChain->length(); i != 0; i--) {
         SavedFrame::HandleLookup lookup = stackChain[i-1];
         lookup->parent = parentFrame;
         parentFrame = cx->realm()->savedStacks().getOrCreateSavedFrame(cx, lookup);
-        if (!parentFrame)
+        if (!parentFrame) {
             return false;
+        }
     }
 
     outSavedFrameStack.set(parentFrame);
     return true;
 }
 
 
 } // namespace ubi
--- a/js/src/vm/Scope.cpp
+++ b/js/src/vm/Scope.cpp
@@ -95,18 +95,19 @@ EmptyEnvironmentShape(JSContext* cx, con
                                        baseShapeFlags);
 }
 
 static Shape*
 NextEnvironmentShape(JSContext* cx, HandleAtom name, BindingKind bindKind, uint32_t slot,
                      StackBaseShape& stackBase, HandleShape shape)
 {
     UnownedBaseShape* base = BaseShape::getUnowned(cx, stackBase);
-    if (!base)
+    if (!base) {
         return nullptr;
+    }
 
     unsigned attrs = JSPROP_PERMANENT | JSPROP_ENUMERATE;
     switch (bindKind) {
       case BindingKind::Const:
       case BindingKind::NamedLambdaCallee:
         attrs |= JSPROP_READONLY;
         break;
       default:
@@ -118,52 +119,56 @@ NextEnvironmentShape(JSContext* cx, Hand
     return cx->zone()->propertyTree().getChild(cx, shape, child);
 }
 
 static Shape*
 CreateEnvironmentShape(JSContext* cx, BindingIter& bi, const Class* cls,
                        uint32_t numSlots, uint32_t baseShapeFlags)
 {
     RootedShape shape(cx, EmptyEnvironmentShape(cx, cls, numSlots, baseShapeFlags));
-    if (!shape)
+    if (!shape) {
         return nullptr;
+    }
 
     RootedAtom name(cx);
     StackBaseShape stackBase(cls, baseShapeFlags);
     for (; bi; bi++) {
         BindingLocation loc = bi.location();
         if (loc.kind() == BindingLocation::Kind::Environment) {
             name = bi.name();
             cx->markAtom(name);
             shape = NextEnvironmentShape(cx, name, bi.kind(), loc.slot(), stackBase, shape);
-            if (!shape)
+            if (!shape) {
                 return nullptr;
+            }
         }
     }
 
     return shape;
 }
 
 template <typename ConcreteScope>
 static UniquePtr<typename ConcreteScope::Data>
 CopyScopeData(JSContext* cx, typename ConcreteScope::Data* data)
 {
     // Make sure the binding names are marked in the context's zone, if we are
     // copying data from another zone.
     BindingName* names = data->trailingNames.start();
     uint32_t length = data->length;
     for (size_t i = 0; i < length; i++) {
-        if (JSAtom* name = names[i].name())
+        if (JSAtom* name = names[i].name()) {
             cx->markAtom(name);
+        }
     }
 
     size_t size = SizeOfData<typename ConcreteScope::Data>(data->length);
     void* bytes = cx->pod_malloc<char>(size);
-    if (!bytes)
+    if (!bytes) {
         return nullptr;
+    }
 
     auto* dataCopy = new (bytes) typename ConcreteScope::Data(*data);
 
     std::uninitialized_copy_n(names, length, dataCopy->trailingNames.start());
 
     return UniquePtr<typename ConcreteScope::Data>(dataCopy);
 }
 
@@ -172,42 +177,45 @@ static bool
 PrepareScopeData(JSContext* cx, BindingIter& bi, Handle<UniquePtr<typename ConcreteScope::Data>> data,
                  const Class* cls, uint32_t baseShapeFlags, MutableHandleShape envShape)
 {
     // Copy a fresh BindingIter for use below.
     BindingIter freshBi(bi);
 
     // Iterate through all bindings. This counts the number of environment
     // slots needed and computes the maximum frame slot.
-    while (bi)
+    while (bi) {
         bi++;
+    }
     data->nextFrameSlot = bi.canHaveFrameSlots() ? bi.nextFrameSlot() : LOCALNO_LIMIT;
 
     // Make a new environment shape if any environment slots were used.
     if (bi.nextEnvironmentSlot() == JSSLOT_FREE(cls)) {
         envShape.set(nullptr);
     } else {
         envShape.set(CreateEnvironmentShape(cx, freshBi, cls, bi.nextEnvironmentSlot(),
                                             baseShapeFlags));
-        if (!envShape)
+        if (!envShape) {
             return false;
+        }
     }
 
     return true;
 }
 
 template <typename ConcreteScope>
 static UniquePtr<typename ConcreteScope::Data>
 NewEmptyScopeData(JSContext* cx, uint32_t length = 0)
 {
     size_t dataSize = SizeOfData<typename ConcreteScope::Data>(length);
     uint8_t* bytes = cx->pod_malloc<uint8_t>(dataSize);
     auto data = reinterpret_cast<typename ConcreteScope::Data*>(bytes);
-    if (data)
+    if (data) {
         new (data) typename ConcreteScope::Data(length);
+    }
     return UniquePtr<typename ConcreteScope::Data>(data);
 }
 
 static constexpr size_t HasAtomMask = 1;
 static constexpr size_t HasAtomShift = 1;
 
 static XDRResult
 XDRBindingName(XDRState<XDR_ENCODE>* xdr, BindingName* bindingName)
@@ -217,34 +225,36 @@ XDRBindingName(XDRState<XDR_ENCODE>* xdr
     RootedAtom atom(cx, bindingName->name());
     bool hasAtom = !!atom;
 
     uint8_t flags = bindingName->flagsForXDR();
     MOZ_ASSERT(((flags << HasAtomShift) >> HasAtomShift) == flags);
     uint8_t u8 = (flags << HasAtomShift) | uint8_t(hasAtom);
     MOZ_TRY(xdr->codeUint8(&u8));
 
-    if (hasAtom)
+    if (hasAtom) {
         MOZ_TRY(XDRAtom(xdr, &atom));
+    }
 
     return Ok();
 }
 
 static XDRResult
 XDRBindingName(XDRState<XDR_DECODE>* xdr, BindingName* bindingName)
 {
     JSContext* cx = xdr->cx();
 
     uint8_t u8;
     MOZ_TRY(xdr->codeUint8(&u8));
 
     bool hasAtom = u8 & HasAtomMask;
     RootedAtom atom(cx);
-    if (hasAtom)
+    if (hasAtom) {
         MOZ_TRY(XDRAtom(xdr, &atom));
+    }
 
     uint8_t flags = u8 >> HasAtomShift;
     *bindingName = BindingName::fromXDR(atom, flags);
 
     return Ok();
 }
 
 template <typename ConcreteScopeData>
@@ -261,60 +271,65 @@ template <typename ConcreteScope, XDRMod
 Scope::XDRSizedBindingNames(XDRState<mode>* xdr, Handle<ConcreteScope*> scope,
                             MutableHandle<typename ConcreteScope::Data*> data)
 {
     MOZ_ASSERT(!data);
 
     JSContext* cx = xdr->cx();
 
     uint32_t length;
-    if (mode == XDR_ENCODE)
+    if (mode == XDR_ENCODE) {
         length = scope->data().length;
+    }
     MOZ_TRY(xdr->codeUint32(&length));
 
     if (mode == XDR_ENCODE) {
         data.set(&scope->data());
     } else {
         data.set(NewEmptyScopeData<ConcreteScope>(cx, length).release());
-        if (!data)
+        if (!data) {
             return xdr->fail(JS::TranscodeResult_Throw);
+        }
         data->length = length;
     }
 
     auto dataGuard = mozilla::MakeScopeExit([&] () {
         if (mode == XDR_DECODE) {
             DeleteScopeData(data.get());
             data.set(nullptr);
         }
     });
 
-    for (uint32_t i = 0; i < length; i++)
+    for (uint32_t i = 0; i < length; i++) {
         MOZ_TRY(XDRBindingName(xdr, &data->trailingNames[i]));
+    }
 
     dataGuard.release();
     return Ok();
 }
 
 /* static */ Scope*
 Scope::create(JSContext* cx, ScopeKind kind, HandleScope enclosing, HandleShape envShape)
 {
     Scope* scope = Allocate<Scope>(cx);
-    if (scope)
+    if (scope) {
         new (scope) Scope(kind, enclosing, envShape);
+    }
     return scope;
 }
 
 template <typename ConcreteScope>
 /* static */ ConcreteScope*
 Scope::create(JSContext* cx, ScopeKind kind, HandleScope enclosing,
               HandleShape envShape, MutableHandle<UniquePtr<typename ConcreteScope::Data>> data)
 {
     Scope* scope = create(cx, kind, enclosing, envShape);
-    if (!scope)
+    if (!scope) {
         return nullptr;
+    }
 
     // It is an invariant that all Scopes that have data (currently, all
     // ScopeKinds except With) must have non-null data.
     MOZ_ASSERT(data);
     scope->initData<ConcreteScope>(data);
 
     return &scope->as<ConcreteScope>();
 }
@@ -326,28 +341,30 @@ Scope::initData(MutableHandle<UniquePtr<
     MOZ_ASSERT(!data_);
     data_ = data.get().release();
 }
 
 uint32_t
 Scope::chainLength() const
 {
     uint32_t length = 0;
-    for (ScopeIter si(const_cast<Scope*>(this)); si; si++)
+    for (ScopeIter si(const_cast<Scope*>(this)); si; si++) {
         length++;
+    }
     return length;
 }
 
 uint32_t
 Scope::environmentChainLength() const
 {
     uint32_t length = 0;
     for (ScopeIter si(const_cast<Scope*>(this)); si; si++) {
-        if (si.hasSyntacticEnvironment())
+        if (si.hasSyntacticEnvironment()) {
             length++;
+        }
     }
     return length;
 }
 
 Shape*
 Scope::maybeCloneEnvironmentShape(JSContext* cx)
 {
     // Clone the environment shape if cloning into a different zone.
@@ -362,63 +379,68 @@ Scope::maybeCloneEnvironmentShape(JSCont
 }
 
 /* static */ Scope*
 Scope::clone(JSContext* cx, HandleScope scope, HandleScope enclosing)
 {
     RootedShape envShape(cx);
     if (scope->environmentShape()) {
         envShape = scope->maybeCloneEnvironmentShape(cx);
-        if (!envShape)
+        if (!envShape) {
             return nullptr;
+        }
     }
 
     switch (scope->kind_) {
       case ScopeKind::Function: {
         RootedScript script(cx, scope->as<FunctionScope>().script());
         const char* filename = script->filename();
         // If the script has an internal URL, include it in the crash reason. If
         // not, it may be a web URL, and therefore privacy-sensitive.
-        if (!strncmp(filename, "chrome:", 7) || !strncmp(filename, "resource:", 9))
+        if (!strncmp(filename, "chrome:", 7) || !strncmp(filename, "resource:", 9)) {
             MOZ_CRASH_UNSAFE_PRINTF("Use FunctionScope::clone (script URL: %s)", filename);
+        }
 
         MOZ_CRASH("Use FunctionScope::clone.");
         break;
       }
 
       case ScopeKind::FunctionBodyVar:
       case ScopeKind::ParameterExpressionVar: {
         Rooted<UniquePtr<VarScope::Data>> dataClone(cx);
         dataClone = CopyScopeData<VarScope>(cx, &scope->as<VarScope>().data());
-        if (!dataClone)
+        if (!dataClone) {
             return nullptr;
+        }
         return create<VarScope>(cx, scope->kind_, enclosing, envShape, &dataClone);
       }
 
       case ScopeKind::Lexical:
       case ScopeKind::SimpleCatch:
       case ScopeKind::Catch:
       case ScopeKind::NamedLambda:
       case ScopeKind::StrictNamedLambda: {
         Rooted<UniquePtr<LexicalScope::Data>> dataClone(cx);
         dataClone = CopyScopeData<LexicalScope>(cx, &scope->as<LexicalScope>().data());
-        if (!dataClone)
+        if (!dataClone) {
             return nullptr;
+        }
         return create<LexicalScope>(cx, scope->kind_, enclosing, envShape, &dataClone);
       }
 
       case ScopeKind::With:
           return create(cx, scope->kind_, enclosing, envShape);
 
       case ScopeKind::Eval:
       case ScopeKind::StrictEval: {
         Rooted<UniquePtr<EvalScope::Data>> dataClone(cx);
         dataClone = CopyScopeData<EvalScope>(cx, &scope->as<EvalScope>().data());
-        if (!dataClone)
+        if (!dataClone) {
             return nullptr;
+        }
         return create<EvalScope>(cx, scope->kind_, enclosing, envShape, &dataClone);
       }
 
       case ScopeKind::Global:
       case ScopeKind::NonSyntactic:
         MOZ_CRASH("Use GlobalScope::clone.");
         break;
 
@@ -446,28 +468,30 @@ Scope::finalize(FreeOp* fop)
         fop->free_(data_);
         data_ = nullptr;
     }
 }
 
 size_t
 Scope::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
-    if (data_)
+    if (data_) {
         return mallocSizeOf(data_);
+    }
     return 0;
 }
 
 void
 Scope::dump()
 {
     for (ScopeIter si(this); si; si++) {
         fprintf(stderr, "%s [%p]", ScopeKindString(si.kind()), si.scope());
-        if (si.scope()->enclosing())
+        if (si.scope()->enclosing()) {
             fprintf(stderr, " -> ");
+        }
     }
     fprintf(stderr, "\n");
 }
 
 uint32_t
 LexicalScope::firstFrameSlot() const
 {
     switch (kind()) {
@@ -530,18 +554,19 @@ LexicalScope::nextFrameSlot(Scope* scope
 LexicalScope::create(JSContext* cx, ScopeKind kind, Handle<Data*> data,
                      uint32_t firstFrameSlot, HandleScope enclosing)
 {
     MOZ_ASSERT(data, "LexicalScopes should not be created if there are no bindings.");
 
     // The data that's passed in is from the frontend and is LifoAlloc'd.
     // Copy it now that we're creating a permanent VM scope.
     Rooted<UniquePtr<Data>> copy(cx, CopyScopeData<LexicalScope>(cx, data));
-    if (!copy)
+    if (!copy) {
         return nullptr;
+    }
 
     return createWithData(cx, kind, &copy, firstFrameSlot, enclosing);
 }
 
 /* static */ LexicalScope*
 LexicalScope::createWithData(JSContext* cx, ScopeKind kind, MutableHandle<UniquePtr<Data>> data,
                              uint32_t firstFrameSlot, HandleScope enclosing)
 {
@@ -555,18 +580,19 @@ LexicalScope::createWithData(JSContext* 
     BindingIter bi(*data, firstFrameSlot, isNamedLambda);
     if (!PrepareScopeData<LexicalScope>(cx, bi, data, &LexicalEnvironmentObject::class_,
                                         BaseShape::NOT_EXTENSIBLE | BaseShape::DELEGATE, &envShape))
     {
         return nullptr;
     }
 
     auto scope = Scope::create<LexicalScope>(cx, kind, enclosing, envShape, data);
-    if (!scope)
+    if (!scope) {
         return nullptr;
+    }
 
     MOZ_ASSERT(scope->firstFrameSlot() == firstFrameSlot);
     return scope;
 }
 
 /* static */ Shape*
 LexicalScope::getEmptyExtensibleEnvironmentShape(JSContext* cx)
 {
@@ -581,34 +607,36 @@ LexicalScope::XDR(XDRState<mode>* xdr, S
 {
     JSContext* cx = xdr->cx();
 
     Rooted<Data*> data(cx);
     MOZ_TRY(XDRSizedBindingNames<LexicalScope>(xdr, scope.as<LexicalScope>(), &data));
 
     {
         Maybe<Rooted<UniquePtr<Data>>> uniqueData;
-        if (mode == XDR_DECODE)
+        if (mode == XDR_DECODE) {
             uniqueData.emplace(cx, data);
+        }
 
         uint32_t firstFrameSlot;
         uint32_t nextFrameSlot;
         if (mode == XDR_ENCODE) {
             firstFrameSlot = scope->as<LexicalScope>().firstFrameSlot();
             nextFrameSlot = data->nextFrameSlot;
         }
 
         MOZ_TRY(xdr->codeUint32(&data->constStart));
         MOZ_TRY(xdr->codeUint32(&firstFrameSlot));
         MOZ_TRY(xdr->codeUint32(&nextFrameSlot));
 
         if (mode == XDR_DECODE) {
             scope.set(createWithData(cx, kind, &uniqueData.ref(), firstFrameSlot, enclosing));
-            if (!scope)
+            if (!scope) {
                 return xdr->fail(JS::TranscodeResult_Throw);
+            }
 
             // nextFrameSlot is used only for this correctness check.
             MOZ_ASSERT(nextFrameSlot == scope->as<LexicalScope>().data().nextFrameSlot);
         }
     }
 
     return Ok();
 }
@@ -621,18 +649,19 @@ LexicalScope::XDR(XDRState<XDR_ENCODE>* 
 template
 /* static */ XDRResult
 LexicalScope::XDR(XDRState<XDR_DECODE>* xdr, ScopeKind kind, HandleScope enclosing,
                   MutableHandleScope scope);
 
 static inline uint32_t
 FunctionScopeEnvShapeFlags(bool hasParameterExprs)
 {
-    if (hasParameterExprs)
+    if (hasParameterExprs) {
         return BaseShape::DELEGATE;
+    }
     return BaseShape::QUALIFIED_VAROBJ | BaseShape::DELEGATE;
 }
 
 Zone*
 FunctionScope::Data::zone() const
 {
     return canonicalFunction ? canonicalFunction->zone() : nullptr;
 }
@@ -641,18 +670,19 @@ FunctionScope::Data::zone() const
 FunctionScope::create(JSContext* cx, Handle<Data*> dataArg,
                       bool hasParameterExprs, bool needsEnvironment,
                       HandleFunction fun, HandleScope enclosing)
 {
     // The data that's passed in is from the frontend and is LifoAlloc'd.
     // Copy it now that we're creating a permanent VM scope.
     Rooted<UniquePtr<Data>> data(cx, dataArg ? CopyScopeData<FunctionScope>(cx, dataArg)
                                              : NewEmptyScopeData<FunctionScope>(cx));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     return createWithData(cx, &data, hasParameterExprs, needsEnvironment, fun, enclosing);
 }
 
 /* static */ FunctionScope*
 FunctionScope::createWithData(JSContext* cx, MutableHandle<UniquePtr<Data>> data,
                               bool hasParameterExprs, bool needsEnvironment,
                               HandleFunction fun, HandleScope enclosing)
@@ -679,18 +709,19 @@ FunctionScope::createWithData(JSContext*
     // An environment may be needed regardless of existence of any closed over
     // bindings:
     //   - Extensible scopes (i.e., due to direct eval)
     //   - Needing a home object
     //   - Being a derived class constructor
     //   - Being a generator
     if (!envShape && needsEnvironment) {
         envShape = getEmptyEnvironmentShape(cx, hasParameterExprs);
-        if (!envShape)
+        if (!envShape) {
             return nullptr;
+        }
     }
 
     return Scope::create<FunctionScope>(cx, ScopeKind::Function, enclosing, envShape, data);
 }
 
 JSScript*
 FunctionScope::script() const
 {
@@ -720,24 +751,26 @@ FunctionScope::clone(JSContext* cx, Hand
     MOZ_ASSERT(fun != scope->canonicalFunction());
 
     // FunctionScope::Data has GCManagedDeletePolicy because it contains a
     // GCPtr. Destruction of |dataClone| below may trigger calls into the GC.
 
     RootedShape envShape(cx);
     if (scope->environmentShape()) {
         envShape = scope->maybeCloneEnvironmentShape(cx);
-        if (!envShape)
+        if (!envShape) {
             return nullptr;
+        }
     }
 
     Rooted<Data*> dataOriginal(cx, &scope->as<FunctionScope>().data());
     Rooted<UniquePtr<Data>> dataClone(cx, CopyScopeData<FunctionScope>(cx, dataOriginal));
-    if (!dataClone)
+    if (!dataClone) {
         return nullptr;
+    }
 
     dataClone->canonicalFunction.init(fun);
 
     return Scope::create<FunctionScope>(cx, scope->kind(), enclosing, envShape, &dataClone);
 }
 
 template <XDRMode mode>
 /* static */ XDRResult
@@ -745,18 +778,19 @@ FunctionScope::XDR(XDRState<mode>* xdr, 
                    MutableHandleScope scope)
 {
     JSContext* cx = xdr->cx();
     Rooted<Data*> data(cx);
     MOZ_TRY(XDRSizedBindingNames<FunctionScope>(xdr, scope.as<FunctionScope>(), &data));
 
     {
         Maybe<Rooted<UniquePtr<Data>>> uniqueData;
-        if (mode == XDR_DECODE)
+        if (mode == XDR_DECODE) {
             uniqueData.emplace(cx, data);
+        }
 
         uint8_t needsEnvironment;
         uint8_t hasParameterExprs;
         uint32_t nextFrameSlot;
         if (mode == XDR_ENCODE) {
             needsEnvironment = scope->hasEnvironment();
             hasParameterExprs = data->hasParameterExprs;
             nextFrameSlot = data->nextFrameSlot;
@@ -771,18 +805,19 @@ FunctionScope::XDR(XDRState<mode>* xdr, 
             if (!data->length) {
                 MOZ_ASSERT(!data->nonPositionalFormalStart);
                 MOZ_ASSERT(!data->varStart);
                 MOZ_ASSERT(!data->nextFrameSlot);
             }
 
             scope.set(createWithData(cx, &uniqueData.ref(), hasParameterExprs, needsEnvironment, fun,
                                      enclosing));
-            if (!scope)
+            if (!scope) {
                 return xdr->fail(JS::TranscodeResult_Throw);
+            }
 
             // nextFrameSlot is used only for this correctness check.
             MOZ_ASSERT(nextFrameSlot == scope->as<FunctionScope>().data().nextFrameSlot);
         }
     }
 
     return Ok();
 }
@@ -799,32 +834,34 @@ FunctionScope::XDR(XDRState<XDR_DECODE>*
 
 static const uint32_t VarScopeEnvShapeFlags =
     BaseShape::QUALIFIED_VAROBJ | BaseShape::DELEGATE;
 
 static UniquePtr<VarScope::Data>
 NewEmptyVarScopeData(JSContext* cx, uint32_t firstFrameSlot)
 {
     UniquePtr<VarScope::Data> data(NewEmptyScopeData<VarScope>(cx));
-    if (data)
+    if (data) {
         data->nextFrameSlot = firstFrameSlot;
+    }
 
     return data;
 }
 
 /* static */ VarScope*
 VarScope::create(JSContext* cx, ScopeKind kind, Handle<Data*> dataArg,
                  uint32_t firstFrameSlot, bool needsEnvironment, HandleScope enclosing)
 {
     // The data that's passed in is from the frontend and is LifoAlloc'd.
     // Copy it now that we're creating a permanent VM scope.
     Rooted<UniquePtr<Data>> data(cx, dataArg ? CopyScopeData<VarScope>(cx, dataArg)
                                              : NewEmptyVarScopeData(cx, firstFrameSlot));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     return createWithData(cx, kind, &data, firstFrameSlot, needsEnvironment, enclosing);
 }
 
 /* static */ VarScope*
 VarScope::createWithData(JSContext* cx, ScopeKind kind, MutableHandle<UniquePtr<Data>> data,
                          uint32_t firstFrameSlot, bool needsEnvironment, HandleScope enclosing)
 {
@@ -839,51 +876,54 @@ VarScope::createWithData(JSContext* cx, 
     }
 
     // An environment may be needed regardless of existence of any closed over
     // bindings:
     //   - Extensible scopes (i.e., due to direct eval)
     //   - Being a generator
     if (!envShape && needsEnvironment) {
         envShape = getEmptyEnvironmentShape(cx);
-        if (!envShape)
+        if (!envShape) {
             return nullptr;
+        }
     }
 
     return Scope::create<VarScope>(cx, kind, enclosing, envShape, data);
 }
 
 /* static */ Shape*
 VarScope::getEmptyEnvironmentShape(JSContext* cx)
 {
     const Class* cls = &VarEnvironmentObject::class_;
     return EmptyEnvironmentShape(cx, cls, JSSLOT_FREE(cls), VarScopeEnvShapeFlags);
 }
 
 uint32_t
 VarScope::firstFrameSlot() const
 {
-    if (enclosing()->is<FunctionScope>())
+    if (enclosing()->is<FunctionScope>()) {
         return enclosing()->as<FunctionScope>().nextFrameSlot();
+    }
     return 0;
 }
 
 template <XDRMode mode>
 /* static */ XDRResult
 VarScope::XDR(XDRState<mode>* xdr, ScopeKind kind, HandleScope enclosing,
               MutableHandleScope scope)
 {
     JSContext* cx = xdr->cx();
     Rooted<Data*> data(cx);
     MOZ_TRY(XDRSizedBindingNames<VarScope>(xdr, scope.as<VarScope>(), &data));
 
     {
         Maybe<Rooted<UniquePtr<Data>>> uniqueData;
-        if (mode == XDR_DECODE)
+        if (mode == XDR_DECODE) {
             uniqueData.emplace(cx, data);
+        }
 
         uint8_t needsEnvironment;
         uint32_t firstFrameSlot;
         uint32_t nextFrameSlot;
         if (mode == XDR_ENCODE) {
             needsEnvironment = scope->hasEnvironment();
             firstFrameSlot = scope->as<VarScope>().firstFrameSlot();
             nextFrameSlot = data->nextFrameSlot;
@@ -894,18 +934,19 @@ VarScope::XDR(XDRState<mode>* xdr, Scope
 
         if (mode == XDR_DECODE) {
             if (!data->length) {
                 MOZ_ASSERT(!data->nextFrameSlot);
             }
 
             scope.set(createWithData(cx, kind, &uniqueData.ref(), firstFrameSlot, needsEnvironment,
                                      enclosing));
-            if (!scope)
+            if (!scope) {
                 return xdr->fail(JS::TranscodeResult_Throw);
+            }
 
             // nextFrameSlot is used only for this correctness check.
             MOZ_ASSERT(nextFrameSlot == scope->as<VarScope>().data().nextFrameSlot);
         }
     }
 
     return Ok();
 }
@@ -922,18 +963,19 @@ VarScope::XDR(XDRState<XDR_DECODE>* xdr,
 
 /* static */ GlobalScope*
 GlobalScope::create(JSContext* cx, ScopeKind kind, Handle<Data*> dataArg)
 {
     // The data that's passed in is from the frontend and is LifoAlloc'd.
     // Copy it now that we're creating a permanent VM scope.
     Rooted<UniquePtr<Data>> data(cx, dataArg ? CopyScopeData<GlobalScope>(cx, dataArg)
                                              : NewEmptyScopeData<GlobalScope>(cx));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     return createWithData(cx, kind, &data);
 }
 
 /* static */ GlobalScope*
 GlobalScope::createWithData(JSContext* cx, ScopeKind kind, MutableHandle<UniquePtr<Data>> data)
 {
     MOZ_ASSERT(data);
@@ -945,49 +987,52 @@ GlobalScope::createWithData(JSContext* c
     return Scope::create<GlobalScope>(cx, kind, nullptr, nullptr, data);
 }
 
 /* static */ GlobalScope*
 GlobalScope::clone(JSContext* cx, Handle<GlobalScope*> scope, ScopeKind kind)
 {
     Rooted<Data*> dataOriginal(cx, &scope->as<GlobalScope>().data());
     Rooted<UniquePtr<Data>> dataClone(cx, CopyScopeData<GlobalScope>(cx, dataOriginal));
-    if (!dataClone)
+    if (!dataClone) {
         return nullptr;
+    }
 
     return Scope::create<GlobalScope>(cx, kind, nullptr, nullptr, &dataClone);
 }
 
 template <XDRMode mode>
 /* static */ XDRResult
 GlobalScope::XDR(XDRState<mode>* xdr, ScopeKind kind, MutableHandleScope scope)
 {
     MOZ_ASSERT((mode == XDR_DECODE) == !scope);
 
     JSContext* cx = xdr->cx();
     Rooted<Data*> data(cx);
     MOZ_TRY(XDRSizedBindingNames<GlobalScope>(xdr, scope.as<GlobalScope>(), &data));
 
     {
         Maybe<Rooted<UniquePtr<Data>>> uniqueData;
-        if (mode == XDR_DECODE)
+        if (mode == XDR_DECODE) {
             uniqueData.emplace(cx, data);
+        }
 
         MOZ_TRY(xdr->codeUint32(&data->letStart));
         MOZ_TRY(xdr->codeUint32(&data->constStart));
 
         if (mode == XDR_DECODE) {
             if (!data->length) {
                 MOZ_ASSERT(!data->letStart);
                 MOZ_ASSERT(!data->constStart);
             }
 
             scope.set(createWithData(cx, kind, &uniqueData.ref()));
-            if (!scope)
+            if (!scope) {
                 return xdr->fail(JS::TranscodeResult_Throw);
+            }
         }
     }
 
     return Ok();
 }
 
 template
 /* static */ XDRResult
@@ -1010,18 +1055,19 @@ static const uint32_t EvalScopeEnvShapeF
 /* static */ EvalScope*
 EvalScope::create(JSContext* cx, ScopeKind scopeKind, Handle<Data*> dataArg,
                   HandleScope enclosing)
 {
     // The data that's passed in is from the frontend and is LifoAlloc'd.
     // Copy it now that we're creating a permanent VM scope.
     Rooted<UniquePtr<Data>> data(cx, dataArg ? CopyScopeData<EvalScope>(cx, dataArg)
                                              : NewEmptyScopeData<EvalScope>(cx));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     return createWithData(cx, scopeKind, &data, enclosing);
 }
 
 /* static */ EvalScope*
 EvalScope::createWithData(JSContext* cx, ScopeKind scopeKind, MutableHandle<UniquePtr<Data>> data,
                           HandleScope enclosing)
 {
@@ -1036,18 +1082,19 @@ EvalScope::createWithData(JSContext* cx,
             return nullptr;
         }
     }
 
     // Strict eval and direct eval in parameter expressions always get their own
     // var environment even if there are no bindings.
     if (!envShape && scopeKind == ScopeKind::StrictEval) {
         envShape = getEmptyEnvironmentShape(cx);
-        if (!envShape)
+        if (!envShape) {
             return nullptr;
+        }
     }
 
     return Scope::create<EvalScope>(cx, scopeKind, enclosing, envShape, data);
 }
 
 /* static */ Scope*
 EvalScope::nearestVarScopeForDirectEval(Scope* scope)
 {
@@ -1078,28 +1125,31 @@ template <XDRMode mode>
 EvalScope::XDR(XDRState<mode>* xdr, ScopeKind kind, HandleScope enclosing,
                MutableHandleScope scope)
 {
     JSContext* cx = xdr->cx();
     Rooted<Data*> data(cx);
 
     {
         Maybe<Rooted<UniquePtr<Data>>> uniqueData;
-        if (mode == XDR_DECODE)
+        if (mode == XDR_DECODE) {
             uniqueData.emplace(cx, data);
+        }
 
         MOZ_TRY(XDRSizedBindingNames<EvalScope>(xdr, scope.as<EvalScope>(), &data));
 
         if (mode == XDR_DECODE) {
-            if (!data->length)
+            if (!data->length) {
                 MOZ_ASSERT(!data->nextFrameSlot);
+            }
 
             scope.set(createWithData(cx, kind, &uniqueData.ref(), enclosing));
-            if (!scope)
+            if (!scope) {
                 return xdr->fail(JS::TranscodeResult_Throw);
+            }
         }
     }
 
     return Ok();
 }
 
 template
 /* static */ XDRResult
@@ -1121,18 +1171,19 @@ ModuleScope::Data::zone() const
 }
 
 /* static */ ModuleScope*
 ModuleScope::create(JSContext* cx, Handle<Data*> dataArg,
                     HandleModuleObject module, HandleScope enclosing)
 {
     Rooted<UniquePtr<Data>> data(cx, dataArg ? CopyScopeData<ModuleScope>(cx, dataArg)
                                              : NewEmptyScopeData<ModuleScope>(cx));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     return createWithData(cx, &data, module, enclosing);
 }
 
 /* static */ ModuleScope*
 ModuleScope::createWithData(JSContext* cx, MutableHandle<UniquePtr<Data>> data,
                             HandleModuleObject module, HandleScope enclosing)
 {
@@ -1150,18 +1201,19 @@ ModuleScope::createWithData(JSContext* c
                                        ModuleScopeEnvShapeFlags, &envShape))
     {
         return nullptr;
     }
 
     // Modules always need an environment object for now.
     if (!envShape) {
         envShape = getEmptyEnvironmentShape(cx);
-        if (!envShape)
+        if (!envShape) {
             return nullptr;
+        }
     }
 
     data->module.init(module);
 
     return Scope::create<ModuleScope>(cx, ScopeKind::Module, enclosing, envShape, data);
 }
 
 /* static */ Shape*
@@ -1181,20 +1233,22 @@ static const uint32_t WasmInstanceEnvSha
     BaseShape::NOT_EXTENSIBLE | BaseShape::DELEGATE;
 
 
 template <size_t ArrayLength>
 static JSAtom*
 GenerateWasmName(JSContext* cx, const char (&prefix)[ArrayLength], uint32_t index)
 {
     StringBuffer sb(cx);
-    if (!sb.append(prefix))
+    if (!sb.append(prefix)) {
         return nullptr;
-    if (!NumberValueToStringBuffer(cx, Int32Value(index), sb))
+    }
+    if (!NumberValueToStringBuffer(cx, Int32Value(index), sb)) {
         return nullptr;
+    }
 
     return sb.finishAtom();
 }
 
 /* static */ WasmInstanceScope*
 WasmInstanceScope::create(JSContext* cx, WasmInstanceObject* instance)
 {
     // WasmInstanceScope::Data has GCManagedDeletePolicy because it contains a
@@ -1204,32 +1258,35 @@ WasmInstanceScope::create(JSContext* cx,
     if (instance->instance().memory()) {
         namesCount++;
     }
     size_t globalsStart = namesCount;
     size_t globalsCount = instance->instance().metadata().globals.length();
     namesCount += globalsCount;
 
     Rooted<UniquePtr<Data>> data(cx, NewEmptyScopeData<WasmInstanceScope>(cx, namesCount));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     size_t nameIndex = 0;
     RootedAtom name(cx);
     if (instance->instance().memory()) {
         name = GenerateWasmName(cx, "memory", /* index = */ 0);
-        if (!name)
+        if (!name) {
             return nullptr;
+        }
         new (&data->trailingNames[nameIndex]) BindingName(name, false);
         nameIndex++;
     }
     for (size_t i = 0; i < globalsCount; i++) {
         name = GenerateWasmName(cx, "global", i);
-        if (!name)
+        if (!name) {
             return nullptr;
+        }
         new (&data->trailingNames[nameIndex]) BindingName(name, false);
         nameIndex++;
     }
     MOZ_ASSERT(nameIndex == namesCount);
 
     data->instance.init(instance);
     data->memoriesStart = 0;
     data->globalsStart = globalsStart;
@@ -1260,31 +1317,34 @@ WasmFunctionScope::create(JSContext* cx,
 
     Rooted<WasmFunctionScope*> wasmFunctionScope(cx);
 
     Rooted<WasmInstanceObject*> instance(cx, enclosing->as<WasmInstanceScope>().instance());
 
     // TODO pull the local variable names from the wasm function definition.
     wasm::ValTypeVector locals;
     size_t argsLength;
-    if (!instance->instance().debug().debugGetLocalTypes(funcIndex, &locals, &argsLength))
+    if (!instance->instance().debug().debugGetLocalTypes(funcIndex, &locals, &argsLength)) {
         return nullptr;
+    }
     uint32_t namesCount = locals.length();
 
     Rooted<UniquePtr<Data>> data(cx, NewEmptyScopeData<WasmFunctionScope>(cx, namesCount));
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
     data->funcIndex = funcIndex;
     data->length = namesCount;
     RootedAtom name(cx);
     for (size_t i = 0; i < namesCount; i++) {
         name = GenerateWasmName(cx, "var", i);
-        if (!name)
+        if (!name) {
             return nullptr;
+        }
         new (&data->trailingNames[i]) BindingName(name, false);
     }
 
     return Scope::create<WasmFunctionScope>(cx, ScopeKind::WasmFunction, enclosing,
                                             /* envShape = */ nullptr, &data);
 }
 
 /* static */ Shape*
@@ -1319,18 +1379,19 @@ BindingIter::BindingIter(Scope* scope)
         break;
       case ScopeKind::With:
         // With scopes do not have bindings.
         index_ = length_ = 0;
         MOZ_ASSERT(done());
         break;
       case ScopeKind::Function: {
         uint8_t flags = IgnoreDestructuredFormalParameters;
-        if (scope->as<FunctionScope>().hasParameterExprs())
+        if (scope->as<FunctionScope>().hasParameterExprs()) {
             flags |= HasFormalParameterExprs;
+        }
         init(scope->as<FunctionScope>().data(), flags);
         break;
       }
       case ScopeKind::FunctionBodyVar:
       case ScopeKind::ParameterExpressionVar:
         init(scope->as<VarScope>().data(),
              scope->as<VarScope>().firstFrameSlot());
         break;
@@ -1383,18 +1444,19 @@ BindingIter::init(LexicalScope::Data& da
              data.trailingNames.start(), data.length);
     }
 }
 
 void
 BindingIter::init(FunctionScope::Data& data, uint8_t flags)
 {
     flags = CanHaveFrameSlots | CanHaveEnvironmentSlots | flags;
-    if (!(flags & HasFormalParameterExprs))
+    if (!(flags & HasFormalParameterExprs)) {
         flags |= CanHaveArgumentSlots;
+    }
 
     //            imports - [0, 0)
     // positional formals - [0, data.nonPositionalFormalStart)
     //      other formals - [data.nonPositionalParamStart, data.varStart)
     //               vars - [data.varStart, data.length)
     //               lets - [data.length, data.length)
     //             consts - [data.length, data.length)
     init(0, data.nonPositionalFormalStart, data.varStart, data.length, data.length,
@@ -1504,36 +1566,39 @@ BindingIter::init(WasmFunctionScope::Dat
          UINT32_MAX, UINT32_MAX,
          data.trailingNames.start(), data.length);
 }
 
 PositionalFormalParameterIter::PositionalFormalParameterIter(JSScript* script)
   : BindingIter(script)
 {
     // Reinit with flags = 0, i.e., iterate over all positional parameters.
-    if (script->bodyScope()->is<FunctionScope>())
+    if (script->bodyScope()->is<FunctionScope>()) {
         init(script->bodyScope()->as<FunctionScope>().data(), /* flags = */ 0);
+    }
     settle();
 }
 
 void
 js::DumpBindings(JSContext* cx, Scope* scopeArg)
 {
     RootedScope scope(cx, scopeArg);
     for (Rooted<BindingIter> bi(cx, BindingIter(scope)); bi; bi++) {
         UniqueChars bytes = AtomToPrintableString(cx, bi.name());
-        if (!bytes)
+        if (!bytes) {
             return;
+        }
         fprintf(stderr, "%s %s ", BindingKindString(bi.kind()), bytes.get());
         switch (bi.location().kind()) {
           case BindingLocation::Kind::Global:
-            if (bi.isTopLevelFunction())
+            if (bi.isTopLevelFunction()) {
                 fprintf(stderr, "global function\n");
-            else
+            } else {
                 fprintf(stderr, "global\n");
+            }
             break;
           case BindingLocation::Kind::Argument:
             fprintf(stderr, "arg slot %u\n", bi.location().argumentSlot());
             break;
           case BindingLocation::Kind::Frame:
             fprintf(stderr, "frame slot %u\n", bi.location().slot());
             break;
           case BindingLocation::Kind::Environment:
@@ -1549,55 +1614,62 @@ js::DumpBindings(JSContext* cx, Scope* s
     }
 }
 
 static JSAtom*
 GetFrameSlotNameInScope(Scope* scope, uint32_t slot)
 {
     for (BindingIter bi(scope); bi; bi++) {
         BindingLocation loc = bi.location();
-        if (loc.kind() == BindingLocation::Kind::Frame && loc.slot() == slot)
+        if (loc.kind() == BindingLocation::Kind::Frame && loc.slot() == slot) {
             return bi.name();
+        }
     }
     return nullptr;
 }
 
 JSAtom*
 js::FrameSlotName(JSScript* script, jsbytecode* pc)
 {
     MOZ_ASSERT(IsLocalOp(JSOp(*pc)));
     uint32_t slot = GET_LOCALNO(pc);
     MOZ_ASSERT(slot < script->nfixed());
 
     // Look for it in the body scope first.
-    if (JSAtom* name = GetFrameSlotNameInScope(script->bodyScope(), slot))
+    if (JSAtom* name = GetFrameSlotNameInScope(script->bodyScope(), slot)) {
         return name;
+    }
 
     // If this is a function script and there is an extra var scope, look for
     // it there.
     if (script->functionHasExtraBodyVarScope()) {
-        if (JSAtom* name = GetFrameSlotNameInScope(script->functionExtraBodyVarScope(), slot))
+        if (JSAtom* name = GetFrameSlotNameInScope(script->functionExtraBodyVarScope(), slot)) {
             return name;
+        }
     }
 
     // If not found, look for it in a lexical scope.
     for (ScopeIter si(script->innermostScope(pc)); si; si++) {
-        if (!si.scope()->is<LexicalScope>())
+        if (!si.scope()->is<LexicalScope>()) {
             continue;
+        }
         LexicalScope& lexicalScope = si.scope()->as<LexicalScope>();
 
         // Is the slot within bounds of the current lexical scope?
-        if (slot < lexicalScope.firstFrameSlot())
+        if (slot < lexicalScope.firstFrameSlot()) {
             continue;
-        if (slot >= lexicalScope.nextFrameSlot())
+        }
+        if (slot >= lexicalScope.nextFrameSlot()) {
             break;
+        }
 
         // If so, get the name.
-        if (JSAtom* name = GetFrameSlotNameInScope(&lexicalScope, slot))
+        if (JSAtom* name = GetFrameSlotNameInScope(&lexicalScope, slot)) {
             return name;
+        }
     }
 
     MOZ_CRASH("Frame slot not found");
 }
 
 JS::ubi::Node::Size
 JS::ubi::Concrete<Scope>::size(mozilla::MallocSizeOf mallocSizeOf) const
 {
--- a/js/src/vm/Scope.h
+++ b/js/src/vm/Scope.h
@@ -198,18 +198,19 @@ class TrailingNamesArray
     }
 
   public:
     // Explicitly ensure no one accidentally allocates scope data without
     // poisoning its trailing names.
     TrailingNamesArray() = delete;
 
     explicit TrailingNamesArray(size_t nameCount) {
-        if (nameCount)
+        if (nameCount) {
             JS_POISON(&data_, 0xCC, sizeof(BindingName) * nameCount, MemCheckKind::MakeUndefined);
+        }
     }
 
     BindingName* start() { return reinterpret_cast<BindingName*>(ptr()); }
 
     BindingName& get(size_t i) { return start()[i]; }
     BindingName& operator[](size_t i) { return get(i); }
 };
 
@@ -387,26 +388,28 @@ class Scope : public js::gc::TenuredCell
     }
 
     uint32_t chainLength() const;
     uint32_t environmentChainLength() const;
 
     template <typename T>
     bool hasOnChain() const {
         for (const Scope* it = this; it; it = it->enclosing()) {
-            if (it->is<T>())
+            if (it->is<T>()) {
                 return true;
+            }
         }
         return false;
     }
 
     bool hasOnChain(ScopeKind kind) const {
         for (const Scope* it = this; it; it = it->enclosing()) {
-            if (it->kind() == kind)
+            if (it->kind() == kind) {
                 return true;
+            }
         }
         return false;
     }
 
     static Scope* clone(JSContext* cx, HandleScope scope, HandleScope enclosing);
 
     void traceChildren(JSTracer* trc);
     void finalize(FreeOp* fop);
@@ -949,18 +952,19 @@ class EvalScope : public Scope
         return kind() == ScopeKind::StrictEval;
     }
 
     bool hasBindings() const {
         return data().length > 0;
     }
 
     bool isNonGlobal() const {
-        if (strict())
+        if (strict()) {
             return true;
+        }
         return !nearestVarScopeForDirectEval(enclosing())->is<GlobalScope>();
     }
 
     static Shape* getEmptyEnvironmentShape(JSContext* cx);
 };
 
 template <>
 inline bool
@@ -1288,27 +1292,29 @@ class BindingIter
                 // indirect bindings.
                 MOZ_ASSERT(kind() != BindingKind::Import);
                 MOZ_ASSERT(canHaveEnvironmentSlots());
                 environmentSlot_++;
             } else if (canHaveFrameSlots()) {
                 // Usually positional formal parameters don't have frame
                 // slots, except when there are parameter expressions, in
                 // which case they act like lets.
-                if (index_ >= nonPositionalFormalStart_ || (hasFormalParameterExprs() && name()))
+                if (index_ >= nonPositionalFormalStart_ || (hasFormalParameterExprs() && name())) {
                     frameSlot_++;
+                }
             }
         }
         index_++;
     }
 
     void settle() {
         if (ignoreDestructuredFormalParameters()) {
-            while (!done() && !name())
+            while (!done() && !name()) {
                 increment();
+            }
         }
     }
 
   public:
     explicit BindingIter(Scope* scope);
     explicit BindingIter(JSScript* script);
 
     BindingIter(LexicalScope::Data& data, uint32_t firstFrameSlot, bool isNamedLambda) {
@@ -1380,63 +1386,73 @@ class BindingIter
 
     bool closedOver() const {
         MOZ_ASSERT(!done());
         return names_[index_].closedOver();
     }
 
     BindingLocation location() const {
         MOZ_ASSERT(!done());
-        if (!(flags_ & CanHaveSlotsMask))
+        if (!(flags_ & CanHaveSlotsMask)) {
             return BindingLocation::Global();
-        if (index_ < positionalFormalStart_)
+        }
+        if (index_ < positionalFormalStart_) {
             return BindingLocation::Import();
+        }
         if (closedOver()) {
             MOZ_ASSERT(canHaveEnvironmentSlots());
             return BindingLocation::Environment(environmentSlot_);
         }
-        if (index_ < nonPositionalFormalStart_ && canHaveArgumentSlots())
+        if (index_ < nonPositionalFormalStart_ && canHaveArgumentSlots()) {
             return BindingLocation::Argument(argumentSlot_);
-        if (canHaveFrameSlots())
+        }
+        if (canHaveFrameSlots()) {
             return BindingLocation::Frame(frameSlot_);
+        }
         MOZ_ASSERT(isNamedLambda());
         return BindingLocation::NamedLambdaCallee();
     }
 
     BindingKind kind() const {
         MOZ_ASSERT(!done());
-        if (index_ < positionalFormalStart_)
+        if (index_ < positionalFormalStart_) {
             return BindingKind::Import;
+        }
         if (index_ < varStart_) {
             // When the parameter list has expressions, the parameters act
             // like lexical bindings and have TDZ.
-            if (hasFormalParameterExprs())
+            if (hasFormalParameterExprs()) {
                 return BindingKind::Let;
+            }
             return BindingKind::FormalParameter;
         }
-        if (index_ < letStart_)
+        if (index_ < letStart_) {
             return BindingKind::Var;
-        if (index_ < constStart_)
+        }
+        if (index_ < constStart_) {
             return BindingKind::Let;
-        if (isNamedLambda())
+        }
+        if (isNamedLambda()) {
             return BindingKind::NamedLambdaCallee;
+        }
         return BindingKind::Const;
     }
 
     bool isTopLevelFunction() const {
         MOZ_ASSERT(!done());
         bool result = names_[index_].isTopLevelFunction();
         MOZ_ASSERT_IF(result, kind() == BindingKind::Var);
         return result;
     }
 
     bool hasArgumentSlot() const {
         MOZ_ASSERT(!done());
-        if (hasFormalParameterExprs())
+        if (hasFormalParameterExprs()) {
             return false;
+        }
         return index_ >= positionalFormalStart_ && index_ < nonPositionalFormalStart_;
     }
 
     uint16_t argumentSlot() const {
         MOZ_ASSERT(canHaveArgumentSlots());
         return mozilla::AssertedCast<uint16_t>(index_);
     }
 
@@ -1458,18 +1474,19 @@ JSAtom* FrameSlotName(JSScript* script, 
 
 //
 // A refinement BindingIter that only iterates over positional formal
 // parameters of a function.
 //
 class PositionalFormalParameterIter : public BindingIter
 {
     void settle() {
-        if (index_ >= nonPositionalFormalStart_)
+        if (index_ >= nonPositionalFormalStart_) {
             index_ = length_;
+        }
     }
 
   public:
     explicit PositionalFormalParameterIter(JSScript* script);
 
     void operator++(int) {
         BindingIter::operator++(1);
         settle();
@@ -1536,18 +1553,19 @@ class MOZ_STACK_CLASS ScopeIter
     }
 
     // Returns whether this scope has a syntactic environment (i.e., an
     // Environment that isn't a non-syntactic With or NonSyntacticVariables)
     // on the environment chain.
     bool hasSyntacticEnvironment() const;
 
     void trace(JSTracer* trc) {
-        if (scope_)
+        if (scope_) {
             TraceRoot(trc, &scope_, "scope iter scope");
+        }
     }
 };
 
 //
 // Specializations of Rooted containers for the iterators.
 //
 
 template <typename Wrapper>
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -86,18 +86,19 @@ selfHosting_WarningReporter(JSContext* c
     PrintError(cx, stderr, JS::ConstUTF8CharsZ(), report, true);
 }
 
 static bool
 intrinsic_ToObject(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     JSObject* obj = ToObject(cx, args[0]);
-    if (!obj)
+    if (!obj) {
         return false;
+    }
     args.rval().setObject(*obj);
     return true;
 }
 
 static bool
 intrinsic_IsObject(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
@@ -111,76 +112,82 @@ static bool
 intrinsic_IsArray(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 1);
     RootedValue val(cx, args[0]);
     if (val.isObject()) {
         RootedObject obj(cx, &val.toObject());
         bool isArray = false;
-        if (!IsArray(cx, obj, &isArray))
+        if (!IsArray(cx, obj, &isArray)) {
             return false;
+        }
         args.rval().setBoolean(isArray);
     } else {
         args.rval().setBoolean(false);
     }
     return true;
 }
 
 static bool
 intrinsic_IsCrossRealmArrayConstructor(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     bool result = false;
-    if (!IsCrossRealmArrayConstructor(cx, args[0], &result))
+    if (!IsCrossRealmArrayConstructor(cx, args[0], &result)) {
         return false;
+    }
     args.rval().setBoolean(result);
     return true;
 }
 
 static bool
 intrinsic_ToInteger(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     double result;
-    if (!ToInteger(cx, args[0], &result))
+    if (!ToInteger(cx, args[0], &result)) {
         return false;
+    }
     args.rval().setNumber(result);
     return true;
 }
 
 static bool
 intrinsic_ToString(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     JSString* str = ToString<CanGC>(cx, args[0]);
-    if (!str)
+    if (!str) {
         return false;
+    }
     args.rval().setString(str);
     return true;
 }
 
 static bool
 intrinsic_ToSource(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     JSString* str = ValueToSource(cx, args[0]);
-    if (!str)
+    if (!str) {
         return false;
+    }
     args.rval().setString(str);
     return true;
 }
 
 static bool
 intrinsic_ToPropertyKey(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     RootedId id(cx);
-    if (!ToPropertyKey(cx, args[0], &id))
+    if (!ToPropertyKey(cx, args[0], &id)) {
         return false;
+    }
 
     args.rval().set(IdToValue(id));
     return true;
 }
 
 static bool
 intrinsic_IsCallable(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -243,25 +250,27 @@ intrinsic_GetBuiltinConstructor(JSContex
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 1);
     RootedString str(cx, args[0].toString());
     JSAtom* atom;
     if (str->isAtom()) {
         atom = &str->asAtom();
     } else {
         atom = AtomizeString(cx, str);
-        if (!atom)
+        if (!atom) {
             return false;
+        }
     }
     RootedId id(cx, AtomToId(atom));
     JSProtoKey key = JS_IdToProtoKey(cx, id);
     MOZ_ASSERT(key != JSProto_Null);
     JSObject* ctor = GlobalObject::getOrCreateConstructor(cx, key);
-    if (!ctor)
+    if (!ctor) {
         return false;
+    }
     args.rval().setObject(*ctor);
     return true;
 }
 
 static bool
 intrinsic_SubstringKernel(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
@@ -269,18 +278,19 @@ intrinsic_SubstringKernel(JSContext* cx,
     MOZ_ASSERT(args[1].isInt32());
     MOZ_ASSERT(args[2].isInt32());
 
     RootedString str(cx, args[0].toString());
     int32_t begin = args[1].toInt32();
     int32_t length = args[2].toInt32();
 
     JSString* substr = SubstringKernel(cx, str, begin, length);
-    if (!substr)
+    if (!substr) {
         return false;
+    }
 
     args.rval().setString(substr);
     return true;
 }
 
 static void
 ThrowErrorWithType(JSContext* cx, JSExnType type, const CallArgs& args)
 {
@@ -292,24 +302,26 @@ ThrowErrorWithType(JSContext* cx, JSExnT
     MOZ_ASSERT(efs->exnType == type, "error-throwing intrinsic and error number are inconsistent");
 #endif
 
     UniqueChars errorArgs[3];
     for (unsigned i = 1; i < 4 && i < args.length(); i++) {
         HandleValue val = args[i];
         if (val.isInt32() || val.isString()) {
             JSString* str = ToString<CanGC>(cx, val);
-            if (!str)
+            if (!str) {
                 return;
+            }
             errorArgs[i - 1] = StringToNewUTF8CharsZ(cx, *str);
         } else {
             errorArgs[i - 1] = DecompileValueGenerator(cx, JSDVG_SEARCH_STACK, val, nullptr);
         }
-        if (!errorArgs[i - 1])
+        if (!errorArgs[i - 1]) {
             return;
+        }
     }
 
     JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, errorNumber,
                              errorArgs[0].get(), errorArgs[1].get(), errorArgs[2].get());
 }
 
 static bool
 intrinsic_ThrowRangeError(JSContext* cx, unsigned argc, Value* vp)
@@ -358,18 +370,19 @@ intrinsic_GetErrorMessage(JSContext* cx,
     MOZ_ASSERT(args.length() == 1);
     MOZ_ASSERT(args[0].isInt32());
 
     const JSErrorFormatString* errorString = GetErrorMessage(nullptr, args[0].toInt32());
     MOZ_ASSERT(errorString);
 
     MOZ_ASSERT(errorString->argCount == 0);
     RootedString message(cx, JS_NewStringCopyZ(cx, errorString->format));
-    if (!message)
+    if (!message) {
         return false;
+    }
 
     args.rval().setString(message);
     return true;
 }
 
 static bool
 intrinsic_CreateModuleSyntaxError(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -377,18 +390,19 @@ intrinsic_CreateModuleSyntaxError(JSCont
     MOZ_ASSERT(args.length() == 4);
     MOZ_ASSERT(args[0].isObject());
     MOZ_ASSERT(args[1].isInt32());
     MOZ_ASSERT(args[2].isInt32());
     MOZ_ASSERT(args[3].isString());
 
     RootedModuleObject module(cx, &args[0].toObject().as<ModuleObject>());
     RootedString filename(cx, JS_NewStringCopyZ(cx, module->script()->filename()));
-    if (!filename)
+    if (!filename) {
         return false;
+    }
 
     RootedString message(cx, args[3].toString());
 
     RootedValue error(cx);
     if (!JS::CreateError(cx, JSEXN_SYNTAXERR, nullptr, filename, args[1].toInt32(),
                          args[2].toInt32(), nullptr, message, &error))
     {
         return false;
@@ -519,61 +533,67 @@ intrinsic_FinishBoundFunctionInit(JSCont
 static bool
 intrinsic_DecompileArg(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
 
     HandleValue value = args[1];
     JSString* str = DecompileArgument(cx, args[0].toInt32(), value);
-    if (!str)
+    if (!str) {
         return false;
+    }
     args.rval().setString(str);
     return true;
 }
 
 static bool
 intrinsic_DefineDataProperty(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
     // When DefineDataProperty is called with 3 arguments, it's compiled to
     // JSOP_INITELEM in the bytecode emitter so we shouldn't get here.
     MOZ_ASSERT(args.length() == 4);
     MOZ_ASSERT(args[0].isObject());
 
     RootedObject obj(cx, &args[0].toObject());
     RootedId id(cx);
-    if (!ValueToId<CanGC>(cx, args[1], &id))
+    if (!ValueToId<CanGC>(cx, args[1], &id)) {
         return false;
+    }
     RootedValue value(cx, args[2]);
 
     unsigned attrs = 0;
     unsigned attributes = args[3].toInt32();
 
     MOZ_ASSERT(bool(attributes & ATTR_ENUMERABLE) != bool(attributes & ATTR_NONENUMERABLE),
                "_DefineDataProperty must receive either ATTR_ENUMERABLE xor ATTR_NONENUMERABLE");
-    if (attributes & ATTR_ENUMERABLE)
+    if (attributes & ATTR_ENUMERABLE) {
         attrs |= JSPROP_ENUMERATE;
+    }
 
     MOZ_ASSERT(bool(attributes & ATTR_CONFIGURABLE) != bool(attributes & ATTR_NONCONFIGURABLE),
                "_DefineDataProperty must receive either ATTR_CONFIGURABLE xor "
                "ATTR_NONCONFIGURABLE");
-    if (attributes & ATTR_NONCONFIGURABLE)
+    if (attributes & ATTR_NONCONFIGURABLE) {
         attrs |= JSPROP_PERMANENT;
+    }
 
     MOZ_ASSERT(bool(attributes & ATTR_WRITABLE) != bool(attributes & ATTR_NONWRITABLE),
                "_DefineDataProperty must receive either ATTR_WRITABLE xor ATTR_NONWRITABLE");
-    if (attributes & ATTR_NONWRITABLE)
+    if (attributes & ATTR_NONWRITABLE) {
         attrs |= JSPROP_READONLY;
+    }
 
     Rooted<PropertyDescriptor> desc(cx);
     desc.setDataDescriptor(value, attrs);
-    if (!DefineProperty(cx, obj, id, desc))
+    if (!DefineProperty(cx, obj, id, desc)) {
         return false;
+    }
 
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 intrinsic_DefineProperty(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -582,89 +602,101 @@ intrinsic_DefineProperty(JSContext* cx, 
     MOZ_ASSERT(args.length() == 6);
     MOZ_ASSERT(args[0].isObject());
     MOZ_ASSERT(args[1].isString() || args[1].isNumber() || args[1].isSymbol());
     MOZ_ASSERT(args[2].isInt32());
     MOZ_ASSERT(args[5].isBoolean());
 
     RootedObject obj(cx, &args[0].toObject());
     RootedId id(cx);
-    if (!ValueToId<CanGC>(cx, args[1], &id))
+    if (!ValueToId<CanGC>(cx, args[1], &id)) {
         return false;
+    }
 
     Rooted<PropertyDescriptor> desc(cx);
 
     unsigned attributes = args[2].toInt32();
     unsigned attrs = 0;
-    if (attributes & ATTR_ENUMERABLE)
+    if (attributes & ATTR_ENUMERABLE) {
         attrs |= JSPROP_ENUMERATE;
-    else if (!(attributes & ATTR_NONENUMERABLE))
+    } else if (!(attributes & ATTR_NONENUMERABLE)) {
         attrs |= JSPROP_IGNORE_ENUMERATE;
-
-    if (attributes & ATTR_NONCONFIGURABLE)
+    }
+
+    if (attributes & ATTR_NONCONFIGURABLE) {
         attrs |= JSPROP_PERMANENT;
-    else if (!(attributes & ATTR_CONFIGURABLE))
+    } else if (!(attributes & ATTR_CONFIGURABLE)) {
         attrs |= JSPROP_IGNORE_PERMANENT;
-
-    if (attributes & ATTR_NONWRITABLE)
+    }
+
+    if (attributes & ATTR_NONWRITABLE) {
         attrs |= JSPROP_READONLY;
-    else if (!(attributes & ATTR_WRITABLE))
+    } else if (!(attributes & ATTR_WRITABLE)) {
         attrs |= JSPROP_IGNORE_READONLY;
+    }
 
     // When args[4] is |null|, the data descriptor has a value component.
-    if ((attributes & DATA_DESCRIPTOR_KIND) && args[4].isNull())
+    if ((attributes & DATA_DESCRIPTOR_KIND) && args[4].isNull()) {
         desc.value().set(args[3]);
-    else
+    } else {
         attrs |= JSPROP_IGNORE_VALUE;
+    }
 
     if (attributes & ACCESSOR_DESCRIPTOR_KIND) {
         Value getter = args[3];
         MOZ_ASSERT(getter.isObject() || getter.isNullOrUndefined());
-        if (getter.isObject())
+        if (getter.isObject()) {
             desc.setGetterObject(&getter.toObject());
-        if (!getter.isNull())
+        }
+        if (!getter.isNull()) {
             attrs |= JSPROP_GETTER;
+        }
 
         Value setter = args[4];
         MOZ_ASSERT(setter.isObject() || setter.isNullOrUndefined());
-        if (setter.isObject())
+        if (setter.isObject()) {
             desc.setSetterObject(&setter.toObject());
-        if (!setter.isNull())
+        }
+        if (!setter.isNull()) {
             attrs |= JSPROP_SETTER;
+        }
 
         // By convention, these bits are not used on accessor descriptors.
         attrs &= ~(JSPROP_IGNORE_READONLY | JSPROP_IGNORE_VALUE);
     }
 
     desc.setAttributes(attrs);
     desc.assertValid();
 
     ObjectOpResult result;
-    if (!DefineProperty(cx, obj, id, desc, result))
+    if (!DefineProperty(cx, obj, id, desc, result)) {
         return false;
+    }
 
     bool strict = args[5].toBoolean();
-    if (strict && !result.checkStrict(cx, obj, id))
+    if (strict && !result.checkStrict(cx, obj, id)) {
         return false;
+    }
 
     args.rval().setBoolean(result.reallyOk());
     return true;
 }
 
 static bool
 intrinsic_ObjectHasPrototype(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
     RootedObject obj(cx, &args[0].toObject());
     RootedObject proto(cx, &args[1].toObject());
 
     RootedObject actualProto(cx);
-    if (!GetPrototype(cx, obj, &actualProto))
+    if (!GetPrototype(cx, obj, &actualProto)) {
         return false;
+    }
 
     args.rval().setBoolean(actualProto == proto);
     return true;
 }
 
 static bool
 intrinsic_UnsafeSetReservedSlot(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -688,45 +720,49 @@ intrinsic_UnsafeGetReservedSlot(JSContex
 
     args.rval().set(args[0].toObject().as<NativeObject>().getReservedSlot(args[1].toPrivateUint32()));
     return true;
 }
 
 static bool
 intrinsic_UnsafeGetObjectFromReservedSlot(JSContext* cx, unsigned argc, Value* vp)
 {
-    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp))
+    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp)) {
         return false;
+    }
     MOZ_ASSERT(vp->isObject());
     return true;
 }
 
 static bool
 intrinsic_UnsafeGetInt32FromReservedSlot(JSContext* cx, unsigned argc, Value* vp)
 {
-    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp))
+    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp)) {
         return false;
+    }
     MOZ_ASSERT(vp->isInt32());
     return true;
 }
 
 static bool
 intrinsic_UnsafeGetStringFromReservedSlot(JSContext* cx, unsigned argc, Value* vp)
 {
-    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp))
+    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp)) {
         return false;
+    }
     MOZ_ASSERT(vp->isString());
     return true;
 }
 
 static bool
 intrinsic_UnsafeGetBooleanFromReservedSlot(JSContext* cx, unsigned argc, Value* vp)
 {
-    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp))
+    if (!intrinsic_UnsafeGetReservedSlot(cx, argc, vp)) {
         return false;
+    }
     MOZ_ASSERT(vp->isBoolean());
     return true;
 }
 
 static bool
 intrinsic_IsPackedArray(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
@@ -738,18 +774,19 @@ intrinsic_IsPackedArray(JSContext* cx, u
 
 bool
 js::intrinsic_NewArrayIterator(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 0);
 
     JSObject* obj = NewArrayIteratorObject(cx);
-    if (!obj)
+    if (!obj) {
         return false;
+    }
 
     args.rval().setObject(*obj);
     return true;
 }
 
 static bool
 intrinsic_GetNextMapEntryForIterator(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -767,18 +804,19 @@ intrinsic_GetNextMapEntryForIterator(JSC
 
 static bool
 intrinsic_CreateMapIterationResultPair(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 0);
 
     JSObject* result = MapIteratorObject::createResultPair(cx);
-    if (!result)
+    if (!result) {
         return false;
+    }
 
     args.rval().setObject(*result);
     return true;
 }
 
 static bool
 intrinsic_GetNextSetEntryForIterator(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -796,48 +834,51 @@ intrinsic_GetNextSetEntryForIterator(JSC
 
 static bool
 intrinsic_CreateSetIterationResult(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 0);
 
     JSObject* result = SetIteratorObject::createResult(cx);
-    if (!result)
+    if (!result) {
         return false;
+    }
 
     args.rval().setObject(*result);
     return true;
 }
 
 bool
 js::intrinsic_NewStringIterator(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 0);
 
     JSObject* obj = NewStringIteratorObject(cx);
-    if (!obj)
+    if (!obj) {
         return false;
+    }
 
     args.rval().setObject(*obj);
     return true;
 }
 
 static bool
 intrinsic_SetCanonicalName(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
 
     RootedFunction fun(cx, &args[0].toObject().as<JSFunction>());
     MOZ_ASSERT(fun->isSelfHostedBuiltin());
     JSAtom* atom = AtomizeString(cx, args[1].toString());
-    if (!atom)
+    if (!atom) {
         return false;
+    }
 
     fun->setAtom(atom);
 #ifdef DEBUG
     fun->setExtendedSlot(HAS_SELFHOSTED_CANONICAL_NAME_SLOT, BooleanValue(true));
 #endif
     args.rval().setUndefined();
     return true;
 }
@@ -1066,18 +1107,19 @@ intrinsic_IsPossiblyWrappedTypedArray(JS
 static bool
 intrinsic_TypedArrayBuffer(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 1);
     MOZ_ASSERT(TypedArrayObject::is(args[0]));
 
     Rooted<TypedArrayObject*> tarray(cx, &args[0].toObject().as<TypedArrayObject>());
-    if (!TypedArrayObject::ensureHasBuffer(cx, tarray))
+    if (!TypedArrayObject::ensureHasBuffer(cx, tarray)) {
         return false;
+    }
 
     args.rval().set(TypedArrayObject::bufferValue(tarray));
     return true;
 }
 
 static bool
 intrinsic_TypedArrayByteOffset(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -1259,18 +1301,19 @@ intrinsic_SetFromTypedArrayApproach(JSCo
     MOZ_ASSERT(!target->hasDetachedBuffer(),
                "something should have defended against a target viewing a "
                "detached buffer");
 
     // As directed by |DangerouslyUnwrapTypedArray|, sigil this pointer and all
     // variables derived from it to counsel extreme caution here.
     Rooted<TypedArrayObject*> unsafeTypedArrayCrossCompartment(cx);
     unsafeTypedArrayCrossCompartment = DangerouslyUnwrapTypedArray(cx, &args[1].toObject());
-    if (!unsafeTypedArrayCrossCompartment)
+    if (!unsafeTypedArrayCrossCompartment) {
         return false;
+    }
 
     double doubleTargetOffset = args[2].toNumber();
     MOZ_ASSERT(doubleTargetOffset >= 0, "caller failed to ensure |targetOffset >= 0|");
 
     uint32_t targetLength = uint32_t(args[3].toInt32());
 
     // Handle all checks preceding the actual element-setting.  A visual skim
     // of 22.2.3.22.2 should confirm these are the only steps after steps 1-11
@@ -1506,18 +1549,19 @@ intrinsic_SetDisjointTypedElements(JSCon
                "set, so it's nonsensical to be setting them");
 
     uint32_t targetOffset = uint32_t(args[1].toInt32());
 
     // As directed by |DangerouslyUnwrapTypedArray|, sigil this pointer and all
     // variables derived from it to counsel extreme caution here.
     Rooted<TypedArrayObject*> unsafeSrcCrossCompartment(cx);
     unsafeSrcCrossCompartment = DangerouslyUnwrapTypedArray(cx, &args[2].toObject());
-    if (!unsafeSrcCrossCompartment)
+    if (!unsafeSrcCrossCompartment) {
         return false;
+    }
 
     SetDisjointTypedElements(target, targetOffset, unsafeSrcCrossCompartment);
 
     args.rval().setUndefined();
     return true;
 }
 
 static bool
@@ -1532,31 +1576,33 @@ intrinsic_SetOverlappingTypedElements(JS
                "shouldn't set elements if underlying buffer is detached");
 
     uint32_t targetOffset = uint32_t(args[1].toInt32());
 
     // As directed by |DangerouslyUnwrapTypedArray|, sigil this pointer and all
     // variables derived from it to counsel extreme caution here.
     Rooted<TypedArrayObject*> unsafeSrcCrossCompartment(cx);
     unsafeSrcCrossCompartment = DangerouslyUnwrapTypedArray(cx, &args[2].toObject());
-    if (!unsafeSrcCrossCompartment)
+    if (!unsafeSrcCrossCompartment) {
         return false;
+    }
 
     // Smarter algorithms exist to perform overlapping transfers of the sort
     // this method performs (for example, v8's self-hosted implementation).
     // But it seems likely deliberate overlapping transfers are rare enough
     // that it's not worth the trouble to implement one (and worry about its
     // safety/correctness!).  Make a copy and do a disjoint set from that.
     uint32_t count = unsafeSrcCrossCompartment->length();
     Scalar::Type unsafeSrcTypeCrossCompartment = unsafeSrcCrossCompartment->type();
     size_t sourceByteLen = count * TypedArrayElemSize(unsafeSrcTypeCrossCompartment);
 
     auto copyOfSrcData = cx->make_pod_array<uint8_t>(sourceByteLen);
-    if (!copyOfSrcData)
+    if (!copyOfSrcData) {
         return false;
+    }
 
     jit::AtomicOperations::memcpySafeWhenRacy(SharedMem<uint8_t*>::unshared(copyOfSrcData.get()),
                                               unsafeSrcCrossCompartment->viewDataEither().cast<uint8_t*>(),
                                               sourceByteLen);
 
     CopyToDisjointArray(target, targetOffset, SharedMem<void*>::unshared(copyOfSrcData.get()),
                         unsafeSrcTypeCrossCompartment, count);
 
@@ -1611,18 +1657,19 @@ intrinsic_TypedArrayBitwiseSlice(JSConte
 
     Rooted<TypedArrayObject*> source(cx, &args[0].toObject().as<TypedArrayObject>());
     MOZ_ASSERT(!source->hasDetachedBuffer());
 
     // As directed by |DangerouslyUnwrapTypedArray|, sigil this pointer and all
     // variables derived from it to counsel extreme caution here.
     Rooted<TypedArrayObject*> unsafeTypedArrayCrossCompartment(cx);
     unsafeTypedArrayCrossCompartment = DangerouslyUnwrapTypedArray(cx, &args[1].toObject());
-    if (!unsafeTypedArrayCrossCompartment)
+    if (!unsafeTypedArrayCrossCompartment) {
         return false;
+    }
     MOZ_ASSERT(!unsafeTypedArrayCrossCompartment->hasDetachedBuffer());
 
     Scalar::Type sourceType = source->type();
     if (!IsTypedArrayBitwiseSlice(sourceType, unsafeTypedArrayCrossCompartment->type())) {
         args.rval().setBoolean(false);
         return true;
     }
 
@@ -1687,25 +1734,27 @@ static bool
 intrinsic_RegExpGetSubstitution(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 5);
 
     RootedArrayObject matchResult(cx, &args[0].toObject().as<ArrayObject>());
 
     RootedLinearString string(cx, args[1].toString()->ensureLinear(cx));
-    if (!string)
+    if (!string) {
         return false;
+    }
 
     int32_t position = int32_t(args[2].toNumber());
     MOZ_ASSERT(position >= 0);
 
     RootedLinearString replacement(cx, args[3].toString()->ensureLinear(cx));
-    if (!replacement)
+    if (!replacement) {
         return false;
+    }
 
     int32_t firstDollarIndex = int32_t(args[4].toNumber());
     MOZ_ASSERT(firstDollarIndex >= 0);
 
     return RegExpGetSubstitution(cx, matchResult, string, size_t(position), replacement,
                                  size_t(firstDollarIndex), args.rval());
 }
 
@@ -1714,39 +1763,42 @@ intrinsic_StringReplaceString(JSContext*
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 3);
 
     RootedString string(cx, args[0].toString());
     RootedString pattern(cx, args[1].toString());
     RootedString replacement(cx, args[2].toString());
     JSString* result = str_replace_string_raw(cx, string, pattern, replacement);
-    if (!result)
+    if (!result) {
         return false;
+    }
 
     args.rval().setString(result);
     return true;
 }
 
 bool
 js::intrinsic_StringSplitString(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
 
     RootedString string(cx, args[0].toString());
     RootedString sep(cx, args[1].toString());
 
     RootedObjectGroup group(cx, ObjectGroupRealm::getStringSplitStringGroup(cx));
-    if (!group)
+    if (!group) {
         return false;
+    }
 
     JSObject* aobj = str_split_string(cx, group, string, sep, INT32_MAX);
-    if (!aobj)
+    if (!aobj) {
         return false;
+    }
 
     args.rval().setObject(*aobj);
     return true;
 }
 
 static bool
 intrinsic_StringSplitStringLimit(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -1757,22 +1809,24 @@ intrinsic_StringSplitStringLimit(JSConte
     RootedString sep(cx, args[1].toString());
 
     // args[2] should be already in UInt32 range, but it could be double typed,
     // because of Ion optimization.
     uint32_t limit = uint32_t(args[2].toNumber());
     MOZ_ASSERT(limit > 0, "Zero limit case is already handled in self-hosted code.");
 
     RootedObjectGroup group(cx, ObjectGroupRealm::getStringSplitStringGroup(cx));
-    if (!group)
+    if (!group) {
         return false;
+    }
 
     JSObject* aobj = str_split_string(cx, group, string, sep, limit);
-    if (!aobj)
+    if (!aobj) {
         return false;
+    }
 
     args.rval().setObject(*aobj);
     return true;
 }
 
 bool
 CallSelfHostedNonGenericMethod(JSContext* cx, const CallArgs& args)
 {
@@ -1780,45 +1834,49 @@ CallSelfHostedNonGenericMethod(JSContext
     // wrapper object, like a CrossCompartmentWrapper. The last argument is
     // the name of the self-hosted function. The other arguments are the
     // arguments to pass to this function.
 
     MOZ_ASSERT(args.length() > 0);
     RootedPropertyName name(cx, args[args.length() - 1].toString()->asAtom().asPropertyName());
 
     InvokeArgs args2(cx);
-    if (!args2.init(cx, args.length() - 1))
+    if (!args2.init(cx, args.length() - 1)) {
         return false;
-
-    for (size_t i = 0; i < args.length() - 1; i++)
+    }
+
+    for (size_t i = 0; i < args.length() - 1; i++) {
         args2[i].set(args[i]);
+    }
 
     return CallSelfHostedFunction(cx, name, args.thisv(), args2, args.rval());
 }
 
 #ifdef DEBUG
 bool
 js::CallSelfHostedFunction(JSContext* cx, const char* name, HandleValue thisv,
                            const AnyInvokeArgs& args, MutableHandleValue rval)
 {
     JSAtom* funAtom = Atomize(cx, name, strlen(name));
-    if (!funAtom)
+    if (!funAtom) {
         return false;
+    }
     RootedPropertyName funName(cx, funAtom->asPropertyName());
     return CallSelfHostedFunction(cx, funName, thisv, args, rval);
 }
 #endif
 
 bool
 js::CallSelfHostedFunction(JSContext* cx, HandlePropertyName name, HandleValue thisv,
                            const AnyInvokeArgs& args, MutableHandleValue rval)
 {
     RootedValue fun(cx);
-    if (!GlobalObject::getIntrinsicValue(cx, cx->global(), name, &fun))
+    if (!GlobalObject::getIntrinsicValue(cx, cx->global(), name, &fun)) {
         return false;
+    }
     MOZ_ASSERT(fun.toObject().is<JSFunction>());
 
     return Call(cx, fun, thisv, args, rval);
 }
 
 template<typename T>
 bool
 Is(HandleValue v)
@@ -1857,18 +1915,19 @@ js::ReportIncompatibleSelfHostedMethod(J
     ScriptFrameIter iter(cx);
     MOZ_ASSERT(iter.isFunctionFrame());
 
     while (!iter.done()) {
         MOZ_ASSERT(iter.callee(cx)->isSelfHostedOrIntrinsic() &&
                    !iter.callee(cx)->isBoundFunction());
         UniqueChars funNameBytes;
         const char* funName = GetFunctionNameBytes(cx, iter.callee(cx), &funNameBytes);
-        if (!funName)
+        if (!funName) {
             return false;
+        }
         if (strcmp(funName, "IsTypedArrayEnsuringArrayBuffer") != 0) {
             JS_ReportErrorNumberLatin1(cx, GetErrorMessage, nullptr, JSMSG_INCOMPATIBLE_METHOD,
                                        funName, "method", InformalValueTypeName(args.thisv()));
             return false;
         }
         ++iter;
     }
 
@@ -1888,18 +1947,19 @@ intrinsic_RuntimeDefaultLocale(JSContext
 
     const char* locale = cx->runtime()->getDefaultLocale();
     if (!locale) {
         JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_DEFAULT_LOCALE_ERROR);
         return false;
     }
 
     JSString* jslocale = NewStringCopyZ<CanGC>(cx, locale);
-    if (!jslocale)
+    if (!jslocale) {
         return false;
+    }
 
     args.rval().setString(jslocale);
     return true;
 }
 
 static bool
 intrinsic_IsRuntimeDefaultLocale(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -1916,18 +1976,19 @@ intrinsic_IsRuntimeDefaultLocale(JSConte
 
     const char* locale = cx->runtime()->getDefaultLocale();
     if (!locale) {
         JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_DEFAULT_LOCALE_ERROR);
         return false;
     }
 
     JSLinearString* str = args[0].toString()->ensureLinear(cx);
-    if (!str)
+    if (!str) {
         return false;
+    }
 
     bool equals;
     if (str->length() == strlen(locale)) {
         JS::AutoCheckCannotGC nogc;
         const Latin1Char* latin1Locale = reinterpret_cast<const Latin1Char*>(locale);
         equals = str->hasLatin1Chars()
                  ? EqualChars(str->latin1Chars(nogc), latin1Locale, str->length())
                  : EqualChars(str->twoByteChars(nogc), latin1Locale, str->length());
@@ -1944,35 +2005,37 @@ using GetOrCreateIntlConstructor = JSFun
 template <GetOrCreateIntlConstructor getOrCreateIntlConstructor>
 static bool
 intrinsic_GetBuiltinIntlConstructor(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 0);
 
     JSFunction* constructor = getOrCreateIntlConstructor(cx, cx->global());
-    if (!constructor)
+    if (!constructor) {
         return false;
+    }
 
     args.rval().setObject(*constructor);
     return true;
 }
 
 static bool
 intrinsic_AddContentTelemetry(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
 
     int id = args[0].toInt32();
     MOZ_ASSERT(id < JS_TELEMETRY_END);
     MOZ_ASSERT(id >= 0);
 
-    if (!cx->realm()->isProbablySystemCode())
+    if (!cx->realm()->isProbablySystemCode()) {
         cx->runtime()->addTelemetry(id, args[1].toInt32());
+    }
 
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 intrinsic_WarnDeprecatedStringMethod(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -1982,22 +2045,24 @@ intrinsic_WarnDeprecatedStringMethod(JSC
     MOZ_ASSERT(args[1].isString());
 
     uint32_t id = uint32_t(args[0].toInt32());
     MOZ_ASSERT(id < STRING_GENERICS_METHODS_LIMIT);
 
     uint32_t mask = (1 << id);
     if (!(cx->realm()->warnedAboutStringGenericsMethods & mask)) {
         JSFlatString* name = args[1].toString()->ensureFlat(cx);
-        if (!name)
+        if (!name) {
             return false;
+        }
 
         AutoStableStringChars stableChars(cx);
-        if (!stableChars.initTwoByte(cx, name))
+        if (!stableChars.initTwoByte(cx, name)) {
             return false;
+        }
         const char16_t* nameChars = stableChars.twoByteRange().begin().get();
 
         if (!JS_ReportErrorFlagsAndNumberUC(cx, JSREPORT_WARNING, GetErrorMessage, nullptr,
                                             JSMSG_DEPRECATED_STRING_METHOD, nameChars, nameChars))
         {
             return false;
         }
         cx->realm()->warnedAboutStringGenericsMethods |= mask;
@@ -2009,44 +2074,48 @@ intrinsic_WarnDeprecatedStringMethod(JSC
 
 static bool
 intrinsic_ThrowArgTypeNotObject(JSContext* cx, unsigned argc ,Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc,vp);
     MOZ_ASSERT(args.length() == 2);
     MOZ_ASSERT(args[0].isNumber());
     MOZ_ASSERT(!args[1].isObject());
-    if (args[0].toNumber() == NOT_OBJECT_KIND_DESCRIPTOR)
+    if (args[0].toNumber() == NOT_OBJECT_KIND_DESCRIPTOR) {
         ReportNotObjectWithName(cx, "descriptor", args[1]);
-    else
+    } else {
         MOZ_CRASH("unexpected kind");
+    }
 
     return false;
 }
 
 static bool
 intrinsic_ConstructFunction(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 3);
     MOZ_ASSERT(IsConstructor(args[0]));
     MOZ_ASSERT(IsConstructor(args[1]));
     MOZ_ASSERT(args[2].toObject().is<ArrayObject>());
 
     RootedArrayObject argsList(cx, &args[2].toObject().as<ArrayObject>());
     uint32_t len = argsList->length();
     ConstructArgs constructArgs(cx);
-    if (!constructArgs.init(cx, len))
+    if (!constructArgs.init(cx, len)) {
         return false;
-    for (uint32_t index = 0; index < len; index++)
+    }
+    for (uint32_t index = 0; index < len; index++) {
         constructArgs[index].set(argsList->getDenseElement(index));
+    }
 
     RootedObject res(cx);
-    if (!Construct(cx, args[0], constructArgs, args[1], &res))
+    if (!Construct(cx, args[0], constructArgs, args[1], &res)) {
         return false;
+    }
 
     args.rval().setObject(*res);
     return true;
 }
 
 
 static bool
 intrinsic_IsConstructing(JSContext* cx, unsigned argc, Value* vp)
@@ -2079,18 +2148,19 @@ intrinsic_ConstructorForTypedArray(JSCon
     // for that type is initialized on the compartment's global, this is not
     // the case. When we construct a typed array given a cross-compartment
     // ArrayBuffer, we put the constructed TypedArray in the same compartment
     // as the ArrayBuffer. Since we use the prototype from the initial
     // compartment, and never call the constructor in the ArrayBuffer's
     // compartment from script, we are not guaranteed to have initialized
     // the constructor.
     JSObject* ctor = GlobalObject::getOrCreateConstructor(cx, protoKey);
-    if (!ctor)
+    if (!ctor) {
         return false;
+    }
 
     args.rval().setObject(*ctor);
     return true;
 }
 
 static bool
 intrinsic_NameForTypedArray(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -2120,18 +2190,19 @@ intrinsic_HostResolveImportedModule(JSCo
     if (!moduleResolveHook) {
         JS_ReportErrorASCII(cx, "Module resolve hook not set");
         return false;
     }
 
     RootedScript script(cx, module->script());
     RootedScript result(cx);
     result = moduleResolveHook(cx, script, specifier);
-    if (!result)
+    if (!result) {
         return false;
+    }
 
     if (!result->module()) {
         JS_ReportErrorASCII(cx, "Module resolve hook did not return a module script");
         return false;
     }
 
     args.rval().setObject(*result->module());
     return true;
@@ -2141,18 +2212,19 @@ static bool
 intrinsic_CreateImportBinding(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 4);
     RootedModuleEnvironmentObject environment(cx, &args[0].toObject().as<ModuleEnvironmentObject>());
     RootedAtom importedName(cx, &args[1].toString()->asAtom());
     RootedModuleObject module(cx, &args[2].toObject().as<ModuleObject>());
     RootedAtom localName(cx, &args[3].toString()->asAtom());
-    if (!environment->createImportBinding(cx, importedName, module, localName))
+    if (!environment->createImportBinding(cx, importedName, module, localName)) {
         return false;
+    }
 
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 intrinsic_CreateNamespaceBinding(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -2192,34 +2264,36 @@ intrinsic_ExecuteModule(JSContext* cx, u
 static bool
 intrinsic_NewModuleNamespace(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
     RootedModuleObject module(cx, &args[0].toObject().as<ModuleObject>());
     RootedObject exports(cx, &args[1].toObject());
     JSObject* namespace_ = ModuleObject::createNamespace(cx, module, exports);
-    if (!namespace_)
+    if (!namespace_) {
         return false;
+    }
 
     args.rval().setObject(*namespace_);
     return true;
 }
 
 static bool
 intrinsic_AddModuleNamespaceBinding(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 4);
     RootedModuleNamespaceObject namespace_(cx, &args[0].toObject().as<ModuleNamespaceObject>());
     RootedAtom exportedName(cx, &args[1].toString()->asAtom());
     RootedModuleObject targetModule(cx, &args[2].toObject().as<ModuleObject>());
     RootedAtom localName(cx, &args[3].toString()->asAtom());
-    if (!namespace_->addBinding(cx, exportedName, targetModule, localName))
+    if (!namespace_->addBinding(cx, exportedName, targetModule, localName)) {
         return false;
+    }
 
     args.rval().setUndefined();
     return true;
 }
 
 static bool
 intrinsic_ModuleNamespaceExports(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -2233,18 +2307,19 @@ intrinsic_ModuleNamespaceExports(JSConte
 static bool
 intrinsic_PromiseResolve(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
     MOZ_ASSERT(args.length() == 2);
 
     RootedObject constructor(cx, &args[0].toObject());
     JSObject* promise = js::PromiseResolve(cx, constructor, args[1]);
-    if (!promise)
+    if (!promise) {
         return false;
+    }
 
     args.rval().setObject(*promise);
     return true;
 }
 
 static bool
 intrinsic_CopyDataPropertiesOrGetOwnKeys(JSContext* cx, unsigned argc, Value* vp)
 {
@@ -2740,52 +2815,56 @@ JSRuntime::createSelfHostingGlobal(JSCon
     MOZ_ASSERT(!cx->isExceptionPending());
     MOZ_ASSERT(!cx->realm());
 
     JS::RealmOptions options;
     options.creationOptions().setNewCompartmentAndZone();
     options.behaviors().setDiscardSource(true);
 
     Realm* realm = NewRealm(cx, nullptr, options);
-    if (!realm)
+    if (!realm) {
         return nullptr;
+    }
 
     static const ClassOps shgClassOps = {
         nullptr, nullptr, nullptr, nullptr,
         nullptr, nullptr, nullptr, nullptr,
         nullptr, nullptr,
         JS_GlobalObjectTraceHook
     };
 
     static const Class shgClass = {
         "self-hosting-global", JSCLASS_GLOBAL_FLAGS,
         &shgClassOps
     };
 
     AutoRealmUnchecked ar(cx, realm);
     Rooted<GlobalObject*> shg(cx, GlobalObject::createInternal(cx, &shgClass));
-    if (!shg)
+    if (!shg) {
         return nullptr;
+    }
 
     cx->runtime()->selfHostingGlobal_ = shg;
     realm->setIsSelfHostingRealm();
 
-    if (!GlobalObject::initSelfHostingBuiltins(cx, shg, intrinsic_functions))
+    if (!GlobalObject::initSelfHostingBuiltins(cx, shg, intrinsic_functions)) {
         return nullptr;
+    }
 
     JS_FireOnNewGlobalObject(cx, shg);
 
     return shg;
 }
 
 static void
 MaybePrintAndClearPendingException(JSContext* cx, FILE* file)
 {
-    if (!cx->isExceptionPending())
+    if (!cx->isExceptionPending()) {
         return;
+    }
 
     AutoClearPendingException acpe(cx);
 
     RootedValue exn(cx);
     if (!cx->getPendingException(&exn)) {
         fprintf(file, "error getting pending exception\n");
         return;
     }
@@ -2853,18 +2932,19 @@ VerifyGlobalNames(JSContext* cx, Handle<
                     break;
                 }
             }
         }
     }
 
     if (nameMissing) {
         UniqueChars bytes = IdToPrintableUTF8(cx, id, IdToPrintableBehavior::IdIsPropertyKey);
-        if (!bytes)
+        if (!bytes) {
             return false;
+        }
 
         JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_NO_SUCH_SELF_HOSTED_PROP,
                                  bytes.get());
         return false;
     }
 #endif // DEBUG
 
     return true;
@@ -2882,18 +2962,19 @@ JSRuntime::initSelfHosting(JSContext* cx
 
     /*
      * Self hosted state can be accessed from threads for other runtimes
      * parented to this one, so cannot include state in the nursery.
      */
     JS::AutoDisableGenerationalGC disable(cx);
 
     Rooted<GlobalObject*> shg(cx, JSRuntime::createSelfHostingGlobal(cx));
-    if (!shg)
+    if (!shg) {
         return false;
+    }
 
     JSAutoRealm ar(cx, shg);
 
     /*
      * Set a temporary error reporter printing to stderr because it is too
      * early in the startup process for any other reporter to be registered
      * and we don't want errors in self-hosted code to be silently swallowed.
      *
@@ -2913,36 +2994,39 @@ JSRuntime::initSelfHosting(JSContext* cx
     uint32_t compressedLen = GetCompressedSize();
     auto src = cx->make_pod_array<char>(srcLen);
     if (!src || !DecompressString(compressed, compressedLen,
                                   reinterpret_cast<unsigned char*>(src.get()), srcLen))
     {
         return false;
     }
 
-    if (!Evaluate(cx, options, src.get(), srcLen, &rv))
+    if (!Evaluate(cx, options, src.get(), srcLen, &rv)) {
         return false;
-
-    if (!VerifyGlobalNames(cx, shg))
+    }
+
+    if (!VerifyGlobalNames(cx, shg)) {
         return false;
+    }
 
     return true;
 }
 
 void
 JSRuntime::finishSelfHosting()
 {
     selfHostingGlobal_ = nullptr;
 }
 
 void
 JSRuntime::traceSelfHostingGlobal(JSTracer* trc)
 {
-    if (selfHostingGlobal_ && !parentRuntime)
+    if (selfHostingGlobal_ && !parentRuntime) {
         TraceRoot(trc, const_cast<NativeObject**>(&selfHostingGlobal_.ref()), "self-hosting global");
+    }
 }
 
 bool
 JSRuntime::isSelfHostingZone(const JS::Zone* zone) const
 {
     return selfHostingGlobal_ && selfHostingGlobal_->zoneFromAnyThread() == zone;
 }
 
@@ -2982,50 +3066,56 @@ GetUnclonedValue(JSContext* cx, HandleNa
 static bool
 CloneProperties(JSContext* cx, HandleNativeObject selfHostedObject, HandleObject clone)
 {
     AutoIdVector ids(cx);
     Vector<uint8_t, 16> attrs(cx);
 
     for (size_t i = 0; i < selfHostedObject->getDenseInitializedLength(); i++) {
         if (!selfHostedObject->getDenseElement(i).isMagic(JS_ELEMENTS_HOLE)) {
-            if (!ids.append(INT_TO_JSID(i)))
+            if (!ids.append(INT_TO_JSID(i))) {
                 return false;
-            if (!attrs.append(JSPROP_ENUMERATE))
+            }
+            if (!attrs.append(JSPROP_ENUMERATE)) {
                 return false;
+            }
         }
     }
 
     Rooted<ShapeVector> shapes(cx, ShapeVector(cx));
     for (Shape::Range<NoGC> range(selfHostedObject->lastProperty()); !range.empty(); range.popFront()) {
         Shape& shape = range.front();
-        if (shape.enumerable() && !shapes.append(&shape))
+        if (shape.enumerable() && !shapes.append(&shape)) {
             return false;
+        }
     }
 
     // Now our shapes are in last-to-first order, so....
     Reverse(shapes.begin(), shapes.end());
     for (size_t i = 0; i < shapes.length(); ++i) {
         MOZ_ASSERT(!shapes[i]->isAccessorShape(),
                    "Can't handle cloning accessors here yet.");
-        if (!ids.append(shapes[i]->propid()))
+        if (!ids.append(shapes[i]->propid())) {
             return false;
+        }
         uint8_t shapeAttrs =
             shapes[i]->attributes() & (JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_READONLY);
-        if (!attrs.append(shapeAttrs))
+        if (!attrs.append(shapeAttrs)) {
             return false;
+        }
     }
 
     RootedId id(cx);
     RootedValue val(cx);
     RootedValue selfHostedValue(cx);
     for (uint32_t i = 0; i < ids.length(); i++) {
         id = ids[i];
-        if (!GetUnclonedValue(cx, selfHostedObject, id, &selfHostedValue))
+        if (!GetUnclonedValue(cx, selfHostedObject, id, &selfHostedValue)) {
             return false;
+        }
         if (!CloneValue(cx, selfHostedValue, &val) ||
             !JS_DefinePropertyById(cx, clone, id, val, attrs[i]))
         {
             return false;
         }
     }
 
     return true;
@@ -3033,27 +3123,30 @@ CloneProperties(JSContext* cx, HandleNat
 
 static JSString*
 CloneString(JSContext* cx, JSFlatString* selfHostedString)
 {
     size_t len = selfHostedString->length();
     {
         JS::AutoCheckCannotGC nogc;
         JSString* clone;
-        if (selfHostedString->hasLatin1Chars())
+        if (selfHostedString->hasLatin1Chars()) {
             clone = NewStringCopyN<NoGC>(cx, selfHostedString->latin1Chars(nogc), len);
-        else
+        } else {
             clone = NewStringCopyNDontDeflate<NoGC>(cx, selfHostedString->twoByteChars(nogc), len);
-        if (clone)
+        }
+        if (clone) {
             return clone;
+        }
     }
 
     AutoStableStringChars chars(cx);
-    if (!chars.init(cx, selfHostedString))
+    if (!chars.init(cx, selfHostedString)) {
         return nullptr;
+    }
 
     return chars.isLatin1()
            ? NewStringCopyN<CanGC>(cx, chars.latin1Range().begin().get(), len)
            : NewStringCopyNDontDeflate<CanGC>(cx, chars.twoByteRange().begin().get(), len);
 }
 
 static JSObject*
 CloneObject(JSContext* cx, HandleNativeObject selfHostedObject)
@@ -3061,20 +3154,22 @@ CloneObject(JSContext* cx, HandleNativeO
 #ifdef DEBUG
     // Object hash identities are owned by the hashed object, which may be on a
     // different thread than the clone target. In theory, these objects are all
     // tenured and will not be compacted; however, we simply avoid the issue
     // altogether by skipping the cycle-detection when off thread.
     mozilla::Maybe<AutoCycleDetector> detect;
     if (js::CurrentThreadCanAccessZone(selfHostedObject->zoneFromAnyThread())) {
         detect.emplace(cx, selfHostedObject);
-        if (!detect->init())
+        if (!detect->init()) {
             return nullptr;
-        if (detect->foundCycle())
+        }
+        if (detect->foundCycle()) {
             MOZ_CRASH("SelfHosted cloning cannot handle cyclic object graphs.");
+        }
     }
 #endif
 
     RootedObject clone(cx);
     if (selfHostedObject->is<JSFunction>()) {
         RootedFunction selfHostedFunction(cx, &selfHostedObject->as<JSFunction>());
         if (selfHostedFunction->isInterpreted()) {
             bool hasName = selfHostedFunction->explicitName() != nullptr;
@@ -3109,57 +3204,64 @@ CloneObject(JSContext* cx, HandleNativeO
     } else if (selfHostedObject->is<DateObject>()) {
         clone = JS::NewDateObject(cx, selfHostedObject->as<DateObject>().clippedTime());
     } else if (selfHostedObject->is<BooleanObject>()) {
         clone = BooleanObject::create(cx, selfHostedObject->as<BooleanObject>().unbox());
     } else if (selfHostedObject->is<NumberObject>()) {
         clone = NumberObject::create(cx, selfHostedObject->as<NumberObject>().unbox());
     } else if (selfHostedObject->is<StringObject>()) {
         JSString* selfHostedString = selfHostedObject->as<StringObject>().unbox();
-        if (!selfHostedString->isFlat())
+        if (!selfHostedString->isFlat()) {
             MOZ_CRASH();
+        }
         RootedString str(cx, CloneString(cx, &selfHostedString->asFlat()));
-        if (!str)
+        if (!str) {
             return nullptr;
+        }
         clone = StringObject::create(cx, str);
     } else if (selfHostedObject->is<ArrayObject>()) {
         clone = NewDenseEmptyArray(cx, nullptr, TenuredObject);
     } else {
         MOZ_ASSERT(selfHostedObject->isNative());
         clone = NewObjectWithGivenProto(cx, selfHostedObject->getClass(), nullptr,
                                         selfHostedObject->asTenured().getAllocKind(),
                                         SingletonObject);
     }
-    if (!clone)
+    if (!clone) {
         return nullptr;
-
-    if (!CloneProperties(cx, selfHostedObject, clone))
+    }
+
+    if (!CloneProperties(cx, selfHostedObject, clone)) {
         return nullptr;
+    }
     return clone;
 }
 
 static bool
 CloneValue(JSContext* cx, HandleValue selfHostedValue, MutableHandleValue vp)
 {
     if (selfHostedValue.isObject()) {
         RootedNativeObject selfHostedObject(cx, &selfHostedValue.toObject().as<NativeObject>());
         JSObject* clone = CloneObject(cx, selfHostedObject);
-        if (!clone)
+        if (!clone) {
             return false;
+        }
         vp.setObject(*clone);
     } else if (selfHostedValue.isBoolean() || selfHostedValue.isNumber() || selfHostedValue.isNullOrUndefined()) {
         // Nothing to do here: these are represented inline in the value.
         vp.set(selfHostedValue);
     } else if (selfHostedValue.isString()) {
-        if (!selfHostedValue.toString()->isFlat())
+        if (!selfHostedValue.toString()->isFlat()) {
             MOZ_CRASH();
+        }
         JSFlatString* selfHostedString = &selfHostedValue.toString()->asFlat();
         JSString* clone = CloneString(cx, selfHostedString);
-        if (!clone)
+        if (!clone) {
             return false;
+        }
         vp.setString(clone);
     } else if (selfHostedValue.isSymbol()) {
         // Well-known symbols are shared.
         mozilla::DebugOnly<JS::Symbol*> sym = selfHostedValue.toSymbol();
         MOZ_ASSERT(sym->isWellKnownSymbol());
         MOZ_ASSERT(cx->wellKnownSymbols().get(sym->code()) == sym);
         vp.set(selfHostedValue);
     } else {
@@ -3173,62 +3275,67 @@ JSRuntime::createLazySelfHostedFunctionC
                                              HandleAtom name, unsigned nargs,
                                              HandleObject proto, NewObjectKind newKind,
                                              MutableHandleFunction fun)
 {
     MOZ_ASSERT(newKind != GenericObject);
 
     RootedAtom funName(cx, name);
     JSFunction* selfHostedFun = getUnclonedSelfHostedFunction(cx, selfHostedName);
-    if (!selfHostedFun)
+    if (!selfHostedFun) {
         return false;
+    }
 
     if (!selfHostedFun->isClassConstructor() && !selfHostedFun->hasGuessedAtom() &&
         selfHostedFun->explicitName() != selfHostedName)
     {
         MOZ_ASSERT(selfHostedFun->getExtendedSlot(HAS_SELFHOSTED_CANONICAL_NAME_SLOT).toBoolean());
         funName = selfHostedFun->explicitName();
     }
 
     fun.set(NewScriptedFunction(cx, nargs, JSFunction::INTERPRETED_LAZY,
                                 funName, proto, gc::AllocKind::FUNCTION_EXTENDED, newKind));
-    if (!fun)
+    if (!fun) {
         return false;
+    }
     fun->setIsSelfHostedBuiltin();
     fun->setExtendedSlot(LAZY_FUNCTION_NAME_SLOT, StringValue(selfHostedName));
     return true;
 }
 
 bool
 JSRuntime::cloneSelfHostedFunctionScript(JSContext* cx, HandlePropertyName name,
                                          HandleFunction targetFun)
 {
     RootedFunction sourceFun(cx, getUnclonedSelfHostedFunction(cx, name));
-    if (!sourceFun)
+    if (!sourceFun) {
         return false;
+    }
     // JSFunction::generatorKind can't handle lazy self-hosted functions, so we make sure there
     // aren't any.
     MOZ_ASSERT(!sourceFun->isGenerator() && !sourceFun->isAsync());
     MOZ_ASSERT(targetFun->isExtended());
     MOZ_ASSERT(targetFun->isInterpretedLazy());
     MOZ_ASSERT(targetFun->isSelfHostedBuiltin());
 
     RootedScript sourceScript(cx, JSFunction::getOrCreateScript(cx, sourceFun));
-    if (!sourceScript)
+    if (!sourceScript) {
         return false;
+    }
 
     // Assert that there are no intervening scopes between the global scope
     // and the self-hosted script. Toplevel lexicals are explicitly forbidden
     // by the parser when parsing self-hosted code. The fact they have the
     // global lexical scope on the scope chain is for uniformity and engine
     // invariants.
     MOZ_ASSERT(sourceScript->outermostScope()->enclosing()->kind() == ScopeKind::Global);
     RootedScope emptyGlobalScope(cx, &cx->global()->emptyGlobalScope());
-    if (!CloneScriptIntoFunction(cx, emptyGlobalScope, targetFun, sourceScript))
+    if (!CloneScriptIntoFunction(cx, emptyGlobalScope, targetFun, sourceScript)) {
         return false;
+    }
     MOZ_ASSERT(!targetFun->isInterpretedLazy());
 
     MOZ_ASSERT(sourceFun->nargs() == targetFun->nargs());
     MOZ_ASSERT(sourceScript->hasRest() == targetFun->nonLazyScript()->hasRest());
 
     // The target function might have been relazified after its flags changed.
     targetFun->setFlags(targetFun->flags() | sourceFun->flags());
     return true;
@@ -3241,28 +3348,30 @@ JSRuntime::getUnclonedSelfHostedValue(JS
     RootedId id(cx, NameToId(name));
     return GetUnclonedValue(cx, HandleNativeObject::fromMarkedLocation(&selfHostingGlobal_.ref()), id, vp);
 }
 
 JSFunction*
 JSRuntime::getUnclonedSelfHostedFunction(JSContext* cx, HandlePropertyName name)
 {
     RootedValue selfHostedValue(cx);
-    if (!getUnclonedSelfHostedValue(cx, name, &selfHostedValue))
+    if (!getUnclonedSelfHostedValue(cx, name, &selfHostedValue)) {
         return nullptr;
+    }
 
     return &selfHostedValue.toObject().as<JSFunction>();
 }
 
 bool
 JSRuntime::cloneSelfHostedValue(JSContext* cx, HandlePropertyName name, MutableHandleValue vp)
 {
     RootedValue selfHostedValue(cx);
-    if (!getUnclonedSelfHostedValue(cx, name, &selfHostedValue))
+    if (!getUnclonedSelfHostedValue(cx, name, &selfHostedValue)) {
         return false;
+    }
 
     /*
      * We don't clone if we're operating in the self-hosting global, as that
      * means we're currently executing the self-hosting script while
      * initializing the runtime (see JSRuntime::initSelfHosting).
      */
     if (cx->global() == selfHostingGlobal_) {
         vp.set(selfHostedValue);
@@ -3287,18 +3396,19 @@ js::IsSelfHostedFunctionWithName(JSFunct
 {
     return fun->isSelfHostedBuiltin() && GetSelfHostedFunctionName(fun) == name;
 }
 
 JSAtom*
 js::GetSelfHostedFunctionName(JSFunction* fun)
 {
     Value name = fun->getExtendedSlot(LAZY_FUNCTION_NAME_SLOT);
-    if (!name.isString())
+    if (!name.isString()) {
         return nullptr;
+    }
     return &name.toString()->asAtom();
 }
 
 static_assert(JSString::MAX_LENGTH <= INT32_MAX,
               "StringIteratorNext in builtin/String.js assumes the stored index "
               "into the string is an Int32Value");
 
 static_assert(JSString::MAX_LENGTH == MAX_STRING_LENGTH,
--- a/js/src/vm/Shape-inl.h
+++ b/js/src/vm/Shape-inl.h
@@ -46,39 +46,42 @@ MOZ_ALWAYS_INLINE Shape*
 Shape::search(JSContext* cx, jsid id)
 {
     return search(cx, this, id);
 }
 
 MOZ_ALWAYS_INLINE bool
 Shape::maybeCreateTableForLookup(JSContext* cx)
 {
-    if (hasTable())
+    if (hasTable()) {
         return true;
+    }
 
     if (!inDictionary() && numLinearSearches() < LINEAR_SEARCHES_MAX) {
         incrementNumLinearSearches();
         return true;
     }
 
-    if (!isBigEnoughForAShapeTable())
+    if (!isBigEnoughForAShapeTable()) {
         return true;
+    }
 
     return Shape::hashify(cx, this);
 }
 
 template<MaybeAdding Adding>
 /* static */ inline bool
 Shape::search(JSContext* cx, Shape* start, jsid id, const AutoKeepShapeTables& keep,
               Shape** pshape, ShapeTable** ptable, ShapeTable::Entry** pentry)
 {
     if (start->inDictionary()) {
         ShapeTable* table = start->ensureTableForDictionary(cx, keep);
-        if (!table)
+        if (!table) {
             return false;
+        }
         *ptable = table;
         *pentry = &table->search<Adding>(id, keep);
         *pshape = (*pentry)->shape();
         return true;
     }
 
     *ptable = nullptr;
     *pentry = nullptr;
@@ -110,114 +113,126 @@ Shape::new_(JSContext* cx, Handle<StackS
     Shape* shape = other.isAccessorShape()
                    ? js::Allocate<AccessorShape>(cx)
                    : js::Allocate<Shape>(cx);
     if (!shape) {
         ReportOutOfMemory(cx);
         return nullptr;
     }
 
-    if (other.isAccessorShape())
+    if (other.isAccessorShape()) {
         new (shape) AccessorShape(other, nfixed);
-    else
+    } else {
         new (shape) Shape(other, nfixed);
+    }
 
     return shape;
 }
 
 inline void
 Shape::updateBaseShapeAfterMovingGC()
 {
     BaseShape* base = base_;
-    if (IsForwarded(base))
+    if (IsForwarded(base)) {
         base_.unsafeSet(Forwarded(base));
+    }
 }
 
 static inline void
 GetterSetterWriteBarrierPost(AccessorShape* shape)
 {
     // If the shape contains any nursery pointers then add it to a vector on the
     // zone that we fixup on minor GC. Prevent this vector growing too large
     // since we don't tolerate OOM here.
 
     static const size_t MaxShapeVectorLength = 5000;
 
     MOZ_ASSERT(shape);
 
     gc::StoreBuffer* sb = nullptr;
-    if (shape->hasGetterObject())
+    if (shape->hasGetterObject()) {
         sb = shape->getterObject()->storeBuffer();
-    if (!sb && shape->hasSetterObject())
+    }
+    if (!sb && shape->hasSetterObject()) {
         sb = shape->setterObject()->storeBuffer();
-    if (!sb)
+    }
+    if (!sb) {
         return;
+    }
 
     auto& nurseryShapes = shape->zone()->nurseryShapes();
 
     {
         AutoEnterOOMUnsafeRegion oomUnsafe;
-        if (!nurseryShapes.append(shape))
+        if (!nurseryShapes.append(shape)) {
             oomUnsafe.crash("GetterSetterWriteBarrierPost");
+        }
     }
 
-    if (nurseryShapes.length() == 1)
+    if (nurseryShapes.length() == 1) {
         sb->putGeneric(NurseryShapesRef(shape->zone()));
-    else if (nurseryShapes.length() == MaxShapeVectorLength)
+    } else if (nurseryShapes.length() == MaxShapeVectorLength) {
         sb->setAboutToOverflow(JS::gcreason::FULL_SHAPE_BUFFER);
+    }
 }
 
 inline
 AccessorShape::AccessorShape(const StackShape& other, uint32_t nfixed)
   : Shape(other, nfixed),
     rawGetter(other.rawGetter),
     rawSetter(other.rawSetter)
 {
     MOZ_ASSERT(getAllocKind() == gc::AllocKind::ACCESSOR_SHAPE);
     GetterSetterWriteBarrierPost(this);
 }
 
 inline void
 Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, GCPtrShape* dictp)
 {
-    if (child.isAccessorShape())
+    if (child.isAccessorShape()) {
         new (this) AccessorShape(child, nfixed);
-    else
+    } else {
         new (this) Shape(child, nfixed);
+    }
     this->immutableFlags |= IN_DICTIONARY;
 
     this->listp = nullptr;
-    if (dictp)
+    if (dictp) {
         insertIntoDictionary(dictp);
+    }
 }
 
 template<class ObjectSubclass>
 /* static */ inline bool
 EmptyShape::ensureInitialCustomShape(JSContext* cx, Handle<ObjectSubclass*> obj)
 {
     static_assert(mozilla::IsBaseOf<JSObject, ObjectSubclass>::value,
                   "ObjectSubclass must be a subclass of JSObject");
 
     // If the provided object has a non-empty shape, it was given the cached
     // initial shape when created: nothing to do.
-    if (!obj->empty())
+    if (!obj->empty()) {
         return true;
+    }
 
     // If no initial shape was assigned, do so.
     RootedShape shape(cx, ObjectSubclass::assignInitialShape(cx, obj));
-    if (!shape)
+    if (!shape) {
         return false;
+    }
     MOZ_ASSERT(!obj->empty());
 
     // If the object is a standard prototype -- |RegExp.prototype|,
     // |String.prototype|, |RangeError.prototype|, &c. -- GlobalObject.cpp's
     // |CreateBlankProto| marked it as a delegate.  These are the only objects
     // of this class that won't use the standard prototype, and there's no
     // reason to pollute the initial shape cache with entries for them.
-    if (obj->isDelegate())
+    if (obj->isDelegate()) {
         return true;
+    }
 
     // Cache the initial shape for non-prototype objects, however, so that
     // future instances will begin life with that shape.
     RootedObject proto(cx, obj->staticPrototype());
     EmptyShape::insertInitialShape(cx, shape, proto);
     return true;
 }
 
@@ -228,29 +243,31 @@ AutoRooterGetterSetter::Inner::Inner(JSC
     pgetter(pgetter_), psetter(psetter_)
 {}
 
 inline
 AutoRooterGetterSetter::AutoRooterGetterSetter(JSContext* cx, uint8_t attrs,
                                                GetterOp* pgetter, SetterOp* psetter
                                                MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
 {
-    if (attrs & (JSPROP_GETTER | JSPROP_SETTER))
+    if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) {
         inner.emplace(cx, attrs, pgetter, psetter);
+    }
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
 }
 
 static inline uint8_t
 GetPropertyAttributes(JSObject* obj, PropertyResult prop)
 {
     MOZ_ASSERT(obj->isNative());
 
     if (prop.isDenseOrTypedArrayElement()) {
-        if (obj->is<TypedArrayObject>())
+        if (obj->is<TypedArrayObject>()) {
             return JSPROP_ENUMERATE | JSPROP_PERMANENT;
+        }
         return obj->as<NativeObject>().getElementsHeader()->elementAttributes();
     }
 
     return prop.shape()->attributes();
 }
 
 /*
  * Double hashing needs the second hash code to be relatively prime to table
@@ -276,74 +293,82 @@ ShapeTable::searchUnchecked(jsid id)
     MOZ_ASSERT(!JSID_IS_EMPTY(id));
 
     /* Compute the primary hash address. */
     HashNumber hash0 = HashId(id);
     HashNumber hash1 = Hash1(hash0, hashShift_);
     Entry* entry = &getEntry(hash1);
 
     /* Miss: return space for a new entry. */
-    if (entry->isFree())
+    if (entry->isFree()) {
         return *entry;
+    }
 
     /* Hit: return entry. */
     Shape* shape = entry->shape();
-    if (shape && shape->propidRaw() == id)
+    if (shape && shape->propidRaw() == id) {
         return *entry;
+    }
 
     /* Collision: double hash. */
     uint32_t sizeLog2 = HASH_BITS - hashShift_;
     HashNumber hash2 = Hash2(hash0, sizeLog2, hashShift_);
     uint32_t sizeMask = JS_BITMASK(sizeLog2);
 
     /* Save the first removed entry pointer so we can recycle it if adding. */
     Entry* firstRemoved;
     if (Adding == MaybeAdding::Adding) {
         if (entry->isRemoved()) {
             firstRemoved = entry;
         } else {
             firstRemoved = nullptr;
-            if (!entry->hadCollision())
+            if (!entry->hadCollision()) {
                 entry->flagCollision();
+            }
         }
     }
 
 #ifdef DEBUG
     bool collisionFlag = true;
-    if (!entry->isRemoved())
+    if (!entry->isRemoved()) {
         collisionFlag = entry->hadCollision();
+    }
 #endif
 
     while (true) {
         hash1 -= hash2;
         hash1 &= sizeMask;
         entry = &getEntry(hash1);
 
-        if (entry->isFree())
+        if (entry->isFree()) {
             return (Adding == MaybeAdding::Adding && firstRemoved) ? *firstRemoved : *entry;
+        }
 
         shape = entry->shape();
         if (shape && shape->propidRaw() == id) {
             MOZ_ASSERT(collisionFlag);
             return *entry;
         }
 
         if (Adding == MaybeAdding::Adding) {
             if (entry->isRemoved()) {
-                if (!firstRemoved)
+                if (!firstRemoved) {
                     firstRemoved = entry;
+                }
             } else {
-                if (!entry->hadCollision())
+                if (!entry->hadCollision()) {
                     entry->flagCollision();
+                }
             }
         }
 
 #ifdef DEBUG
-        if (!entry->isRemoved())
+        if (!entry->isRemoved()) {
             collisionFlag &= entry->hadCollision();
+        }
 #endif
     }
 
     MOZ_CRASH("Shape::search failed to find an expected entry.");
 }
 
 template<MaybeAdding Adding>
 MOZ_ALWAYS_INLINE ShapeTable::Entry&
@@ -387,18 +412,19 @@ NativeObject::addDataProperty(JSContext*
     MOZ_ASSERT(obj->uninlinedNonProxyIsExtensible());
     MOZ_ASSERT(!obj->containsPure(id));
 
     AutoKeepShapeTables keep(cx);
     ShapeTable* table = nullptr;
     ShapeTable::Entry* entry = nullptr;
     if (obj->inDictionaryMode()) {
         table = obj->lastProperty()->ensureTableForDictionary(cx, keep);
-        if (!table)
+        if (!table) {
             return nullptr;
+        }
         entry = &table->search<MaybeAdding::Adding>(id, keep);
     }
 
     return addDataPropertyInternal(cx, obj, id, slot, attrs, table, entry, keep);
 }
 
 /* static */ MOZ_ALWAYS_INLINE Shape*
 NativeObject::addAccessorProperty(JSContext* cx, HandleNativeObject obj, HandleId id,
@@ -408,18 +434,19 @@ NativeObject::addAccessorProperty(JSCont
     MOZ_ASSERT(obj->uninlinedNonProxyIsExtensible());
     MOZ_ASSERT(!obj->containsPure(id));
 
     AutoKeepShapeTables keep(cx);
     ShapeTable* table = nullptr;
     ShapeTable::Entry* entry = nullptr;
     if (obj->inDictionaryMode()) {
         table = obj->lastProperty()->ensureTableForDictionary(cx, keep);
-        if (!table)
+        if (!table) {
             return nullptr;
+        }
         entry = &table->search<MaybeAdding::Adding>(id, keep);
     }
 
     return addAccessorPropertyInternal(cx, obj, id, getter, setter, attrs, table, entry, keep);
 }
 
 } /* namespace js */
 
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -37,39 +37,43 @@ using JS::AutoCheckCannotGC;
 
 Shape* const ShapeTable::Entry::SHAPE_REMOVED = (Shape*)ShapeTable::Entry::SHAPE_COLLISION;
 
 bool
 ShapeTable::init(JSContext* cx, Shape* lastProp)
 {
     uint32_t sizeLog2 = CeilingLog2Size(entryCount_);
     uint32_t size = JS_BIT(sizeLog2);
-    if (entryCount_ >= size - (size >> 2))
+    if (entryCount_ >= size - (size >> 2)) {
         sizeLog2++;
-    if (sizeLog2 < MIN_SIZE_LOG2)
+    }
+    if (sizeLog2 < MIN_SIZE_LOG2) {
         sizeLog2 = MIN_SIZE_LOG2;
+    }
 
     size = JS_BIT(sizeLog2);
     entries_ = cx->pod_calloc<Entry>(size);
-    if (!entries_)
+    if (!entries_) {
         return false;
+    }
 
     MOZ_ASSERT(sizeLog2 <= HASH_BITS);
     hashShift_ = HASH_BITS - sizeLog2;
 
     for (Shape::Range<NoGC> r(lastProp); !r.empty(); r.popFront()) {
         Shape& shape = r.front();
         Entry& entry = searchUnchecked<MaybeAdding::Adding>(shape.propid());
 
         /*
          * Beware duplicate args and arg vs. var conflicts: the youngest shape
          * (nearest to lastProp) must win. See bug 600067.
          */
-        if (!entry.shape())
+        if (!entry.shape()) {
             entry.setPreservingCollision(&shape);
+        }
     }
 
     MOZ_ASSERT(capacity() == size);
     MOZ_ASSERT(size >= MIN_SIZE);
     MOZ_ASSERT(!needsToGrow());
     return true;
 }
 
@@ -78,18 +82,19 @@ Shape::removeFromDictionary(NativeObject
 {
     MOZ_ASSERT(inDictionary());
     MOZ_ASSERT(obj->inDictionaryMode());
     MOZ_ASSERT(listp);
 
     MOZ_ASSERT(obj->shape()->inDictionary());
     MOZ_ASSERT(obj->shape()->listp == obj->shapePtr());
 
-    if (parent)
+    if (parent) {
         parent->listp = listp;
+    }
     *listp = parent;
     listp = nullptr;
 
     obj->shape()->clearCachedBigEnoughForShapeTable();
 }
 
 void
 Shape::insertIntoDictionary(GCPtrShape* dictp)
@@ -99,47 +104,50 @@ Shape::insertIntoDictionary(GCPtrShape* 
     MOZ_ASSERT(inDictionary());
     MOZ_ASSERT(!listp);
 
     MOZ_ASSERT_IF(*dictp, (*dictp)->inDictionary());
     MOZ_ASSERT_IF(*dictp, (*dictp)->listp == dictp);
     MOZ_ASSERT_IF(*dictp, zone() == (*dictp)->zone());
 
     setParent(dictp->get());
-    if (parent)
+    if (parent) {
         parent->listp = &parent;
+    }
     listp = (GCPtrShape*) dictp;
     *dictp = this;
 }
 
 bool
 Shape::makeOwnBaseShape(JSContext* cx)
 {
     MOZ_ASSERT(!base()->isOwned());
     MOZ_ASSERT(cx->zone() == zone());
 
     BaseShape* nbase = Allocate<BaseShape, NoGC>(cx);
-    if (!nbase)
+    if (!nbase) {
         return false;
+    }
 
     new (nbase) BaseShape(StackBaseShape(this));
     nbase->setOwned(base()->toUnowned());
 
     this->base_ = nbase;
 
     return true;
 }
 
 void
 Shape::handoffTableTo(Shape* shape)
 {
     MOZ_ASSERT(inDictionary() && shape->inDictionary());
 
-    if (this == shape)
+    if (this == shape) {
         return;
+    }
 
     MOZ_ASSERT(base()->isOwned() && !shape->base()->isOwned());
 
     BaseShape* nbase = base();
 
     MOZ_ASSERT_IF(!shape->isEmptyShape() && shape->isDataProperty(),
                   nbase->slotSpan() > shape->slot());
 
@@ -149,22 +157,24 @@ Shape::handoffTableTo(Shape* shape)
     shape->base_ = nbase;
 }
 
 /* static */ bool
 Shape::hashify(JSContext* cx, Shape* shape)
 {
     MOZ_ASSERT(!shape->hasTable());
 
-    if (!shape->ensureOwnBaseShape(cx))
+    if (!shape->ensureOwnBaseShape(cx)) {
         return false;
+    }
 
     ShapeTable* table = cx->new_<ShapeTable>(shape->entryCount());
-    if (!table)
+    if (!table) {
         return false;
+    }
 
     if (!table->init(cx, shape)) {
         js_free(table);
         return false;
     }
 
     shape->base()->setTable(table);
     return true;
@@ -179,18 +189,19 @@ ShapeTable::change(JSContext* cx, int lo
     /*
      * Grow, shrink, or compress by changing this->entries_.
      */
     uint32_t oldLog2 = HASH_BITS - hashShift_;
     uint32_t newLog2 = oldLog2 + log2Delta;
     uint32_t oldSize = JS_BIT(oldLog2);
     uint32_t newSize = JS_BIT(newLog2);
     Entry* newTable = cx->maybe_pod_calloc<Entry>(newSize);
-    if (!newTable)
+    if (!newTable) {
         return false;
+    }
 
     /* Now that we have newTable allocated, update members. */
     MOZ_ASSERT(newLog2 <= HASH_BITS);
     hashShift_ = HASH_BITS - newLog2;
     removedCount_ = 0;
     Entry* oldTable = entries_;
     entries_ = newTable;
 
@@ -235,32 +246,34 @@ ShapeTable::grow(JSContext* cx)
 void
 ShapeTable::trace(JSTracer* trc)
 {
     for (size_t i = 0; i < capacity(); i++) {
         Entry& entry = getEntry(i);
         Shape* shape = entry.shape();
         if (shape) {
             TraceManuallyBarrieredEdge(trc, &shape, "ShapeTable shape");
-            if (shape != entry.shape())
+            if (shape != entry.shape()) {
                 entry.setPreservingCollision(shape);
+            }
         }
     }
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 void
 ShapeTable::checkAfterMovingGC()
 {
     for (size_t i = 0; i < capacity(); i++) {
         Entry& entry = getEntry(i);
         Shape* shape = entry.shape();
-        if (shape)
+        if (shape) {
             CheckGCThingAfterMovingGC(shape);
+        }
     }
 }
 
 #endif
 
 /* static */ Shape*
 Shape::replaceLastProperty(JSContext* cx, StackBaseShape& base,
                            TaggedProto proto, HandleShape shape)
@@ -270,18 +283,19 @@ Shape::replaceLastProperty(JSContext* cx
     if (!shape->parent) {
         /* Treat as resetting the initial property of the shape hierarchy. */
         gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
         return EmptyShape::getInitialShape(cx, base.clasp, proto, kind,
                                            base.flags & BaseShape::OBJECT_FLAG_MASK);
     }
 
     UnownedBaseShape* nbase = BaseShape::getUnowned(cx, base);
-    if (!nbase)
+    if (!nbase) {
         return nullptr;
+    }
 
     Rooted<StackShape> child(cx, StackShape(shape));
     child.setBase(nbase);
 
     return cx->zone()->propertyTree().getChild(cx, shape->parent, child);
 }
 
 /*
@@ -294,18 +308,19 @@ NativeObject::getChildDataProperty(JSCon
                                    HandleNativeObject obj, HandleShape parent,
                                    MutableHandle<StackShape> child)
 {
     MOZ_ASSERT(child.isDataProperty());
 
     if (child.hasMissingSlot()) {
         uint32_t slot;
         if (obj->inDictionaryMode()) {
-            if (!allocDictionarySlot(cx, obj, &slot))
+            if (!allocDictionarySlot(cx, obj, &slot)) {
                 return nullptr;
+            }
         } else {
             slot = obj->slotSpan();
             MOZ_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
             // Objects with many properties are converted to dictionary
             // mode, so we can't overflow SHAPE_MAXIMUM_SLOT here.
             MOZ_ASSERT(slot < JSSLOT_FREE(obj->getClass()) + PropertyTree::MAX_HEIGHT);
             MOZ_ASSERT(slot < SHAPE_MAXIMUM_SLOT);
         }
@@ -325,68 +340,74 @@ NativeObject::getChildDataProperty(JSCon
                    child.slot() == parent->maybeSlot() + 1 ||
                    (parent->maybeSlot() + 1 < JSSLOT_FREE(obj->getClass()) &&
                     child.slot() == JSSLOT_FREE(obj->getClass())));
     }
 
     if (obj->inDictionaryMode()) {
         MOZ_ASSERT(parent == obj->lastProperty());
         Shape* shape = Allocate<Shape>(cx);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
         if (child.slot() >= obj->lastProperty()->base()->slotSpan()) {
             if (!obj->setSlotSpan(cx, child.slot() + 1)) {
                 new (shape) Shape(obj->lastProperty()->base()->unowned(), 0);
                 return nullptr;
             }
         }
         shape->initDictionaryShape(child, obj->numFixedSlots(), obj->shapePtr());
         return shape;
     }
 
     Shape* shape = cx->zone()->propertyTree().inlinedGetChild(cx, parent, child);
-    if (!shape)
+    if (!shape) {
         return nullptr;
+    }
 
     MOZ_ASSERT(shape->parent == parent);
     MOZ_ASSERT_IF(parent != obj->lastProperty(), parent == obj->lastProperty()->parent);
 
-    if (!obj->setLastProperty(cx, shape))
+    if (!obj->setLastProperty(cx, shape)) {
         return nullptr;
+    }
     return shape;
 }
 
 /* static */ MOZ_ALWAYS_INLINE Shape*
 NativeObject::getChildAccessorProperty(JSContext* cx,
                                        HandleNativeObject obj, HandleShape parent,
                                        MutableHandle<StackShape> child)
 {
     MOZ_ASSERT(!child.isDataProperty());
 
     // Accessor properties have no slot, but slot_ will reflect that of parent.
     child.setSlot(parent->maybeSlot());
 
     if (obj->inDictionaryMode()) {
         MOZ_ASSERT(parent == obj->lastProperty());
         Shape* shape = Allocate<AccessorShape>(cx);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
         shape->initDictionaryShape(child, obj->numFixedSlots(), obj->shapePtr());
         return shape;
     }
 
     Shape* shape = cx->zone()->propertyTree().inlinedGetChild(cx, parent, child);
-    if (!shape)
+    if (!shape) {
         return nullptr;
+    }
 
     MOZ_ASSERT(shape->parent == parent);
     MOZ_ASSERT_IF(parent != obj->lastProperty(), parent == obj->lastProperty()->parent);
 
-    if (!obj->setLastProperty(cx, shape))
+    if (!obj->setLastProperty(cx, shape)) {
         return nullptr;
+    }
     return shape;
 }
 
 /* static */ bool
 js::NativeObject::toDictionaryMode(JSContext* cx, HandleNativeObject obj)
 {
     MOZ_ASSERT(!obj->inDictionaryMode());
     MOZ_ASSERT(cx->isInsideCurrentCompartment(obj));
@@ -408,18 +429,19 @@ js::NativeObject::toDictionaryMode(JSCon
             ReportOutOfMemory(cx);
             return false;
         }
 
         GCPtrShape* listp = dictionaryShape ? &dictionaryShape->parent : nullptr;
         StackShape child(shape);
         dprop->initDictionaryShape(child, obj->numFixedSlots(), listp);
 
-        if (!dictionaryShape)
+        if (!dictionaryShape) {
             root = dprop;
+        }
 
         MOZ_ASSERT(!dprop->hasTable());
         dictionaryShape = dprop;
         shape = shape->previous();
     }
 
     if (!Shape::hashify(cx, root)) {
         ReportOutOfMemory(cx);
@@ -445,36 +467,39 @@ js::NativeObject::toDictionaryMode(JSCon
 
 static bool
 ShouldConvertToDictionary(NativeObject* obj)
 {
     /*
      * Use a lower limit if this object is likely a hashmap (SETELEM was used
      * to set properties).
      */
-    if (obj->hadElementsAccess())
+    if (obj->hadElementsAccess()) {
         return obj->lastProperty()->entryCount() >= PropertyTree::MAX_HEIGHT_WITH_ELEMENTS_ACCESS;
+    }
     return obj->lastProperty()->entryCount() >= PropertyTree::MAX_HEIGHT;
 }
 
 static MOZ_ALWAYS_INLINE UnownedBaseShape*
 GetBaseShapeForNewShape(JSContext* cx, HandleShape last, HandleId id)
 {
     uint32_t index;
     bool indexed = IdIsIndex(id, &index);
     bool interestingSymbol = JSID_IS_SYMBOL(id) && JSID_TO_SYMBOL(id)->isInterestingSymbol();
 
-    if (MOZ_LIKELY(!indexed && !interestingSymbol))
+    if (MOZ_LIKELY(!indexed && !interestingSymbol)) {
         return last->base()->unowned();
+    }
 
     StackBaseShape base(last->base());
-    if (indexed)
+    if (indexed) {
         base.flags |= BaseShape::INDEXED;
-    else if (interestingSymbol)
+    } else if (interestingSymbol) {
         base.flags |= BaseShape::HAS_INTERESTING_SYMBOL;
+    }
     return BaseShape::getUnowned(cx, base);
 }
 
 namespace js {
 
 class MOZ_RAII AutoCheckShapeConsistency
 {
 #ifdef DEBUG
@@ -502,26 +527,30 @@ NativeObject::maybeConvertToOrGrowDictio
                                                    ShapeTable** table, ShapeTable::Entry** entry,
                                                    const AutoKeepShapeTables& keep)
 {
     MOZ_ASSERT(!!*table == !!*entry);
 
     // The code below deals with either converting obj to dictionary mode or
     // growing an object that's already in dictionary mode.
     if (!obj->inDictionaryMode()) {
-        if (!ShouldConvertToDictionary(obj))
+        if (!ShouldConvertToDictionary(obj)) {
             return true;
-        if (!toDictionaryMode(cx, obj))
+        }
+        if (!toDictionaryMode(cx, obj)) {
             return false;
+        }
         *table = obj->lastProperty()->maybeTable(keep);
     } else {
-        if (!(*table)->needsToGrow())
+        if (!(*table)->needsToGrow()) {
             return true;
-        if (!(*table)->grow(cx))
+        }
+        if (!(*table)->grow(cx)) {
             return false;
+        }
     }
 
     *entry = &(*table)->search<MaybeAdding::Adding>(id, keep);
     MOZ_ASSERT(!(*entry)->shape());
     return true;
 }
 
 MOZ_ALWAYS_INLINE void
@@ -563,38 +592,42 @@ NativeObject::addAccessorPropertyInterna
                                           ShapeTable* table, ShapeTable::Entry* entry,
                                           const AutoKeepShapeTables& keep)
 {
     AutoCheckShapeConsistency check(obj);
     AutoRooterGetterSetter gsRoot(cx, attrs, &getter, &setter);
 
     AssertValidPropertyOp(obj, getter, setter, attrs);
 
-    if (!maybeConvertToOrGrowDictionaryForAdd(cx, obj, id, &table, &entry, keep))
+    if (!maybeConvertToOrGrowDictionaryForAdd(cx, obj, id, &table, &entry, keep)) {
         return nullptr;
+    }
 
     // Find or create a property tree node labeled by our arguments.
     RootedShape shape(cx);
     {
         RootedShape last(cx, obj->lastProperty());
         Rooted<UnownedBaseShape*> nbase(cx, GetBaseShapeForNewShape(cx, last, id));
-        if (!nbase)
+        if (!nbase) {
             return nullptr;
+        }
 
         Rooted<StackShape> child(cx, StackShape(nbase, id, SHAPE_INVALID_SLOT, attrs));
         child.updateGetterSetter(getter, setter);
         shape = getChildAccessorProperty(cx, obj, last, &child);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
     }
 
     MOZ_ASSERT(shape == obj->lastProperty());
 
-    if (table)
+    if (table) {
         shape->updateDictionaryTable(table, entry, keep);
+    }
 
     return shape;
 }
 
 /* static */ Shape*
 NativeObject::addDataPropertyInternal(JSContext* cx,
                                       HandleNativeObject obj, HandleId id,
                                       uint32_t slot, unsigned attrs,
@@ -603,37 +636,41 @@ NativeObject::addDataPropertyInternal(JS
                                       const AutoKeepShapeTables& keep)
 {
     AutoCheckShapeConsistency check(obj);
 
     // The slot, if any, must be a reserved slot.
     MOZ_ASSERT(slot == SHAPE_INVALID_SLOT ||
                slot < JSCLASS_RESERVED_SLOTS(obj->getClass()));
 
-    if (!maybeConvertToOrGrowDictionaryForAdd(cx, obj, id, &table, &entry, keep))
+    if (!maybeConvertToOrGrowDictionaryForAdd(cx, obj, id, &table, &entry, keep)) {
         return nullptr;
+    }
 
     // Find or create a property tree node labeled by our arguments.
     RootedShape shape(cx);
     {
         RootedShape last(cx, obj->lastProperty());
         Rooted<UnownedBaseShape*> nbase(cx, GetBaseShapeForNewShape(cx, last, id));
-        if (!nbase)
+        if (!nbase) {
             return nullptr;
+        }
 
         Rooted<StackShape> child(cx, StackShape(nbase, id, slot, attrs));
         shape = getChildDataProperty(cx, obj, last, &child);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
     }
 
     MOZ_ASSERT(shape == obj->lastProperty());
 
-    if (table)
+    if (table) {
         shape->updateDictionaryTable(table, entry, keep);
+    }
 
     return shape;
 }
 
 static MOZ_ALWAYS_INLINE Shape*
 PropertyTreeReadBarrier(Shape* parent, Shape* shape)
 {
     JS::Zone* zone = shape->zone();
@@ -644,18 +681,19 @@ PropertyTreeReadBarrier(Shape* parent, S
         TraceManuallyBarrieredEdge(zone->barrierTracer(), &tmp, "read barrier");
         MOZ_ASSERT(tmp == shape);
         return shape;
     }
 
     if (MOZ_LIKELY(!zone->isGCSweepingOrCompacting() ||
                    !IsAboutToBeFinalizedUnbarriered(&shape)))
     {
-        if (shape->isMarkedGray())
+        if (shape->isMarkedGray()) {
             UnmarkGrayShapeRecursively(shape);
+        }
         return shape;
     }
 
     // The shape we've found is unreachable and due to be finalized, so
     // remove our weak reference to it and don't use it.
     MOZ_ASSERT(parent->isMarkedAny());
     parent->removeChild(shape);
 
@@ -670,88 +708,98 @@ NativeObject::addEnumerableDataProperty(
 
     AutoCheckShapeConsistency check(obj);
 
     // Fast path for non-dictionary shapes with a single kid.
     do {
         AutoCheckCannotGC nogc;
 
         Shape* lastProperty = obj->lastProperty();
-        if (lastProperty->inDictionary())
+        if (lastProperty->inDictionary()) {
             break;
+        }
 
         KidsPointer* kidp = &lastProperty->kids;
-        if (!kidp->isShape())
+        if (!kidp->isShape()) {
             break;
+        }
 
         Shape* kid = kidp->toShape();
         MOZ_ASSERT(!kid->inDictionary());
 
         if (kid->propidRaw() != id ||
             kid->isAccessorShape() ||
             kid->attributes() != JSPROP_ENUMERATE ||
             kid->base()->unowned() != lastProperty->base()->unowned())
         {
             break;
         }
 
         MOZ_ASSERT(kid->isDataProperty());
 
         kid = PropertyTreeReadBarrier(lastProperty, kid);
-        if (!kid)
+        if (!kid) {
             break;
+        }
 
-        if (!obj->setLastProperty(cx, kid))
+        if (!obj->setLastProperty(cx, kid)) {
             return nullptr;
+        }
         return kid;
     } while (0);
 
     AutoKeepShapeTables keep(cx);
     ShapeTable* table = nullptr;
     ShapeTable::Entry* entry = nullptr;
 
     if (!obj->inDictionaryMode()) {
         if (MOZ_UNLIKELY(ShouldConvertToDictionary(obj))) {
-            if (!toDictionaryMode(cx, obj))
+            if (!toDictionaryMode(cx, obj)) {
                 return nullptr;
+            }
             table = obj->lastProperty()->maybeTable(keep);
             entry = &table->search<MaybeAdding::Adding>(id, keep);
         }
     } else {
         table = obj->lastProperty()->ensureTableForDictionary(cx, keep);
-        if (!table)
+        if (!table) {
             return nullptr;
+        }
         if (table->needsToGrow()) {
-            if (!table->grow(cx))
+            if (!table->grow(cx)) {
                 return nullptr;
+            }
         }
         entry = &table->search<MaybeAdding::Adding>(id, keep);
         MOZ_ASSERT(!entry->shape());
     }
 
     MOZ_ASSERT(!!table == !!entry);
 
     /* Find or create a property tree node labeled by our arguments. */
     RootedShape last(cx, obj->lastProperty());
     UnownedBaseShape* nbase = GetBaseShapeForNewShape(cx, last, id);
-    if (!nbase)
+    if (!nbase) {
         return nullptr;
+    }
 
     Shape* shape;
     if (obj->inDictionaryMode()) {
         uint32_t slot;
-        if (!allocDictionarySlot(cx, obj, &slot))
+        if (!allocDictionarySlot(cx, obj, &slot)) {
             return nullptr;
+        }
 
         Rooted<StackShape> child(cx, StackShape(nbase, id, slot, JSPROP_ENUMERATE));
 
         MOZ_ASSERT(last == obj->lastProperty());
         shape = Allocate<Shape>(cx);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
         if (slot >= obj->lastProperty()->base()->slotSpan()) {
             if (MOZ_UNLIKELY(!obj->setSlotSpan(cx, slot + 1))) {
                 new (shape) Shape(obj->lastProperty()->base()->unowned(), 0);
                 return nullptr;
             }
         }
         shape->initDictionaryShape(child, obj->numFixedSlots(), obj->shapePtr());
     } else {
@@ -759,84 +807,92 @@ NativeObject::addEnumerableDataProperty(
         MOZ_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
         // Objects with many properties are converted to dictionary
         // mode, so we can't overflow SHAPE_MAXIMUM_SLOT here.
         MOZ_ASSERT(slot < JSSLOT_FREE(obj->getClass()) + PropertyTree::MAX_HEIGHT);
         MOZ_ASSERT(slot < SHAPE_MAXIMUM_SLOT);
 
         Rooted<StackShape> child(cx, StackShape(nbase, id, slot, JSPROP_ENUMERATE));
         shape = cx->zone()->propertyTree().inlinedGetChild(cx, last, child);
-        if (!shape)
+        if (!shape) {
             return nullptr;
-        if (!obj->setLastProperty(cx, shape))
+        }
+        if (!obj->setLastProperty(cx, shape)) {
             return nullptr;
+        }
     }
 
     MOZ_ASSERT(shape == obj->lastProperty());
 
-    if (table)
+    if (table) {
         shape->updateDictionaryTable(table, entry, keep);
+    }
 
     return shape;
 }
 
 Shape*
 js::ReshapeForAllocKind(JSContext* cx, Shape* shape, TaggedProto proto,
                                  gc::AllocKind allocKind)
 {
     // Compute the number of fixed slots with the new allocation kind.
     size_t nfixed = gc::GetGCKindSlots(allocKind, shape->getObjectClass());
 
     // Get all the ids in the shape, in order.
     js::AutoIdVector ids(cx);
     {
         for (unsigned i = 0; i < shape->slotSpan(); i++) {
-            if (!ids.append(JSID_VOID))
+            if (!ids.append(JSID_VOID)) {
                 return nullptr;
+            }
         }
         Shape* nshape = shape;
         while (!nshape->isEmptyShape()) {
             ids[nshape->slot()].set(nshape->propid());
             nshape = nshape->previous();
         }
     }
 
     // Construct the new shape, without updating type information.
     RootedId id(cx);
     RootedShape newShape(cx, EmptyShape::getInitialShape(cx, shape->getObjectClass(),
                                                          proto, nfixed, shape->getObjectFlags()));
-    if (!newShape)
+    if (!newShape) {
         return nullptr;
+    }
 
     for (unsigned i = 0; i < ids.length(); i++) {
         id = ids[i];
 
         UnownedBaseShape* nbase = GetBaseShapeForNewShape(cx, newShape, id);
-        if (!nbase)
+        if (!nbase) {
             return nullptr;
+        }
 
         Rooted<StackShape> child(cx, StackShape(nbase, id, i, JSPROP_ENUMERATE));
         newShape = cx->zone()->propertyTree().getChild(cx, newShape, child);
-        if (!newShape)
+        if (!newShape) {
             return nullptr;
+        }
     }
 
     return newShape;
 }
 
 /*
  * Assert some invariants that should hold when changing properties. It's the
  * responsibility of the callers to ensure these hold.
  */
 static void
 AssertCanChangeAttrs(Shape* shape, unsigned attrs)
 {
 #ifdef DEBUG
-    if (shape->configurable())
+    if (shape->configurable()) {
         return;
+    }
 
     /* A permanent property must stay permanent. */
     MOZ_ASSERT(attrs & JSPROP_PERMANENT);
 
     /* Reject attempts to remove a slot from the permanent data property. */
     MOZ_ASSERT_IF(shape->isDataProperty(),
                   !(attrs & (JSPROP_GETTER | JSPROP_SETTER)));
 #endif
@@ -844,35 +900,38 @@ AssertCanChangeAttrs(Shape* shape, unsig
 
 static void
 AssertValidArrayIndex(NativeObject* obj, jsid id)
 {
 #ifdef DEBUG
     if (obj->is<ArrayObject>()) {
         ArrayObject* arr = &obj->as<ArrayObject>();
         uint32_t index;
-        if (IdIsIndex(id, &index))
+        if (IdIsIndex(id, &index)) {
             MOZ_ASSERT(index < arr->length() || arr->lengthIsWritable());
+        }
     }
 #endif
 }
 
 /* static */ bool
 NativeObject::maybeToDictionaryModeForPut(JSContext* cx, HandleNativeObject obj,
                                           MutableHandleShape shape)
 {
     // Overwriting a non-last property requires switching to dictionary mode.
     // The shape tree is shared immutable, and we can't removeProperty and then
     // addAccessorPropertyInternal because a failure under add would lose data.
 
-    if (shape == obj->lastProperty() || obj->inDictionaryMode())
+    if (shape == obj->lastProperty() || obj->inDictionaryMode()) {
         return true;
+    }
 
-    if (!toDictionaryMode(cx, obj))
+    if (!toDictionaryMode(cx, obj)) {
         return false;
+    }
 
     AutoCheckCannotGC nogc;
     ShapeTable* table = obj->lastProperty()->maybeTable(nogc);
     MOZ_ASSERT(table);
     shape.set(table->search<MaybeAdding::NotAdding>(shape->propid(), nogc).shape());
     return true;
 }
 
@@ -917,69 +976,77 @@ NativeObject::putDataProperty(JSContext*
     bool hadSlot = shape->isDataProperty();
     uint32_t oldSlot = shape->maybeSlot();
     uint32_t slot = hadSlot ? oldSlot : SHAPE_INVALID_SLOT;
 
     Rooted<UnownedBaseShape*> nbase(cx);
     {
         RootedShape shape(cx, obj->lastProperty());
         nbase = GetBaseShapeForNewShape(cx, shape, id);
-        if (!nbase)
+        if (!nbase) {
             return nullptr;
+        }
     }
 
     // Now that we've possibly preserved slot, check whether all members match.
     // If so, this is a redundant "put" and we can return without more work.
-    if (shape->matchesParamsAfterId(nbase, slot, attrs, nullptr, nullptr))
+    if (shape->matchesParamsAfterId(nbase, slot, attrs, nullptr, nullptr)) {
         return shape;
+    }
 
-    if (!maybeToDictionaryModeForPut(cx, obj, &shape))
+    if (!maybeToDictionaryModeForPut(cx, obj, &shape)) {
         return nullptr;
+    }
 
     MOZ_ASSERT_IF(shape->isDataProperty(), shape->slot() == slot);
 
     if (obj->inDictionaryMode()) {
         // Updating some property in a dictionary-mode object. Create a new
         // shape for the existing property, and also generate a new shape for
         // the last property of the dictionary (unless the modified property
         // is also the last property).
         bool updateLast = (shape == obj->lastProperty());
         shape = NativeObject::replaceWithNewEquivalentShape(cx, obj, shape, nullptr,
                                                             /* accessorShape = */ false);
-        if (!shape)
+        if (!shape) {
             return nullptr;
-        if (!updateLast && !NativeObject::generateOwnShape(cx, obj))
+        }
+        if (!updateLast && !NativeObject::generateOwnShape(cx, obj)) {
             return nullptr;
+        }
 
         if (slot == SHAPE_INVALID_SLOT) {
-            if (!allocDictionarySlot(cx, obj, &slot))
+            if (!allocDictionarySlot(cx, obj, &slot)) {
                 return nullptr;
+            }
         }
 
-        if (updateLast)
+        if (updateLast) {
             shape->base()->adoptUnowned(nbase);
-        else
+        } else {
             shape->base_ = nbase;
+        }
 
         shape->setSlot(slot);
         shape->attrs = uint8_t(attrs);
         shape->immutableFlags &= ~Shape::ACCESSOR_SHAPE;
         shape->immutableFlags |= Shape::IN_DICTIONARY;
     } else {
         // Updating the last property in a non-dictionary-mode object. Find an
         // alternate shared child of the last property's previous shape.
 
         MOZ_ASSERT(shape == obj->lastProperty());
 
         // Find or create a property tree node labeled by our arguments.
         Rooted<StackShape> child(cx, StackShape(nbase, id, slot, attrs));
         RootedShape parent(cx, shape->parent);
         shape = getChildDataProperty(cx, obj, parent, &child);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
     }
 
     MOZ_ASSERT(shape->isDataProperty());
     return shape;
 }
 
 /* static */ Shape*
 NativeObject::putAccessorProperty(JSContext* cx, HandleNativeObject obj, HandleId id,
@@ -1021,45 +1088,51 @@ NativeObject::putAccessorProperty(JSCont
 
     bool hadSlot = shape->isDataProperty();
     uint32_t oldSlot = shape->maybeSlot();
 
     Rooted<UnownedBaseShape*> nbase(cx);
     {
         RootedShape shape(cx, obj->lastProperty());
         nbase = GetBaseShapeForNewShape(cx, shape, id);
-        if (!nbase)
+        if (!nbase) {
             return nullptr;
+        }
     }
 
     // Check whether all members match. If so, this is a redundant "put" and we can
     // return without more work.
-    if (shape->matchesParamsAfterId(nbase, SHAPE_INVALID_SLOT, attrs, getter, setter))
+    if (shape->matchesParamsAfterId(nbase, SHAPE_INVALID_SLOT, attrs, getter, setter)) {
         return shape;
+    }
 
-    if (!maybeToDictionaryModeForPut(cx, obj, &shape))
+    if (!maybeToDictionaryModeForPut(cx, obj, &shape)) {
         return nullptr;
+    }
 
     if (obj->inDictionaryMode()) {
         // Updating some property in a dictionary-mode object. Create a new
         // shape for the existing property, and also generate a new shape for
         // the last property of the dictionary (unless the modified property
         // is also the last property).
         bool updateLast = (shape == obj->lastProperty());
         shape = NativeObject::replaceWithNewEquivalentShape(cx, obj, shape, nullptr,
                                                             /* accessorShape = */ true);
-        if (!shape)
+        if (!shape) {
             return nullptr;
-        if (!updateLast && !NativeObject::generateOwnShape(cx, obj))
+        }
+        if (!updateLast && !NativeObject::generateOwnShape(cx, obj)) {
             return nullptr;
+        }
 
-        if (updateLast)
+        if (updateLast) {
             shape->base()->adoptUnowned(nbase);
-        else
+        } else {
             shape->base_ = nbase;
+        }
 
         shape->setSlot(SHAPE_INVALID_SLOT);
         shape->attrs = uint8_t(attrs);
         shape->immutableFlags |= Shape::IN_DICTIONARY | Shape::ACCESSOR_SHAPE;
 
         AccessorShape& accShape = shape->asAccessorShape();
         accShape.rawGetter = getter;
         accShape.rawSetter = setter;
@@ -1070,26 +1143,28 @@ NativeObject::putAccessorProperty(JSCont
 
         MOZ_ASSERT(shape == obj->lastProperty());
 
         // Find or create a property tree node labeled by our arguments.
         Rooted<StackShape> child(cx, StackShape(nbase, id, SHAPE_INVALID_SLOT, attrs));
         child.updateGetterSetter(getter, setter);
         RootedShape parent(cx, shape->parent);
         shape = getChildAccessorProperty(cx, obj, parent, &child);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
     }
 
     // Can't fail now, so free the previous incarnation's slot. But we do not
     // need to free oldSlot (and must not, as trying to will botch an assertion
     // in NativeObject::freeSlot) if the new last property (shape here) has a
     // slotSpan that does not cover it.
-    if (hadSlot && oldSlot < obj->slotSpan())
+    if (hadSlot && oldSlot < obj->slotSpan()) {
         obj->freeSlot(cx, oldSlot);
+    }
 
     MOZ_ASSERT(!shape->isDataProperty());
     return shape;
 }
 
 /* static */ Shape*
 NativeObject::changeProperty(JSContext* cx, HandleNativeObject obj, HandleShape shape,
                              unsigned attrs, GetterOp getter, SetterOp setter)
@@ -1103,45 +1178,49 @@ NativeObject::changeProperty(JSContext* 
     bool needSlot = Shape::isDataProperty(attrs, getter, setter);
     MOZ_ASSERT_IF(shape->isDataProperty() != needSlot, needSlot);
 #endif
 
     MarkTypePropertyNonData(cx, obj, shape->propid());
 
     AssertCanChangeAttrs(shape, attrs);
 
-    if (shape->attrs == attrs && shape->getter() == getter && shape->setter() == setter)
+    if (shape->attrs == attrs && shape->getter() == getter && shape->setter() == setter) {
         return shape;
+    }
 
     RootedId propid(cx, shape->propid());
     return putAccessorProperty(cx, obj, propid, getter, setter, attrs);
 }
 
 /* static */ bool
 NativeObject::removeProperty(JSContext* cx, HandleNativeObject obj, jsid id_)
 {
     RootedId id(cx, id_);
 
     AutoKeepShapeTables keep(cx);
     ShapeTable* table;
     ShapeTable::Entry* entry;
     RootedShape shape(cx);
-    if (!Shape::search(cx, obj->lastProperty(), id, keep, shape.address(), &table, &entry))
+    if (!Shape::search(cx, obj->lastProperty(), id, keep, shape.address(), &table, &entry)) {
         return false;
+    }
 
-    if (!shape)
+    if (!shape) {
         return true;
+    }
 
     /*
      * If shape is not the last property added, or the last property cannot
      * be removed, switch to dictionary mode.
      */
     if (!obj->inDictionaryMode() && (shape != obj->lastProperty() || !obj->canRemoveLastProperty())) {
-        if (!toDictionaryMode(cx, obj))
+        if (!toDictionaryMode(cx, obj)) {
             return false;
+        }
         table = obj->lastProperty()->maybeTable(keep);
         MOZ_ASSERT(table);
         entry = &table->search<MaybeAdding::NotAdding>(shape->propid(), keep);
         shape = entry->shape();
     }
 
     /*
      * If in dictionary mode, get a new shape for the last property after the
@@ -1149,38 +1228,41 @@ NativeObject::removeProperty(JSContext* 
      * the last property. Otherwise, a shape could replay and caches might
      * return deleted DictionaryShapes! See bug 595365. Do this before changing
      * the object or table, so the remaining removal is infallible.
      */
     RootedShape spare(cx);
     if (obj->inDictionaryMode()) {
         /* For simplicity, always allocate an accessor shape for now. */
         spare = Allocate<AccessorShape>(cx);
-        if (!spare)
+        if (!spare) {
             return false;
+        }
         new (spare) Shape(shape->base()->unowned(), 0);
         if (shape == obj->lastProperty()) {
             /*
              * Get an up to date unowned base shape for the new last property
              * when removing the dictionary's last property. Information in
              * base shapes for non-last properties may be out of sync with the
              * object's state.
              */
             RootedShape previous(cx, obj->lastProperty()->parent);
             StackBaseShape base(obj->lastProperty()->base());
             BaseShape* nbase = BaseShape::getUnowned(cx, base);
-            if (!nbase)
+            if (!nbase) {
                 return false;
+            }
             previous->base_ = nbase;
         }
     }
 
     /* If shape has a slot, free its slot number. */
-    if (shape->isDataProperty())
+    if (shape->isDataProperty()) {
         obj->freeSlot(cx, shape->slot());
+    }
 
     /*
      * A dictionary-mode object owns mutable, unique shapes on a non-circular
      * doubly linked list, hashed by lastProperty()->table. So we can edit the
      * list and hash in place.
      */
     if (obj->inDictionaryMode()) {
         MOZ_ASSERT(obj->lastProperty()->maybeTable(keep) == table);
@@ -1195,18 +1277,19 @@ NativeObject::removeProperty(JSContext* 
 
 #ifdef DEBUG
             /*
              * Check the consistency of the table but limit the number of
              * checks not to alter significantly the complexity of the
              * delete in debug builds, see bug 534493.
              */
             Shape* aprop = obj->lastProperty();
-            for (int n = 50; --n >= 0 && aprop->parent; aprop = aprop->parent)
+            for (int n = 50; --n >= 0 && aprop->parent; aprop = aprop->parent) {
                 MOZ_ASSERT_IF(aprop != shape, obj->contains(cx, aprop));
+            }
 #endif
         }
 
         {
             /* Remove shape from its non-circular doubly linked list. */
             Shape* oldLastProp = obj->lastProperty();
             shape->removeFromDictionary(obj);
 
@@ -1214,18 +1297,19 @@ NativeObject::removeProperty(JSContext* 
             oldLastProp->handoffTableTo(obj->lastProperty());
         }
 
         /* Generate a new shape for the object, infallibly. */
         MOZ_ALWAYS_TRUE(NativeObject::generateOwnShape(cx, obj, spare));
 
         /* Consider shrinking table if its load factor is <= .25. */
         uint32_t size = table->capacity();
-        if (size > ShapeTable::MIN_SIZE && table->entryCount() <= size >> 2)
+        if (size > ShapeTable::MIN_SIZE && table->entryCount() <= size >> 2) {
             (void) table->change(cx, -1);
+        }
     } else {
         /*
          * Non-dictionary-mode shape tables are shared immutables, so all we
          * need do is retract the last property and we'll either get or else
          * lazily make via a later hashify the exact table for the new property
          * lineage.
          */
         MOZ_ASSERT(shape == obj->lastProperty());
@@ -1243,18 +1327,19 @@ NativeObject::clear(JSContext* cx, Handl
     MOZ_ASSERT(obj->inDictionaryMode() == shape->inDictionary());
 
     while (shape->parent) {
         shape = shape->parent;
         MOZ_ASSERT(obj->inDictionaryMode() == shape->inDictionary());
     }
     MOZ_ASSERT(shape->isEmptyShape());
 
-    if (obj->inDictionaryMode())
+    if (obj->inDictionaryMode()) {
         shape->listp = obj->shapePtr();
+    }
 
     MOZ_ALWAYS_TRUE(obj->setLastProperty(cx, shape));
 
     obj->checkShapeConsistency();
 }
 
 /* static */ bool
 NativeObject::rollbackProperties(JSContext* cx, HandleNativeObject obj, uint32_t slotSpan)
@@ -1266,142 +1351,157 @@ NativeObject::rollbackProperties(JSConte
      */
     MOZ_ASSERT(!obj->inDictionaryMode() && slotSpan <= obj->slotSpan());
     while (true) {
         if (obj->lastProperty()->isEmptyShape()) {
             MOZ_ASSERT(slotSpan == 0);
             break;
         } else {
             uint32_t slot = obj->lastProperty()->slot();
-            if (slot < slotSpan)
+            if (slot < slotSpan) {
                 break;
+            }
         }
-        if (!NativeObject::removeProperty(cx, obj, obj->lastProperty()->propid()))
+        if (!NativeObject::removeProperty(cx, obj, obj->lastProperty()->propid())) {
             return false;
+        }
     }
 
     return true;
 }
 
 /* static */ Shape*
 NativeObject::replaceWithNewEquivalentShape(JSContext* cx, HandleNativeObject obj,
                                             Shape* oldShape, Shape* newShape, bool accessorShape)
 {
     MOZ_ASSERT(cx->isInsideCurrentZone(oldShape));
     MOZ_ASSERT_IF(oldShape != obj->lastProperty(),
                   obj->inDictionaryMode() && obj->lookup(cx, oldShape->propidRef()) == oldShape);
 
     if (!obj->inDictionaryMode()) {
         RootedShape newRoot(cx, newShape);
-        if (!toDictionaryMode(cx, obj))
+        if (!toDictionaryMode(cx, obj)) {
             return nullptr;
+        }
         oldShape = obj->lastProperty();
         newShape = newRoot;
     }
 
     if (!newShape) {
         RootedShape oldRoot(cx, oldShape);
         newShape = (oldShape->isAccessorShape() || accessorShape)
                    ? Allocate<AccessorShape>(cx)
                    : Allocate<Shape>(cx);
-        if (!newShape)
+        if (!newShape) {
             return nullptr;
+        }
         new (newShape) Shape(oldRoot->base()->unowned(), 0);
         oldShape = oldRoot;
     }
 
     AutoCheckCannotGC nogc;
     ShapeTable* table = obj->lastProperty()->ensureTableForDictionary(cx, nogc);
-    if (!table)
+    if (!table) {
         return nullptr;
+    }
 
     ShapeTable::Entry* entry = oldShape->isEmptyShape()
         ? nullptr
         : &table->search<MaybeAdding::NotAdding>(oldShape->propidRef(), nogc);
 
     /*
      * Splice the new shape into the same position as the old shape, preserving
      * enumeration order (see bug 601399).
      */
     StackShape nshape(oldShape);
     newShape->initDictionaryShape(nshape, obj->numFixedSlots(), oldShape->listp);
 
     MOZ_ASSERT(newShape->parent == oldShape);
     oldShape->removeFromDictionary(obj);
 
-    if (newShape == obj->lastProperty())
+    if (newShape == obj->lastProperty()) {
         oldShape->handoffTableTo(newShape);
+    }
 
-    if (entry)
+    if (entry) {
         entry->setPreservingCollision(newShape);
+    }
     return newShape;
 }
 
 /* static */ bool
 JSObject::setFlags(JSContext* cx, HandleObject obj, BaseShape::Flag flags,
                    GenerateShape generateShape)
 {
-    if (obj->hasAllFlags(flags))
+    if (obj->hasAllFlags(flags)) {
         return true;
+    }
 
     Shape* existingShape = obj->ensureShape(cx);
-    if (!existingShape)
+    if (!existingShape) {
         return false;
+    }
 
     if (obj->isNative() && obj->as<NativeObject>().inDictionaryMode()) {
         if (generateShape == GENERATE_SHAPE) {
-            if (!NativeObject::generateOwnShape(cx, obj.as<NativeObject>()))
+            if (!NativeObject::generateOwnShape(cx, obj.as<NativeObject>())) {
                 return false;
+            }
         }
         StackBaseShape base(obj->as<NativeObject>().lastProperty());
         base.flags |= flags;
         UnownedBaseShape* nbase = BaseShape::getUnowned(cx, base);
-        if (!nbase)
+        if (!nbase) {
             return false;
+        }
 
         obj->as<NativeObject>().lastProperty()->base()->adoptUnowned(nbase);
         return true;
     }
 
     Shape* newShape = Shape::setObjectFlags(cx, flags, obj->taggedProto(), existingShape);
-    if (!newShape)
+    if (!newShape) {
         return false;
+    }
 
     // The success of the |JSObject::ensureShape| call above means that |obj|
     // can be assumed to have a shape.
     obj->as<ShapedObject>().setShape(newShape);
 
     return true;
 }
 
 /* static */ bool
 NativeObject::clearFlag(JSContext* cx, HandleNativeObject obj, BaseShape::Flag flag)
 {
     MOZ_ASSERT(obj->lastProperty()->getObjectFlags() & flag);
 
     if (!obj->inDictionaryMode()) {
-        if (!toDictionaryMode(cx, obj))
+        if (!toDictionaryMode(cx, obj)) {
             return false;
+        }
     }
 
     StackBaseShape base(obj->lastProperty());
     base.flags &= ~flag;
     UnownedBaseShape* nbase = BaseShape::getUnowned(cx, base);
-    if (!nbase)
+    if (!nbase) {
         return false;
+    }
 
     obj->lastProperty()->base()->adoptUnowned(nbase);
     return true;
 }
 
 /* static */ Shape*
 Shape::setObjectFlags(JSContext* cx, BaseShape::Flag flags, TaggedProto proto, Shape* last)
 {
-    if ((last->getObjectFlags() & flags) == flags)
+    if ((last->getObjectFlags() & flags) == flags) {
         return last;
+    }
 
     StackBaseShape base(last);
     base.flags |= flags;
 
     RootedShape lastRoot(cx, last);
     return replaceLastProperty(cx, base, proto, lastRoot);
 }
 
@@ -1440,29 +1540,32 @@ BaseShape::adoptUnowned(UnownedBaseShape
 }
 
 /* static */ UnownedBaseShape*
 BaseShape::getUnowned(JSContext* cx, StackBaseShape& base)
 {
     auto& table = cx->zone()->baseShapes();
 
     auto p = MakeDependentAddPtr(cx, table, base);
-    if (p)
+    if (p) {
         return *p;
+    }
 
     BaseShape* nbase_ = Allocate<BaseShape>(cx);
-    if (!nbase_)
+    if (!nbase_) {
         return nullptr;
+    }
 
     new (nbase_) BaseShape(base);
 
     UnownedBaseShape* nbase = static_cast<UnownedBaseShape*>(nbase_);
 
-    if (!p.add(cx, table, base, nbase))
+    if (!p.add(cx, table, base, nbase)) {
         return nullptr;
+    }
 
     return nbase;
 }
 
 void
 BaseShape::assertConsistency()
 {
 #ifdef DEBUG
@@ -1478,48 +1581,52 @@ BaseShape::traceChildren(JSTracer* trc)
 {
     traceChildrenSkipShapeTable(trc);
     traceShapeTable(trc);
 }
 
 void
 BaseShape::traceChildrenSkipShapeTable(JSTracer* trc)
 {
-    if (isOwned())
+    if (isOwned()) {
         TraceEdge(trc, &unowned_, "base");
+    }
 
     assertConsistency();
 }
 
 void
 BaseShape::traceShapeTable(JSTracer* trc)
 {
     AutoCheckCannotGC nogc;
-    if (ShapeTable* table = maybeTable(nogc))
+    if (ShapeTable* table = maybeTable(nogc)) {
         table->trace(trc);
+    }
 }
 
 #ifdef DEBUG
 bool
 BaseShape::canSkipMarkingShapeTable(Shape* lastShape)
 {
     // Check that every shape in the shape table will be marked by marking
     // |lastShape|.
 
     AutoCheckCannotGC nogc;
     ShapeTable* table = maybeTable(nogc);
-    if (!table)
+    if (!table) {
         return true;
+    }
 
     uint32_t count = 0;
     for (Shape::Range<NoGC> r(lastShape); !r.empty(); r.popFront()) {
         Shape* shape = &r.front();
         ShapeTable::Entry& entry = table->search<MaybeAdding::NotAdding>(shape->propid(), nogc);
-        if (entry.isLive())
+        if (entry.isLive()) {
             count++;
+        }
     }
 
     return count == table->entryCount();
 }
 #endif
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 
@@ -1569,18 +1676,19 @@ Zone::checkInitialShapesTableAfterMoving
      */
     for (auto r = initialShapes().all(); !r.empty(); r.popFront()) {
         InitialShapeEntry entry = r.front();
         JSProtoKey protoKey = entry.proto.key();
         TaggedProto proto = entry.proto.proto().unbarrieredGet();
         Shape* shape = entry.shape.unbarrieredGet();
 
         CheckGCThingAfterMovingGC(shape);
-        if (proto.isObject())
+        if (proto.isObject()) {
             CheckGCThingAfterMovingGC(proto.toObject());
+        }
 
         using Lookup = InitialShapeEntry::Lookup;
         Lookup lookup(shape->getObjectClass(),
                       Lookup::ShapeProto(protoKey, proto),
                       shape->numFixedSlots(),
                       shape->getObjectFlags());
         InitialShapeSet::Ptr ptr = initialShapes().lookup(lookup);
         MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
@@ -1613,18 +1721,19 @@ ShapeHasher::match(const Key k, const Lo
 {
     return k->matches(l);
 }
 
 static KidsHash*
 HashChildren(Shape* kid1, Shape* kid2)
 {
     auto hash = MakeUnique<KidsHash>();
-    if (!hash || !hash->reserve(2))
+    if (!hash || !hash->reserve(2)) {
         return nullptr;
+    }
 
     hash->putNewInfallible(StackShape(kid1), kid1);
     hash->putNewInfallible(StackShape(kid2), kid2);
     return hash.release();
 }
 
 bool
 PropertyTree::insertChild(JSContext* cx, Shape* parent, Shape* child)
@@ -1717,38 +1826,43 @@ PropertyTree::inlinedGetChild(JSContext*
      * defining closures that capture a constructor's environment as
      * getters or setters on the new object that is passed in as
      * |this| can significantly increase fan-out below the property
      * tree root -- see bug 335700 for details.
      */
     KidsPointer* kidp = &parent->kids;
     if (kidp->isShape()) {
         Shape* kid = kidp->toShape();
-        if (kid->matches(child))
+        if (kid->matches(child)) {
             existingShape = kid;
+        }
     } else if (kidp->isHash()) {
-        if (KidsHash::Ptr p = kidp->toHash()->lookup(child))
+        if (KidsHash::Ptr p = kidp->toHash()->lookup(child)) {
             existingShape = *p;
+        }
     } else {
         /* If kidp->isNull(), we always insert. */
     }
 
     if (existingShape) {
         existingShape = PropertyTreeReadBarrier(parent, existingShape);
-        if (existingShape)
+        if (existingShape) {
             return existingShape;
+        }
     }
 
     RootedShape parentRoot(cx, parent);
     Shape* shape = Shape::new_(cx, child, parentRoot->numFixedSlots());
-    if (!shape)
+    if (!shape) {
         return nullptr;
+    }
 
-    if (!insertChild(cx, parentRoot, shape))
+    if (!insertChild(cx, parentRoot, shape)) {
         return nullptr;
+    }
 
     return shape;
 }
 
 Shape*
 PropertyTree::getChild(JSContext* cx, Shape* parent, Handle<StackShape> child)
 {
     return inlinedGetChild(cx, parent, child);
@@ -1763,36 +1877,39 @@ Shape::sweep()
      * This test depends on shape arenas not being freed until after we finish
      * incrementally sweeping them. If that were not the case the parent pointer
      * could point to a marked cell that had been deallocated and then
      * reallocated, since allocating a cell in a zone that is being marked will
      * set the mark bit for that cell.
      */
     if (parent && parent->isMarkedAny()) {
         if (inDictionary()) {
-            if (parent->listp == &parent)
+            if (parent->listp == &parent) {
                 parent->listp = nullptr;
+            }
         } else {
             parent->removeChild(this);
         }
     }
 }
 
 void
 Shape::finalize(FreeOp* fop)
 {
-    if (!inDictionary() && kids.isHash())
+    if (!inDictionary() && kids.isHash()) {
         fop->delete_(kids.toHash());
+    }
 }
 
 void
 Shape::fixupDictionaryShapeAfterMovingGC()
 {
-    if (!listp)
+    if (!listp) {
         return;
+    }
 
     // The listp field either points to the parent field of the next shape in
     // the list if there is one.  Otherwise if this shape is the last in the
     // list then it points to the shape_ field of the object the list is for.
     // We can tell which it is because the base shape is owned if this is the
     // last property and not otherwise.
     bool listpPointsIntoShape = !MaybeForwarded(base())->isOwned();
 
@@ -1805,106 +1922,122 @@ Shape::fixupDictionaryShapeAfterMovingGC
     gc::AllocKind kind = TenuredCell::fromPointer(cell)->getAllocKind();
     MOZ_ASSERT_IF(listpPointsIntoShape, IsShapeAllocKind(kind));
     MOZ_ASSERT_IF(!listpPointsIntoShape, IsObjectAllocKind(kind));
 #endif
 
     if (listpPointsIntoShape) {
         // listp points to the parent field of the next shape.
         Shape* next = Shape::fromParentFieldPointer(uintptr_t(listp));
-        if (gc::IsForwarded(next))
+        if (gc::IsForwarded(next)) {
             listp = &gc::Forwarded(next)->parent;
+        }
     } else {
         // listp points to the shape_ field of an object.
         JSObject* last = ShapedObject::fromShapeFieldPointer(uintptr_t(listp));
-        if (gc::IsForwarded(last))
+        if (gc::IsForwarded(last)) {
             listp = gc::Forwarded(last)->as<NativeObject>().shapePtr();
+        }
     }
 }
 
 void
 Shape::fixupShapeTreeAfterMovingGC()
 {
-    if (kids.isNull())
+    if (kids.isNull()) {
         return;
+    }
 
     if (kids.isShape()) {
-        if (gc::IsForwarded(kids.toShape()))
+        if (gc::IsForwarded(kids.toShape())) {
             kids.setShape(gc::Forwarded(kids.toShape()));
+        }
         return;
     }
 
     MOZ_ASSERT(kids.isHash());
     KidsHash* kh = kids.toHash();
     for (KidsHash::Enum e(*kh); !e.empty(); e.popFront()) {
         Shape* key = e.front();
-        if (IsForwarded(key))
+        if (IsForwarded(key)) {
             key = Forwarded(key);
+        }
 
         BaseShape* base = key->base();
-        if (IsForwarded(base))
+        if (IsForwarded(base)) {
             base = Forwarded(base);
+        }
         UnownedBaseShape* unowned = base->unowned();
-        if (IsForwarded(unowned))
+        if (IsForwarded(unowned)) {
             unowned = Forwarded(unowned);
+        }
 
         GetterOp getter = key->getter();
-        if (key->hasGetterObject())
+        if (key->hasGetterObject()) {
             getter = GetterOp(MaybeForwarded(key->getterObject()));
+        }
 
         SetterOp setter = key->setter();
-        if (key->hasSetterObject())
+        if (key->hasSetterObject()) {
             setter = SetterOp(MaybeForwarded(key->setterObject()));
+        }
 
         StackShape lookup(unowned,
                           const_cast<Shape*>(key)->propidRef(),
                           key->immutableFlags & Shape::SLOT_MASK,
                           key->attrs);
         lookup.updateGetterSetter(getter, setter);
         e.rekeyFront(lookup, key);
     }
 }
 
 void
 Shape::fixupAfterMovingGC()
 {
-    if (inDictionary())
+    if (inDictionary()) {
         fixupDictionaryShapeAfterMovingGC();
-    else
+    } else {
         fixupShapeTreeAfterMovingGC();
+    }
 }
 
 void
 NurseryShapesRef::trace(JSTracer* trc)
 {
     auto& shapes = zone_->nurseryShapes();
-    for (auto shape : shapes)
+    for (auto shape : shapes) {
         shape->fixupGetterSetterForBarrier(trc);
+    }
     shapes.clearAndFree();
 }
 
 void
 Shape::fixupGetterSetterForBarrier(JSTracer* trc)
 {
-    if (!hasGetterValue() && !hasSetterValue())
+    if (!hasGetterValue() && !hasSetterValue()) {
         return;
+    }
 
     JSObject* priorGetter = asAccessorShape().getterObj;
     JSObject* priorSetter = asAccessorShape().setterObj;
-    if (!priorGetter && !priorSetter)
+    if (!priorGetter && !priorSetter) {
         return;
+    }
 
     JSObject* postGetter = priorGetter;
     JSObject* postSetter = priorSetter;
-    if (priorGetter)
+    if (priorGetter) {
         TraceManuallyBarrieredEdge(trc, &postGetter, "getterObj");
-    if (priorSetter)
+    }
+    if (priorSetter) {
         TraceManuallyBarrieredEdge(trc, &postSetter, "setterObj");
-    if (priorGetter == postGetter && priorSetter == postSetter)
+    }
+    if (priorGetter == postGetter && priorSetter == postSetter) {
         return;
+    }
 
     if (parent && !parent->inDictionary() && parent->kids.isHash()) {
         // Relocating the getterObj or setterObj will have changed our location
         // in our parent's KidsHash, so take care to update it.  We must do this
         // before we update the shape itself, since the shape is used to match
         // the original entry in the hash set.
 
         StackShape original(this);
@@ -1943,20 +2076,21 @@ Shape::dump(js::GenericPrinter& out) con
 {
     jsid propid = this->propid();
 
     MOZ_ASSERT(!JSID_IS_VOID(propid));
 
     if (JSID_IS_INT(propid)) {
         out.printf("[%ld]", (long) JSID_TO_INT(propid));
     } else if (JSID_IS_ATOM(propid)) {
-        if (JSLinearString* str = JSID_TO_ATOM(propid))
+        if (JSLinearString* str = JSID_TO_ATOM(propid)) {
             EscapedStringPrinter(out, str, '"');
-        else
+        } else {
             out.put("<error>");
+        }
     } else {
         MOZ_ASSERT(JSID_IS_SYMBOL(propid));
         JSID_TO_SYMBOL(propid)->dump(out);
     }
 
     out.printf(" g/s %p/%p slot %d attrs %x ",
                JS_FUNC_TO_DATA_PTR(void*, getter()),
                JS_FUNC_TO_DATA_PTR(void*, setter()),
@@ -2023,76 +2157,83 @@ Shape::dumpSubtree(int level, js::Generi
     }
 }
 
 #endif
 
 static bool
 IsOriginalProto(GlobalObject* global, JSProtoKey key, NativeObject& proto)
 {
-    if (global->getPrototype(key) != ObjectValue(proto))
+    if (global->getPrototype(key) != ObjectValue(proto)) {
         return false;
+    }
 
     MOZ_ASSERT(&proto.global() == global);
 
     if (key == JSProto_Object) {
         MOZ_ASSERT(proto.staticPrototypeIsImmutable(),
                    "proto should be Object.prototype, whose prototype is "
                    "immutable");
         MOZ_ASSERT(proto.staticPrototype() == nullptr,
                    "Object.prototype must have null prototype");
         return true;
     }
 
     // Check that other prototypes still have Object.prototype as proto.
     JSObject* protoProto = proto.staticPrototype();
-    if (!protoProto || global->getPrototype(JSProto_Object) != ObjectValue(*protoProto))
+    if (!protoProto || global->getPrototype(JSProto_Object) != ObjectValue(*protoProto)) {
         return false;
+    }
 
     MOZ_ASSERT(protoProto->staticPrototypeIsImmutable(),
                "protoProto should be Object.prototype, whose prototype is "
                "immutable");
     MOZ_ASSERT(protoProto->staticPrototype() == nullptr,
                "Object.prototype must have null prototype");
     return true;
 }
 
 static JSProtoKey
 GetInitialShapeProtoKey(TaggedProto proto, JSContext* cx)
 {
     if (proto.isObject() && proto.toObject()->isNative()) {
         GlobalObject* global = cx->global();
         NativeObject& obj = proto.toObject()->as<NativeObject>();
 
-        if (IsOriginalProto(global, JSProto_Object, obj))
+        if (IsOriginalProto(global, JSProto_Object, obj)) {
             return JSProto_Object;
-        if (IsOriginalProto(global, JSProto_Function, obj))
+        }
+        if (IsOriginalProto(global, JSProto_Function, obj)) {
             return JSProto_Function;
-        if (IsOriginalProto(global, JSProto_Array, obj))
+        }
+        if (IsOriginalProto(global, JSProto_Array, obj)) {
             return JSProto_Array;
-        if (IsOriginalProto(global, JSProto_RegExp, obj))
+        }
+        if (IsOriginalProto(global, JSProto_RegExp, obj)) {
             return JSProto_RegExp;
+        }
     }
     return JSProto_LIMIT;
 }
 
 /* static */ Shape*
 EmptyShape::getInitialShape(JSContext* cx, const Class* clasp, TaggedProto proto,
                             size_t nfixed, uint32_t objectFlags)
 {
     MOZ_ASSERT_IF(proto.isObject(), cx->isInsideCurrentCompartment(proto.toObject()));
 
     auto& table = cx->zone()->initialShapes();
 
     using Lookup = InitialShapeEntry::Lookup;
     auto protoPointer = MakeDependentAddPtr(cx, table,
                                             Lookup(clasp, Lookup::ShapeProto(proto),
                                                    nfixed, objectFlags));
-    if (protoPointer)
+    if (protoPointer) {
         return protoPointer->shape;
+    }
 
     // No entry for this proto. If the proto is one of a few common builtin
     // prototypes, try to do a lookup based on the JSProtoKey, so we can share
     // shapes across globals.
     Rooted<TaggedProto> protoRoot(cx, proto);
     Shape* shape = nullptr;
     bool insertKey = false;
     mozilla::Maybe<DependentAddPtr<InitialShapeSet>> keyPointer;
@@ -2108,35 +2249,39 @@ EmptyShape::getInitialShape(JSContext* c
         } else {
             insertKey = true;
         }
     }
 
     if (!shape) {
         StackBaseShape base(clasp, objectFlags);
         Rooted<UnownedBaseShape*> nbase(cx, BaseShape::getUnowned(cx, base));
-        if (!nbase)
+        if (!nbase) {
             return nullptr;
+        }
 
         shape = EmptyShape::new_(cx, nbase, nfixed);
-        if (!shape)
+        if (!shape) {
             return nullptr;
+        }
     }
 
     Lookup::ShapeProto shapeProto(protoRoot);
     Lookup lookup(clasp, shapeProto, nfixed, objectFlags);
-    if (!protoPointer.add(cx, table, lookup, InitialShapeEntry(shape, shapeProto)))
+    if (!protoPointer.add(cx, table, lookup, InitialShapeEntry(shape, shapeProto))) {
         return nullptr;
+    }
 
     // Also add an entry based on the JSProtoKey, if needed.
     if (insertKey) {
         Lookup::ShapeProto shapeProto(key);
         Lookup lookup(clasp, shapeProto, nfixed, objectFlags);
-        if (!keyPointer->add(cx, table, lookup, InitialShapeEntry(shape, shapeProto)))
+        if (!keyPointer->add(cx, table, lookup, InitialShapeEntry(shape, shapeProto))) {
             return nullptr;
+        }
     }
 
     return shape;
 }
 
 /* static */ Shape*
 EmptyShape::getInitialShape(JSContext* cx, const Class* clasp, TaggedProto proto,
                             gc::AllocKind kind, uint32_t objectFlags)
@@ -2145,92 +2290,100 @@ EmptyShape::getInitialShape(JSContext* c
 }
 
 void
 NewObjectCache::invalidateEntriesForShape(JSContext* cx, HandleShape shape, HandleObject proto)
 {
     const Class* clasp = shape->getObjectClass();
 
     gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
-    if (CanBeFinalizedInBackground(kind, clasp))
+    if (CanBeFinalizedInBackground(kind, clasp)) {
         kind = GetBackgroundAllocKind(kind);
+    }
 
     RootedObjectGroup group(cx, ObjectGroup::defaultNewGroup(cx, clasp, TaggedProto(proto)));
     if (!group) {
         purge();
         cx->recoverFromOutOfMemory();
         return;
     }
 
     EntryIndex entry;
     for (RealmsInZoneIter realm(shape->zone()); !realm.done(); realm.next()) {
         if (GlobalObject* global = realm->unsafeUnbarrieredMaybeGlobal()) {
-            if (lookupGlobal(clasp, global, kind, &entry))
+            if (lookupGlobal(clasp, global, kind, &entry)) {
                 PodZero(&entries[entry]);
+            }
         }
     }
-    if (!proto->is<GlobalObject>() && lookupProto(clasp, proto, kind, &entry))
+    if (!proto->is<GlobalObject>() && lookupProto(clasp, proto, kind, &entry)) {
         PodZero(&entries[entry]);
-    if (lookupGroup(group, kind, &entry))
+    }
+    if (lookupGroup(group, kind, &entry)) {
         PodZero(&entries[entry]);
+    }
 }
 
 /* static */ void
 EmptyShape::insertInitialShape(JSContext* cx, HandleShape shape, HandleObject proto)
 {
     using Lookup = InitialShapeEntry::Lookup;
     Lookup lookup(shape->getObjectClass(), Lookup::ShapeProto(TaggedProto(proto)),
                   shape->numFixedSlots(), shape->getObjectFlags());
 
     InitialShapeSet::Ptr p = cx->zone()->initialShapes().lookup(lookup);
     MOZ_ASSERT(p);
 
     InitialShapeEntry& entry = const_cast<InitialShapeEntry&>(*p);
 
     // The metadata callback can end up causing redundant changes of the initial shape.
-    if (entry.shape == shape)
+    if (entry.shape == shape) {
         return;
+    }
 
     // The new shape had better be rooted at the old one.
 #ifdef DEBUG
     Shape* nshape = shape;
-    while (!nshape->isEmptyShape())
+    while (!nshape->isEmptyShape()) {
         nshape = nshape->previous();
+    }
     MOZ_ASSERT(nshape == entry.shape);
 #endif
 
     entry.shape = ReadBarrieredShape(shape);
 
     // For certain prototypes -- namely, those of various builtin classes,
     // keyed by JSProtoKey |key| -- there are two entries: one for a lookup
     // via |proto|, and one for a lookup via |key|.  If this is such a
     // prototype, also update the alternate |key|-keyed shape.
     JSProtoKey key = GetInitialShapeProtoKey(TaggedProto(proto), cx);
     if (key != JSProto_LIMIT) {
         Lookup lookup(shape->getObjectClass(), Lookup::ShapeProto(key),
                       shape->numFixedSlots(), shape->getObjectFlags());
         if (InitialShapeSet::Ptr p = cx->zone()->initialShapes().lookup(lookup)) {
             InitialShapeEntry& entry = const_cast<InitialShapeEntry&>(*p);
-            if (entry.shape != shape)
+            if (entry.shape != shape) {
                 entry.shape = ReadBarrieredShape(shape);
+            }
         }
     }
 
     /*
      * This affects the shape that will be produced by the various NewObject
      * methods, so clear any cache entry referring to the old shape. This is
      * not required for correctness: the NewObject must always check for a
      * nativeEmpty() result and generate the appropriate properties if found.
      * Clearing the cache entry avoids this duplicate regeneration.
      *
      * Clearing is not necessary when this context is running off
      * thread, as it will not use the new object cache for allocations.
      */
-    if (!cx->helperThread())
+    if (!cx->helperThread()) {
         cx->caches().newObjectCache.invalidateEntriesForShape(cx, shape, proto);
+    }
 }
 
 void
 Zone::fixupInitialShapeTable()
 {
     for (InitialShapeSet::Enum e(initialShapes()); !e.empty(); e.popFront()) {
         // The shape may have been moved, but we can update that in place.
         Shape* shape = e.front().shape.unbarrieredGet();
@@ -2252,41 +2405,46 @@ Zone::fixupInitialShapeTable()
             e.rekeyFront(relookup, entry);
         }
     }
 }
 
 void
 AutoRooterGetterSetter::Inner::trace(JSTracer* trc)
 {
-    if ((attrs & JSPROP_GETTER) && *pgetter)
+    if ((attrs & JSPROP_GETTER) && *pgetter) {
         TraceRoot(trc, (JSObject**) pgetter, "AutoRooterGetterSetter getter");
-    if ((attrs & JSPROP_SETTER) && *psetter)
+    }
+    if ((attrs & JSPROP_SETTER) && *psetter) {
         TraceRoot(trc, (JSObject**) psetter, "AutoRooterGetterSetter setter");
+    }
 }
 
 JS::ubi::Node::Size
 JS::ubi::Concrete<js::Shape>::size(mozilla::MallocSizeOf mallocSizeOf) const
 {
     Size size = js::gc::Arena::thingSize(get().asTenured().getAllocKind());
 
     AutoCheckCannotGC nogc;
-    if (ShapeTable* table = get().maybeTable(nogc))
+    if (ShapeTable* table = get().maybeTable(nogc)) {
         size += table->sizeOfIncludingThis(mallocSizeOf);
+    }
 
-    if (!get().inDictionary() && get().kids.isHash())
+    if (!get().inDictionary() && get().kids.isHash()) {
         size += get().kids.toHash()->shallowSizeOfIncludingThis(mallocSizeOf);
+    }
 
     return size;
 }
 
 JS::ubi::Node::Size
 JS::ubi::Concrete<js::BaseShape>::size(mozilla::MallocSizeOf mallocSizeOf) const
 {
     return js::gc::Arena::thingSize(get().asTenured().getAllocKind());
 }
 
 void
 PropertyResult::trace(JSTracer* trc)
 {
-    if (isNativeProperty())
+    if (isNativeProperty()) {
         TraceRoot(trc, &shape_, "PropertyResult::shape_");
+    }
 }
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -651,20 +651,22 @@ struct StackBaseShape : public DefaultHa
     }
 };
 
 static MOZ_ALWAYS_INLINE js::HashNumber
 HashId(jsid id)
 {
     // HashGeneric alone would work, but bits of atom and symbol addresses
     // could then be recovered from the hash code. See bug 1330769.
-    if (MOZ_LIKELY(JSID_IS_ATOM(id)))
+    if (MOZ_LIKELY(JSID_IS_ATOM(id))) {
         return JSID_TO_ATOM(id)->hash();
-    if (JSID_IS_SYMBOL(id))
+    }
+    if (JSID_IS_SYMBOL(id)) {
         return JSID_TO_SYMBOL(id)->hash();
+    }
     return mozilla::HashGeneric(JSID_BITS(id));
 }
 
 } // namespace js
 
 namespace mozilla {
 
 template <>
@@ -794,18 +796,19 @@ class Shape : public gc::TenuredCell
         MOZ_ASSERT_IF(p && !p->hasMissingSlot() && !inDictionary(),
                       p->maybeSlot() <= maybeSlot());
         MOZ_ASSERT_IF(p && !inDictionary(),
                       isDataProperty() == (p->maybeSlot() != maybeSlot()));
         parent = p;
     }
 
     bool ensureOwnBaseShape(JSContext* cx) {
-        if (base()->isOwned())
+        if (base()->isOwned()) {
             return true;
+        }
         return makeOwnBaseShape(cx);
     }
 
     bool makeOwnBaseShape(JSContext* cx);
 
     MOZ_ALWAYS_INLINE MOZ_MUST_USE bool maybeCreateTableForLookup(JSContext* cx);
 
     MOZ_ALWAYS_INLINE void updateDictionaryTable(ShapeTable* table, ShapeTable::Entry* entry,
@@ -819,39 +822,43 @@ class Shape : public gc::TenuredCell
     }
     ShapeTable* maybeTable(const JS::AutoCheckCannotGC& check) const {
         return base()->maybeTable(check);
     }
 
     template <typename T>
     MOZ_MUST_USE ShapeTable* ensureTableForDictionary(JSContext* cx, const T& nogc) {
         MOZ_ASSERT(inDictionary());
-        if (ShapeTable* table = maybeTable(nogc))
+        if (ShapeTable* table = maybeTable(nogc)) {
             return table;
-        if (!hashify(cx, this))
+        }
+        if (!hashify(cx, this)) {
             return nullptr;
+        }
         ShapeTable* table = maybeTable(nogc);
         MOZ_ASSERT(table);
         return table;
     }
 
     void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf,
                                 JS::ShapeInfo* info) const
     {
         JS::AutoCheckCannotGC nogc;
         if (ShapeTable* table = maybeTable(nogc)) {
-            if (inDictionary())
+            if (inDictionary()) {
                 info->shapesMallocHeapDictTables += table->sizeOfIncludingThis(mallocSizeOf);
-            else
+            } else {
                 info->shapesMallocHeapTreeTables += table->sizeOfIncludingThis(mallocSizeOf);
+            }
         }
 
-        if (!inDictionary() && kids.isHash())
+        if (!inDictionary() && kids.isHash()) {
             info->shapesMallocHeapTreeKids +=
                 kids.toHash()->shallowSizeOfIncludingThis(mallocSizeOf);
+        }
     }
 
     bool isAccessorShape() const {
         MOZ_ASSERT_IF(immutableFlags & ACCESSOR_SHAPE,
                       getAllocKind() == gc::AllocKind::ACCESSOR_SHAPE);
         return immutableFlags & ACCESSOR_SHAPE;
     }
     AccessorShape& asAccessorShape() const {
@@ -937,36 +944,38 @@ class Shape : public gc::TenuredCell
     bool hasDefaultGetter() const { return !getter(); }
     GetterOp getterOp() const { MOZ_ASSERT(!hasGetterValue()); return getter(); }
     inline JSObject* getterObject() const;
     bool hasGetterObject() const { return hasGetterValue() && getterObject(); }
 
     // Per ES5, decode null getterObj as the undefined value, which encodes as null.
     Value getterValue() const {
         MOZ_ASSERT(hasGetterValue());
-        if (JSObject* getterObj = getterObject())
+        if (JSObject* getterObj = getterObject()) {
             return ObjectValue(*getterObj);
+        }
         return UndefinedValue();
     }
 
     Value getterOrUndefined() const {
         return hasGetterValue() ? getterValue() : UndefinedValue();
     }
 
     inline SetterOp setter() const;
     bool hasDefaultSetter() const { return !setter(); }
     SetterOp setterOp() const { MOZ_ASSERT(!hasSetterValue()); return setter(); }
     inline JSObject* setterObject() const;
     bool hasSetterObject() const { return hasSetterValue() && setterObject(); }
 
     // Per ES5, decode null setterObj as the undefined value, which encodes as null.
     Value setterValue() const {
         MOZ_ASSERT(hasSetterValue());
-        if (JSObject* setterObj = setterObject())
+        if (JSObject* setterObj = setterObject()) {
             return ObjectValue(*setterObj);
+        }
         return UndefinedValue();
     }
 
     Value setterOrUndefined() const {
         return hasSetterValue() ? setterValue() : UndefinedValue();
     }
 
     void setOverwritten() {
@@ -1076,31 +1085,34 @@ class Shape : public gc::TenuredCell
         return (attrs & (JSPROP_SETTER | JSPROP_GETTER)) == 0;
     }
     bool isAccessorDescriptor() const {
         return (attrs & (JSPROP_SETTER | JSPROP_GETTER)) != 0;
     }
 
     uint32_t entryCount() {
         JS::AutoCheckCannotGC nogc;
-        if (ShapeTable* table = maybeTable(nogc))
+        if (ShapeTable* table = maybeTable(nogc)) {
             return table->entryCount();
+        }
         uint32_t count = 0;
-        for (Shape::Range<NoGC> r(this); !r.empty(); r.popFront())
+        for (Shape::Range<NoGC> r(this); !r.empty(); r.popFront()) {
             ++count;
+        }
         return count;
     }
 
   private:
     bool isBigEnoughForAShapeTableSlow() {
         uint32_t count = 0;
         for (Shape::Range<NoGC> r(this); !r.empty(); r.popFront()) {
             ++count;
-            if (count >= ShapeTable::MIN_ENTRIES)
+            if (count >= ShapeTable::MIN_ENTRIES) {
                 return true;
+            }
         }
         return false;
     }
     void clearCachedBigEnoughForShapeTable() {
         mutableFlags &= ~(HAS_CACHED_BIG_ENOUGH_FOR_SHAPE_TABLE | CACHED_BIG_ENOUGH_FOR_SHAPE_TABLE);
     }
 
   public:
@@ -1114,18 +1126,19 @@ class Shape : public gc::TenuredCell
             bool res = mutableFlags & CACHED_BIG_ENOUGH_FOR_SHAPE_TABLE;
             MOZ_ASSERT(res == isBigEnoughForAShapeTableSlow());
             return res;
         }
 
         MOZ_ASSERT(!(mutableFlags & CACHED_BIG_ENOUGH_FOR_SHAPE_TABLE));
 
         bool res = isBigEnoughForAShapeTableSlow();
-        if (res)
+        if (res) {
             mutableFlags |= CACHED_BIG_ENOUGH_FOR_SHAPE_TABLE;
+        }
         mutableFlags |= HAS_CACHED_BIG_ENOUGH_FOR_SHAPE_TABLE;
         return res;
     }
 
 #ifdef DEBUG
     void dump(js::GenericPrinter& out) const;
     void dump() const;
     void dumpSubtree(int level, js::GenericPrinter& out) const;
@@ -1447,20 +1460,21 @@ struct StackShape
         rawGetter(shape->getter()),
         rawSetter(shape->setter()),
         immutableFlags(shape->immutableFlags),
         attrs(shape->attrs),
         mutableFlags(shape->mutableFlags)
     {}
 
     void updateGetterSetter(GetterOp rawGetter, SetterOp rawSetter) {
-        if (rawGetter || rawSetter || (attrs & (JSPROP_GETTER|JSPROP_SETTER)))
+        if (rawGetter || rawSetter || (attrs & (JSPROP_GETTER|JSPROP_SETTER))) {
             immutableFlags |= Shape::ACCESSOR_SHAPE;
-        else
+        } else {
             immutableFlags &= ~Shape::ACCESSOR_SHAPE;
+        }
 
         this->rawGetter = rawGetter;
         this->rawSetter = rawSetter;
     }
 
     bool isDataProperty() const {
         MOZ_ASSERT(!JSID_IS_EMPTY(propid));
         return Shape::isDataProperty(attrs, rawGetter, rawSetter);
@@ -1597,18 +1611,19 @@ Shape::setterObject() const
     MOZ_ASSERT(hasSetterValue());
     return asAccessorShape().setterObj;
 }
 
 inline Shape*
 Shape::searchLinear(jsid id)
 {
     for (Shape* shape = this; shape; ) {
-        if (shape->propidRef() == id)
+        if (shape->propidRef() == id) {
             return shape;
+        }
         shape = shape->parent;
     }
 
     return nullptr;
 }
 
 inline bool
 Shape::matches(const StackShape& other) const
--- a/js/src/vm/SharedArrayObject.cpp
+++ b/js/src/vm/SharedArrayObject.cpp
@@ -47,33 +47,36 @@ SharedArrayAccessibleSize(uint32_t lengt
 SharedArrayRawBuffer*
 SharedArrayRawBuffer::Allocate(uint32_t length, const Maybe<uint32_t>& max)
 {
     MOZ_RELEASE_ASSERT(length <= ArrayBufferObject::MaxBufferByteLength);
 
     bool preparedForWasm = max.isSome();
 
     uint32_t accessibleSize = SharedArrayAccessibleSize(length);
-    if (accessibleSize < length)
+    if (accessibleSize < length) {
         return nullptr;
+    }
 
     uint32_t maxSize = max.isSome() ? *max : accessibleSize;
 
     size_t mappedSize;
-    if (preparedForWasm)
+    if (preparedForWasm) {
         mappedSize = SharedArrayMappedSizeForWasm(maxSize);
-    else
+    } else {
         mappedSize = accessibleSize;
+    }
 
     uint64_t mappedSizeWithHeader = mappedSize + gc::SystemPageSize();
     uint64_t accessibleSizeWithHeader = accessibleSize + gc::SystemPageSize();
 
     void* p = MapBufferMemory(mappedSizeWithHeader, accessibleSizeWithHeader);
-    if (!p)
+    if (!p) {
         return nullptr;
+    }
 
     uint8_t* buffer = reinterpret_cast<uint8_t*>(p) + gc::SystemPageSize();
     uint8_t* base = buffer - sizeof(SharedArrayRawBuffer);
     SharedArrayRawBuffer* rawbuf = new (base) SharedArrayRawBuffer(buffer,
                                                                    length,
                                                                    maxSize,
                                                                    mappedSize,
                                                                    preparedForWasm);
@@ -87,83 +90,91 @@ SharedArrayRawBuffer::tryGrowMaxSizeInPl
 {
     CheckedInt<uint32_t> newMaxSize = maxSize_;
     newMaxSize += deltaMaxSize;
     MOZ_ASSERT(newMaxSize.isValid());
     MOZ_ASSERT(newMaxSize.value() % wasm::PageSize == 0);
 
     size_t newMappedSize = SharedArrayMappedSizeForWasm(newMaxSize.value());
     MOZ_ASSERT(mappedSize_ <= newMappedSize);
-    if (mappedSize_ == newMappedSize)
+    if (mappedSize_ == newMappedSize) {
         return;
+    }
 
-    if (!ExtendBufferMapping(basePointer(), mappedSize_, newMappedSize))
+    if (!ExtendBufferMapping(basePointer(), mappedSize_, newMappedSize)) {
         return;
+    }
 
     mappedSize_ = newMappedSize;
     maxSize_ = newMaxSize.value();
 }
 #endif
 
 bool
 SharedArrayRawBuffer::wasmGrowToSizeInPlace(const Lock&, uint32_t newLength)
 {
-    if (newLength > ArrayBufferObject::MaxBufferByteLength)
+    if (newLength > ArrayBufferObject::MaxBufferByteLength) {
         return false;
+    }
 
     MOZ_ASSERT(newLength >= length_);
 
-    if (newLength == length_)
+    if (newLength == length_) {
         return true;
+    }
 
     uint32_t delta = newLength - length_;
     MOZ_ASSERT(delta % wasm::PageSize == 0);
 
     uint8_t* dataEnd = dataPointerShared().unwrap(/* for resize */) + length_;
     MOZ_ASSERT(uintptr_t(dataEnd) % gc::SystemPageSize() == 0);
 
     // The ordering of committing memory and changing length does not matter
     // since all clients take the lock.
 
-    if (!CommitBufferMemory(dataEnd, delta))
+    if (!CommitBufferMemory(dataEnd, delta)) {
         return false;
+    }
 
     length_ = newLength;
 
     return true;
 }
 
 bool
 SharedArrayRawBuffer::addReference()
 {
     MOZ_RELEASE_ASSERT(refcount_ > 0);
 
     // Be careful never to overflow the refcount field.
     for (;;) {
         uint32_t old_refcount = refcount_;
         uint32_t new_refcount = old_refcount + 1;
-        if (new_refcount == 0)
+        if (new_refcount == 0) {
             return false;
-        if (refcount_.compareExchange(old_refcount, new_refcount))
+        }
+        if (refcount_.compareExchange(old_refcount, new_refcount)) {
             return true;
+        }
     }
 }
 
 void
 SharedArrayRawBuffer::dropReference()
 {
     // Normally if the refcount is zero then the memory will have been unmapped
     // and this test may just crash, but if the memory has been retained for any
     // reason we will catch the underflow here.
     MOZ_RELEASE_ASSERT(refcount_ > 0);
 
     // Drop the reference to the buffer.
     uint32_t new_refcount = --refcount_; // Atomic.
-    if (new_refcount)
+    if (new_refcount) {
         return;
+    }
 
     size_t mappedSizeWithHeader = mappedSize_ + gc::SystemPageSize();
 
     // This was the final reference, so release the buffer.
     UnmapBufferMemory(basePointer(), mappedSizeWithHeader);
 }
 
 
@@ -185,51 +196,56 @@ SharedArrayBufferObject::byteLengthGette
 // ES2017 draft rev 6390c2f1b34b309895d31d8c0512eac8660a0210
 // 24.2.2.1 SharedArrayBuffer( length )
 bool
 SharedArrayBufferObject::class_constructor(JSContext* cx, unsigned argc, Value* vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
 
     // Step 1.
-    if (!ThrowIfNotConstructing(cx, args, "SharedArrayBuffer"))
+    if (!ThrowIfNotConstructing(cx, args, "SharedArrayBuffer")) {
         return false;
+    }
 
     // Step 2.
     uint64_t byteLength;
-    if (!ToIndex(cx, args.get(0), &byteLength))
+    if (!ToIndex(cx, args.get(0), &byteLength)) {
         return false;
+    }
 
     // Step 3 (Inlined 24.2.1.1 AllocateSharedArrayBuffer).
     // 24.2.1.1, step 1 (Inlined 9.1.14 OrdinaryCreateFromConstructor).
     RootedObject proto(cx);
-    if (!GetPrototypeFromBuiltinConstructor(cx, args, &proto))
+    if (!GetPrototypeFromBuiltinConstructor(cx, args, &proto)) {
         return false;
+    }
 
     // 24.2.1.1, step 3 (Inlined 6.2.7.2 CreateSharedByteDataBlock, step 2).
     // Refuse to allocate too large buffers, currently limited to ~2 GiB.
     if (byteLength > INT32_MAX) {
         JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_SHARED_ARRAY_BAD_LENGTH);
         return false;
     }
 
     // 24.2.1.1, steps 1 and 4-6.
     JSObject* bufobj = New(cx, uint32_t(byteLength), proto);
-    if (!bufobj)
+    if (!bufobj) {
         return false;
+    }
     args.rval().setObject(*bufobj);
     return true;
 }
 
 SharedArrayBufferObject*
 SharedArrayBufferObject::New(JSContext* cx, uint32_t length, HandleObject proto)
 {
     SharedArrayRawBuffer* buffer = SharedArrayRawBuffer::Allocate(length, Nothing());
-    if (!buffer)
+    if (!buffer) {
         return nullptr;
+    }
 
     SharedArrayBufferObject* obj = New(cx, buffer, length, proto);
     if (!obj) {
         buffer->dropReference();
         return nullptr;
     }
 
     return obj;
@@ -239,18 +255,19 @@ SharedArrayBufferObject*
 SharedArrayBufferObject::New(JSContext* cx, SharedArrayRawBuffer* buffer, uint32_t length,
                              HandleObject proto)
 {
     MOZ_ASSERT(cx->realm()->creationOptions().getSharedMemoryAndAtomicsEnabled());
 
     AutoSetNewObjectMetadata metadata(cx);
     Rooted<SharedArrayBufferObject*> obj(cx,
         NewObjectWithClassProto<SharedArrayBufferObject>(cx, proto));
-    if (!obj)
+    if (!obj) {
         return nullptr;
+    }
 
     MOZ_ASSERT(obj->getClass() == &class_);
 
     obj->acceptRawBuffer(buffer, length);
 
     return obj;
 }
 
@@ -290,18 +307,19 @@ SharedArrayBufferObject::Finalize(FreeOp
         buf.dropRawBuffer();
     }
 }
 
 #ifndef WASM_HUGE_MEMORY
 uint32_t
 SharedArrayBufferObject::wasmBoundsCheckLimit() const
 {
-    if (isWasm())
+    if (isWasm()) {
         return rawBufferObject()->boundsCheckLimit();
+    }
     return byteLength();
 }
 #endif
 
 /* static */ void
 SharedArrayBufferObject::addSizeOfExcludingThis(JSObject* obj, mozilla::MallocSizeOf mallocSizeOf,
                                                 JS::ClassInfo* info)
 {
@@ -473,13 +491,14 @@ JS_IsSharedArrayBufferObject(JSObject* o
     obj = CheckedUnwrap(obj);
     return obj ? obj->is<SharedArrayBufferObject>() : false;
 }
 
 JS_FRIEND_API(uint8_t*)
 JS_GetSharedArrayBufferData(JSObject* obj, bool* isSharedMemory, const JS::AutoRequireNoGC&)
 {
     obj = CheckedUnwrap(obj);
-    if (!obj)
+    if (!obj) {
         return nullptr;
+    }
     *isSharedMemory = true;
     return obj->as<SharedArrayBufferObject>().dataPointerShared().unwrap(/*safe - caller knows*/);
 }
--- a/js/src/vm/SharedImmutableStringsCache-inl.h
+++ b/js/src/vm/SharedImmutableStringsCache-inl.h
@@ -19,23 +19,25 @@ SharedImmutableStringsCache::getOrCreate
     MOZ_ASSERT(inner_);
     MOZ_ASSERT(chars);
     Hasher::Lookup lookup(Hasher::hashLongString(chars, length), chars, length);
 
     auto locked = inner_->lock();
     auto entry = locked->set.lookupForAdd(lookup);
     if (!entry) {
         OwnedChars ownedChars(intoOwnedChars());
-        if (!ownedChars)
+        if (!ownedChars) {
             return mozilla::Nothing();
+        }
         MOZ_ASSERT(ownedChars.get() == chars ||
                    memcmp(ownedChars.get(), chars, length) == 0);
         auto box = StringBox::Create(std::move(ownedChars), length);
-        if (!box || !locked->set.add(entry, std::move(box)))
+        if (!box || !locked->set.add(entry, std::move(box))) {
             return mozilla::Nothing();
+        }
     }
 
     MOZ_ASSERT(entry && *entry);
     return mozilla::Some(SharedImmutableString(locked, entry->get()));
 }
 
 template <typename IntoOwnedTwoByteChars>
 MOZ_MUST_USE mozilla::Maybe<SharedImmutableTwoByteString>
@@ -46,24 +48,26 @@ SharedImmutableStringsCache::getOrCreate
     auto hash = Hasher::hashLongString(reinterpret_cast<const char*>(chars),
                                        length * sizeof(char16_t));
     Hasher::Lookup lookup(hash, chars, length);
 
     auto locked = inner_->lock();
     auto entry = locked->set.lookupForAdd(lookup);
     if (!entry) {
         OwnedTwoByteChars ownedTwoByteChars(intoOwnedTwoByteChars());
-        if (!ownedTwoByteChars)
+        if (!ownedTwoByteChars) {
             return mozilla::Nothing();
+        }
         MOZ_ASSERT(ownedTwoByteChars.get() == chars ||
                    memcmp(ownedTwoByteChars.get(), chars, length * sizeof(char16_t)) == 0);
         OwnedChars ownedChars(reinterpret_cast<char*>(ownedTwoByteChars.release()));
         auto box = StringBox::Create(std::move(ownedChars), length * sizeof(char16_t));
-        if (!box || !locked->set.add(entry, std::move(box)))
+        if (!box || !locked->set.add(entry, std::move(box))) {
             return mozilla::Nothing();
+        }
     }
 
     MOZ_ASSERT(entry && *entry);
     return mozilla::Some(SharedImmutableTwoByteString(locked, entry->get()));
 }
 
 } // namespace js
 
--- a/js/src/vm/SharedImmutableStringsCache.cpp
+++ b/js/src/vm/SharedImmutableStringsCache.cpp
@@ -61,26 +61,28 @@ SharedImmutableTwoByteString&
 SharedImmutableTwoByteString::operator=(SharedImmutableTwoByteString&& rhs)
 {
     this->~SharedImmutableTwoByteString();
     new (this) SharedImmutableTwoByteString(std::move(rhs));
     return *this;
 }
 
 SharedImmutableString::~SharedImmutableString() {
-    if (!box_)
+    if (!box_) {
         return;
+    }
 
     auto locked = cache_.inner_->lock();
 
     MOZ_ASSERT(box_->refcount > 0);
 
     box_->refcount--;
-    if (box_->refcount == 0)
+    if (box_->refcount == 0) {
         box_->chars_.reset(nullptr);
+    }
 }
 
 SharedImmutableString
 SharedImmutableString::clone() const
 {
     auto locked = cache_.inner_->lock();
     MOZ_ASSERT(box_);
     MOZ_ASSERT(box_->refcount > 0);
--- a/js/src/vm/SharedImmutableStringsCache.h
+++ b/js/src/vm/SharedImmutableStringsCache.h
@@ -141,31 +141,33 @@ class SharedImmutableStringsCache
         auto locked = inner_->lock();
 
         // Size of the table.
         n += locked->set.shallowSizeOfExcludingThis(mallocSizeOf);
 
         // Sizes of the strings and their boxes.
         for (auto r = locked->set.all(); !r.empty(); r.popFront()) {
             n += mallocSizeOf(r.front().get());
-            if (const char* chars = r.front()->chars())
+            if (const char* chars = r.front()->chars()) {
                 n += mallocSizeOf(chars);
+            }
         }
 
         return n;
     }
 
     /**
      * Construct a new cache of shared, immutable strings. Returns
      * `mozilla::Nothing` on out of memory failure.
      */
     static mozilla::Maybe<SharedImmutableStringsCache> Create() {
         auto inner = js_new<ExclusiveData<Inner>>(mutexid::SharedImmutableStringsCache);
-        if (!inner)
+        if (!inner) {
             return mozilla::Nothing();
+        }
 
         auto locked = inner->lock();
         return mozilla::Some(SharedImmutableStringsCache(locked));
     }
 
     SharedImmutableStringsCache(SharedImmutableStringsCache&& rhs)
       : inner_(rhs.inner_)
     {
@@ -183,31 +185,34 @@ class SharedImmutableStringsCache
 
     SharedImmutableStringsCache clone() {
         MOZ_ASSERT(inner_);
         auto locked = inner_->lock();
         return SharedImmutableStringsCache(locked);
     }
 
     ~SharedImmutableStringsCache() {
-        if (!inner_)
+        if (!inner_) {
             return;
+        }
 
         bool shouldDestroy = false;
         {
             // ~ExclusiveData takes the lock, so be sure to drop the lock before
             // attempting to destroy the inner.
             auto locked = inner_->lock();
             MOZ_ASSERT(locked->refcount > 0);
             locked->refcount--;
-            if (locked->refcount == 0)
+            if (locked->refcount == 0) {
                 shouldDestroy = true;
+            }
         }
-        if (shouldDestroy)
+        if (shouldDestroy) {
             js_delete(inner_);
+        }
     }
 
     /**
      * Purge the cache of all refcount == 0 entries.
      */
     void purge() {
         auto locked = inner_->lock();
         MOZ_ASSERT(locked->refcount > 0);
@@ -310,21 +315,23 @@ class SharedImmutableStringsCache
 
         static HashNumber hash(const Lookup& lookup) {
             return lookup.hash_;
         }
 
         static bool match(const StringBox::Ptr& key, const Lookup& lookup) {
             MOZ_ASSERT(lookup.chars_);
 
-            if (!key->chars() || key->length() != lookup.length_)
+            if (!key->chars() || key->length() != lookup.length_) {
                 return false;
+            }
 
-            if (key->chars() == lookup.chars_)
+            if (key->chars() == lookup.chars_) {
                 return true;
+            }
 
             return memcmp(key->chars(), lookup.chars_, key->length()) == 0;
         }
     };
 
     // The `Inner` struct contains the actual cached contents, and is reference
     // counted and shared between all `SharedImmutableStringsCache` and
     // `SharedImmutable[TwoByte]String` holders.
--- a/js/src/vm/SharedMem.h
+++ b/js/src/vm/SharedMem.h
@@ -71,18 +71,19 @@ class SharedMem
     // Reinterpret-cast the pointer to type U, preserving sharedness.
     // Eg, "obj->dataPointerEither().cast<uint8_t*>()" yields a SharedMem<uint8_t*>.
     template<typename U>
     inline SharedMem<U> cast() const {
 #ifdef DEBUG
         MOZ_ASSERT(asValue() % sizeof(mozilla::Conditional<mozilla::IsVoid<typename mozilla::RemovePointer<U>::Type>::value,
                                                            char,
                                                            typename mozilla::RemovePointer<U>::Type>) == 0);
-        if (sharedness_ == IsUnshared)
+        if (sharedness_ == IsUnshared) {
             return SharedMem<U>::unshared(unwrap());
+        }
 #endif
         return SharedMem<U>::shared(unwrap());
     }
 
     explicit operator bool() { return ptr_ != nullptr; }
 
     SharedMem operator +(size_t offset) {
         return SharedMem(ptr_ + offset, *this);
--- a/js/src/vm/Stack-inl.h
+++ b/js/src/vm/Stack-inl.h
@@ -39,18 +39,19 @@ InterpreterFrame::global() const
 {
     return script()->global();
 }
 
 inline JSObject&
 InterpreterFrame::varObj() const
 {
     JSObject* obj = environmentChain();
-    while (!obj->isQualifiedVarObj())
+    while (!obj->isQualifiedVarObj()) {
         obj = obj->enclosingEnvironment();
+    }
     return *obj;
 }
 
 inline LexicalEnvironmentObject&
 InterpreterFrame::extensibleLexicalEnvironment() const
 {
     return NearestEnclosingExtensibleLexicalEnvironment(environmentChain());
 }
@@ -59,28 +60,30 @@ inline void
 InterpreterFrame::initCallFrame(InterpreterFrame* prev, jsbytecode* prevpc,
                                 Value* prevsp, JSFunction& callee, JSScript* script, Value* argv,
                                 uint32_t nactual, MaybeConstruct constructing)
 {
     MOZ_ASSERT(callee.nonLazyScript() == script);
 
     /* Initialize stack frame members. */
     flags_ = 0;
-    if (constructing)
+    if (constructing) {
         flags_ |= CONSTRUCTING;
+    }
     argv_ = argv;
     script_ = script;
     nactual_ = nactual;
     envChain_ = callee.environment();
     prev_ = prev;
     prevpc_ = prevpc;
     prevsp_ = prevsp;
 
-    if (script->isDebuggee())
+    if (script->isDebuggee()) {
         setIsDebuggee();
+    }
 
     initLocals();
 }
 
 inline void
 InterpreterFrame::initLocals()
 {
     SetValueRangeToUndefined(slots(), script()->nfixed());
@@ -115,18 +118,19 @@ template <class Op>
 inline void
 InterpreterFrame::unaliasedForEachActual(Op op)
 {
     // Don't assert !script()->funHasAnyAliasedFormal() since this function is
     // called from ArgumentsObject::createUnexpected() which can access aliased
     // slots.
 
     const Value* argsEnd = argv() + numActualArgs();
-    for (const Value* p = argv(); p < argsEnd; ++p)
+    for (const Value* p = argv(); p < argsEnd; ++p) {
         op(*p);
+    }
 }
 
 struct CopyTo
 {
     Value* dst;
     explicit CopyTo(Value* dst) : dst(dst) {}
     void operator()(const Value& src) { *dst++ = src; }
 };
@@ -153,29 +157,31 @@ InterpreterFrame::initArgsObj(ArgumentsO
     flags_ |= HAS_ARGS_OBJ;
     argsObj_ = &argsobj;
 }
 
 inline EnvironmentObject&
 InterpreterFrame::aliasedEnvironment(EnvironmentCoordinate ec) const
 {
     JSObject* env = &environmentChain()->as<EnvironmentObject>();
-    for (unsigned i = ec.hops(); i; i--)
+    for (unsigned i = ec.hops(); i; i--) {
         env = &env->as<EnvironmentObject>().enclosingEnvironment();
+    }
     return env->as<EnvironmentObject>();
 }
 
 template <typename SpecificEnvironment>
 inline void
 InterpreterFrame::pushOnEnvironmentChain(SpecificEnvironment& env)
 {
     MOZ_ASSERT(*environmentChain() == env.enclosingEnvironment());
     envChain_ = &env;
-    if (IsFrameInitialEnvironment(this, env))
+    if (IsFrameInitialEnvironment(this, env)) {
         flags_ |= HAS_INITIAL_ENV;
+    }
 }
 
 template <typename SpecificEnvironment>
 inline void
 InterpreterFrame::popOffEnvironmentChain()
 {
     MOZ_ASSERT(envChain_->is<SpecificEnvironment>());
     envChain_ = &envChain_->as<SpecificEnvironment>().enclosingEnvironment();
@@ -197,18 +203,19 @@ InterpreterFrame::hasInitialEnvironment(
 }
 
 inline CallObject&
 InterpreterFrame::callObj() const
 {
     MOZ_ASSERT(callee().needsCallObject());
 
     JSObject* pobj = environmentChain();
-    while (MOZ_UNLIKELY(!pobj->is<CallObject>()))
+    while (MOZ_UNLIKELY(!pobj->is<CallObject>())) {
         pobj = pobj->enclosingEnvironment();
+    }
     return pobj->as<CallObject>();
 }
 
 inline void
 InterpreterFrame::unsetIsDebuggee()
 {
     MOZ_ASSERT(!script()->isDebuggee());
     flags_ &= ~DEBUGGEE;
@@ -221,20 +228,21 @@ InterpreterStack::purge(JSRuntime* rt)
 {
     rt->gc.freeUnusedLifoBlocksAfterSweeping(&allocator_);
 }
 
 uint8_t*
 InterpreterStack::allocateFrame(JSContext* cx, size_t size)
 {
     size_t maxFrames;
-    if (cx->realm()->principals() == cx->runtime()->trustedPrincipals())
+    if (cx->realm()->principals() == cx->runtime()->trustedPrincipals()) {
         maxFrames = MAX_FRAMES_TRUSTED;
-    else
+    } else {
         maxFrames = MAX_FRAMES;
+    }
 
     if (MOZ_UNLIKELY(frameCount_ >= maxFrames)) {
         ReportOverRecursed(cx);
         return nullptr;
     }
 
     uint8_t* buffer = reinterpret_cast<uint8_t*>(allocator_.alloc(size));
     if (!buffer) {
@@ -264,27 +272,29 @@ InterpreterStack::getCallFrame(JSContext
 
     // Pad any missing arguments with |undefined|.
     MOZ_ASSERT(args.length() < nformal);
 
     unsigned nfunctionState = 2 + constructing; // callee, |this|, |new.target|
 
     nvals += nformal + nfunctionState;
     uint8_t* buffer = allocateFrame(cx, sizeof(InterpreterFrame) + nvals * sizeof(Value));
-    if (!buffer)
+    if (!buffer) {
         return nullptr;
+    }
 
     Value* argv = reinterpret_cast<Value*>(buffer);
     unsigned nmissing = nformal - args.length();
 
     mozilla::PodCopy(argv, args.base(), 2 + args.length());
     SetValueRangeToUndefined(argv + 2 + args.length(), nmissing);
 
-    if (constructing)
+    if (constructing) {
         argv[2 + nformal] = args.newTarget();
+    }
 
     *pargv = argv + 2;
     return reinterpret_cast<InterpreterFrame*>(argv + nfunctionState + nformal);
 }
 
 MOZ_ALWAYS_INLINE bool
 InterpreterStack::pushInlineFrame(JSContext* cx, InterpreterRegs& regs, const CallArgs& args,
                                   HandleScript script, MaybeConstruct constructing)
@@ -299,18 +309,19 @@ InterpreterStack::pushInlineFrame(JSCont
     jsbytecode* prevpc = regs.pc;
     Value* prevsp = regs.sp;
     MOZ_ASSERT(prev);
 
     LifoAlloc::Mark mark = allocator_.mark();
 
     Value* argv;
     InterpreterFrame* fp = getCallFrame(cx, args, script, constructing, &argv);
-    if (!fp)
+    if (!fp) {
         return false;
+    }
 
     fp->mark_ = mark;
 
     /* Initialize frame, locals, regs. */
     fp->initCallFrame(prev, prevpc, prevsp, *callee, script, argv, args.length(),
                       constructing);
 
     regs.prepareToRun(*fp, script);
@@ -335,18 +346,19 @@ InterpreterStack::resumeGeneratorCallFra
     // (Async) generators and async functions are not constructors.
     MOZ_ASSERT(!callee->isConstructor());
 
     // Include callee, |this|, and maybe |new.target|
     unsigned nformal = callee->nargs();
     unsigned nvals = 2 + nformal + script->nslots();
 
     uint8_t* buffer = allocateFrame(cx, sizeof(InterpreterFrame) + nvals * sizeof(Value));
-    if (!buffer)
+    if (!buffer) {
         return false;
+    }
 
     Value* argv = reinterpret_cast<Value*>(buffer) + 2;
     argv[-2] = ObjectValue(*callee);
     argv[-1] = UndefinedValue();
     SetValueRangeToUndefined(argv, nformal);
 
     InterpreterFrame* fp = reinterpret_cast<InterpreterFrame*>(argv + nformal);
     fp->mark_ = mark;
@@ -398,20 +410,22 @@ FrameIter::unaliasedForEachActual(JSCont
         return;
     }
     MOZ_CRASH("Unexpected state");
 }
 
 inline HandleValue
 AbstractFramePtr::returnValue() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->returnValue();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return asWasmDebugFrame()->returnValue();
+    }
     return asBaselineFrame()->returnValue();
 }
 
 inline void
 AbstractFramePtr::setReturnValue(const Value& rval) const
 {
     if (isInterpreterFrame()) {
         asInterpreterFrame()->setReturnValue(rval);
@@ -428,22 +442,25 @@ AbstractFramePtr::setReturnValue(const V
         return;
     }
     asRematerializedFrame()->setReturnValue(rval);
 }
 
 inline JSObject*
 AbstractFramePtr::environmentChain() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->environmentChain();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->environmentChain();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return &global()->lexicalEnvironment();
+    }
     return asRematerializedFrame()->environmentChain();
 }
 
 template <typename SpecificEnvironment>
 inline void
 AbstractFramePtr::pushOnEnvironmentChain(SpecificEnvironment& env)
 {
     if (isInterpreterFrame()) {
@@ -470,20 +487,22 @@ AbstractFramePtr::popOffEnvironmentChain
         return;
     }
     asRematerializedFrame()->popOffEnvironmentChain<SpecificEnvironment>();
 }
 
 inline CallObject&
 AbstractFramePtr::callObj() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->callObj();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->callObj();
+    }
     return asRematerializedFrame()->callObj();
 }
 
 inline bool
 AbstractFramePtr::initFunctionEnvironmentObjects(JSContext* cx)
 {
     return js::InitFunctionEnvironmentObjects(cx, *this);
 }
@@ -498,168 +517,199 @@ inline JS::Realm*
 AbstractFramePtr::realm() const
 {
     return environmentChain()->nonCCWRealm();
 }
 
 inline unsigned
 AbstractFramePtr::numActualArgs() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->numActualArgs();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->numActualArgs();
+    }
     return asRematerializedFrame()->numActualArgs();
 }
 
 inline unsigned
 AbstractFramePtr::numFormalArgs() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->numFormalArgs();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->numFormalArgs();
+    }
     return asRematerializedFrame()->numFormalArgs();
 }
 
 inline Value&
 AbstractFramePtr::unaliasedLocal(uint32_t i)
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->unaliasedLocal(i);
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->unaliasedLocal(i);
+    }
     return asRematerializedFrame()->unaliasedLocal(i);
 }
 
 inline Value&
 AbstractFramePtr::unaliasedFormal(unsigned i, MaybeCheckAliasing checkAliasing)
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->unaliasedFormal(i, checkAliasing);
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->unaliasedFormal(i, checkAliasing);
+    }
     return asRematerializedFrame()->unaliasedFormal(i, checkAliasing);
 }
 
 inline Value&
 AbstractFramePtr::unaliasedActual(unsigned i, MaybeCheckAliasing checkAliasing)
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->unaliasedActual(i, checkAliasing);
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->unaliasedActual(i, checkAliasing);
+    }
     return asRematerializedFrame()->unaliasedActual(i, checkAliasing);
 }
 
 inline bool
 AbstractFramePtr::hasInitialEnvironment() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->hasInitialEnvironment();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->hasInitialEnvironment();
+    }
     return asRematerializedFrame()->hasInitialEnvironment();
 }
 
 inline bool
 AbstractFramePtr::isGlobalFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isGlobalFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isGlobalFrame();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return false;
+    }
     return asRematerializedFrame()->isGlobalFrame();
 }
 
 inline bool
 AbstractFramePtr::isModuleFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isModuleFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isModuleFrame();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return false;
+    }
     return asRematerializedFrame()->isModuleFrame();
 }
 
 inline bool
 AbstractFramePtr::isEvalFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isEvalFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isEvalFrame();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return false;
+    }
     MOZ_ASSERT(isRematerializedFrame());
     return false;
 }
 
 inline bool
 AbstractFramePtr::isDebuggerEvalFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isDebuggerEvalFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isDebuggerEvalFrame();
+    }
     MOZ_ASSERT(isRematerializedFrame());
     return false;
 }
 
 inline bool
 AbstractFramePtr::isDebuggee() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isDebuggee();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isDebuggee();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return asWasmDebugFrame()->isDebuggee();
+    }
     return asRematerializedFrame()->isDebuggee();
 }
 
 inline void
 AbstractFramePtr::setIsDebuggee()
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         asInterpreterFrame()->setIsDebuggee();
-    else if (isBaselineFrame())
+    } else if (isBaselineFrame()) {
         asBaselineFrame()->setIsDebuggee();
-    else if (isWasmDebugFrame())
+    } else if (isWasmDebugFrame()) {
         asWasmDebugFrame()->setIsDebuggee();
-    else
+    } else {
         asRematerializedFrame()->setIsDebuggee();
+    }
 }
 
 inline void
 AbstractFramePtr::unsetIsDebuggee()
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         asInterpreterFrame()->unsetIsDebuggee();
-    else if (isBaselineFrame())
+    } else if (isBaselineFrame()) {
         asBaselineFrame()->unsetIsDebuggee();
-    else if (isWasmDebugFrame())
+    } else if (isWasmDebugFrame()) {
         asWasmDebugFrame()->unsetIsDebuggee();
-    else
+    } else {
         asRematerializedFrame()->unsetIsDebuggee();
+    }
 }
 
 inline bool
 AbstractFramePtr::isConstructing() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isConstructing();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isConstructing();
-    if (isRematerializedFrame())
+    }
+    if (isRematerializedFrame()) {
         return asRematerializedFrame()->isConstructing();
+    }
     MOZ_CRASH("Unexpected frame");
 }
 
 inline bool
 AbstractFramePtr::hasArgs() const
 {
     return isFunctionFrame();
 }
@@ -668,140 +718,163 @@ inline bool
 AbstractFramePtr::hasScript() const
 {
     return !isWasmDebugFrame();
 }
 
 inline JSScript*
 AbstractFramePtr::script() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->script();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->script();
+    }
     return asRematerializedFrame()->script();
 }
 
 inline wasm::Instance*
 AbstractFramePtr::wasmInstance() const
 {
     return asWasmDebugFrame()->instance();
 }
 
 inline GlobalObject*
 AbstractFramePtr::global() const
 {
-    if (isWasmDebugFrame())
+    if (isWasmDebugFrame()) {
         return &wasmInstance()->object()->global();
+    }
     return &script()->global();
 }
 
 inline JSFunction*
 AbstractFramePtr::callee() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return &asInterpreterFrame()->callee();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->callee();
+    }
     return asRematerializedFrame()->callee();
 }
 
 inline Value
 AbstractFramePtr::calleev() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->calleev();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->calleev();
+    }
     return asRematerializedFrame()->calleev();
 }
 
 inline bool
 AbstractFramePtr::isFunctionFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isFunctionFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isFunctionFrame();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return false;
+    }
     return asRematerializedFrame()->isFunctionFrame();
 }
 
 inline bool
 AbstractFramePtr::isNonStrictDirectEvalFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isNonStrictDirectEvalFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isNonStrictDirectEvalFrame();
+    }
     MOZ_ASSERT(isRematerializedFrame());
     return false;
 }
 
 inline bool
 AbstractFramePtr::isStrictEvalFrame() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->isStrictEvalFrame();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->isStrictEvalFrame();
+    }
     MOZ_ASSERT(isRematerializedFrame());
     return false;
 }
 
 inline Value*
 AbstractFramePtr::argv() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->argv();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->argv();
+    }
     return asRematerializedFrame()->argv();
 }
 
 inline bool
 AbstractFramePtr::hasArgsObj() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->hasArgsObj();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->hasArgsObj();
+    }
     return asRematerializedFrame()->hasArgsObj();
 }
 
 inline ArgumentsObject&
 AbstractFramePtr::argsObj() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->argsObj();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->argsObj();
+    }
     return asRematerializedFrame()->argsObj();
 }
 
 inline void
 AbstractFramePtr::initArgsObj(ArgumentsObject& argsobj) const
 {
     if (isInterpreterFrame()) {
         asInterpreterFrame()->initArgsObj(argsobj);
         return;
     }
     asBaselineFrame()->initArgsObj(argsobj);
 }
 
 inline bool
 AbstractFramePtr::prevUpToDate() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->prevUpToDate();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->prevUpToDate();
-    if (isWasmDebugFrame())
+    }
+    if (isWasmDebugFrame()) {
         return asWasmDebugFrame()->prevUpToDate();
+    }
     return asRematerializedFrame()->prevUpToDate();
 }
 
 inline void
 AbstractFramePtr::setPrevUpToDate() const
 {
     if (isInterpreterFrame()) {
         asInterpreterFrame()->setPrevUpToDate();
@@ -834,45 +907,51 @@ AbstractFramePtr::unsetPrevUpToDate() co
         return;
     }
     asRematerializedFrame()->unsetPrevUpToDate();
 }
 
 inline Value&
 AbstractFramePtr::thisArgument() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->thisArgument();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->thisArgument();
+    }
     return asRematerializedFrame()->thisArgument();
 }
 
 inline Value
 AbstractFramePtr::newTarget() const
 {
-    if (isInterpreterFrame())
+    if (isInterpreterFrame()) {
         return asInterpreterFrame()->newTarget();
-    if (isBaselineFrame())
+    }
+    if (isBaselineFrame()) {
         return asBaselineFrame()->newTarget();
+    }
     return asRematerializedFrame()->newTarget();
 }
 
 inline bool
 AbstractFramePtr::debuggerNeedsCheckPrimitiveReturn() const
 {
-    if (isWasmDebugFrame())
+    if (isWasmDebugFrame()) {
         return false;
+    }
     return script()->isDerivedClassConstructor();
 }
 
 ActivationEntryMonitor::~ActivationEntryMonitor()
 {
-    if (entryMonitor_)
+    if (entryMonitor_) {
         entryMonitor_->Exit(cx_);
+    }
 
     cx_->entryMonitor = entryMonitor_;
 }
 
 Activation::Activation(JSContext* cx, Kind kind)
   : cx_(cx),
     compartment_(cx->compartment()),
     prev_(cx->activation_),
@@ -899,35 +978,38 @@ Activation::~Activation()
     cx_->asyncCauseForNewActivations = asyncCause_;
     cx_->asyncStackForNewActivations() = asyncStack_;
     cx_->asyncCallIsExplicit = asyncCallIsExplicit_;
 }
 
 bool
 Activation::isProfiling() const
 {
-    if (isInterpreter())
+    if (isInterpreter()) {
         return asInterpreter()->isProfiling();
+    }
 
     MOZ_ASSERT(isJit());
     return asJit()->isProfiling();
 }
 
 Activation*
 Activation::mostRecentProfiling()
 {
-    if (isProfiling())
+    if (isProfiling()) {
         return this;
+    }
     return prevProfiling_;
 }
 
 inline LiveSavedFrameCache*
 Activation::getLiveSavedFrameCache(JSContext* cx) {
-    if (!frameCache_.get().initialized() && !frameCache_.get().init(cx))
+    if (!frameCache_.get().initialized() && !frameCache_.get().init(cx)) {
         return nullptr;
+    }
     return frameCache_.address();
 }
 
 InterpreterActivation::InterpreterActivation(RunState& state, JSContext* cx,
                                              InterpreterFrame* entryFrame)
   : Activation(cx, Interpreter),
     entryFrame_(entryFrame),
     opMask_(0)
@@ -938,32 +1020,35 @@ InterpreterActivation::InterpreterActiva
     regs_.prepareToRun(*entryFrame, state.script());
     MOZ_ASSERT(regs_.pc == state.script()->code());
     MOZ_ASSERT_IF(entryFrame_->isEvalFrame(), state.script()->isActiveEval());
 }
 
 InterpreterActivation::~InterpreterActivation()
 {
     // Pop all inline frames.
-    while (regs_.fp() != entryFrame_)
+    while (regs_.fp() != entryFrame_) {
         popInlineFrame(regs_.fp());
+    }
 
     MOZ_ASSERT(oldFrameCount_ == cx_->interpreterStack().frameCount_);
     MOZ_ASSERT_IF(oldFrameCount_ == 0, cx_->interpreterStack().allocator_.used() == 0);
 
-    if (entryFrame_)
+    if (entryFrame_) {
         cx_->interpreterStack().releaseFrame(entryFrame_);
+    }
 }
 
 inline bool
 InterpreterActivation::pushInlineFrame(const CallArgs& args, HandleScript script,
                                        MaybeConstruct constructing)
 {
-    if (!cx_->interpreterStack().pushInlineFrame(cx_, regs_, args, script, constructing))
+    if (!cx_->interpreterStack().pushInlineFrame(cx_, regs_, args, script, constructing)) {
         return false;
+    }
     MOZ_ASSERT(regs_.fp()->script()->compartment() == compartment());
     return true;
 }
 
 inline void
 InterpreterActivation::popInlineFrame(InterpreterFrame* frame)
 {
     (void)frame; // Quell compiler warning.
@@ -972,62 +1057,72 @@ InterpreterActivation::popInlineFrame(In
 
     cx_->interpreterStack().popInlineFrame(regs_);
 }
 
 inline bool
 InterpreterActivation::resumeGeneratorFrame(HandleFunction callee, HandleObject envChain)
 {
     InterpreterStack& stack = cx_->interpreterStack();
-    if (!stack.resumeGeneratorCallFrame(cx_, regs_, callee, envChain))
+    if (!stack.resumeGeneratorCallFrame(cx_, regs_, callee, envChain)) {
         return false;
+    }
 
     MOZ_ASSERT(regs_.fp()->script()->compartment() == compartment_);
     return true;
 }
 
 /* static */ inline mozilla::Maybe<LiveSavedFrameCache::FramePtr>
 LiveSavedFrameCache::FramePtr::create(const FrameIter& iter)
 {
-    if (iter.done())
+    if (iter.done()) {
         return mozilla::Nothing();
+    }
 
-    if (iter.isPhysicalJitFrame())
+    if (iter.isPhysicalJitFrame()) {
         return mozilla::Some(FramePtr(iter.physicalJitFrame()));
+    }
 
-    if (!iter.hasUsableAbstractFramePtr())
+    if (!iter.hasUsableAbstractFramePtr()) {
         return mozilla::Nothing();
+    }
 
     auto afp = iter.abstractFramePtr();
 
-    if (afp.isInterpreterFrame())
+    if (afp.isInterpreterFrame()) {
         return mozilla::Some(FramePtr(afp.asInterpreterFrame()));
-    if (afp.isWasmDebugFrame())
+    }
+    if (afp.isWasmDebugFrame()) {
         return mozilla::Some(FramePtr(afp.asWasmDebugFrame()));
-    if (afp.isRematerializedFrame())
+    }
+    if (afp.isRematerializedFrame()) {
         return mozilla::Some(FramePtr(afp.asRematerializedFrame()));
+    }
 
     MOZ_CRASH("unexpected frame type");
 }
 
 /* static */ inline LiveSavedFrameCache::FramePtr
 LiveSavedFrameCache::FramePtr::create(AbstractFramePtr afp)
 {
     MOZ_ASSERT(afp);
 
     if (afp.isBaselineFrame()) {
         js::jit::CommonFrameLayout *common = afp.asBaselineFrame()->framePrefix();
         return FramePtr(common);
     }
-    if (afp.isInterpreterFrame())
+    if (afp.isInterpreterFrame()) {
         return FramePtr(afp.asInterpreterFrame());
-    if (afp.isWasmDebugFrame())
+    }
+    if (afp.isWasmDebugFrame()) {
         return FramePtr(afp.asWasmDebugFrame());
-    if (afp.isRematerializedFrame())
+    }
+    if (afp.isRematerializedFrame()) {
         return FramePtr(afp.asRematerializedFrame());
+    }
 
     MOZ_CRASH("unexpected frame type");
 }
 
 struct LiveSavedFrameCache::FramePtr::HasCachedMatcher {
     template<typename Frame>
     bool match(Frame* f) const { return f->hasCachedSavedFrame(); }
 };
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -65,18 +65,19 @@ InterpreterFrame::initExecuteFrame(JSCon
     envChain_ = envChain.get();
     prev_ = nullptr;
     prevpc_ = nullptr;
     prevsp_ = nullptr;
 
     evalInFramePrev_ = evalInFramePrev;
     MOZ_ASSERT_IF(evalInFramePrev, isDebuggerEvalFrame());
 
-    if (script->isDebuggee())
+    if (script->isDebuggee()) {
         setIsDebuggee();
+    }
 
 #ifdef DEBUG
     Debug_SetValueRangeToCrashOnTouch(&rval_, 1);
 #endif
 }
 
 bool
 InterpreterFrame::isNonGlobalEvalFrame() const
@@ -182,18 +183,19 @@ AssertScopeMatchesEnvironment(Scope* sco
 }
 
 static inline void
 AssertScopeMatchesEnvironment(InterpreterFrame* fp, jsbytecode* pc)
 {
 #ifdef DEBUG
     // If we OOMed before fully initializing the environment chain, the scope
     // and environment will definitely mismatch.
-    if (fp->script()->initialEnvironmentShape() && fp->hasInitialEnvironment())
+    if (fp->script()->initialEnvironmentShape() && fp->hasInitialEnvironment()) {
         AssertScopeMatchesEnvironment(fp->script()->innermostScope(pc), fp->environmentChain());
+    }
 #endif
 }
 
 bool
 InterpreterFrame::initFunctionEnvironmentObjects(JSContext* cx)
 {
     return js::InitFunctionEnvironmentObjects(cx, this);
 }
@@ -207,51 +209,55 @@ InterpreterFrame::prologue(JSContext* cx
     MOZ_ASSERT(cx->realm() == script->realm());
 
     if (isEvalFrame()) {
         if (!script->bodyScope()->hasEnvironment()) {
             MOZ_ASSERT(!script->strict());
             // Non-strict eval may introduce var bindings that conflict with
             // lexical bindings in an enclosing lexical scope.
             RootedObject varObjRoot(cx, &varObj());
-            if (!CheckEvalDeclarationConflicts(cx, script, environmentChain(), varObjRoot))
+            if (!CheckEvalDeclarationConflicts(cx, script, environmentChain(), varObjRoot)) {
                 return false;
+            }
         }
         return probes::EnterScript(cx, script, nullptr, this);
     }
 
     if (isGlobalFrame()) {
         Rooted<LexicalEnvironmentObject*> lexicalEnv(cx);
         RootedObject varObjRoot(cx);
         if (script->hasNonSyntacticScope()) {
             lexicalEnv = &extensibleLexicalEnvironment();
             varObjRoot = &varObj();
         } else {
             lexicalEnv = &cx->global()->lexicalEnvironment();
             varObjRoot = cx->global();
         }
         if (!CheckGlobalDeclarationConflicts(cx, script, lexicalEnv, varObjRoot)) {
             // Treat this as a script entry, for consistency with Ion.
-            if (script->trackRecordReplayProgress())
+            if (script->trackRecordReplayProgress()) {
                 mozilla::recordreplay::AdvanceExecutionProgressCounter();
+            }
             return false;
         }
         return probes::EnterScript(cx, script, nullptr, this);
     }
 
-    if (isModuleFrame())
+    if (isModuleFrame()) {
         return probes::EnterScript(cx, script, nullptr, this);
+    }
 
     // At this point, we've yet to push any environments. Check that they
     // match the enclosing scope.
     AssertScopeMatchesEnvironment(script->enclosingScope(), environmentChain());
 
     MOZ_ASSERT(isFunctionFrame());
-    if (callee().needsFunctionEnvironmentObjects() && !initFunctionEnvironmentObjects(cx))
+    if (callee().needsFunctionEnvironmentObjects() && !initFunctionEnvironmentObjects(cx)) {
         return false;
+    }
 
     MOZ_ASSERT_IF(isConstructing(),
                   thisArgument().isObject() || thisArgument().isMagic(JS_UNINITIALIZED_LEXICAL));
 
     return probes::EnterScript(cx, script, script->functionNonDelazifying(), this);
 }
 
 void
@@ -287,83 +293,90 @@ InterpreterFrame::epilogue(JSContext* cx
 bool
 InterpreterFrame::checkReturn(JSContext* cx, HandleValue thisv)
 {
     MOZ_ASSERT(script()->isDerivedClassConstructor());
     MOZ_ASSERT(isFunctionFrame());
     MOZ_ASSERT(callee().isClassConstructor());
 
     HandleValue retVal = returnValue();
-    if (retVal.isObject())
+    if (retVal.isObject()) {
         return true;
+    }
 
     if (!retVal.isUndefined()) {
         ReportValueError(cx, JSMSG_BAD_DERIVED_RETURN, JSDVG_IGNORE_STACK, retVal, nullptr);
         return false;
     }
 
-    if (thisv.isMagic(JS_UNINITIALIZED_LEXICAL))
+    if (thisv.isMagic(JS_UNINITIALIZED_LEXICAL)) {
         return ThrowUninitializedThis(cx, this);
+    }
 
     setReturnValue(thisv);
     return true;
 }
 
 bool
 InterpreterFrame::pushVarEnvironment(JSContext* cx, HandleScope scope)
 {
     return js::PushVarEnvironmentObject(cx, scope, this);
 }
 
 bool
 InterpreterFrame::pushLexicalEnvironment(JSContext* cx, Handle<LexicalScope*> scope)
 {
     LexicalEnvironmentObject* env = LexicalEnvironmentObject::create(cx, scope, this);
-    if (!env)
+    if (!env) {
         return false;
+    }
 
     pushOnEnvironmentChain(*env);
     return true;
 }
 
 bool
 InterpreterFrame::freshenLexicalEnvironment(JSContext* cx)
 {
     Rooted<LexicalEnvironmentObject*> env(cx, &envChain_->as<LexicalEnvironmentObject>());
     LexicalEnvironmentObject* fresh = LexicalEnvironmentObject::clone(cx, env);
-    if (!fresh)
+    if (!fresh) {
         return false;
+    }
 
     replaceInnermostEnvironment(*fresh);
     return true;
 }
 
 bool
 InterpreterFrame::recreateLexicalEnvironment(JSContext* cx)
 {
     Rooted<LexicalEnvironmentObject*> env(cx, &envChain_->as<LexicalEnvironmentObject>());
     LexicalEnvironmentObject* fresh = LexicalEnvironmentObject::recreate(cx, env);
-    if (!fresh)
+    if (!fresh) {
         return false;
+    }
 
     replaceInnermostEnvironment(*fresh);
     return true;
 }
 
 void
 InterpreterFrame::trace(JSTracer* trc, Value* sp, jsbytecode* pc)
 {
     TraceRoot(trc, &envChain_, "env chain");
     TraceRoot(trc, &script_, "script");
 
-    if (flags_ & HAS_ARGS_OBJ)
+    if (flags_ & HAS_ARGS_OBJ) {
         TraceRoot(trc, &argsObj_, "arguments");
+    }
 
-    if (hasReturnValue())
+    if (hasReturnValue()) {
         TraceRoot(trc, &rval_, "rval");
+    }
 
     MOZ_ASSERT(sp >= slots());
 
     if (hasArgs()) {
         // Trace the callee and |this|. When we're doing a moving GC, we
         // need to fix up the callee pointer before we use it below, under
         // numFormalArgs() and script().
         TraceRootRange(trc, 2, argv_ - 2, "fp callee and this");
@@ -383,50 +396,54 @@ InterpreterFrame::trace(JSTracer* trc, V
     if (nfixed == nlivefixed) {
         // All locals are live.
         traceValues(trc, 0, sp - slots());
     } else {
         // Trace operand stack.
         traceValues(trc, nfixed, sp - slots());
 
         // Clear dead block-scoped locals.
-        while (nfixed > nlivefixed)
+        while (nfixed > nlivefixed) {
             unaliasedLocal(--nfixed).setUndefined();
+        }
 
         // Trace live locals.
         traceValues(trc, 0, nlivefixed);
     }
 
-    if (auto* debugEnvs = script->realm()->debugEnvs())
+    if (auto* debugEnvs = script->realm()->debugEnvs()) {
         debugEnvs->traceLiveFrame(trc, this);
+    }
 }
 
 void
 InterpreterFrame::traceValues(JSTracer* trc, unsigned start, unsigned end)
 {
-    if (start < end)
+    if (start < end) {
         TraceRootRange(trc, end - start, slots() + start, "vm_stack");
+    }
 }
 
 static void
 TraceInterpreterActivation(JSTracer* trc, InterpreterActivation* act)
 {
     for (InterpreterFrameIterator frames(act); !frames.done(); ++frames) {
         InterpreterFrame* fp = frames.frame();
         fp->trace(trc, frames.sp(), frames.pc());
     }
 }
 
 void
 js::TraceInterpreterActivations(JSContext* cx, JSTracer* trc)
 {
     for (ActivationIterator iter(cx); !iter.done(); ++iter) {
         Activation* act = iter.activation();
-        if (act->isInterpreter())
+        if (act->isInterpreter()) {
             TraceInterpreterActivation(trc, act->asInterpreter());
+        }
     }
 }
 
 /*****************************************************************************/
 
 // Unlike the other methods of this class, this method is defined here so that
 // we don't have to #include jsautooplen.h in vm/Stack.h.
 void
@@ -442,35 +459,37 @@ InterpreterStack::pushInvokeFrame(JSCont
 {
     LifoAlloc::Mark mark = allocator_.mark();
 
     RootedFunction fun(cx, &args.callee().as<JSFunction>());
     RootedScript script(cx, fun->nonLazyScript());
 
     Value* argv;
     InterpreterFrame* fp = getCallFrame(cx, args, script, constructing, &argv);
-    if (!fp)
+    if (!fp) {
         return nullptr;
+    }
 
     fp->mark_ = mark;
     fp->initCallFrame(nullptr, nullptr, nullptr, *fun, script, argv, args.length(),
                       constructing);
     return fp;
 }
 
 InterpreterFrame*
 InterpreterStack::pushExecuteFrame(JSContext* cx, HandleScript script, const Value& newTargetValue,
                                    HandleObject envChain, AbstractFramePtr evalInFrame)
 {
     LifoAlloc::Mark mark = allocator_.mark();
 
     unsigned nvars = 1 /* newTarget */ + script->nslots();
     uint8_t* buffer = allocateFrame(cx, sizeof(InterpreterFrame) + nvars * sizeof(Value));
-    if (!buffer)
+    if (!buffer) {
         return nullptr;
+    }
 
     InterpreterFrame* fp = reinterpret_cast<InterpreterFrame*>(buffer + 1 * sizeof(Value));
     fp->mark_ = mark;
     fp->initExecuteFrame(cx, script, evalInFrame, newTargetValue, envChain);
     fp->initLocals();
 
     return fp;
 }
@@ -485,20 +504,22 @@ JitFrameIter::JitFrameIter(const JitFram
 JitFrameIter&
 JitFrameIter::operator=(const JitFrameIter& another)
 {
     MOZ_ASSERT(this != &another);
 
     act_ = another.act_;
     mustUnwindActivation_ = another.mustUnwindActivation_;
 
-    if (isSome())
+    if (isSome()) {
         iter_.destroy();
-    if (!another.isSome())
+    }
+    if (!another.isSome()) {
         return *this;
+    }
 
     if (another.isJSJit()) {
         iter_.construct<jit::JSJitFrameIter>(another.asJSJit());
     } else {
         MOZ_ASSERT(another.isWasm());
         iter_.construct<wasm::WasmFrameIter>(another.asWasm());
     }
 
@@ -520,127 +541,138 @@ JitFrameIter::JitFrameIter(jit::JitActiv
 }
 
 void
 JitFrameIter::skipNonScriptedJSFrames()
 {
     if (isJSJit()) {
         // Stop at the first scripted frame.
         jit::JSJitFrameIter& frames = asJSJit();
-        while (!frames.isScripted() && !frames.done())
+        while (!frames.isScripted() && !frames.done()) {
             ++frames;
+        }
         settle();
     }
 }
 
 bool
 JitFrameIter::isSelfHostedIgnoringInlining() const
 {
     MOZ_ASSERT(!done());
 
-    if (isWasm())
+    if (isWasm()) {
         return false;
+    }
 
     return asJSJit().script()->selfHosted();
 }
 
 JS::Realm*
 JitFrameIter::realm() const
 {
     MOZ_ASSERT(!done());
 
-    if (isWasm())
+    if (isWasm()) {
         return asWasm().instance()->realm();
+    }
 
     return asJSJit().script()->realm();
 }
 
 bool
 JitFrameIter::done() const
 {
-    if (!isSome())
+    if (!isSome()) {
         return true;
-    if (isJSJit())
+    }
+    if (isJSJit()) {
         return asJSJit().done();
-    if (isWasm())
+    }
+    if (isWasm()) {
         return asWasm().done();
+    }
     MOZ_CRASH("unhandled case");
 }
 
 void
 JitFrameIter::settle()
 {
     if (isJSJit()) {
         const jit::JSJitFrameIter& jitFrame = asJSJit();
-        if (jitFrame.type() != jit::FrameType::WasmToJSJit)
+        if (jitFrame.type() != jit::FrameType::WasmToJSJit) {
             return;
+        }
 
         // Transition from js jit frames to wasm frames: we're on the
         // wasm-to-jit fast path. The current stack layout is as follows:
         // (stack grows downward)
         //
         // [--------------------]
         // [WASM FUNC           ]
         // [WASM JIT EXIT FRAME ]
         // [JIT WASM ENTRY FRAME] <-- we're here.
         //
         // So prevFP points to the wasm jit exit FP, maintaing the invariant in
         // WasmFrameIter that the first frame is an exit frame and can be
         // popped.
 
         wasm::Frame* prevFP = (wasm::Frame*) jitFrame.prevFp();
 
-        if (mustUnwindActivation_)
+        if (mustUnwindActivation_) {
             act_->setWasmExitFP(prevFP);
+        }
 
         iter_.destroy();
         iter_.construct<wasm::WasmFrameIter>(act_, prevFP);
         MOZ_ASSERT(!asWasm().done());
         return;
     }
 
     if (isWasm()) {
         const wasm::WasmFrameIter& wasmFrame = asWasm();
-        if (!wasmFrame.unwoundIonCallerFP())
+        if (!wasmFrame.unwoundIonCallerFP()) {
             return;
+        }
 
         // Transition from wasm frames to jit frames: we're on the
         // jit-to-wasm fast path. The current stack layout is as follows:
         // (stack grows downward)
         //
         // [--------------------]
         // [JIT FRAME           ]
         // [WASM JIT ENTRY FRAME] <-- we're here
         //
         // The wasm iterator has saved the previous jit frame pointer for us.
 
         MOZ_ASSERT(wasmFrame.done());
         uint8_t* prevFP = wasmFrame.unwoundIonCallerFP();
         jit::FrameType prevFrameType = wasmFrame.unwoundIonFrameType();
 
-        if (mustUnwindActivation_)
+        if (mustUnwindActivation_) {
             act_->setJSExitFP(prevFP);
+        }
 
         iter_.destroy();
         iter_.construct<jit::JSJitFrameIter>(act_, prevFrameType, prevFP);
         MOZ_ASSERT(!asJSJit().done());
         return;
     }
 }
 
 void
 JitFrameIter::operator++()
 {
     MOZ_ASSERT(isSome());
     if (isJSJit()) {
         const jit::JSJitFrameIter& jitFrame = asJSJit();
 
         jit::JitFrameLayout* prevFrame = nullptr;
-        if (mustUnwindActivation_ && jitFrame.isScripted())
+        if (mustUnwindActivation_ && jitFrame.isScripted()) {
             prevFrame = jitFrame.jsFrame();
+        }
 
         ++asJSJit();
 
         if (prevFrame) {
             // Unwind the frame by updating packedExitFP. This is necessary
             // so that (1) debugger exception unwind and leave frame hooks
             // don't see this frame when they use ScriptFrameIter, and (2)
             // ScriptFrameIter does not crash when accessing an IonScript
@@ -684,37 +716,40 @@ bool
 FrameIter::principalsSubsumeFrame() const
 {
     // If the caller supplied principals, only show frames which are
     // subsumed (of the same origin or of an origin accessible) by these
     // principals.
 
     MOZ_ASSERT(!done());
 
-    if (!data_.principals_)
+    if (!data_.principals_) {
         return true;
+    }
 
     JSSubsumesOp subsumes = data_.cx_->runtime()->securityCallbacks->subsumes;
-    if (!subsumes)
+    if (!subsumes) {
         return true;
+    }
 
     return subsumes(data_.principals_, realm()->principals());
 }
 
 void
 FrameIter::popInterpreterFrame()
 {
     MOZ_ASSERT(data_.state_ == INTERP);
 
     ++data_.interpFrames_;
 
-    if (data_.interpFrames_.done())
+    if (data_.interpFrames_.done()) {
         popActivation();
-    else
+    } else {
         data_.pc_ = data_.interpFrames_.pc();
+    }
 }
 
 void
 FrameIter::settleOnActivation()
 {
     MOZ_ASSERT(!data_.cx_->inUnsafeCallWithABI);
 
     while (true) {
@@ -804,34 +839,36 @@ FrameIter::FrameIter(JSContext* cx, Debu
                      JSPrincipals* principals)
   : data_(cx, debuggerEvalOption, principals),
     ionInlineFrames_(cx, (js::jit::JSJitFrameIter*) nullptr)
 {
     settleOnActivation();
 
     // If we're not allowed to see this frame, call operator++ to skip this (and
     // other) cross-origin frames.
-    if (!done() && !principalsSubsumeFrame())
+    if (!done() && !principalsSubsumeFrame()) {
         ++*this;
+    }
 }
 
 FrameIter::FrameIter(const FrameIter& other)
   : data_(other.data_),
     ionInlineFrames_(other.data_.cx_, isIonScripted() ? &other.ionInlineFrames_ : nullptr)
 {
 }
 
 FrameIter::FrameIter(const Data& data)
   : data_(data),
     ionInlineFrames_(data.cx_, isIonScripted() ? &jsJitFrame() : nullptr)
 {
     MOZ_ASSERT(data.cx_);
     if (isIonScripted()) {
-        while (ionInlineFrames_.frameNo() != data.ionInlineFrameNo_)
+        while (ionInlineFrames_.frameNo() != data.ionInlineFrameNo_) {
             ++ionInlineFrames_;
+        }
     }
 }
 
 void
 FrameIter::nextJitFrame()
 {
     MOZ_ASSERT(data_.jitFrames_.isSome());
 
@@ -884,61 +921,66 @@ FrameIter::operator++()
             if (interpFrame()->isDebuggerEvalFrame() &&
                 data_.debuggerEvalOption_ == FOLLOW_DEBUGGER_EVAL_PREV_LINK)
             {
                 AbstractFramePtr eifPrev = interpFrame()->evalInFramePrev();
 
                 popInterpreterFrame();
 
                 while (!hasUsableAbstractFramePtr() || abstractFramePtr() != eifPrev) {
-                    if (data_.state_ == JIT)
+                    if (data_.state_ == JIT) {
                         popJitFrame();
-                    else
+                    } else {
                         popInterpreterFrame();
+                    }
                 }
 
                 break;
             }
             popInterpreterFrame();
             break;
           case JIT:
             popJitFrame();
             break;
         }
 
-        if (done() || principalsSubsumeFrame())
+        if (done() || principalsSubsumeFrame()) {
             break;
+        }
     }
 
     return *this;
 }
 
 FrameIter::Data*
 FrameIter::copyData() const
 {
     Data* data = data_.cx_->new_<Data>(data_);
-    if (!data)
+    if (!data) {
         return nullptr;
+    }
 
-    if (data && isIonScripted())
+    if (data && isIonScripted()) {
         data->ionInlineFrameNo_ = ionInlineFrames_.frameNo();
+    }
     return data;
 }
 
 void*
 FrameIter::rawFramePtr() const
 {
     switch (data_.state_) {
       case DONE:
         return nullptr;
       case INTERP:
         return interpFrame();
       case JIT:
-        if (isJSJit())
+        if (isJSJit()) {
             return jsJitFrame().fp();
+        }
         MOZ_ASSERT(isWasm());
         return nullptr;
     }
     MOZ_CRASH("Unexpected state");
 }
 
 JS::Compartment*
 FrameIter::compartment() const
@@ -953,34 +995,36 @@ FrameIter::compartment() const
     MOZ_CRASH("Unexpected state");
 }
 
 Realm*
 FrameIter::realm() const
 {
     MOZ_ASSERT(!done());
 
-    if (hasScript())
+    if (hasScript()) {
         return script()->realm();
+    }
 
     return wasmInstance()->realm();
 }
 
 bool
 FrameIter::isEvalFrame() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
         return interpFrame()->isEvalFrame();
       case JIT:
         if (isJSJit()) {
-            if (jsJitFrame().isBaselineJS())
+            if (jsJitFrame().isBaselineJS()) {
                 return jsJitFrame().baselineFrame()->isEvalFrame();
+            }
             MOZ_ASSERT(!script()->isForEval());
             return false;
         }
         MOZ_ASSERT(isWasm());
         return false;
     }
     MOZ_CRASH("Unexpected state");
 }
@@ -991,38 +1035,41 @@ FrameIter::isFunctionFrame() const
     MOZ_ASSERT(!done());
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
         return interpFrame()->isFunctionFrame();
       case JIT:
         if (isJSJit()) {
-            if (jsJitFrame().isBaselineJS())
+            if (jsJitFrame().isBaselineJS()) {
                 return jsJitFrame().baselineFrame()->isFunctionFrame();
+            }
             return script()->functionNonDelazifying();
         }
         MOZ_ASSERT(isWasm());
         return false;
     }
     MOZ_CRASH("Unexpected state");
 }
 
 JSAtom*
 FrameIter::maybeFunctionDisplayAtom() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
       case JIT:
-        if (isWasm())
+        if (isWasm()) {
             return wasmFrame().functionDisplayAtom();
-        if (isFunctionFrame())
+        }
+        if (isFunctionFrame()) {
             return calleeTemplate()->displayAtom();
+        }
         return nullptr;
     }
 
     MOZ_CRASH("Unexpected state");
 }
 
 ScriptSource*
 FrameIter::scriptSource() const
@@ -1041,81 +1088,86 @@ FrameIter::scriptSource() const
 const char*
 FrameIter::filename() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
       case JIT:
-        if (isWasm())
+        if (isWasm()) {
             return wasmFrame().filename();
+        }
         return script()->filename();
     }
 
     MOZ_CRASH("Unexpected state");
 }
 
 const char16_t*
 FrameIter::displayURL() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
       case JIT:
-        if (isWasm())
+        if (isWasm()) {
             return wasmFrame().displayURL();
+        }
         ScriptSource* ss = script()->scriptSource();
         return ss->hasDisplayURL() ? ss->displayURL() : nullptr;
     }
     MOZ_CRASH("Unexpected state");
 }
 
 unsigned
 FrameIter::computeLine(uint32_t* column) const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
       case JIT:
-        if (isWasm())
+        if (isWasm()) {
             return wasmFrame().computeLine(column);
+        }
         return PCToLineNumber(script(), pc(), column);
     }
 
     MOZ_CRASH("Unexpected state");
 }
 
 bool
 FrameIter::mutedErrors() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
       case JIT:
-        if (isWasm())
+        if (isWasm()) {
             return wasmFrame().mutedErrors();
+        }
         return script()->mutedErrors();
     }
     MOZ_CRASH("Unexpected state");
 }
 
 bool
 FrameIter::isConstructing() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case JIT:
         MOZ_ASSERT(isJSJit());
-        if (jsJitFrame().isIonScripted())
+        if (jsJitFrame().isIonScripted()) {
             return ionInlineFrames_.isConstructing();
+        }
         MOZ_ASSERT(jsJitFrame().isBaselineJS());
         return jsJitFrame().isConstructing();
       case INTERP:
         return interpFrame()->isConstructing();
     }
 
     MOZ_CRASH("Unexpected state");
 }
@@ -1130,18 +1182,19 @@ FrameIter::ensureHasRematerializedFrame(
 bool
 FrameIter::hasUsableAbstractFramePtr() const
 {
     switch (data_.state_) {
       case DONE:
         return false;
       case JIT:
         if (isJSJit()) {
-            if (jsJitFrame().isBaselineJS())
+            if (jsJitFrame().isBaselineJS()) {
                 return true;
+            }
 
             MOZ_ASSERT(jsJitFrame().isIonScripted());
             return !!activation()->asJit()->lookupRematerializedFrame(jsJitFrame().fp(),
                                                                       ionInlineFrames_.frameNo());
         }
         MOZ_ASSERT(isWasm());
         return wasmFrame().debugEnabled();
       case INTERP:
@@ -1154,18 +1207,19 @@ AbstractFramePtr
 FrameIter::abstractFramePtr() const
 {
     MOZ_ASSERT(hasUsableAbstractFramePtr());
     switch (data_.state_) {
       case DONE:
         break;
       case JIT: {
         if (isJSJit()) {
-            if (jsJitFrame().isBaselineJS())
+            if (jsJitFrame().isBaselineJS()) {
                 return jsJitFrame().baselineFrame();
+            }
             MOZ_ASSERT(isIonScripted());
             return activation()->asJit()->lookupRematerializedFrame(jsJitFrame().fp(),
                                                                     ionInlineFrames_.frameNo());
         }
         MOZ_ASSERT(isWasm());
         MOZ_ASSERT(wasmFrame().debugEnabled());
         return wasmFrame().debugFrame();
       }
@@ -1183,34 +1237,36 @@ FrameIter::updatePcQuadratic()
       case DONE:
         break;
       case INTERP: {
         InterpreterFrame* frame = interpFrame();
         InterpreterActivation* activation = data_.activations_->asInterpreter();
 
         // Look for the current frame.
         data_.interpFrames_ = InterpreterFrameIterator(activation);
-        while (data_.interpFrames_.frame() != frame)
+        while (data_.interpFrames_.frame() != frame) {
             ++data_.interpFrames_;
+        }
 
         // Update the pc.
         MOZ_ASSERT(data_.interpFrames_.frame() == frame);
         data_.pc_ = data_.interpFrames_.pc();
         return;
       }
       case JIT:
         if (jsJitFrame().isBaselineJS()) {
             jit::BaselineFrame* frame = jsJitFrame().baselineFrame();
             jit::JitActivation* activation = data_.activations_->asJit();
 
             // activation's exitFP may be invalid, so create a new
             // activation iterator.
             data_.activations_ = ActivationIterator(data_.cx_);
-            while (data_.activations_.activation() != activation)
+            while (data_.activations_.activation() != activation) {
                 ++data_.activations_;
+            }
 
             // Look for the current frame.
             data_.jitFrames_ = JitFrameIter(data_.activations_->asJit());
             while (!isJSJit() ||
                    !jsJitFrame().isBaselineJS() ||
                    jsJitFrame().baselineFrame() != frame)
             {
                 ++data_.jitFrames_;
@@ -1230,34 +1286,36 @@ void
 FrameIter::wasmUpdateBytecodeOffset()
 {
     MOZ_RELEASE_ASSERT(isWasm(), "Unexpected state");
 
     wasm::DebugFrame* frame = wasmFrame().debugFrame();
 
     // Relookup the current frame, updating the bytecode offset in the process.
     data_.jitFrames_ = JitFrameIter(data_.activations_->asJit());
-    while (wasmFrame().debugFrame() != frame)
+    while (wasmFrame().debugFrame() != frame) {
         ++data_.jitFrames_;
+    }
 
     MOZ_ASSERT(wasmFrame().debugFrame() == frame);
 }
 
 JSFunction*
 FrameIter::calleeTemplate() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
         MOZ_ASSERT(isFunctionFrame());
         return &interpFrame()->callee();
       case JIT:
-        if (jsJitFrame().isBaselineJS())
+        if (jsJitFrame().isBaselineJS()) {
             return jsJitFrame().callee();
+        }
         MOZ_ASSERT(jsJitFrame().isIonScripted());
         return ionInlineFrames_.calleeTemplate();
     }
     MOZ_CRASH("Unexpected state");
 }
 
 JSFunction*
 FrameIter::callee(JSContext* cx) const
@@ -1314,18 +1372,19 @@ FrameIter::numActualArgs() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case INTERP:
         MOZ_ASSERT(isFunctionFrame());
         return interpFrame()->numActualArgs();
       case JIT:
-        if (isIonScripted())
+        if (isIonScripted()) {
             return ionInlineFrames_.numActualArgs();
+        }
         MOZ_ASSERT(jsJitFrame().isBaselineJS());
         return jsJitFrame().numActualArgs();
     }
     MOZ_CRASH("Unexpected state");
 }
 
 unsigned
 FrameIter::numFormalArgs() const
@@ -1362,18 +1421,19 @@ FrameIter::environmentChain(JSContext* c
 }
 
 CallObject&
 FrameIter::callObj(JSContext* cx) const
 {
     MOZ_ASSERT(calleeTemplate()->needsCallObject());
 
     JSObject* pobj = environmentChain(cx);
-    while (!pobj->is<CallObject>())
+    while (!pobj->is<CallObject>()) {
         pobj = pobj->enclosingEnvironment();
+    }
     return pobj->as<CallObject>();
 }
 
 bool
 FrameIter::hasArgsObj() const
 {
     return abstractFramePtr().hasArgsObj();
 }
@@ -1422,18 +1482,19 @@ FrameIter::newTarget() const
 
 Value
 FrameIter::returnValue() const
 {
     switch (data_.state_) {
       case DONE:
         break;
       case JIT:
-        if (jsJitFrame().isBaselineJS())
+        if (jsJitFrame().isBaselineJS()) {
             return jsJitFrame().baselineFrame()->returnValue();
+        }
         break;
       case INTERP:
         return interpFrame()->returnValue();
     }
     MOZ_CRASH("Unexpected state");
 }
 
 void
@@ -1510,27 +1571,29 @@ js::SelfHostedFramesVisible()
     return visible;
 }
 #endif
 
 void
 NonBuiltinFrameIter::settle()
 {
     if (!SelfHostedFramesVisible()) {
-        while (!done() && hasScript() && script()->selfHosted())
+        while (!done() && hasScript() && script()->selfHosted()) {
             FrameIter::operator++();
+        }
     }
 }
 
 void
 NonBuiltinScriptFrameIter::settle()
 {
     if (!SelfHostedFramesVisible()) {
-        while (!done() && script()->selfHosted())
+        while (!done() && script()->selfHosted()) {
             ScriptFrameIter::operator++();
+        }
     }
 }
 
 ActivationEntryMonitor::ActivationEntryMonitor(JSContext* cx)
   : cx_(cx), entryMonitor_(cx->entryMonitor)
 {
     cx->entryMonitor = nullptr;
 }
@@ -1550,36 +1613,38 @@ ActivationEntryMonitor::ActivationEntryM
   : ActivationEntryMonitor(cx)
 {
     if (entryMonitor_) {
         // The InterpreterFrame is not yet part of an Activation, so it won't
         // be traced if we trigger GC here. Suppress GC to avoid this.
         gc::AutoSuppressGC suppressGC(cx);
         RootedValue stack(cx, asyncStack(cx));
         const char* asyncCause = cx->asyncCauseForNewActivations;
-        if (entryFrame->isFunctionFrame())
+        if (entryFrame->isFunctionFrame()) {
             entryMonitor_->Entry(cx, &entryFrame->callee(), stack, asyncCause);
-        else
+        } else {
             entryMonitor_->Entry(cx, entryFrame->script(), stack, asyncCause);
+        }
     }
 }
 
 ActivationEntryMonitor::ActivationEntryMonitor(JSContext* cx, jit::CalleeToken entryToken)
   : ActivationEntryMonitor(cx)
 {
     if (entryMonitor_) {
         // The CalleeToken is not traced at this point and we also don't want
         // a GC to discard the code we're about to enter, so we suppress GC.
         gc::AutoSuppressGC suppressGC(cx);
         RootedValue stack(cx, asyncStack(cx));
         const char* asyncCause = cx->asyncCauseForNewActivations;
-        if (jit::CalleeTokenIsFunction(entryToken))
+        if (jit::CalleeTokenIsFunction(entryToken)) {
             entryMonitor_->Entry(cx_, jit::CalleeTokenToFunction(entryToken), stack, asyncCause);
-        else
+        } else {
             entryMonitor_->Entry(cx_, jit::CalleeTokenToScript(entryToken), stack, asyncCause);
+        }
     }
 }
 
 /*****************************************************************************/
 
 jit::JitActivation::JitActivation(JSContext* cx)
   : Activation(cx, Jit),
     packedExitFP_(nullptr),
@@ -1592,18 +1657,19 @@ jit::JitActivation::JitActivation(JSCont
     lastProfilingCallSite_(nullptr)
 {
     cx->jitActivation = this;
     registerProfiling();
 }
 
 jit::JitActivation::~JitActivation()
 {
-    if (isProfiling())
+    if (isProfiling()) {
         unregisterProfiling();
+    }
     cx_->jitActivation = prevJitActivation_;
 
     // All reocvered value are taken from activation during the bailout.
     MOZ_ASSERT(ionRecovery_.empty());
 
     // The BailoutFrameInfo should have unregistered itself from the
     // JitActivations.
     MOZ_ASSERT(!bailoutData_);
@@ -1626,48 +1692,51 @@ jit::JitActivation::cleanBailoutData()
 {
     MOZ_ASSERT(bailoutData_);
     bailoutData_ = nullptr;
 }
 
 void
 jit::JitActivation::removeRematerializedFrame(uint8_t* top)
 {
-    if (!rematerializedFrames_)
+    if (!rematerializedFrames_) {
         return;
+    }
 
     if (RematerializedFrameTable::Ptr p = rematerializedFrames_->lookup(top)) {
         RematerializedFrame::FreeInVector(p->value());
         rematerializedFrames_->remove(p);
     }
 }
 
 void
 jit::JitActivation::clearRematerializedFrames()
 {
-    if (!rematerializedFrames_)
+    if (!rematerializedFrames_) {
         return;
+    }
 
     for (RematerializedFrameTable::Enum e(*rematerializedFrames_); !e.empty(); e.popFront()) {
         RematerializedFrame::FreeInVector(e.front().value());
         e.removeFront();
     }
 }
 
 jit::RematerializedFrame*
 jit::JitActivation::getRematerializedFrame(JSContext* cx, const JSJitFrameIter& iter,
                                            size_t inlineDepth)
 {
     MOZ_ASSERT(iter.activation() == this);
     MOZ_ASSERT(iter.isIonScripted());
 
     if (!rematerializedFrames_) {
         rematerializedFrames_ = cx->make_unique<RematerializedFrameTable>(cx);
-        if (!rematerializedFrames_)
+        if (!rematerializedFrames_) {
             return nullptr;
+        }
     }
 
     uint8_t* top = iter.fp();
     RematerializedFrameTable::AddPtr p = rematerializedFrames_->lookupForAdd(top);
     if (!p) {
         RematerializedFrameVector frames(cx);
 
         // The unit of rematerialization is an uninlined frame and its inlined
@@ -1678,103 +1747,114 @@ jit::JitActivation::getRematerializedFra
         InlineFrameIterator inlineIter(cx, &iter);
         MaybeReadFallback recover(cx, this, &iter);
 
         // Frames are often rematerialized with the cx inside a Debugger's
         // realm. To recover slots and to create CallObjects, we need to
         // be in the script's realm.
         AutoRealmUnchecked ar(cx, iter.script()->realm());
 
-        if (!RematerializedFrame::RematerializeInlineFrames(cx, top, inlineIter, recover, frames))
+        if (!RematerializedFrame::RematerializeInlineFrames(cx, top, inlineIter, recover, frames)) {
             return nullptr;
+        }
 
         if (!rematerializedFrames_->add(p, top, std::move(frames))) {
             ReportOutOfMemory(cx);
             return nullptr;
         }
 
         // See comment in unsetPrevUpToDateUntil.
         DebugEnvironments::unsetPrevUpToDateUntil(cx, p->value()[inlineDepth]);
     }
 
     return p->value()[inlineDepth];
 }
 
 jit::RematerializedFrame*
 jit::JitActivation::lookupRematerializedFrame(uint8_t* top, size_t inlineDepth)
 {
-    if (!rematerializedFrames_)
+    if (!rematerializedFrames_) {
         return nullptr;
-    if (RematerializedFrameTable::Ptr p = rematerializedFrames_->lookup(top))
+    }
+    if (RematerializedFrameTable::Ptr p = rematerializedFrames_->lookup(top)) {
         return inlineDepth < p->value().length() ? p->value()[inlineDepth] : nullptr;
+    }
     return nullptr;
 }
 
 void
 jit::JitActivation::removeRematerializedFramesFromDebugger(JSContext* cx, uint8_t* top)
 {
     // Ion bailout can fail due to overrecursion and OOM. In such cases we
     // cannot honor any further Debugger hooks on the frame, and need to
     // ensure that its Debugger.Frame entry is cleaned up.
-    if (!cx->realm()->isDebuggee() || !rematerializedFrames_)
+    if (!cx->realm()->isDebuggee() || !rematerializedFrames_) {
         return;
+    }
     if (RematerializedFrameTable::Ptr p = rematerializedFrames_->lookup(top)) {
-        for (uint32_t i = 0; i < p->value().length(); i++)
+        for (uint32_t i = 0; i < p->value().length(); i++) {
             Debugger::handleUnrecoverableIonBailoutError(cx, p->value()[i]);
+        }
         RematerializedFrame::FreeInVector(p->value());
         rematerializedFrames_->remove(p);
     }
 }
 
 void
 jit::JitActivation::traceRematerializedFrames(JSTracer* trc)
 {
-    if (!rematerializedFrames_)
+    if (!rematerializedFrames_) {
         return;
-    for (RematerializedFrameTable::Enum e(*rematerializedFrames_); !e.empty(); e.popFront())
+    }
+    for (RematerializedFrameTable::Enum e(*rematerializedFrames_); !e.empty(); e.popFront()) {
         e.front().value().trace(trc);
+    }
 }
 
 bool
 jit::JitActivation::registerIonFrameRecovery(RInstructionResults&& results)
 {
     // Check that there is no entry in the vector yet.
     MOZ_ASSERT(!maybeIonFrameRecovery(results.frame()));
-    if (!ionRecovery_.append(std::move(results)))
+    if (!ionRecovery_.append(std::move(results))) {
         return false;
+    }
 
     return true;
 }
 
 jit::RInstructionResults*
 jit::JitActivation::maybeIonFrameRecovery(JitFrameLayout* fp)
 {
     for (RInstructionResults* it = ionRecovery_.begin(); it != ionRecovery_.end(); ) {
-        if (it->frame() == fp)
+        if (it->frame() == fp) {
             return it;
+        }
     }
 
     return nullptr;
 }
 
 void
 jit::JitActivation::removeIonFrameRecovery(JitFrameLayout* fp)
 {
     RInstructionResults* elem = maybeIonFrameRecovery(fp);
-    if (!elem)
+    if (!elem) {
         return;
+    }
 
     ionRecovery_.erase(elem);
 }
 
 void
 jit::JitActivation::traceIonRecovery(JSTracer* trc)
 {
-    for (RInstructionResults* it = ionRecovery_.begin(); it != ionRecovery_.end(); it++)
+    for (RInstructionResults* it = ionRecovery_.begin(); it != ionRecovery_.end(); it++) {
         it->trace(trc);
+    }
 }
 
 void
 jit::JitActivation::startWasmTrap(wasm::Trap trap, uint32_t bytecodeOffset,
                                   const wasm::RegisterState& state)
 {
     MOZ_ASSERT(!isWasmTrapping());
 
@@ -1786,18 +1866,19 @@ jit::JitActivation::startWasmTrap(wasm::
     void* pc = unwindState.pc;
     wasm::Frame* fp = unwindState.fp;
 
     const wasm::Code& code = fp->tls->instance->code();
     MOZ_RELEASE_ASSERT(&code == wasm::LookupCode(pc));
 
     // If the frame was unwound, the bytecodeOffset must be recovered from the
     // callsite so that it is accurate.
-    if (unwound)
+    if (unwound) {
         bytecodeOffset = code.lookupCallSite(pc)->lineOrBytecode();
+    }
 
     setWasmExitFP(fp);
     wasmTrapData_.emplace();
     wasmTrapData_->resumePC = ((uint8_t*)state.pc) + jit::WasmTrapInstructionLength;
     wasmTrapData_->unwoundPC = pc;
     wasmTrapData_->trap = trap;
     wasmTrapData_->bytecodeOffset = bytecodeOffset;
 
@@ -1859,25 +1940,28 @@ ActivationIterator::operator++()
 }
 
 JS::ProfilingFrameIterator::ProfilingFrameIterator(JSContext* cx, const RegisterState& state,
                                                    const Maybe<uint64_t>& samplePositionInProfilerBuffer)
   : cx_(cx),
     samplePositionInProfilerBuffer_(samplePositionInProfilerBuffer),
     activation_(nullptr)
 {
-    if (!cx->runtime()->geckoProfiler().enabled())
+    if (!cx->runtime()->geckoProfiler().enabled()) {
         MOZ_CRASH("ProfilingFrameIterator called when geckoProfiler not enabled for runtime.");
+    }
 
-    if (!cx->profilingActivation())
+    if (!cx->profilingActivation()) {
         return;
+    }
 
     // If profiler sampling is not enabled, skip.
-    if (!cx->isProfilerSamplingEnabled())
+    if (!cx->isProfilerSamplingEnabled()) {
         return;
+    }
 
     activation_ = cx->profilingActivation();
 
     MOZ_ASSERT(activation_->isProfiling());
 
     static_assert(sizeof(wasm::ProfilingFrameIterator) <= StorageSpace &&
                   sizeof(jit::JSJitProfilingFrameIterator) <= StorageSpace,
                   "ProfilingFrameIterator::storage_ is too small");
@@ -1897,29 +1981,29 @@ JS::ProfilingFrameIterator::~ProfilingFr
     }
 }
 
 void
 JS::ProfilingFrameIterator::operator++()
 {
     MOZ_ASSERT(!done());
     MOZ_ASSERT(activation_->isJit());
-    if (isWasm())
+    if (isWasm()) {
         ++wasmIter();
-    else
+    } else {
         ++jsJitIter();
+    }
     settle();
 }
 
 void
 JS::ProfilingFrameIterator::settleFrames()
 {
     // Handle transition frames (see comment in JitFrameIter::operator++).
-    if (isJSJit() && !jsJitIter().done() && jsJitIter().frameType() == jit::FrameType::WasmToJSJit)
-    {
+    if (isJSJit() && !jsJitIter().done() && jsJitIter().frameType() == jit::FrameType::WasmToJSJit) {
         wasm::Frame* fp = (wasm::Frame*) jsJitIter().fp();
         iteratorDestroy();
         new (storage()) wasm::ProfilingFrameIterator(*activation_->asJit(), fp);
         kind_ = Kind::Wasm;
         MOZ_ASSERT(!wasmIter().done());
         return;
     }
 
@@ -1938,18 +2022,19 @@ JS::ProfilingFrameIterator::settleFrames
 
 void
 JS::ProfilingFrameIterator::settle()
 {
     settleFrames();
     while (iteratorDone()) {
         iteratorDestroy();
         activation_ = activation_->prevProfiling();
-        if (!activation_)
+        if (!activation_) {
             return;
+        }
         iteratorConstruct();
         settleFrames();
     }
 }
 
 void
 JS::ProfilingFrameIterator::iteratorConstruct(const RegisterState& state)
 {
@@ -2012,30 +2097,32 @@ JS::ProfilingFrameIterator::iteratorDest
 }
 
 bool
 JS::ProfilingFrameIterator::iteratorDone()
 {
     MOZ_ASSERT(!done());
     MOZ_ASSERT(activation_->isJit());
 
-    if (isWasm())
+    if (isWasm()) {
         return wasmIter().done();
+    }
 
     return jsJitIter().done();
 }
 
 void*
 JS::ProfilingFrameIterator::stackAddress() const
 {
     MOZ_ASSERT(!done());
     MOZ_ASSERT(activation_->isJit());
 
-    if (isWasm())
+    if (isWasm()) {
         return wasmIter().stackAddress();
+    }
 
     return jsJitIter().stackAddress();
 }
 
 Maybe<JS::ProfilingFrameIterator::Frame>
 JS::ProfilingFrameIterator::getPhysicalFrameAndEntry(jit::JitcodeGlobalEntry* entry) const
 {
     void* stackAddr = stackAddress();
@@ -2051,65 +2138,70 @@ JS::ProfilingFrameIterator::getPhysicalF
         return mozilla::Some(frame);
     }
 
     MOZ_ASSERT(isJSJit());
 
     // Look up an entry for the return address.
     void* returnAddr = jsJitIter().returnAddressToFp();
     jit::JitcodeGlobalTable* table = cx_->runtime()->jitRuntime()->getJitcodeGlobalTable();
-    if (samplePositionInProfilerBuffer_)
+    if (samplePositionInProfilerBuffer_) {
         *entry = table->lookupForSamplerInfallible(returnAddr, cx_->runtime(),
                                                    *samplePositionInProfilerBuffer_);
-    else
+    } else {
         *entry = table->lookupInfallible(returnAddr);
+    }
 
     MOZ_ASSERT(entry->isIon() || entry->isIonCache() || entry->isBaseline() || entry->isDummy());
 
     // Dummy frames produce no stack frames.
-    if (entry->isDummy())
+    if (entry->isDummy()) {
         return mozilla::Nothing();
+    }
 
     Frame frame;
     frame.kind = entry->isBaseline() ? Frame_Baseline : Frame_Ion;
     frame.stackAddress = stackAddr;
     frame.returnAddress = returnAddr;
     frame.activation = activation_;
     frame.label = nullptr;
     frame.endStackAddress = activation_->asJit()->jsOrWasmExitFP();
     return mozilla::Some(frame);
 }
 
 uint32_t
 JS::ProfilingFrameIterator::extractStack(Frame* frames, uint32_t offset, uint32_t end) const
 {
-    if (offset >= end)
+    if (offset >= end) {
         return 0;
+    }
 
     jit::JitcodeGlobalEntry entry;
     Maybe<Frame> physicalFrame = getPhysicalFrameAndEntry(&entry);
 
     // Dummy frames produce no stack frames.
-    if (physicalFrame.isNothing())
+    if (physicalFrame.isNothing()) {
         return 0;
+    }
 
     if (isWasm()) {
         frames[offset] = physicalFrame.value();
         frames[offset].label = wasmIter().label();
         return 1;
     }
 
     // Extract the stack for the entry.  Assume maximum inlining depth is <64
     const char* labels[64];
     uint32_t depth = entry.callStackAtAddr(cx_->runtime(), jsJitIter().returnAddressToFp(),
                                            labels, ArrayLength(labels));
     MOZ_ASSERT(depth < ArrayLength(labels));
     for (uint32_t i = 0; i < depth; i++) {
-        if (offset + i >= end)
+        if (offset + i >= end) {
             return i;
+        }
         frames[offset + i] = physicalFrame.value();
         frames[offset + i].label = labels[i];
     }
 
     return depth;
 }
 
 Maybe<JS::ProfilingFrameIterator::Frame>
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -641,23 +641,25 @@ class InterpreterFrame
     /*
      * New Target
      *
      * Only function frames have a meaningful newTarget. An eval frame in a
      * function will have a copy of the newTarget of the enclosing function
      * frame.
      */
     Value newTarget() const {
-        if (isEvalFrame())
+        if (isEvalFrame()) {
             return ((Value*)this)[-1];
+        }
 
         MOZ_ASSERT(isFunctionFrame());
 
-        if (callee().isArrow())
+        if (callee().isArrow()) {
             return callee().getExtendedSlot(FunctionExtended::ARROW_NEWTARGET_SLOT);
+        }
 
         if (isConstructing()) {
             unsigned pushedArgs = Max(numFormalArgs(), numActualArgs());
             return argv()[pushedArgs];
         }
         return UndefinedValue();
     }
 
@@ -677,18 +679,19 @@ class InterpreterFrame
 
     /* Return value */
 
     bool hasReturnValue() const {
         return flags_ & HAS_RVAL;
     }
 
     MutableHandleValue returnValue() {
-        if (!hasReturnValue())
+        if (!hasReturnValue()) {
             rval_.setUndefined();
+        }
         return MutableHandleValue::fromMarkedLocation(&rval_);
     }
 
     void markReturnValue() {
         flags_ |= HAS_RVAL;
     }
 
     void setReturnValue(const Value& v) {
@@ -963,23 +966,25 @@ class GenericArgsBase
         if (argc > ARGS_LENGTH_MAX) {
             JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_TOO_MANY_ARGUMENTS);
             return false;
         }
 
         // callee, this, arguments[, new.target iff constructing]
         size_t len = 2 + argc + uint32_t(Construct);
         MOZ_ASSERT(len > argc);  // no overflow
-        if (!v_.resize(len))
+        if (!v_.resize(len)) {
             return false;
+        }
 
         *static_cast<JS::CallArgs*>(this) = CallArgsFromVp(argc, v_.begin());
         this->constructing_ = Construct;
-        if (Construct)
+        if (Construct) {
             this->CallArgs::setThis(MagicValue(JS_IS_CONSTRUCTING));
+        }
         return true;
     }
 };
 
 /** Function call/construct args of statically-known count. */
 template <MaybeConstruct Construct, size_t N>
 class FixedArgsBase
   : public mozilla::Conditional<Construct, AnyConstructArgs, AnyInvokeArgs>::Type
@@ -987,18 +992,19 @@ class FixedArgsBase
     static_assert(N <= ARGS_LENGTH_MAX, "o/~ too many args o/~");
 
   protected:
     JS::AutoValueArray<2 + N + uint32_t(Construct)> v_;
 
     explicit FixedArgsBase(JSContext* cx) : v_(cx) {
         *static_cast<JS::CallArgs*>(this) = CallArgsFromVp(N, v_.begin());
         this->constructing_ = Construct;
-        if (Construct)
+        if (Construct) {
             this->CallArgs::setThis(MagicValue(JS_IS_CONSTRUCTING));
+        }
     }
 };
 
 } // namespace detail
 
 /** Function call args of statically-unknown count. */
 class InvokeArgs : public detail::GenericArgsBase<NO_CONSTRUCT>
 {
@@ -1048,21 +1054,23 @@ class FixedConstructArgs : public detail
     explicit FixedConstructArgs(JSContext* cx) : Base(cx) {}
 };
 
 template <class Args, class Arraylike>
 inline bool
 FillArgumentsFromArraylike(JSContext* cx, Args& args, const Arraylike& arraylike)
 {
     uint32_t len = arraylike.length();
-    if (!args.init(cx, len))
+    if (!args.init(cx, len)) {
         return false;
-
-    for (uint32_t i = 0; i < len; i++)
+    }
+
+    for (uint32_t i = 0; i < len; i++) {
         args[i].set(arraylike[i]);
+    }
 
     return true;
 }
 
 } // namespace js
 
 namespace mozilla {
 
@@ -1586,18 +1594,19 @@ class InterpreterActivation : public Act
     }
 
     bool isProfiling() const {
         return false;
     }
 
     // If this js::Interpret frame is running |script|, enable interrupts.
     void enableInterruptsIfRunning(JSScript* script) {
-        if (regs_.fp()->script() == script)
+        if (regs_.fp()->script() == script) {
             enableInterruptsUnconditionally();
+        }
     }
     void enableInterruptsUnconditionally() {
         opMask_ = EnableInterruptsPseudoOpcode;
     }
     void clearInterruptsMask() {
         opMask_ = 0;
     }
 };
@@ -1836,18 +1845,19 @@ class JitActivation : public Activation
     bool isWasmTrapping() const { return !!wasmTrapData_; }
     const wasm::TrapData& wasmTrapData() { return *wasmTrapData_; }
 };
 
 // A filtering of the ActivationIterator to only stop at JitActivations.
 class JitActivationIterator : public ActivationIterator
 {
     void settle() {
-        while (!done() && !activation_->isJit())
+        while (!done() && !activation_->isJit()) {
             ActivationIterator::operator++();
+        }
     }
 
   public:
     explicit JitActivationIterator(JSContext* cx)
       : ActivationIterator(cx)
     {
         settle();
     }
@@ -1975,18 +1985,19 @@ class JitFrameIter
 };
 
 // A JitFrameIter that skips all the non-JSJit frames, skipping interleaved
 // frames of any another kind.
 
 class OnlyJSJitFrameIter : public JitFrameIter
 {
     void settle() {
-        while (!done() && !isJSJit())
+        while (!done() && !isJSJit()) {
             JitFrameIter::operator++();
+        }
     }
 
   public:
     explicit OnlyJSJitFrameIter(jit::JitActivation* act);
     explicit OnlyJSJitFrameIter(JSContext* cx);
     explicit OnlyJSJitFrameIter(const ActivationIterator& cx);
 
     void operator++() {
@@ -2205,18 +2216,19 @@ class FrameIter
     void nextJitFrame();
     void popJitFrame();
     void settleOnActivation();
 };
 
 class ScriptFrameIter : public FrameIter
 {
     void settle() {
-        while (!done() && !hasScript())
+        while (!done() && !hasScript()) {
             FrameIter::operator++();
+        }
     }
 
   public:
     explicit ScriptFrameIter(JSContext* cx,
                              DebuggerEvalOption debuggerEvalOption = FOLLOW_DEBUGGER_EVAL_PREV_LINK)
       : FrameIter(cx, debuggerEvalOption)
     {
         settle();
@@ -2348,20 +2360,22 @@ class AllScriptFramesIter : public Scrip
 
 /* Popular inline definitions. */
 
 inline JSScript*
 FrameIter::script() const
 {
     MOZ_ASSERT(!done());
     MOZ_ASSERT(hasScript());
-    if (data_.state_ == INTERP)
+    if (data_.state_ == INTERP) {
         return interpFrame()->script();
-    if (jsJitFrame().isIonJS())
+    }
+    if (jsJitFrame().isIonJS()) {
         return ionInlineFrames_.script();
+    }
     return jsJitFrame().script();
 }
 
 inline bool
 FrameIter::wasmDebugEnabled() const
 {
     MOZ_ASSERT(!done());
     MOZ_ASSERT(isWasm());
@@ -2409,23 +2423,25 @@ FrameIter::interpFrame() const
 {
     MOZ_ASSERT(data_.state_ == INTERP);
     return data_.interpFrames_.frame();
 }
 
 inline bool
 FrameIter::isPhysicalJitFrame() const
 {
-    if (!isJSJit())
+    if (!isJSJit()) {
         return false;
+    }
 
     auto& jitFrame = jsJitFrame();
 
-    if (jitFrame.isBaselineJS())
+    if (jitFrame.isBaselineJS()) {
         return true;
+    }
 
     if (jitFrame.isIonScripted()) {
         // Only the bottom of a group of inlined Ion frames is a physical frame.
         return ionInlineFrames_.frameNo() == 0;
     }
 
     return false;
 }
--- a/js/src/vm/Stopwatch.cpp
+++ b/js/src/vm/Stopwatch.cpp
@@ -19,18 +19,19 @@
 #include "vm/Realm.h"
 #include "vm/Runtime.h"
 
 namespace js {
 
 bool
 PerformanceMonitoring::addRecentGroup(PerformanceGroup* group)
 {
-    if (group->isUsedInThisIteration())
+    if (group->isUsedInThisIteration()) {
         return true;
+    }
 
     group->setIsUsedInThisIteration(true);
     return recentGroups_.append(group);
 }
 
 void
 PerformanceMonitoring::reset()
 {
@@ -47,27 +48,29 @@ PerformanceMonitoring::reset()
     // valid sets of measures just because we are on a CPU that has a
     // lower RDTSC.
     highestTimestampCounter_ = 0;
 }
 
 void
 PerformanceMonitoring::start()
 {
-    if (!isMonitoringJank_)
+    if (!isMonitoringJank_) {
         return;
+    }
 
     if (iteration_ == startedAtIteration_) {
         // The stopwatch is already started for this iteration.
         return;
     }
 
     startedAtIteration_ = iteration_;
-    if (stopwatchStartCallback)
+    if (stopwatchStartCallback) {
         stopwatchStartCallback(iteration_, stopwatchStartClosure);
+    }
 }
 
 // Commit the data that has been collected during the iteration
 // into the actual `PerformanceData`.
 //
 // We use the proportion of cycles-spent-in-group over
 // cycles-spent-in-toplevel-group as an approximation to allocate
 // system (kernel) time and user (CPU) time to each group. Note
@@ -152,18 +155,19 @@ PerformanceMonitoring::commit()
 
     // The move operation is generally constant time, unless
     // `recentGroups_.length()` is very small, in which case
     // it's fast just because it's small.
     PerformanceGroupVector recentGroups(std::move(recentGroups_));
     recentGroups_ = PerformanceGroupVector(); // Reconstruct after `Move`.
 
     bool success = true;
-    if (stopwatchCommitCallback)
+    if (stopwatchCommitCallback) {
         success = stopwatchCommitCallback(iteration_, recentGroups, stopwatchCommitClosure);
+    }
 
     // Heuristic: we expect to have roughly the same number of groups as in
     // the previous iteration.
     const size_t capacity = std::min(recentGroups.capacity(), MAX_GROUPS_INIT_CAPACITY);
     success = recentGroups_.reserve(capacity)
             && success;
 
     // Reset immediately, to make sure that we're not hit by the end
@@ -177,30 +181,32 @@ PerformanceMonitoring::commit()
 #endif // defined(MOZ_HAVE_RDTSC)
 }
 
 uint64_t
 PerformanceMonitoring::monotonicReadTimestampCounter()
 {
 #if defined(MOZ_HAVE_RDTSC)
     const uint64_t hardware = ReadTimestampCounter();
-    if (highestTimestampCounter_ < hardware)
+    if (highestTimestampCounter_ < hardware) {
         highestTimestampCounter_ = hardware;
+    }
     return highestTimestampCounter_;
 #else
     return 0;
 #endif // defined(MOZ_HAVE_RDTSC)
 }
 
 void
 PerformanceMonitoring::dispose(JSRuntime* rt)
 {
     reset();
-    for (RealmsIter r(rt); !r.done(); r.next())
+    for (RealmsIter r(rt); !r.done(); r.next()) {
         r->performanceMonitoring.unlink();
+    }
 }
 
 PerformanceGroupHolder::~PerformanceGroupHolder()
 {
     unlink();
 }
 
 void
@@ -208,58 +214,63 @@ PerformanceGroupHolder::unlink()
 {
     initialized_ = false;
     groups_.clear();
 }
 
 const PerformanceGroupVector*
 PerformanceGroupHolder::getGroups(JSContext* cx)
 {
-    if (initialized_)
+    if (initialized_) {
         return &groups_;
+    }
 
-    if (!runtime_->performanceMonitoring().getGroupsCallback)
+    if (!runtime_->performanceMonitoring().getGroupsCallback) {
         return nullptr;
+    }
 
-    if (!runtime_->performanceMonitoring().getGroupsCallback(cx, groups_, runtime_->performanceMonitoring().getGroupsClosure))
+    if (!runtime_->performanceMonitoring().getGroupsCallback(cx, groups_, runtime_->performanceMonitoring().getGroupsClosure)) {
         return nullptr;
+    }
 
     initialized_ = true;
     return &groups_;
 }
 
 AutoStopwatch::AutoStopwatch(JSContext* cx MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : cx_(cx)
   , iteration_(0)
   , isMonitoringJank_(false)
   , isMonitoringCPOW_(false)
   , cyclesStart_(0)
   , CPOWTimeStart_(0)
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
 
     JS::Compartment* compartment = cx_->compartment();
-    if (MOZ_UNLIKELY(compartment->gcState.scheduledForDestruction))
+    if (MOZ_UNLIKELY(compartment->gcState.scheduledForDestruction)) {
         return;
+    }
 
     JSRuntime* runtime = cx_->runtime();
     iteration_ = runtime->performanceMonitoring().iteration();
 
     const PerformanceGroupVector* groups = cx_->realm()->performanceMonitoring.getGroups(cx);
     if (!groups) {
       // Either the embedding has not provided any performance
       // monitoring logistics or there was an error that prevents
       // performance monitoring.
       return;
     }
     for (auto group = groups->begin(); group < groups->end(); group++) {
       auto acquired = acquireGroup(*group);
       if (acquired) {
-          if (!groups_.append(acquired))
+          if (!groups_.append(acquired)) {
               MOZ_CRASH();
+          }
       }
     }
     if (groups_.length() == 0) {
       // We are not in charge of monitoring anything.
       return;
     }
 
     // Now that we are sure that JS code is being executed,
@@ -271,30 +282,32 @@ AutoStopwatch::AutoStopwatch(JSContext* 
 AutoStopwatch::~AutoStopwatch()
 {
     if (groups_.length() == 0) {
         // We are not in charge of monitoring anything.
         return;
     }
 
     JS::Compartment* compartment = cx_->compartment();
-    if (MOZ_UNLIKELY(compartment->gcState.scheduledForDestruction))
+    if (MOZ_UNLIKELY(compartment->gcState.scheduledForDestruction)) {
         return;
+    }
 
     JSRuntime* runtime = cx_->runtime();
     if (MOZ_UNLIKELY(iteration_ != runtime->performanceMonitoring().iteration())) {
         // We have entered a nested event loop at some point.
         // Any information we may have is obsolete.
         return;
     }
 
     mozilla::Unused << exit(); // Sadly, there is nothing we can do about an error at this point.
 
-    for (auto group = groups_.begin(); group < groups_.end(); group++)
+    for (auto group = groups_.begin(); group < groups_.end(); group++) {
         releaseGroup(*group);
+    }
 }
 
 void
 AutoStopwatch::enter()
 {
     JSRuntime* runtime = cx_->runtime();
 
     if (runtime->performanceMonitoring().isMonitoringCPOW()) {
@@ -346,32 +359,35 @@ AutoStopwatch::exit()
     return addToGroups(cyclesDelta, CPOWTimeDelta);
 }
 
 void
 AutoStopwatch::updateTelemetry(const cpuid_t& cpuStart_, const cpuid_t& cpuEnd)
 {
   JSRuntime* runtime = cx_->runtime();
 
-    if (isSameCPU(cpuStart_, cpuEnd))
+    if (isSameCPU(cpuStart_, cpuEnd)) {
         runtime->performanceMonitoring().testCpuRescheduling.stayed += 1;
-    else
+    } else {
         runtime->performanceMonitoring().testCpuRescheduling.moved += 1;
+    }
 }
 
 PerformanceGroup*
 AutoStopwatch::acquireGroup(PerformanceGroup* group)
 {
     MOZ_ASSERT(group);
 
-    if (group->isAcquired(iteration_))
+    if (group->isAcquired(iteration_)) {
         return nullptr;
+    }
 
-    if (!group->isActive())
+    if (!group->isActive()) {
         return nullptr;
+    }
 
     group->acquire(iteration_, this);
     return group;
 }
 
 void
 AutoStopwatch::releaseGroup(PerformanceGroup* group)
 {
@@ -380,41 +396,44 @@ AutoStopwatch::releaseGroup(PerformanceG
 }
 
 bool
 AutoStopwatch::addToGroups(uint64_t cyclesDelta, uint64_t CPOWTimeDelta)
 {
   JSRuntime* runtime = cx_->runtime();
 
     for (auto group = groups_.begin(); group < groups_.end(); ++group) {
-      if (!addToGroup(runtime, cyclesDelta, CPOWTimeDelta, *group))
+      if (!addToGroup(runtime, cyclesDelta, CPOWTimeDelta, *group)) {
         return false;
+      }
     }
     return true;
 }
 
 bool
 AutoStopwatch::addToGroup(JSRuntime* runtime, uint64_t cyclesDelta, uint64_t CPOWTimeDelta, PerformanceGroup* group)
 {
     MOZ_ASSERT(group);
     MOZ_ASSERT(group->isAcquired(iteration_, this));
 
-    if (!runtime->performanceMonitoring().addRecentGroup(group))
+    if (!runtime->performanceMonitoring().addRecentGroup(group)) {
       return false;
+    }
     group->addRecentTicks(iteration_, 1);
     group->addRecentCycles(iteration_, cyclesDelta);
     group->addRecentCPOW(iteration_, CPOWTimeDelta);
     return true;
 }
 
 uint64_t
 AutoStopwatch::getDelta(const uint64_t end, const uint64_t start) const
 {
-    if (start >= end)
+    if (start >= end) {
       return 0;
+    }
     return end - start;
 }
 
 uint64_t
 AutoStopwatch::getCycles(JSRuntime* runtime) const
 {
     return runtime->performanceMonitoring().monotonicReadTimestampCounter();
 }
@@ -485,18 +504,19 @@ PerformanceGroup::acquire(uint64_t it, c
     }
     iteration_ = it;
     owner_ = owner;
 }
 
 void
 PerformanceGroup::release(uint64_t it, const AutoStopwatch* owner)
 {
-    if (iteration_ != it)
+    if (iteration_ != it) {
         return;
+    }
 
     MOZ_ASSERT(owner == owner_ || owner_ == nullptr);
     owner_ = nullptr;
 }
 
 void
 PerformanceGroup::resetRecentData()
 {
@@ -580,18 +600,19 @@ PerformanceGroup::AddRef()
     ++refCount_;
 }
 
 void
 PerformanceGroup::Release()
 {
     MOZ_ASSERT(refCount_ > 0);
     --refCount_;
-    if (refCount_ > 0)
+    if (refCount_ > 0) {
         return;
+    }
 
     JS::AutoSuppressGCAnalysis nogc;
     this->Delete();
 }
 
 JS_PUBLIC_API(bool)
 SetStopwatchStartCallback(JSContext* cx, StopwatchStartCallback cb, void* closure)
 {
--- a/js/src/vm/Stopwatch.h
+++ b/js/src/vm/Stopwatch.h
@@ -124,18 +124,19 @@ struct PerformanceMonitoring {
      * is not measuring jank.
      *
      * Otherwise, any pending measurements are dropped, but previous
      * measurements remain stored.
      *
      * May return `false` if the underlying hashtable cannot be allocated.
      */
     bool setIsMonitoringJank(bool value) {
-        if (isMonitoringJank_ != value)
+        if (isMonitoringJank_ != value) {
             reset();
+        }
 
         isMonitoringJank_ = value;
         return true;
     }
     bool isMonitoringJank() const {
         return isMonitoringJank_;
     }
 
@@ -152,18 +153,19 @@ struct PerformanceMonitoring {
      * is not measuring CPOW.
      *
      * Otherwise, any pending measurements are dropped, but previous
      * measurements remain stored.
      *
      * May return `false` if the underlying hashtable cannot be allocated.
      */
     bool setIsMonitoringCPOW(bool value) {
-        if (isMonitoringCPOW_ != value)
+        if (isMonitoringCPOW_ != value) {
             reset();
+        }
 
         isMonitoringCPOW_ = value;
         return true;
     }
 
     bool isMonitoringCPOW() const {
         return isMonitoringCPOW_;
     }
--- a/js/src/vm/StringObject-inl.h
+++ b/js/src/vm/StringObject-inl.h
@@ -14,32 +14,35 @@
 
 namespace js {
 
 /* static */ inline bool
 StringObject::init(JSContext* cx, Handle<StringObject*> obj, HandleString str)
 {
     MOZ_ASSERT(obj->numFixedSlots() == 2);
 
-    if (!EmptyShape::ensureInitialCustomShape<StringObject>(cx, obj))
+    if (!EmptyShape::ensureInitialCustomShape<StringObject>(cx, obj)) {
         return false;
+    }
 
     MOZ_ASSERT(obj->lookup(cx, NameToId(cx->names().length))->slot() == LENGTH_SLOT);
 
     obj->setStringThis(str);
 
     return true;
 }
 
 /* static */ inline StringObject*
 StringObject::create(JSContext* cx, HandleString str, HandleObject proto, NewObjectKind newKind)
 {
     Rooted<StringObject*> obj(cx, NewObjectWithClassProto<StringObject>(cx, proto, newKind));
-    if (!obj)
+    if (!obj) {
         return nullptr;
-    if (!StringObject::init(cx, obj, str))
+    }
+    if (!StringObject::init(cx, obj, str)) {
         return nullptr;
+    }
     return obj;
 }
 
 } // namespace js
 
 #endif /* vm_StringObject_inl_h */
--- a/js/src/vm/StringType-inl.h
+++ b/js/src/vm/StringType-inl.h
@@ -27,25 +27,27 @@ namespace js {
 template <AllowGC allowGC, typename CharT>
 static MOZ_ALWAYS_INLINE JSInlineString*
 AllocateInlineString(JSContext* cx, size_t len, CharT** chars)
 {
     MOZ_ASSERT(JSInlineString::lengthFits<CharT>(len));
 
     if (JSThinInlineString::lengthFits<CharT>(len)) {
         JSThinInlineString* str = JSThinInlineString::new_<allowGC>(cx);
-        if (!str)
+        if (!str) {
             return nullptr;
+        }
         *chars = str->init<CharT>(len);
         return str;
     }
 
     JSFatInlineString* str = JSFatInlineString::new_<allowGC>(cx);
-    if (!str)
+    if (!str) {
         return nullptr;
+    }
     *chars = str->init<CharT>(len);
     return str;
 }
 
 // Create a thin inline string if possible, and a fat inline string if not.
 template <AllowGC allowGC, typename CharT>
 static MOZ_ALWAYS_INLINE JSInlineString*
 NewInlineString(JSContext* cx, mozilla::Range<const CharT> chars)
@@ -53,35 +55,37 @@ NewInlineString(JSContext* cx, mozilla::
     /*
      * Don't bother trying to find a static atom; measurement shows that not
      * many get here (for one, Atomize is catching them).
      */
 
     size_t len = chars.length();
     CharT* storage;
     JSInlineString* str = AllocateInlineString<allowGC>(cx, len, &storage);
-    if (!str)
+    if (!str) {
         return nullptr;
+    }
 
     mozilla::PodCopy(storage, chars.begin().get(), len);
     storage[len] = 0;
     return str;
 }
 
 // Create a thin inline string if possible, and a fat inline string if not.
 template <typename CharT>
 static MOZ_ALWAYS_INLINE JSInlineString*
 NewInlineString(JSContext* cx, HandleLinearString base, size_t start, size_t length)
 {
     MOZ_ASSERT(JSInlineString::lengthFits<CharT>(length));
 
     CharT* chars;
     JSInlineString* s = AllocateInlineString<CanGC>(cx, length, &chars);
-    if (!s)
+    if (!s) {
         return nullptr;
+    }
 
     JS::AutoCheckCannotGC nogc;
     mozilla::PodCopy(chars, base->chars<CharT>(nogc) + start, length);
     chars[length] = 0;
     return s;
 }
 
 } /* namespace js */
@@ -109,46 +113,51 @@ MOZ_ALWAYS_INLINE const JS::Latin1Char*
 JSString::nonInlineCharsRaw() const
 {
     return d.s.u2.nonInlineCharsLatin1;
 }
 
 MOZ_ALWAYS_INLINE void
 JSRope::init(JSContext* cx, JSString* left, JSString* right, size_t length)
 {
-    if (left->hasLatin1Chars() && right->hasLatin1Chars())
+    if (left->hasLatin1Chars() && right->hasLatin1Chars()) {
         setLengthAndFlags(length, INIT_ROPE_FLAGS | LATIN1_CHARS_BIT);
-    else
+    } else {
         setLengthAndFlags(length, INIT_ROPE_FLAGS);
+    }
     d.s.u2.left = left;
     d.s.u3.right = right;
 
     // Post-barrier by inserting into the whole cell buffer if either
     // this -> left or this -> right is a tenured -> nursery edge.
     if (isTenured()) {
         js::gc::StoreBuffer* sb = left->storeBuffer();
-        if (!sb)
+        if (!sb) {
             sb = right->storeBuffer();
-        if (sb)
+        }
+        if (sb) {
             sb->putWholeCell(this);
+        }
     }
 }
 
 template <js::AllowGC allowGC>
 MOZ_ALWAYS_INLINE JSRope*
 JSRope::new_(JSContext* cx,
              typename js::MaybeRooted<JSString*, allowGC>::HandleType left,
              typename js::MaybeRooted<JSString*, allowGC>::HandleType right,
              size_t length, js::gc::InitialHeap heap)
 {
-    if (!validateLength(cx, length))
+    if (!validateLength(cx, length)) {
         return nullptr;
+    }
     JSRope* str = js::Allocate<JSRope, allowGC>(cx, heap);
-    if (!str)
+    if (!str) {
         return nullptr;
+    }
     str->init(cx, left, right, length);
     return str;
 }
 
 MOZ_ALWAYS_INLINE void
 JSDependentString::init(JSContext* cx, JSLinearString* base, size_t start,
                         size_t length)
 {
@@ -157,18 +166,19 @@ JSDependentString::init(JSContext* cx, J
     if (base->hasLatin1Chars()) {
         setLengthAndFlags(length, DEPENDENT_FLAGS | LATIN1_CHARS_BIT);
         d.s.u2.nonInlineCharsLatin1 = base->latin1Chars(nogc) + start;
     } else {
         setLengthAndFlags(length, DEPENDENT_FLAGS);
         d.s.u2.nonInlineCharsTwoByte = base->twoByteChars(nogc) + start;
     }
     d.s.u3.base = base;
-    if (isTenured() && !base->isTenured())
+    if (isTenured() && !base->isTenured()) {
         base->storeBuffer()->putWholeCell(this);
+    }
 }
 
 MOZ_ALWAYS_INLINE JSLinearString*
 JSDependentString::new_(JSContext* cx, JSLinearString* baseArg, size_t start,
                         size_t length)
 {
     /*
      * Try to avoid long chains of dependent strings. We can't avoid these
@@ -193,30 +203,32 @@ JSDependentString::new_(JSContext* cx, J
                      : JSInlineString::lengthFits<JS::Latin1Char>(length);
     if (useInline) {
         js::RootedLinearString base(cx, baseArg);
         return baseArg->hasLatin1Chars()
                ? js::NewInlineString<JS::Latin1Char>(cx, base, start, length)
                : js::NewInlineString<char16_t>(cx, base, start, length);
     }
 
-    if (baseArg->isExternal() && !baseArg->ensureFlat(cx))
+    if (baseArg->isExternal() && !baseArg->ensureFlat(cx)) {
         return nullptr;
+    }
 
     JSDependentString* str = js::Allocate<JSDependentString, js::NoGC>(cx, js::gc::DefaultHeap);
     if (str) {
         str->init(cx, baseArg, start, length);
         return str;
     }
 
     js::RootedLinearString base(cx, baseArg);
 
     str = js::Allocate<JSDependentString>(cx, js::gc::DefaultHeap);
-    if (!str)
+    if (!str) {
         return nullptr;
+    }
     str->init(cx, base, start, length);
     return str;
 }
 
 MOZ_ALWAYS_INLINE void
 JSFlatString::init(const char16_t* chars, size_t length)
 {
     setLengthAndFlags(length, INIT_FLAT_FLAGS);
@@ -231,76 +243,84 @@ JSFlatString::init(const JS::Latin1Char*
 }
 
 template <js::AllowGC allowGC, typename CharT>
 MOZ_ALWAYS_INLINE JSFlatString*
 JSFlatString::new_(JSContext* cx, const CharT* chars, size_t length)
 {
     MOZ_ASSERT(chars[length] == CharT(0));
 
-    if (!validateLength(cx, length))
+    if (!validateLength(cx, length)) {
         return nullptr;
+    }
 
     JSFlatString* str;
-    if (cx->zone()->isAtomsZone())
+    if (cx->zone()->isAtomsZone()) {
         str = js::Allocate<js::NormalAtom, allowGC>(cx);
-    else
+    } else {
         str = js::Allocate<JSFlatString, allowGC>(cx, js::gc::DefaultHeap);
-    if (!str)
+    }
+    if (!str) {
         return nullptr;
+    }
 
     if (!str->isTenured()) {
         // The chars pointer is only considered to be handed over to this
         // function on a successful return. If the following registration
         // fails, the string is partially initialized and must be made valid,
         // or its finalizer may attempt to free uninitialized memory.
         void* ptr = const_cast<void*>(static_cast<const void*>(chars));
         if (!cx->runtime()->gc.nursery().registerMallocedBuffer(ptr)) {
             str->init((JS::Latin1Char*)nullptr, 0);
-            if (allowGC)
+            if (allowGC) {
                 ReportOutOfMemory(cx);
+            }
             return nullptr;
         }
     }
 
     str->init(chars, length);
     return str;
 }
 
 inline js::PropertyName*
 JSFlatString::toPropertyName(JSContext* cx)
 {
 #ifdef DEBUG
     uint32_t dummy;
     MOZ_ASSERT(!isIndex(&dummy));
 #endif
-    if (isAtom())
+    if (isAtom()) {
         return asAtom().asPropertyName();
+    }
     JSAtom* atom = js::AtomizeString(cx, this);
-    if (!atom)
+    if (!atom) {
         return nullptr;
+    }
     return atom->asPropertyName();
 }
 
 template <js::AllowGC allowGC>
 MOZ_ALWAYS_INLINE JSThinInlineString*
 JSThinInlineString::new_(JSContext* cx)
 {
-    if (cx->zone()->isAtomsZone())
+    if (cx->zone()->isAtomsZone()) {
         return (JSThinInlineString*)(js::Allocate<js::NormalAtom, allowGC>(cx));
+    }
 
     return js::Allocate<JSThinInlineString, allowGC>(cx, js::gc::DefaultHeap);
 }
 
 template <js::AllowGC allowGC>
 MOZ_ALWAYS_INLINE JSFatInlineString*
 JSFatInlineString::new_(JSContext* cx)
 {
-    if (cx->zone()->isAtomsZone())
+    if (cx->zone()->isAtomsZone()) {
         return (JSFatInlineString*)(js::Allocate<js::FatInlineAtom, allowGC>(cx));
+    }
 
     return js::Allocate<JSFatInlineString, allowGC>(cx, js::gc::DefaultHeap);
 }
 
 template<>
 MOZ_ALWAYS_INLINE JS::Latin1Char*
 JSThinInlineString::init<JS::Latin1Char>(size_t length)
 {
@@ -345,60 +365,66 @@ JSExternalString::init(const char16_t* c
     d.s.u2.nonInlineCharsTwoByte = chars;
     d.s.u3.externalFinalizer = fin;
 }
 
 MOZ_ALWAYS_INLINE JSExternalString*
 JSExternalString::new_(JSContext* cx, const char16_t* chars, size_t length,
                        const JSStringFinalizer* fin)
 {
-    if (!validateLength(cx, length))
+    if (!validateLength(cx, length)) {
         return nullptr;
+    }
     JSExternalString* str = js::Allocate<JSExternalString>(cx);
-    if (!str)
+    if (!str) {
         return nullptr;
+    }
     str->init(chars, length, fin);
     cx->updateMallocCounter((length + 1) * sizeof(char16_t));
     return str;
 }
 
 inline JSLinearString*
 js::StaticStrings::getUnitStringForElement(JSContext* cx, JSString* str, size_t index)
 {
     MOZ_ASSERT(index < str->length());
 
     char16_t c;
-    if (!str->getChar(cx, index, &c))
+    if (!str->getChar(cx, index, &c)) {
         return nullptr;
-    if (c < UNIT_STATIC_LIMIT)
+    }
+    if (c < UNIT_STATIC_LIMIT) {
         return getUnit(c);
+    }
     return js::NewInlineString<CanGC>(cx, mozilla::Range<const char16_t>(&c, 1));
 }
 
 MOZ_ALWAYS_INLINE void
 JSString::finalize(js::FreeOp* fop)
 {
     /* FatInline strings are in a different arena. */
     MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_STRING);
     MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_ATOM);
 
-    if (isFlat())
+    if (isFlat()) {
         asFlat().finalize(fop);
-    else
+    } else {
         MOZ_ASSERT(isDependent() || isRope());
+    }
 }
 
 inline void
 JSFlatString::finalize(js::FreeOp* fop)
 {
     MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_STRING);
     MOZ_ASSERT(getAllocKind() != js::gc::AllocKind::FAT_INLINE_ATOM);
 
-    if (!isInline())
+    if (!isInline()) {
         fop->free_(nonInlineCharsRaw());
+    }
 }
 
 inline void
 JSFatInlineString::finalize(js::FreeOp* fop)
 {
     MOZ_ASSERT(getAllocKind() == js::gc::AllocKind::FAT_INLINE_STRING);
     MOZ_ASSERT(isInline());
 
@@ -407,18 +433,19 @@ JSFatInlineString::finalize(js::FreeOp* 
 
 inline void
 JSAtom::finalize(js::FreeOp* fop)
 {
     MOZ_ASSERT(JSString::isAtom());
     MOZ_ASSERT(JSString::isFlat());
     MOZ_ASSERT(getAllocKind() == js::gc::AllocKind::ATOM);
 
-    if (!isInline())
+    if (!isInline()) {
         fop->free_(nonInlineCharsRaw());
+    }
 }
 
 inline void
 js::FatInlineAtom::finalize(js::FreeOp* fop)
 {
     MOZ_ASSERT(JSString::isAtom());
     MOZ_ASSERT(getAllocKind() == js::gc::AllocKind::FAT_INLINE_ATOM);
 
--- a/js/src/vm/StringType.cpp
+++ b/js/src/vm/StringType.cpp
@@ -49,24 +49,26 @@ using JS::AutoCheckCannotGC;
 using JS::AutoStableStringChars;
 
 using UniqueLatin1Chars = UniquePtr<Latin1Char[], JS::FreePolicy>;
 
 size_t
 JSString::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
 {
     // JSRope: do nothing, we'll count all children chars when we hit the leaf strings.
-    if (isRope())
+    if (isRope()) {
         return 0;
+    }
 
     MOZ_ASSERT(isLinear());
 
     // JSDependentString: do nothing, we'll count the chars when we hit the base string.
-    if (isDependent())
+    if (isDependent()) {
         return 0;
+    }
 
     // JSExternalString: Ask the embedding to tell us what's going on.  If it
     // doesn't want to say, don't count, the chars could be stored anywhere.
     if (isExternal()) {
         if (auto* cb = runtimeFromMainThread()->externalStringSizeofCallback.ref()) {
             // Our callback isn't supposed to cause GC.
             JS::AutoSuppressGCAnalysis nogc;
             return cb(this, mallocSizeOf);
@@ -80,92 +82,98 @@ JSString::sizeOfExcludingThis(mozilla::M
     if (isExtensible()) {
         JSExtensibleString& extensible = asExtensible();
         return extensible.hasLatin1Chars()
                ? mallocSizeOf(extensible.rawLatin1Chars())
                : mallocSizeOf(extensible.rawTwoByteChars());
     }
 
     // JSInlineString, JSFatInlineString [JSInlineAtom, JSFatInlineAtom]: the chars are inline.
-    if (isInline())
+    if (isInline()) {
         return 0;
+    }
 
     // JSAtom, JSUndependedString: measure the space for the chars.  For
     // JSUndependedString, there is no need to count the base string, for the
     // same reason as JSDependentString above.
     JSFlatString& flat = asFlat();
     return flat.hasLatin1Chars()
            ? mallocSizeOf(flat.rawLatin1Chars())
            : mallocSizeOf(flat.rawTwoByteChars());
 }
 
 JS::ubi::Node::Size
 JS::ubi::Concrete<JSString>::size(mozilla::MallocSizeOf mallocSizeOf) const
 {
     JSString& str = get();
     size_t size;
-    if (str.isAtom())
+    if (str.isAtom()) {
         size = str.isFatInline() ? sizeof(js::FatInlineAtom) : sizeof(js::NormalAtom);
-    else
+    } else {
         size = str.isFatInline() ? sizeof(JSFatInlineString) : sizeof(JSString);
+    }
 
-    if (IsInsideNursery(&str))
+    if (IsInsideNursery(&str)) {
         size += Nursery::stringHeaderSize();
+    }
 
     size += str.sizeOfExcludingThis(mallocSizeOf);
 
     return size;
 }
 
 const char16_t JS::ubi::Concrete<JSString>::concreteTypeName[] = u"JSString";
 
 #if defined(DEBUG) || defined(JS_JITSPEW)
 
 template <typename CharT>
 /*static */ void
 JSString::dumpChars(const CharT* s, size_t n, js::GenericPrinter& out)
 {
     if (n == SIZE_MAX) {
         n = 0;
-        while (s[n])
+        while (s[n]) {
             n++;
+        }
     }
 
     out.put("\"");
     for (size_t i = 0; i < n; i++) {
         char16_t c = s[i];
-        if (c == '\n')
+        if (c == '\n') {
             out.put("\\n");
-        else if (c == '\t')
+        } else if (c == '\t') {
             out.put("\\t");
-        else if (c >= 32 && c < 127)
+        } else if (c >= 32 && c < 127) {
             out.putChar((char)s[i]);
-        else if (c <= 255)
+        } else if (c <= 255) {
             out.printf("\\x%02x", unsigned(c));
-        else
+        } else {
             out.printf("\\u%04x", unsigned(c));
+        }
     }
     out.putChar('"');
 }
 
 template void
 JSString::dumpChars(const Latin1Char* s, size_t n, js::GenericPrinter& out);
 
 template void
 JSString::dumpChars(const char16_t* s, size_t n, js::GenericPrinter& out);
 
 void
 JSString::dumpCharsNoNewline(js::GenericPrinter& out)
 {
     if (JSLinearString* linear = ensureLinear(nullptr)) {
         AutoCheckCannotGC nogc;
-        if (hasLatin1Chars())
+        if (hasLatin1Chars()) {
             dumpChars(linear->latin1Chars(nogc), length(), out);
-        else
+        } else {
             dumpChars(linear->twoByteChars(nogc), length(), out);
+        }
     } else {
         out.put("(oom in JSString::dumpCharsNoNewline)");
     }
 }
 
 void
 JSString::dump()
 {
@@ -200,24 +208,31 @@ JSString::dumpNoNewline(js::GenericPrint
         out.put("(oom in JSString::dump)");
     }
 }
 
 
 void
 JSString::dumpRepresentation(js::GenericPrinter& out, int indent) const
 {
-    if      (isRope())          asRope()        .dumpRepresentation(out, indent);
-    else if (isDependent())     asDependent()   .dumpRepresentation(out, indent);
-    else if (isExternal())      asExternal()    .dumpRepresentation(out, indent);
-    else if (isExtensible())    asExtensible()  .dumpRepresentation(out, indent);
-    else if (isInline())        asInline()      .dumpRepresentation(out, indent);
-    else if (isFlat())          asFlat()        .dumpRepresentation(out, indent);
-    else
+    if (isRope()) {
+        asRope().dumpRepresentation(out, indent);
+    } else if (isDependent()) {
+        asDependent().dumpRepresentation(out, indent);
+    } else if (isExternal()) {
+        asExternal().dumpRepresentation(out, indent);
+    } else if (isExtensible()) {
+        asExtensible().dumpRepresentation(out, indent);
+    } else if (isInline()) {
+        asInline().dumpRepresentation(out, indent);
+    } else if (isFlat()) {
+        asFlat().dumpRepresentation(out, indent);
+    } else {
         MOZ_CRASH("Unexpected JSString representation");
+    }
 }
 
 void
 JSString::dumpRepresentationHeader(js::GenericPrinter& out, const char* subclass) const
 {
     uint32_t flags = JSString::flags();
     // Print the string's address as an actual C++ expression, to facilitate
     // copy-and-paste into a debugger.
@@ -318,48 +333,53 @@ JSRope::copyCharsInternal(JSContext* may
 {
     // Left-leaning ropes are far more common than right-leaning ropes, so
     // perform a non-destructive traversal of the rope, right node first,
     // splatting each node's characters into a contiguous buffer.
 
     size_t n = length();
 
     UniquePtr<CharT[], JS::FreePolicy> out;
-    if (maybecx)
+    if (maybecx) {
         out.reset(maybecx->pod_malloc<CharT>(n + 1));
-    else
+    } else {
         out.reset(js_pod_malloc<CharT>(n + 1));
+    }
 
-    if (!out)
+    if (!out) {
         return nullptr;
+    }
 
     Vector<const JSString*, 8, SystemAllocPolicy> nodeStack;
     const JSString* str = this;
     CharT* end = out.get() + str->length();
     while (true) {
         if (str->isRope()) {
             if (!nodeStack.append(str->asRope().leftChild())) {
-                if (maybecx)
+                if (maybecx) {
                     ReportOutOfMemory(maybecx);
+                }
                 return nullptr;
             }
             str = str->asRope().rightChild();
         } else {
             end -= str->length();
             CopyChars(end, str->asLinear());
-            if (nodeStack.empty())
+            if (nodeStack.empty()) {
                 break;
+            }
             str = nodeStack.popCopy();
         }
     }
 
     MOZ_ASSERT(end == out.get());
 
-    if (nullTerminate)
+    if (nullTerminate) {
         out[n] = 0;
+    }
 
     return out;
 }
 
 template <typename CharT>
 void AddStringToHash(uint32_t* hash, const CharT* chars, size_t len)
 {
     // It's tempting to use |HashString| instead of this loop, but that's
@@ -371,39 +391,42 @@ void AddStringToHash(uint32_t* hash, con
         *hash = mozilla::AddToHash(*hash, chars[i]);
     }
 }
 
 void AddStringToHash(uint32_t* hash, const JSString* str)
 {
     AutoCheckCannotGC nogc;
     const auto& s = str->asLinear();
-    if (s.hasLatin1Chars())
+    if (s.hasLatin1Chars()) {
         AddStringToHash(hash, s.latin1Chars(nogc), s.length());
-    else
+    } else {
         AddStringToHash(hash, s.twoByteChars(nogc), s.length());
+    }
 }
 
 bool
 JSRope::hash(uint32_t* outHash) const
 {
     Vector<const JSString*, 8, SystemAllocPolicy> nodeStack;
     const JSString* str = this;
 
     *outHash = 0;
 
     while (true) {
         if (str->isRope()) {
-            if (!nodeStack.append(str->asRope().rightChild()))
+            if (!nodeStack.append(str->asRope().rightChild())) {
                 return false;
+            }
             str = str->asRope().leftChild();
         } else {
             AddStringToHash(outHash, str);
-            if (nodeStack.empty())
+            if (nodeStack.empty()) {
                 break;
+            }
             str = nodeStack.popCopy();
         }
     }
 
     return true;
 }
 
 #if defined(DEBUG) || defined(JS_JITSPEW)
@@ -423,20 +446,21 @@ JSRope::dumpRepresentation(js::GenericPr
 
 namespace js {
 
 template <>
 void
 CopyChars(char16_t* dest, const JSLinearString& str)
 {
     AutoCheckCannotGC nogc;
-    if (str.hasTwoByteChars())
+    if (str.hasTwoByteChars()) {
         PodCopy(dest, str.twoByteChars(nogc), str.length());
-    else
+    } else {
         CopyAndInflateChars(dest, str.latin1Chars(nogc), str.length());
+    }
 }
 
 template <>
 void
 CopyChars(Latin1Char* dest, const JSLinearString& str)
 {
     AutoCheckCannotGC nogc;
     if (str.hasLatin1Chars()) {
@@ -531,18 +555,19 @@ JSRope::flattenInternal(JSContext* maybe
     static const uintptr_t Tag_VisitRightChild = 0x1;
 
     AutoCheckCannotGC nogc;
 
     gc::StoreBuffer* bufferIfNursery = storeBuffer();
 
     /* Find the left most string, containing the first string. */
     JSRope* leftMostRope = this;
-    while (leftMostRope->leftChild()->isRope())
+    while (leftMostRope->leftChild()->isRope()) {
         leftMostRope = &leftMostRope->leftChild()->asRope();
+    }
 
     if (leftMostRope->leftChild()->isExtensible()) {
         JSExtensibleString& left = leftMostRope->leftChild()->asExtensible();
         size_t capacity = left.capacity();
         if (capacity >= wholeLength && left.hasTwoByteChars() == IsSame<CharT, char16_t>::value) {
             wholeChars = const_cast<CharT*>(left.nonInlineChars<CharT>(nogc));
             wholeCapacity = capacity;
 
@@ -565,20 +590,21 @@ JSRope::flattenInternal(JSContext* maybe
             }
             if (b == WithIncrementalBarrier) {
                 JSString::writeBarrierPre(str->d.s.u2.left);
                 JSString::writeBarrierPre(str->d.s.u3.right);
             }
             str->setNonInlineChars(wholeChars);
             uint32_t left_len = left.length();
             pos = wholeChars + left_len;
-            if (IsSame<CharT, char16_t>::value)
+            if (IsSame<CharT, char16_t>::value) {
                 left.setLengthAndFlags(left_len, DEPENDENT_FLAGS);
-            else
+            } else {
                 left.setLengthAndFlags(left_len, DEPENDENT_FLAGS | LATIN1_CHARS_BIT);
+            }
             left.d.s.u3.base = (JSLinearString*)this;  /* will be true on exit */
             Nursery& nursery = runtimeFromMainThread()->gc.nursery();
             bool inTenured = !bufferIfNursery;
             if (!inTenured && left.isTenured()) {
                 // tenured leftmost child is giving its chars buffer to the
                 // nursery-allocated root node.
                 nursery.registerMallocedBuffer(wholeChars);
                 // leftmost child -> root is a tenured -> nursery edge.
@@ -588,27 +614,29 @@ JSRope::flattenInternal(JSContext* maybe
                 // tenured string.
                 nursery.removeMallocedBuffer(wholeChars);
             }
             goto visit_right_child;
         }
     }
 
     if (!AllocChars(this, wholeLength, &wholeChars, &wholeCapacity)) {
-        if (maybecx)
+        if (maybecx) {
             ReportOutOfMemory(maybecx);
+        }
         return nullptr;
     }
 
     if (!isTenured()) {
         Nursery& nursery = runtimeFromMainThread()->gc.nursery();
         if (!nursery.registerMallocedBuffer(wholeChars)) {
             js_free(wholeChars);
-            if (maybecx)
+            if (maybecx) {
                 ReportOutOfMemory(maybecx);
+            }
             return nullptr;
         }
     }
 
     pos = wholeChars;
     first_visit_node: {
         if (b == WithIncrementalBarrier) {
             JSString::writeBarrierPre(str->d.s.u2.left);
@@ -637,143 +665,159 @@ JSRope::flattenInternal(JSContext* maybe
         CopyChars(pos, right.asLinear());
         pos += right.length();
     }
 
     finish_node: {
         if (str == this) {
             MOZ_ASSERT(pos == wholeChars + wholeLength);
             *pos = '\0';
-            if (IsSame<CharT, char16_t>::value)
+            if (IsSame<CharT, char16_t>::value) {
                 str->setLengthAndFlags(wholeLength, EXTENSIBLE_FLAGS);
-            else
+            } else {
                 str->setLengthAndFlags(wholeLength, EXTENSIBLE_FLAGS | LATIN1_CHARS_BIT);
+            }
             str->setNonInlineChars(wholeChars);
             str->d.s.u3.capacity = wholeCapacity;
             return &this->asFlat();
         }
         uintptr_t flattenData;
         uint32_t len = pos - str->nonInlineCharsRaw<CharT>();
-        if (IsSame<CharT, char16_t>::value)
+        if (IsSame<CharT, char16_t>::value) {
             flattenData = str->unsetFlattenData(len, DEPENDENT_FLAGS);
-        else
+        } else {
             flattenData = str->unsetFlattenData(len, DEPENDENT_FLAGS | LATIN1_CHARS_BIT);
+        }
         str->d.s.u3.base = (JSLinearString*)this;       /* will be true on exit */
 
         // Every interior (rope) node in the rope's tree will be visited during
         // the traversal and post-barriered here, so earlier additions of
         // dependent.base -> root pointers are handled by this barrier as well.
         //
         // The only time post-barriers need do anything is when the root is in
         // the nursery. Note that the root was a rope but will be an extensible
         // string when we return, so it will not point to any strings and need
         // not be barriered.
         gc::StoreBuffer* bufferIfNursery = storeBuffer();
-        if (bufferIfNursery && str->isTenured())
+        if (bufferIfNursery && str->isTenured()) {
             bufferIfNursery->putWholeCell(str);
+        }
 
         str = (JSString*)(flattenData & ~Tag_Mask);
-        if ((flattenData & Tag_Mask) == Tag_VisitRightChild)
+        if ((flattenData & Tag_Mask) == Tag_VisitRightChild) {
             goto visit_right_child;
+        }
         MOZ_ASSERT((flattenData & Tag_Mask) == Tag_FinishNode);
         goto finish_node;
     }
 }
 
 template<JSRope::UsingBarrier b>
 JSFlatString*
 JSRope::flattenInternal(JSContext* maybecx)
 {
-    if (hasTwoByteChars())
+    if (hasTwoByteChars()) {
         return flattenInternal<b, char16_t>(maybecx);
+    }
     return flattenInternal<b, Latin1Char>(maybecx);
 }
 
 JSFlatString*
 JSRope::flatten(JSContext* maybecx)
 {
     mozilla::Maybe<AutoGeckoProfilerEntry> entry;
-    if (maybecx && !maybecx->helperThread())
+    if (maybecx && !maybecx->helperThread()) {
         entry.emplace(maybecx, "JSRope::flatten");
+    }
 
-    if (zone()->needsIncrementalBarrier())
+    if (zone()->needsIncrementalBarrier()) {
         return flattenInternal<WithIncrementalBarrier>(maybecx);
+    }
     return flattenInternal<NoBarrier>(maybecx);
 }
 
 template <AllowGC allowGC>
 static JSLinearString*
 EnsureLinear(JSContext* cx, typename MaybeRooted<JSString*, allowGC>::HandleType string)
 {
     JSLinearString* linear = string->ensureLinear(cx);
     // Don't report an exception if GC is not allowed, just return nullptr.
-    if (!linear && !allowGC)
+    if (!linear && !allowGC) {
         cx->recoverFromOutOfMemory();
+    }
     return linear;
 }
 
 template <AllowGC allowGC>
 JSString*
 js::ConcatStrings(JSContext* cx,
                   typename MaybeRooted<JSString*, allowGC>::HandleType left,
                   typename MaybeRooted<JSString*, allowGC>::HandleType right)
 {
     MOZ_ASSERT_IF(!left->isAtom(), cx->isInsideCurrentZone(left));
     MOZ_ASSERT_IF(!right->isAtom(), cx->isInsideCurrentZone(right));
 
     size_t leftLen = left->length();
-    if (leftLen == 0)
+    if (leftLen == 0) {
         return right;
+    }
 
     size_t rightLen = right->length();
-    if (rightLen == 0)
+    if (rightLen == 0) {
         return left;
+    }
 
     size_t wholeLength = leftLen + rightLen;
     if (MOZ_UNLIKELY(wholeLength > JSString::MAX_LENGTH)) {
         // Don't report an exception if GC is not allowed, just return nullptr.
-        if (allowGC)
+        if (allowGC) {
             js::ReportAllocationOverflow(cx);
+        }
         return nullptr;
     }
 
     bool isLatin1 = left->hasLatin1Chars() && right->hasLatin1Chars();
     bool canUseInline = isLatin1
                         ? JSInlineString::lengthFits<Latin1Char>(wholeLength)
                         : JSInlineString::lengthFits<char16_t>(wholeLength);
     if (canUseInline) {
         Latin1Char* latin1Buf = nullptr;  // initialize to silence GCC warning
         char16_t* twoByteBuf = nullptr;  // initialize to silence GCC warning
         JSInlineString* str = isLatin1
             ? AllocateInlineString<allowGC>(cx, wholeLength, &latin1Buf)
             : AllocateInlineString<allowGC>(cx, wholeLength, &twoByteBuf);
-        if (!str)
+        if (!str) {
             return nullptr;
+        }
 
         AutoCheckCannotGC nogc;
         JSLinearString* leftLinear = EnsureLinear<allowGC>(cx, left);
-        if (!leftLinear)
+        if (!leftLinear) {
             return nullptr;
+        }
         JSLinearString* rightLinear = EnsureLinear<allowGC>(cx, right);
-        if (!rightLinear)
+        if (!rightLinear) {
             return nullptr;
+        }
 
         if (isLatin1) {
             PodCopy(latin1Buf, leftLinear->latin1Chars(nogc), leftLen);
             PodCopy(latin1Buf + leftLen, rightLinear->latin1Chars(nogc), rightLen);
             latin1Buf[wholeLength] = 0;
         } else {
-            if (leftLinear->hasTwoByteChars())
+            if (leftLinear->hasTwoByteChars()) {
                 PodCopy(twoByteBuf, leftLinear->twoByteChars(nogc), leftLen);
-            else
+            } else {
                 CopyAndInflateChars(twoByteBuf, leftLinear->latin1Chars(nogc), leftLen);
-            if (rightLinear->hasTwoByteChars())
+            }
+            if (rightLinear->hasTwoByteChars()) {
                 PodCopy(twoByteBuf + leftLen, rightLinear->twoByteChars(nogc), rightLen);
-            else
+            } else {
                 CopyAndInflateChars(twoByteBuf + leftLen, rightLinear->latin1Chars(nogc), rightLen);
+            }
             twoByteBuf[wholeLength] = 0;
         }
 
         return str;
     }
 
     return JSRope::new_<allowGC>(cx, left, right, wholeLength);
 }
@@ -785,18 +829,19 @@ template JSString*
 js::ConcatStrings<NoGC>(JSContext* cx, JSString* const& left, JSString* const& right);
 
 template <typename CharT>
 JSFlatString*
 JSDependentString::undependInternal(JSContext* cx)
 {
     size_t n = length();
     auto s = cx->make_pod_array<CharT>(n + 1);
-    if (!s)
+    if (!s) {
         return nullptr;
+    }
 
     if (!isTenured()) {
         if (!cx->runtime()->gc.nursery().registerMallocedBuffer(s.get())) {
             ReportOutOfMemory(cx);
             return nullptr;
         }
     }
 
@@ -804,20 +849,21 @@ JSDependentString::undependInternal(JSCo
     PodCopy(s.get(), nonInlineChars<CharT>(nogc), n);
     s[n] = '\0';
     setNonInlineChars<CharT>(s.release());
 
     /*
      * Transform *this into an undepended string so 'base' will remain rooted
      * for the benefit of any other dependent string that depends on *this.
      */
-    if (IsSame<CharT, Latin1Char>::value)
+    if (IsSame<CharT, Latin1Char>::value) {
         setLengthAndFlags(n, UNDEPENDED_FLAGS | LATIN1_CHARS_BIT);
-    else
+    } else {
         setLengthAndFlags(n, UNDEPENDED_FLAGS);
+    }
 
     return &this->asFlat();
 }
 
 JSFlatString*
 JSDependentString::undepend(JSContext* cx)
 {
     MOZ_ASSERT(JSString::isDependent());
@@ -828,64 +874,69 @@ JSDependentString::undepend(JSContext* c
 
 #if defined(DEBUG) || defined(JS_JITSPEW)
 void
 JSDependentString::dumpRepresentation(js::GenericPrinter& out, int indent) const
 {
     dumpRepresentationHeader(out, "JSDependentString");
     indent += 2;
 
-    if (mozilla::Maybe<size_t> offset = baseOffset())
+    if (mozilla::Maybe<size_t> offset = baseOffset()) {
         out.printf("%*soffset: %zu\n", indent, "", *offset);
+    }
 
     out.printf("%*sbase: ", indent, "");
     base()->dumpRepresentation(out, indent);
 }
 #endif
 
 bool
 js::EqualChars(JSLinearString* str1, JSLinearString* str2)
 {
     MOZ_ASSERT(str1->length() == str2->length());
 
     size_t len = str1->length();
 
     AutoCheckCannotGC nogc;
     if (str1->hasTwoByteChars()) {
-        if (str2->hasTwoByteChars())
+        if (str2->hasTwoByteChars()) {
             return ArrayEqual(str1->twoByteChars(nogc), str2->twoByteChars(nogc), len);
+        }
 
         return EqualChars(str2->latin1Chars(nogc), str1->twoByteChars(nogc), len);
     }
 
-    if (str2->hasLatin1Chars())
+    if (str2->hasLatin1Chars()) {
         return ArrayEqual(str1->latin1Chars(nogc), str2->latin1Chars(nogc), len);
+    }
 
     return EqualChars(str1->latin1Chars(nogc), str2->twoByteChars(nogc), len);
 }
 
 bool
 js::HasSubstringAt(JSLinearString* text, JSLinearString* pat, size_t start)
 {
     MOZ_ASSERT(start + pat->length() <= text->length());
 
     size_t patLen = pat->length();
 
     AutoCheckCannotGC nogc;
     if (text->hasLatin1Chars()) {
         const Latin1Char* textChars = text->latin1Chars(nogc) + start;
-        if (pat->hasLatin1Chars())
+        if (pat->hasLatin1Chars()) {
             return ArrayEqual(textChars, pat->latin1Chars(nogc), patLen);
+        }
 
         return EqualChars(textChars, pat->twoByteChars(nogc), patLen);
     }
 
     const char16_t* textChars = text->twoByteChars(nogc) + start;
-    if (pat->hasTwoByteChars())
+    if (pat->hasTwoByteChars()) {
         return ArrayEqual(textChars, pat->twoByteChars(nogc), patLen);
+    }
 
     return EqualChars(pat->latin1Chars(nogc), textChars, patLen);
 }
 
 bool
 js::EqualStrings(JSContext* cx, JSString* str1, JSString* str2, bool* result)
 {
     if (str1 == str2) {
@@ -895,35 +946,39 @@ js::EqualStrings(JSContext* cx, JSString
 
     size_t length1 = str1->length();
     if (length1 != str2->length()) {
         *result = false;
         return true;
     }
 
     JSLinearString* linear1 = str1->ensureLinear(cx);
-    if (!linear1)
+    if (!linear1) {
         return false;
+    }
     JSLinearString* linear2 = str2->ensureLinear(cx);
-    if (!linear2)
+    if (!linear2) {
         return false;
+    }
 
     *result = EqualChars(linear1, linear2);
     return true;
 }
 
 bool
 js::EqualStrings(JSLinearString* str1, JSLinearString* str2)
 {
-    if (str1 == str2)
+    if (str1 == str2) {
         return true;
+    }
 
     size_t length1 = str1->length();
-    if (length1 != str2->length())
+    if (length1 != str2->length()) {
         return false;
+    }
 
     return EqualChars(str1, str2);
 }
 
 int32_t
 js::CompareChars(const char16_t* s1, size_t len1, JSLinearString* s2)
 {
     AutoCheckCannotGC nogc;
@@ -959,63 +1014,69 @@ js::CompareStrings(JSContext* cx, JSStri
     MOZ_ASSERT(str2);
 
     if (str1 == str2) {
         *result = 0;
         return true;
     }
 
     JSLinearString* linear1 = str1->ensureLinear(cx);
-    if (!linear1)
+    if (!linear1) {
         return false;
+    }
 
     JSLinearString* linear2 = str2->ensureLinear(cx);
-    if (!linear2)
+    if (!linear2) {
         return false;
+    }
 
     *result = CompareStringsImpl(linear1, linear2);
     return true;
 }
 
 int32_t
 js::CompareAtoms(JSAtom* atom1, JSAtom* atom2)
 {
     return CompareStringsImpl(atom1, atom2);
 }
 
 bool
 js::StringEqualsAscii(JSLinearString* str, const char* asciiBytes)
 {
     size_t length = strlen(asciiBytes);
 #ifdef DEBUG
-    for (size_t i = 0; i != length; ++i)
+    for (size_t i = 0; i != length; ++i) {
         MOZ_ASSERT(unsigned(asciiBytes[i]) <= 127);
+    }
 #endif
-    if (length != str->length())
+    if (length != str->length()) {
         return false;
+    }
 
     const Latin1Char* latin1 = reinterpret_cast<const Latin1Char*>(asciiBytes);
 
     AutoCheckCannotGC nogc;
     return str->hasLatin1Chars()
            ? ArrayEqual(latin1, str->latin1Chars(nogc), length)
            : EqualChars(latin1, str->twoByteChars(nogc), length);
 }
 
 template <typename CharT>
 /* static */ bool
 JSFlatString::isIndexSlow(const CharT* s, size_t length, uint32_t* indexp)
 {
     CharT ch = *s;
 
-    if (!IsAsciiDigit(ch))
+    if (!IsAsciiDigit(ch)) {
         return false;
+    }
 
-    if (length > UINT32_CHAR_BUFFER_LENGTH)
+    if (length > UINT32_CHAR_BUFFER_LENGTH) {
         return false;
+    }
 
     /*
      * Make sure to account for the '\0' at the end of characters, dereferenced
      * in the loop below.
      */
     RangedPtr<const CharT> cp(s, length + 1);
     const RangedPtr<const CharT> end(s + length, s, length + 1);
 
@@ -1028,18 +1089,19 @@ JSFlatString::isIndexSlow(const CharT* s
             oldIndex = index;
             c = JS7_UNDEC(*cp);
             index = 10 * index + c;
             cp++;
         }
     }
 
     /* It's not an element if there are characters after the number. */
-    if (cp != end)
+    if (cp != end) {
         return false;
+    }
 
     /*
      * Look out for "4294967296" and larger-number strings that fit in
      * UINT32_CHAR_BUFFER_LENGTH: only unsigned 32-bit integers shall pass.
      */
     if (oldIndex < UINT32_MAX / 10 || (oldIndex == UINT32_MAX / 10 && c <= (UINT32_MAX % 10))) {
         *indexp = index;
         return true;
@@ -1101,27 +1163,29 @@ StaticStrings::init(JSContext* cx)
     static_assert(UNIT_STATIC_LIMIT - 1 <= JSString::MAX_LATIN1_CHAR,
                   "Unit strings must fit in Latin1Char.");
 
     using Latin1Range = mozilla::Range<const Latin1Char>;
 
     for (uint32_t i = 0; i < UNIT_STATIC_LIMIT; i++) {
         Latin1Char buffer[] = { Latin1Char(i), '\0' };
         JSFlatString* s = NewInlineString<NoGC>(cx, Latin1Range(buffer, 1));
-        if (!s)
+        if (!s) {
             return false;
+        }
         HashNumber hash = mozilla::HashString(buffer, 1);
         unitStaticTable[i] = s->morphAtomizedStringIntoPermanentAtom(hash);
     }
 
     for (uint32_t i = 0; i < NUM_SMALL_CHARS * NUM_SMALL_CHARS; i++) {
         Latin1Char buffer[] = { FROM_SMALL_CHAR(i >> 6), FROM_SMALL_CHAR(i & 0x3F), '\0' };
         JSFlatString* s = NewInlineString<NoGC>(cx, Latin1Range(buffer, 2));
-        if (!s)
+        if (!s) {
             return false;
+        }
         HashNumber hash = mozilla::HashString(buffer, 2);
         length2StaticTable[i] = s->morphAtomizedStringIntoPermanentAtom(hash);
     }
 
     for (uint32_t i = 0; i < INT_STATIC_LIMIT; i++) {
         if (i < 10) {
             intStaticTable[i] = unitStaticTable[i + '0'];
         } else if (i < 100) {
@@ -1129,18 +1193,19 @@ StaticStrings::init(JSContext* cx)
                 TO_SMALL_CHAR((i % 10) + '0');
             intStaticTable[i] = length2StaticTable[index];
         } else {
             Latin1Char buffer[] = { Latin1Char('0' + (i / 100)),
                                     Latin1Char('0' + ((i / 10) % 10)),
                                     Latin1Char('0' + (i % 10)),
                                     '\0' };
             JSFlatString* s = NewInlineString<NoGC>(cx, Latin1Range(buffer, 3));
-            if (!s)
+            if (!s) {
                 return false;
+            }
             HashNumber hash = mozilla::HashString(buffer, 3);
             intStaticTable[i] = s->morphAtomizedStringIntoPermanentAtom(hash);
         }
 
         // Static string initialization can not race, so allow even without the lock.
         intStaticTable[i]->maybeInitializeIndex(i, true);
     }
 
@@ -1154,25 +1219,28 @@ TraceStaticString(JSTracer* trc, JSAtom*
     TraceProcessGlobalRoot(trc, atom, name);
 }
 
 void
 StaticStrings::trace(JSTracer* trc)
 {
     /* These strings never change, so barriers are not needed. */
 
-    for (uint32_t i = 0; i < UNIT_STATIC_LIMIT; i++)
+    for (uint32_t i = 0; i < UNIT_STATIC_LIMIT; i++) {
         TraceStaticString(trc, unitStaticTable[i], "unit-static-string");
+    }
 
-    for (uint32_t i = 0; i < NUM_SMALL_CHARS * NUM_SMALL_CHARS; i++)
+    for (uint32_t i = 0; i < NUM_SMALL_CHARS * NUM_SMALL_CHARS; i++) {
         TraceStaticString(trc, length2StaticTable[i], "length2-static-string");
+    }
 
     /* This may mark some strings more than once, but so be it. */
-    for (uint32_t i = 0; i < INT_STATIC_LIMIT; i++)
+    for (uint32_t i = 0; i < INT_STATIC_LIMIT; i++) {
         TraceStaticString(trc, intStaticTable[i], "int-static-string");
+    }
 }
 
 template <typename CharT>
 /* static */ bool
 StaticStrings::isStatic(const CharT* chars, size_t length)
 {
     switch (length) {
       case 1: {
@@ -1205,23 +1273,25 @@ StaticStrings::isStatic(JSAtom* atom)
            ? isStatic(atom->latin1Chars(nogc), atom->length())
            : isStatic(atom->twoByteChars(nogc), atom->length());
 }
 
 bool
 AutoStableStringChars::init(JSContext* cx, JSString* s)
 {
     RootedLinearString linearString(cx, s->ensureLinear(cx));
-    if (!linearString)
+    if (!linearString) {
         return false;
+    }
 
     MOZ_ASSERT(state_ == Uninitialized);
 
-    if (linearString->isExternal() && !linearString->ensureFlat(cx))
+    if (linearString->isExternal() && !linearString->ensureFlat(cx)) {
         return false;
+    }
 
     // If the chars are inline then we need to copy them since they may be moved
     // by a compacting GC.
     if (baseIsInline(linearString)) {
         return linearString->hasTwoByteChars() ? copyTwoByteChars(cx, linearString)
                                                : copyLatin1Chars(cx, linearString);
     }
 
@@ -1236,43 +1306,48 @@ AutoStableStringChars::init(JSContext* c
     s_ = linearString;
     return true;
 }
 
 bool
 AutoStableStringChars::initTwoByte(JSContext* cx, JSString* s)