Bug 1286948 - Renames Profiling to Instrumentation for prolog/epilogue. draft
authorYury Delendik <ydelendik@mozilla.com>
Fri, 07 Oct 2016 18:21:23 -0500
changeset 422584 0949d27b62946dd1a5c9b53bf7490b59e7d1287a
parent 422256 49fe455cac957808ed4a5d1685c3a1938dac1d31
child 422585 03af032f7c8cde6f5af9434757b36858c83f0e13
push id31754
push userydelendik@mozilla.com
push dateFri, 07 Oct 2016 23:25:30 +0000
bugs1286948
milestone52.0a1
Bug 1286948 - Renames Profiling to Instrumentation for prolog/epilogue. We will use the same prolog/epilogue to track frame pointer and return address for profiling and during debugging. Reflecting the same in the code and comments by using "instumentation" vs "profile". MozReview-Commit-ID: APa1j1haLZd
js/src/asmjs/WasmCode.cpp
js/src/asmjs/WasmCode.h
js/src/asmjs/WasmFrameIterator.cpp
js/src/asmjs/WasmFrameIterator.h
js/src/asmjs/WasmGenerator.cpp
js/src/asmjs/WasmInstance.cpp
js/src/asmjs/WasmModule.cpp
js/src/asmjs/WasmStubs.cpp
js/src/asmjs/WasmStubs.h
js/src/asmjs/WasmTypes.h
--- a/js/src/asmjs/WasmCode.cpp
+++ b/js/src/asmjs/WasmCode.cpp
@@ -332,69 +332,69 @@ FuncImport::deserialize(const uint8_t* c
 size_t
 FuncImport::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
 {
     return sig_.sizeOfExcludingThis(mallocSizeOf);
 }
 
 CodeRange::CodeRange(Kind kind, Offsets offsets)
   : begin_(offsets.begin),
-    profilingReturn_(0),
+    instrumentationReturn_(0),
     end_(offsets.end),
     funcDefIndex_(0),
     funcLineOrBytecode_(0),
     funcBeginToTableEntry_(0),
-    funcBeginToTableProfilingJump_(0),
-    funcBeginToNonProfilingEntry_(0),
-    funcProfilingJumpToProfilingReturn_(0),
-    funcProfilingEpilogueToProfilingReturn_(0),
+    funcBeginToTableInstrumentationJump_(0),
+    funcBeginToNonInstrumentationEntry_(0),
+    funcInstrumentationJumpToInstrumentationReturn_(0),
+    funcInstrumentationEpilogueToInstrumentationReturn_(0),
     kind_(kind)
 {
     MOZ_ASSERT(begin_ <= end_);
     MOZ_ASSERT(kind_ == Entry || kind_ == Inline || kind_ == CallThunk);
 }
 
-CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets)
+CodeRange::CodeRange(Kind kind, InstrumentationOffsets offsets)
   : begin_(offsets.begin),
-    profilingReturn_(offsets.profilingReturn),
+    instrumentationReturn_(offsets.instrumentationReturn),
     end_(offsets.end),
     funcDefIndex_(0),
     funcLineOrBytecode_(0),
     funcBeginToTableEntry_(0),
-    funcBeginToTableProfilingJump_(0),
-    funcBeginToNonProfilingEntry_(0),
-    funcProfilingJumpToProfilingReturn_(0),
-    funcProfilingEpilogueToProfilingReturn_(0),
+    funcBeginToTableInstrumentationJump_(0),
+    funcBeginToNonInstrumentationEntry_(0),
+    funcInstrumentationJumpToInstrumentationReturn_(0),
+    funcInstrumentationEpilogueToInstrumentationReturn_(0),
     kind_(kind)
 {
-    MOZ_ASSERT(begin_ < profilingReturn_);
-    MOZ_ASSERT(profilingReturn_ < end_);
+    MOZ_ASSERT(begin_ < instrumentationReturn_);
+    MOZ_ASSERT(instrumentationReturn_ < end_);
     MOZ_ASSERT(kind_ == ImportJitExit || kind_ == ImportInterpExit);
 }
 
 CodeRange::CodeRange(uint32_t funcDefIndex, uint32_t funcLineOrBytecode, FuncOffsets offsets)
   : begin_(offsets.begin),
-    profilingReturn_(offsets.profilingReturn),
+    instrumentationReturn_(offsets.instrumentationReturn),
     end_(offsets.end),
     funcDefIndex_(funcDefIndex),
     funcLineOrBytecode_(funcLineOrBytecode),
     funcBeginToTableEntry_(offsets.tableEntry - begin_),
-    funcBeginToTableProfilingJump_(offsets.tableProfilingJump - begin_),
-    funcBeginToNonProfilingEntry_(offsets.nonProfilingEntry - begin_),
-    funcProfilingJumpToProfilingReturn_(profilingReturn_ - offsets.profilingJump),
-    funcProfilingEpilogueToProfilingReturn_(profilingReturn_ - offsets.profilingEpilogue),
+    funcBeginToTableInstrumentationJump_(offsets.tableInstrumentationJump - begin_),
+    funcBeginToNonInstrumentationEntry_(offsets.nonInstrumentationEntry - begin_),
+    funcInstrumentationJumpToInstrumentationReturn_(instrumentationReturn_ - offsets.instrumentationJump),
+    funcInstrumentationEpilogueToInstrumentationReturn_(instrumentationReturn_ - offsets.instrumentationEpilogue),
     kind_(Function)
 {
-    MOZ_ASSERT(begin_ < profilingReturn_);
-    MOZ_ASSERT(profilingReturn_ < end_);
+    MOZ_ASSERT(begin_ < instrumentationReturn_);
+    MOZ_ASSERT(instrumentationReturn_ < end_);
     MOZ_ASSERT(funcBeginToTableEntry_ == offsets.tableEntry - begin_);
-    MOZ_ASSERT(funcBeginToTableProfilingJump_ == offsets.tableProfilingJump - begin_);
-    MOZ_ASSERT(funcBeginToNonProfilingEntry_ == offsets.nonProfilingEntry - begin_);
-    MOZ_ASSERT(funcProfilingJumpToProfilingReturn_ == profilingReturn_ - offsets.profilingJump);
-    MOZ_ASSERT(funcProfilingEpilogueToProfilingReturn_ == profilingReturn_ - offsets.profilingEpilogue);
+    MOZ_ASSERT(funcBeginToTableInstrumentationJump_ == offsets.tableInstrumentationJump - begin_);
+    MOZ_ASSERT(funcBeginToNonInstrumentationEntry_ == offsets.nonInstrumentationEntry - begin_);
+    MOZ_ASSERT(funcInstrumentationJumpToInstrumentationReturn_ == instrumentationReturn_ - offsets.instrumentationJump);
+    MOZ_ASSERT(funcInstrumentationEpilogueToInstrumentationReturn_ == instrumentationReturn_ - offsets.instrumentationEpilogue);
 }
 
 static size_t
 StringLengthWithNullChar(const char* chars)
 {
     return chars ? strlen(chars) + 1 : 0;
 }
 
@@ -584,16 +584,17 @@ Metadata::getFuncDefName(JSContext* cx, 
 }
 
 Code::Code(UniqueCodeSegment segment,
            const Metadata& metadata,
            const ShareableBytes* maybeBytecode)
   : segment_(Move(segment)),
     metadata_(&metadata),
     maybeBytecode_(maybeBytecode),
+    instrumentationModeCounter_(0),
     profilingEnabled_(false)
 {}
 
 struct CallSiteRetAddrOffset
 {
     const CallSiteVector& callSites;
     explicit CallSiteRetAddrOffset(const CallSiteVector& callSites) : callSites(callSites) {}
     uint32_t operator[](size_t index) const {
@@ -811,31 +812,58 @@ Code::ensureProfilingState(JSContext* cx
         }
     } else {
         funcLabels_.clear();
     }
 
     // Only mutate the code after the fallible operations are complete to avoid
     // the need to rollback.
     profilingEnabled_ = newProfilingEnabled;
+    if (newProfilingEnabled)
+        incrementInstrumentationMode(cx);
+    else
+        decrementInstrumentationMode(cx);
+    return true;
+}
 
-    {
-        AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength());
-        AutoFlushICache afc("Code::ensureProfilingState");
-        AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength());
+void
+Code::incrementInstrumentationMode(JSContext* cx)
+{
+    if (instrumentationModeCounter_++)
+        return;
+
+    AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength());
+    AutoFlushICache afc("Code::incrementInstrumentationMode");
+    AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength());
 
-        for (const CallSite& callSite : metadata_->callSites)
-            ToggleProfiling(*this, callSite, newProfilingEnabled);
-        for (const CallThunk& callThunk : metadata_->callThunks)
-            ToggleProfiling(*this, callThunk, newProfilingEnabled);
-        for (const CodeRange& codeRange : metadata_->codeRanges)
-            ToggleProfiling(*this, codeRange, newProfilingEnabled);
-    }
+    for (const CallSite& callSite : metadata_->callSites)
+        ToggleInstrumentation(*this, callSite, true);
+    for (const CallThunk& callThunk : metadata_->callThunks)
+        ToggleInstrumentation(*this, callThunk, true);
+    for (const CodeRange& codeRange : metadata_->codeRanges)
+        ToggleInstrumentation(*this, codeRange, true);
+}
 
-    return true;
+void
+Code::decrementInstrumentationMode(JSContext* cx)
+{
+    MOZ_ASSERT(instrumentationModeCounter_ > 0);
+    if (--instrumentationModeCounter_)
+        return;
+
+    AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength());
+    AutoFlushICache afc("Code::decrementInstrumentationMode");
+    AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength());
+
+    for (const CallSite& callSite : metadata_->callSites)
+        ToggleInstrumentation(*this, callSite, false);
+    for (const CallThunk& callThunk : metadata_->callThunks)
+        ToggleInstrumentation(*this, callThunk, false);
+    for (const CodeRange& codeRange : metadata_->codeRanges)
+        ToggleInstrumentation(*this, codeRange, false);
 }
 
 void
 Code::addSizeOfMisc(MallocSizeOf mallocSizeOf,
                     Metadata::SeenSet* seenMetadata,
                     ShareableBytes::SeenSet* seenBytes,
                     size_t* code,
                     size_t* data) const
--- a/js/src/asmjs/WasmCode.h
+++ b/js/src/asmjs/WasmCode.h
@@ -52,19 +52,16 @@ class CodeSegment
     uint32_t globalDataLength_;
 
     // These are pointers into code for stubs used for asynchronous
     // signal-handler control-flow transfer.
     uint8_t* interruptCode_;
     uint8_t* outOfBoundsCode_;
     uint8_t* unalignedAccessCode_;
 
-    // The profiling mode may be changed dynamically.
-    bool profilingEnabled_;
-
     CodeSegment() { PodZero(this); }
     template <class> friend struct js::MallocProvider;
 
     CodeSegment(const CodeSegment&) = delete;
     CodeSegment(CodeSegment&&) = delete;
     void operator=(const CodeSegment&) = delete;
     void operator=(CodeSegment&&) = delete;
 
@@ -235,31 +232,31 @@ typedef Vector<FuncImport, 0, SystemAllo
 class CodeRange
 {
   public:
     enum Kind { Function, Entry, ImportJitExit, ImportInterpExit, Inline, CallThunk };
 
   private:
     // All fields are treated as cacheable POD:
     uint32_t begin_;
-    uint32_t profilingReturn_;
+    uint32_t instrumentationReturn_;
     uint32_t end_;
     uint32_t funcDefIndex_;
     uint32_t funcLineOrBytecode_;
     uint8_t funcBeginToTableEntry_;
-    uint8_t funcBeginToTableProfilingJump_;
-    uint8_t funcBeginToNonProfilingEntry_;
-    uint8_t funcProfilingJumpToProfilingReturn_;
-    uint8_t funcProfilingEpilogueToProfilingReturn_;
+    uint8_t funcBeginToTableInstrumentationJump_;
+    uint8_t funcBeginToNonInstrumentationEntry_;
+    uint8_t funcInstrumentationJumpToInstrumentationReturn_;
+    uint8_t funcInstrumentationEpilogueToInstrumentationReturn_;
     Kind kind_ : 8;
 
   public:
     CodeRange() = default;
     CodeRange(Kind kind, Offsets offsets);
-    CodeRange(Kind kind, ProfilingOffsets offsets);
+    CodeRange(Kind kind, InstrumentationOffsets offsets);
     CodeRange(uint32_t funcDefIndex, uint32_t lineOrBytecode, FuncOffsets offsets);
 
     // All CodeRanges have a begin and end.
 
     uint32_t begin() const {
         return begin_;
     }
     uint32_t end() const {
@@ -277,50 +274,50 @@ class CodeRange
     }
     bool isImportExit() const {
         return kind() == ImportJitExit || kind() == ImportInterpExit;
     }
     bool isInline() const {
         return kind() == Inline;
     }
 
-    // Every CodeRange except entry and inline stubs has a profiling return
-    // which is used for asynchronous profiling to determine the frame pointer.
+    // Every CodeRange except entry and inline stubs has a instrumentation return
+    // which is used for asynchronous instrumentation to determine the frame pointer.
 
-    uint32_t profilingReturn() const {
+    uint32_t instrumentationReturn() const {
         MOZ_ASSERT(isFunction() || isImportExit());
-        return profilingReturn_;
+        return instrumentationReturn_;
     }
 
     // Functions have offsets which allow patching to selectively execute
-    // profiling prologues/epilogues.
+    // instrumentation prologues/epilogues.
 
-    uint32_t funcProfilingEntry() const {
+    uint32_t funcInstrumentationEntry() const {
         MOZ_ASSERT(isFunction());
         return begin();
     }
     uint32_t funcTableEntry() const {
         MOZ_ASSERT(isFunction());
         return begin_ + funcBeginToTableEntry_;
     }
-    uint32_t funcTableProfilingJump() const {
+    uint32_t funcTableInstrumentationJump() const {
         MOZ_ASSERT(isFunction());
-        return begin_ + funcBeginToTableProfilingJump_;
+        return begin_ + funcBeginToTableInstrumentationJump_;
     }
-    uint32_t funcNonProfilingEntry() const {
+    uint32_t funcNonInstrumentationEntry() const {
         MOZ_ASSERT(isFunction());
-        return begin_ + funcBeginToNonProfilingEntry_;
+        return begin_ + funcBeginToNonInstrumentationEntry_;
     }
-    uint32_t funcProfilingJump() const {
+    uint32_t funcInstrumentationJump() const {
         MOZ_ASSERT(isFunction());
-        return profilingReturn_ - funcProfilingJumpToProfilingReturn_;
+        return instrumentationReturn_ - funcInstrumentationJumpToInstrumentationReturn_;
     }
-    uint32_t funcProfilingEpilogue() const {
+    uint32_t funcInstrumentationEpilogue() const {
         MOZ_ASSERT(isFunction());
-        return profilingReturn_ - funcProfilingEpilogueToProfilingReturn_;
+        return instrumentationReturn_ - funcInstrumentationEpilogueToInstrumentationReturn_;
     }
     uint32_t funcDefIndex() const {
         MOZ_ASSERT(isFunction());
         return funcDefIndex_;
     }
     uint32_t funcLineOrBytecode() const {
         MOZ_ASSERT(isFunction());
         return funcLineOrBytecode_;
@@ -338,17 +335,17 @@ class CodeRange
             return offset < rhs.begin();
         }
     };
 };
 
 WASM_DECLARE_POD_VECTOR(CodeRange, CodeRangeVector)
 
 // A CallThunk describes the offset and target of thunks so that they may be
-// patched at runtime when profiling is toggled. Thunks are emitted to connect
+// patched at runtime when instrumentation is toggled. Thunks are emitted to connect
 // callsites that are too far away from callees to fit in a single call
 // instruction's relative offset.
 
 struct CallThunk
 {
     uint32_t offset;
     union {
         uint32_t funcDefIndex;
@@ -508,16 +505,17 @@ typedef RefPtr<const Metadata> SharedMet
 
 class Code
 {
     const UniqueCodeSegment  segment_;
     const SharedMetadata     metadata_;
     const SharedBytes        maybeBytecode_;
     UniqueGeneratedSourceMap maybeSourceMap_;
     CacheableCharsVector     funcLabels_;
+    uint32_t                 instrumentationModeCounter_;
     bool                     profilingEnabled_;
 
   public:
     Code(UniqueCodeSegment segment,
          const Metadata& metadata,
          const ShareableBytes* maybeBytecode);
 
     CodeSegment& segment() { return *segment_; }
@@ -559,16 +557,20 @@ class Code
 
     void addSizeOfMisc(MallocSizeOf mallocSizeOf,
                        Metadata::SeenSet* seenMetadata,
                        ShareableBytes::SeenSet* seenBytes,
                        size_t* code,
                        size_t* data) const;
 
     WASM_DECLARE_SERIALIZABLE(Code);
+
+private:
+    void incrementInstrumentationMode(JSContext* cx);
+    void decrementInstrumentationMode(JSContext* cx);
 };
 
 typedef UniquePtr<Code> UniqueCode;
 
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_code_h
--- a/js/src/asmjs/WasmFrameIterator.cpp
+++ b/js/src/asmjs/WasmFrameIterator.cpp
@@ -264,18 +264,18 @@ PushRetAddr(MacroAssembler& masm)
     // The x86/x64 call instruction pushes the return address.
 #endif
 }
 
 // Generate a prologue that maintains WasmActivation::fp as the virtual frame
 // pointer so that ProfilingFrameIterator can walk the stack at any pc in
 // generated code.
 static void
-GenerateProfilingPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                          ProfilingOffsets* offsets)
+GenerateInstrumentationPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
+                                InstrumentationOffsets* offsets)
 {
     Register scratch = ABINonArgReg0;
 
     // ProfilingFrameIterator needs to know the offsets of several key
     // instructions from entry. To save space, we make these offsets static
     // constants and assert that they match the actual codegen below. On ARM,
     // this requires AutoForbidPools to prevent a constant pool from being
     // randomly inserted between two instructions.
@@ -299,20 +299,20 @@ GenerateProfilingPrologue(MacroAssembler
 
     if (reason != ExitReason::None)
         masm.store32(Imm32(int32_t(reason)), Address(scratch, WasmActivation::offsetOfExitReason()));
 
     if (framePushed)
         masm.subFromStackPtr(Imm32(framePushed));
 }
 
-// Generate the inverse of GenerateProfilingPrologue.
+// Generate the inverse of GenerateInstrumentationPrologue.
 static void
-GenerateProfilingEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                          ProfilingOffsets* offsets)
+GenerateInstrumentationEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
+                                InstrumentationOffsets* offsets)
 {
     Register scratch = ABINonArgReturnReg0;
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
     defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
     Register scratch2 = ABINonArgReturnReg1;
 #endif
 
     if (framePushed)
@@ -321,17 +321,17 @@ GenerateProfilingEpilogue(MacroAssembler
     masm.loadWasmActivationFromTls(scratch);
 
     if (reason != ExitReason::None) {
         masm.store32(Imm32(int32_t(ExitReason::None)),
                      Address(scratch, WasmActivation::offsetOfExitReason()));
     }
 
     // ProfilingFrameIterator assumes fixed offsets of the last few
-    // instructions from profilingReturn, so AutoForbidPools to ensure that
+    // instructions from instrumentationReturn, so AutoForbidPools to ensure that
     // unintended instructions are not automatically inserted.
     {
 #if defined(JS_CODEGEN_ARM)
         AutoForbidPools afp(&masm, /* number of instructions in scope = */ 4);
 #endif
 
         // sp protects the stack from clobber via asynchronous signal handlers
         // and the async interrupt exit. Since activation.fp can be read at any
@@ -344,41 +344,41 @@ GenerateProfilingEpilogue(MacroAssembler
         DebugOnly<uint32_t> prePop = masm.currentOffset();
         masm.addToStackPtr(Imm32(sizeof(void *)));
         MOZ_ASSERT_IF(!masm.oom(), PostStorePrePopFP == masm.currentOffset() - prePop);
 #else
         masm.pop(Address(scratch, WasmActivation::offsetOfFP()));
         MOZ_ASSERT(PostStorePrePopFP == 0);
 #endif
 
-        offsets->profilingReturn = masm.currentOffset();
+        offsets->instrumentationReturn = masm.currentOffset();
         masm.ret();
     }
 }
 
-// In profiling mode, we need to maintain fp so that we can unwind the stack at
-// any pc. In non-profiling mode, the only way to observe WasmActivation::fp is
+// In instrumentation mode, we need to maintain fp so that we can unwind the stack at
+// any pc. In non-instrumentation mode, the only way to observe WasmActivation::fp is
 // to call out to C++ so, as an optimization, we don't update fp. To avoid
-// recompilation when the profiling mode is toggled, we generate both prologues
-// a priori and switch between prologues when the profiling mode is toggled.
-// Specifically, ToggleProfiling patches all callsites to either call the
-// profiling or non-profiling entry point.
+// recompilation when the instrumentation mode is toggled, we generate both prologues
+// a priori and switch between prologues when the instrumentation mode is toggled.
+// Specifically, ToggleInstrumentation patches all callsites to either call the
+// instrumentation or non-instrumentation entry point.
 void
 wasm::GenerateFunctionPrologue(MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
                                FuncOffsets* offsets)
 {
 #if defined(JS_CODEGEN_ARM)
     // Flush pending pools so they do not get dumped between the 'begin' and
     // 'entry' offsets since the difference must be less than UINT8_MAX.
     masm.flushBuffer();
 #endif
 
     masm.haltingAlign(CodeAlignment);
 
-    GenerateProfilingPrologue(masm, framePushed, ExitReason::None, offsets);
+    GenerateInstrumentationPrologue(masm, framePushed, ExitReason::None, offsets);
     Label body;
     masm.jump(&body);
 
     // Generate table entry thunk:
     masm.haltingAlign(CodeAlignment);
     offsets->tableEntry = masm.currentOffset();
     switch (sigId.kind()) {
       case SigIdDesc::Kind::Global: {
@@ -390,76 +390,76 @@ wasm::GenerateFunctionPrologue(MacroAsse
       }
       case SigIdDesc::Kind::Immediate:
         masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg, Imm32(sigId.immediate()),
                       JumpTarget::IndirectCallBadSig);
         break;
       case SigIdDesc::Kind::None:
         break;
     }
-    offsets->tableProfilingJump = masm.nopPatchableToNearJump().offset();
+    offsets->tableInstrumentationJump = masm.nopPatchableToNearJump().offset();
 
     // Generate normal prologue:
     masm.nopAlign(CodeAlignment);
-    offsets->nonProfilingEntry = masm.currentOffset();
+    offsets->nonInstrumentationEntry = masm.currentOffset();
     PushRetAddr(masm);
     masm.subFromStackPtr(Imm32(framePushed + AsmJSFrameBytesAfterReturnAddress));
 
     // Prologue join point, body begin:
     masm.bind(&body);
     masm.setFramePushed(framePushed);
 }
 
 // Similar to GenerateFunctionPrologue (see comment), we generate both a
-// profiling and non-profiling epilogue a priori. When the profiling mode is
-// toggled, ToggleProfiling patches the 'profiling jump' to either be a nop
-// (falling through to the normal prologue) or a jump (jumping to the profiling
+// instrumentation and non-instrumentation epilogue a priori. When the instrumentation mode is
+// toggled, ToggleInstrumentation patches the 'instrumentation jump' to either be a nop
+// (falling through to the normal prologue) or a jump (jumping to the instrumentation
 // epilogue).
 void
 wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets)
 {
     MOZ_ASSERT(masm.framePushed() == framePushed);
 
 #if defined(JS_CODEGEN_ARM)
-    // Flush pending pools so they do not get dumped between the profilingReturn
-    // and profilingJump/profilingEpilogue offsets since the difference must be
+    // Flush pending pools so they do not get dumped between the instrumentationReturn
+    // and instrumentationJump/instrumentationEpilogue offsets since the difference must be
     // less than UINT8_MAX.
     masm.flushBuffer();
 #endif
 
-    // Generate a nop that is overwritten by a jump to the profiling epilogue
-    // when profiling is enabled.
-    offsets->profilingJump = masm.nopPatchableToNearJump().offset();
+    // Generate a nop that is overwritten by a jump to the instrumentation epilogue
+    // when instrumentation is enabled.
+    offsets->instrumentationJump = masm.nopPatchableToNearJump().offset();
 
     // Normal epilogue:
     masm.addToStackPtr(Imm32(framePushed + AsmJSFrameBytesAfterReturnAddress));
     masm.ret();
     masm.setFramePushed(0);
 
-    // Profiling epilogue:
-    offsets->profilingEpilogue = masm.currentOffset();
-    GenerateProfilingEpilogue(masm, framePushed, ExitReason::None, offsets);
+    // Instrumentation epilogue:
+    offsets->instrumentationEpilogue = masm.currentOffset();
+    GenerateInstrumentationEpilogue(masm, framePushed, ExitReason::None, offsets);
 }
 
 void
 wasm::GenerateExitPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                           ProfilingOffsets* offsets)
+                           InstrumentationOffsets* offsets)
 {
     masm.haltingAlign(CodeAlignment);
-    GenerateProfilingPrologue(masm, framePushed, reason, offsets);
+    GenerateInstrumentationPrologue(masm, framePushed, reason, offsets);
     masm.setFramePushed(framePushed);
 }
 
 void
 wasm::GenerateExitEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                           ProfilingOffsets* offsets)
+                           InstrumentationOffsets* offsets)
 {
     // Inverse of GenerateExitPrologue:
     MOZ_ASSERT(masm.framePushed() == framePushed);
-    GenerateProfilingEpilogue(masm, framePushed, reason, offsets);
+    GenerateInstrumentationEpilogue(masm, framePushed, reason, offsets);
     masm.setFramePushed(0);
 }
 
 /*****************************************************************************/
 // ProfilingFrameIterator
 
 ProfilingFrameIterator::ProfilingFrameIterator()
   : activation_(nullptr),
@@ -582,17 +582,17 @@ typedef JS::ProfilingFrameIterator::Regi
 static bool
 InThunk(const CodeRange& codeRange, uint32_t offsetInModule)
 {
     if (codeRange.kind() == CodeRange::CallThunk)
         return true;
 
     return codeRange.isFunction() &&
            offsetInModule >= codeRange.funcTableEntry() &&
-           offsetInModule < codeRange.funcNonProfilingEntry();
+           offsetInModule < codeRange.funcNonInstrumentationEntry();
 }
 
 ProfilingFrameIterator::ProfilingFrameIterator(const WasmActivation& activation,
                                                const RegisterState& state)
   : activation_(&activation),
     code_(nullptr),
     codeRange_(nullptr),
     callerFP_(nullptr),
@@ -641,25 +641,25 @@ ProfilingFrameIterator::ProfilingFrameIt
         void** sp = (void**)state.sp;
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
         if (offsetInCodeRange < PushedRetAddr || InThunk(*codeRange, offsetInModule)) {
             // First instruction of the ARM/MIPS function; the return address is
             // still in lr and fp still holds the caller's fp.
             callerPC_ = state.lr;
             callerFP_ = fp;
             AssertMatchesCallSite(*activation_, callerPC_, callerFP_, sp - 2);
-        } else if (offsetInModule == codeRange->profilingReturn() - PostStorePrePopFP) {
+        } else if (offsetInModule == codeRange->instrumentationReturn() - PostStorePrePopFP) {
             // Second-to-last instruction of the ARM/MIPS function; fp points to
             // the caller's fp; have not yet popped AsmJSFrame.
             callerPC_ = ReturnAddressFromFP(sp);
             callerFP_ = CallerFPFromFP(sp);
             AssertMatchesCallSite(*activation_, callerPC_, callerFP_, sp);
         } else
 #endif
-        if (offsetInCodeRange < PushedFP || offsetInModule == codeRange->profilingReturn() ||
+        if (offsetInCodeRange < PushedFP || offsetInModule == codeRange->instrumentationReturn() ||
             InThunk(*codeRange, offsetInModule))
         {
             // The return address has been pushed on the stack but not fp; fp
             // still points to the caller's fp.
             callerPC_ = *sp;
             callerFP_ = fp;
             AssertMatchesCallSite(*activation_, callerPC_, callerFP_, sp - 1);
         } else if (offsetInCodeRange < StoredFP) {
@@ -789,17 +789,17 @@ ProfilingFrameIterator::label() const
 
     MOZ_CRASH("bad code range kind");
 }
 
 /*****************************************************************************/
 // Runtime patching to enable/disable profiling
 
 void
-wasm::ToggleProfiling(const Code& code, const CallSite& callSite, bool enabled)
+wasm::ToggleInstrumentation(const Code& code, const CallSite& callSite, bool enabled)
 {
     if (callSite.kind() != CallSite::Relative)
         return;
 
     uint8_t* callerRetAddr = code.segment().base() + callSite.returnAddressOffset();
 
 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
     void* callee = X86Encoding::GetRel32Target(callerRetAddr);
@@ -825,18 +825,18 @@ wasm::ToggleProfiling(const Code& code, 
 #else
 # error "Missing architecture"
 #endif
 
     const CodeRange* codeRange = code.lookupRange(callee);
     if (!codeRange->isFunction())
         return;
 
-    uint8_t* from = code.segment().base() + codeRange->funcNonProfilingEntry();
-    uint8_t* to = code.segment().base() + codeRange->funcProfilingEntry();
+    uint8_t* from = code.segment().base() + codeRange->funcNonInstrumentationEntry();
+    uint8_t* to = code.segment().base() + codeRange->funcInstrumentationEntry();
     if (!enabled)
         Swap(from, to);
 
     MOZ_ASSERT(callee == from);
 
 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
     X86Encoding::SetRel32(callerRetAddr, to);
 #elif defined(JS_CODEGEN_ARM)
@@ -849,35 +849,35 @@ wasm::ToggleProfiling(const Code& code, 
 #elif defined(JS_CODEGEN_NONE)
     MOZ_CRASH();
 #else
 # error "Missing architecture"
 #endif
 }
 
 void
-wasm::ToggleProfiling(const Code& code, const CallThunk& callThunk, bool enabled)
+wasm::ToggleInstrumentation(const Code& code, const CallThunk& callThunk, bool enabled)
 {
     const CodeRange& cr = code.metadata().codeRanges[callThunk.u.codeRangeIndex];
-    uint32_t calleeOffset = enabled ? cr.funcProfilingEntry() : cr.funcNonProfilingEntry();
+    uint32_t calleeOffset = enabled ? cr.funcInstrumentationEntry() : cr.funcNonInstrumentationEntry();
     MacroAssembler::repatchThunk(code.segment().base(), callThunk.offset, calleeOffset);
 }
 
 void
-wasm::ToggleProfiling(const Code& code, const CodeRange& codeRange, bool enabled)
+wasm::ToggleInstrumentation(const Code& code, const CodeRange& codeRange, bool enabled)
 {
     if (!codeRange.isFunction())
         return;
 
     uint8_t* codeBase = code.segment().base();
-    uint8_t* profilingEntry     = codeBase + codeRange.funcProfilingEntry();
-    uint8_t* tableProfilingJump = codeBase + codeRange.funcTableProfilingJump();
-    uint8_t* profilingJump      = codeBase + codeRange.funcProfilingJump();
-    uint8_t* profilingEpilogue  = codeBase + codeRange.funcProfilingEpilogue();
+    uint8_t* instrumentationEntry     = codeBase + codeRange.funcInstrumentationEntry();
+    uint8_t* tableInstrumentationJump = codeBase + codeRange.funcTableInstrumentationJump();
+    uint8_t* instrumentationJump      = codeBase + codeRange.funcInstrumentationJump();
+    uint8_t* profilingEpilogue  = codeBase + codeRange.funcInstrumentationEpilogue();
 
     if (enabled) {
-        MacroAssembler::patchNopToNearJump(tableProfilingJump, profilingEntry);
-        MacroAssembler::patchNopToNearJump(profilingJump, profilingEpilogue);
+        MacroAssembler::patchNopToNearJump(tableInstrumentationJump, instrumentationEntry);
+        MacroAssembler::patchNopToNearJump(instrumentationJump, profilingEpilogue);
     } else {
-        MacroAssembler::patchNearJumpToNop(tableProfilingJump);
-        MacroAssembler::patchNearJumpToNop(profilingJump);
+        MacroAssembler::patchNearJumpToNop(tableInstrumentationJump);
+        MacroAssembler::patchNearJumpToNop(instrumentationJump);
     }
 }
--- a/js/src/asmjs/WasmFrameIterator.h
+++ b/js/src/asmjs/WasmFrameIterator.h
@@ -33,17 +33,17 @@ namespace wasm {
 class CallSite;
 class Code;
 class CodeRange;
 class Instance;
 class SigIdDesc;
 struct CallThunk;
 struct FuncOffsets;
 struct Metadata;
-struct ProfilingOffsets;
+struct InstrumentationOffsets;
 
 // Iterates over the frames of a single WasmActivation, called synchronously
 // from C++ in the thread of the asm.js.
 //
 // The one exception is that this iterator may be called from the interrupt
 // callback which may be called asynchronously from asm.js code; in this case,
 // the backtrace may not be correct. That being said, we try our best printing
 // an informative message to the user and at least the name of the innermost
@@ -110,33 +110,33 @@ class ProfilingFrameIterator
     void* stackAddress() const { MOZ_ASSERT(!done()); return stackAddress_; }
     const char* label() const;
 };
 
 // Prologue/epilogue code generation
 
 void
 GenerateExitPrologue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                     ProfilingOffsets* offsets);
+                     InstrumentationOffsets* offsets);
 void
 GenerateExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                     ProfilingOffsets* offsets);
+                     InstrumentationOffsets* offsets);
 void
 GenerateFunctionPrologue(jit::MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
                          FuncOffsets* offsets);
 void
 GenerateFunctionEpilogue(jit::MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets);
 
 // Runtime patching to enable/disable profiling
 
 void
-ToggleProfiling(const Code& code, const CallSite& callSite, bool enabled);
+ToggleInstrumentation(const Code& code, const CallSite& callSite, bool enabled);
 
 void
-ToggleProfiling(const Code& code, const CallThunk& callThunk, bool enabled);
+ToggleInstrumentation(const Code& code, const CallThunk& callThunk, bool enabled);
 
 void
-ToggleProfiling(const Code& code, const CodeRange& codeRange, bool enabled);
+ToggleInstrumentation(const Code& code, const CodeRange& codeRange, bool enabled);
 
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_frame_iterator_h
--- a/js/src/asmjs/WasmGenerator.cpp
+++ b/js/src/asmjs/WasmGenerator.cpp
@@ -278,17 +278,17 @@ ModuleGenerator::convertOutOfRangeBranch
         const CallSiteAndTarget& cs = masm_.callSites()[lastPatchedCallsite_];
         if (!cs.isDefinition())
             continue;
 
         uint32_t callerOffset = cs.returnAddressOffset();
         MOZ_RELEASE_ASSERT(callerOffset < INT32_MAX);
 
         if (funcIsDefined(cs.funcDefIndex())) {
-            uint32_t calleeOffset = funcDefCodeRange(cs.funcDefIndex()).funcNonProfilingEntry();
+            uint32_t calleeOffset = funcDefCodeRange(cs.funcDefIndex()).funcNonInstrumentationEntry();
             MOZ_RELEASE_ASSERT(calleeOffset < INT32_MAX);
 
             if (uint32_t(abs(int32_t(calleeOffset) - int32_t(callerOffset))) < JumpRange()) {
                 masm_.patchCall(callerOffset, calleeOffset);
                 continue;
             }
         }
 
@@ -418,33 +418,33 @@ ModuleGenerator::finishFuncDefExports()
                                                         funcDefIndex,
                                                         funcDefIndexToCodeRange_[funcDefIndex]);
     }
 
     return true;
 }
 
 typedef Vector<Offsets, 0, SystemAllocPolicy> OffsetVector;
-typedef Vector<ProfilingOffsets, 0, SystemAllocPolicy> ProfilingOffsetVector;
+typedef Vector<InstrumentationOffsets, 0, SystemAllocPolicy> InstrumentationOffsetVector;
 
 bool
 ModuleGenerator::finishCodegen()
 {
     uint32_t offsetInWhole = masm_.size();
 
     uint32_t numFuncDefExports = metadata_->funcDefExports.length();
     MOZ_ASSERT(numFuncDefExports == exportedFuncDefs_.count());
 
     // Generate stubs in a separate MacroAssembler since, otherwise, for modules
     // larger than the JumpImmediateRange, even local uses of Label will fail
     // due to the large absolute offsets temporarily stored by Label::bind().
 
     OffsetVector entries;
-    ProfilingOffsetVector interpExits;
-    ProfilingOffsetVector jitExits;
+    InstrumentationOffsetVector interpExits;
+    InstrumentationOffsetVector jitExits;
     EnumeratedArray<JumpTarget, JumpTarget::Limit, Offsets> jumpTargets;
     Offsets interruptExit;
 
     {
         TempAllocator alloc(&lifo_);
         MacroAssembler masm(MacroAssembler::AsmJSToken(), alloc);
 
         if (!entries.resize(numFuncDefExports))
@@ -514,17 +514,17 @@ ModuleGenerator::finishCodegen()
     if (!convertOutOfRangeBranchesToThunks())
         return false;
 
     // Now that all thunks have been generated, patch all the thunks.
 
     for (CallThunk& callThunk : metadata_->callThunks) {
         uint32_t funcDefIndex = callThunk.u.funcDefIndex;
         callThunk.u.codeRangeIndex = funcDefIndexToCodeRange_[funcDefIndex];
-        masm_.patchThunk(callThunk.offset, funcDefCodeRange(funcDefIndex).funcNonProfilingEntry());
+        masm_.patchThunk(callThunk.offset, funcDefCodeRange(funcDefIndex).funcNonInstrumentationEntry());
     }
 
     for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit)) {
         for (uint32_t thunkOffset : jumpThunks_[target])
             masm_.patchThunk(thunkOffset, jumpTargets[target].begin);
     }
 
     // Code-generation is complete!
--- a/js/src/asmjs/WasmInstance.cpp
+++ b/js/src/asmjs/WasmInstance.cpp
@@ -330,17 +330,17 @@ Instance::Instance(JSContext* cx,
         HandleFunction f = funcImports[i];
         const FuncImport& fi = metadata().funcImports[i];
         FuncImportTls& import = funcImportTls(fi);
         if (!isAsmJS() && IsExportedWasmFunction(f)) {
             WasmInstanceObject* calleeInstanceObj = ExportedFunctionToInstanceObject(f);
             const CodeRange& codeRange = calleeInstanceObj->getExportedFunctionCodeRange(f);
             Instance& calleeInstance = calleeInstanceObj->instance();
             import.tls = &calleeInstance.tlsData_;
-            import.code = calleeInstance.codeSegment().base() + codeRange.funcNonProfilingEntry();
+            import.code = calleeInstance.codeSegment().base() + codeRange.funcNonInstrumentationEntry();
             import.baselineScript = nullptr;
             import.obj = calleeInstanceObj;
         } else {
             import.tls = &tlsData_;
             import.code = codeBase() + fi.interpExitCodeOffset();
             import.baselineScript = nullptr;
             import.obj = f;
         }
@@ -764,18 +764,18 @@ Instance::deoptimizeImportExit(uint32_t 
     import.code = codeBase() + fi.interpExitCodeOffset();
     import.baselineScript = nullptr;
 }
 
 static void
 UpdateEntry(const Code& code, bool profilingEnabled, void** entry)
 {
     const CodeRange& codeRange = *code.lookupRange(*entry);
-    void* from = code.segment().base() + codeRange.funcNonProfilingEntry();
-    void* to = code.segment().base() + codeRange.funcProfilingEntry();
+    void* from = code.segment().base() + codeRange.funcNonInstrumentationEntry();
+    void* to = code.segment().base() + codeRange.funcInstrumentationEntry();
 
     if (!profilingEnabled)
         Swap(from, to);
 
     MOZ_ASSERT(*entry == from);
     *entry = to;
 }
 
--- a/js/src/asmjs/WasmModule.cpp
+++ b/js/src/asmjs/WasmModule.cpp
@@ -449,18 +449,18 @@ Module::initSegments(JSContext* cx,
                 Instance& exportInstance = exportInstanceObj->instance();
                 table.set(offset + i, exportInstance.codeBase() + cr.funcTableEntry(), exportInstance);
             } else {
                 MOZ_ASSERT(seg.elemCodeRangeIndices[i] != UINT32_MAX);
 
                 const CodeRange& cr = codeRanges[seg.elemCodeRangeIndices[i]];
                 uint32_t entryOffset = table.isTypedFunction()
                                        ? profilingEnabled
-                                         ? cr.funcProfilingEntry()
-                                         : cr.funcNonProfilingEntry()
+                                         ? cr.funcInstrumentationEntry()
+                                         : cr.funcNonInstrumentationEntry()
                                        : cr.funcTableEntry();
                 table.set(offset + i, codeBase + entryOffset, instance);
             }
         }
     }
 
     if (memoryObj) {
         uint8_t* memoryBase = memoryObj->buffer().dataPointerEither().unwrap(/* memcpy */);
--- a/js/src/asmjs/WasmStubs.cpp
+++ b/js/src/asmjs/WasmStubs.cpp
@@ -428,17 +428,17 @@ FillArgumentArray(MacroAssembler& masm, 
             break;
         }
     }
 }
 
 // Generate a stub that is called via the internal ABI derived from the
 // signature of the import and calls into an appropriate callImport C++
 // function, having boxed all the ABI arguments into a homogeneous Value array.
-ProfilingOffsets
+InstrumentationOffsets
 wasm::GenerateInterpExit(MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex)
 {
     const Sig& sig = fi.sig();
 
     masm.setFramePushed(0);
 
     // Argument types for Module::callImport_*:
     static const MIRType typeArray[] = { MIRType::Pointer,   // Instance*
@@ -451,17 +451,17 @@ wasm::GenerateInterpExit(MacroAssembler&
     // At the point of the call, the stack layout shall be (sp grows to the left):
     //   | stack args | padding | Value argv[] | padding | retaddr | caller stack args |
     // The padding between stack args and argv ensures that argv is aligned. The
     // padding between argv and retaddr ensures that sp is aligned.
     unsigned argOffset = AlignBytes(StackArgBytes(invokeArgTypes), sizeof(double));
     unsigned argBytes = Max<size_t>(1, sig.args().length()) * sizeof(Value);
     unsigned framePushed = StackDecrementForCall(masm, ABIStackAlignment, argOffset + argBytes);
 
-    ProfilingOffsets offsets;
+    InstrumentationOffsets offsets;
     GenerateExitPrologue(masm, framePushed, ExitReason::ImportInterp, &offsets);
 
     // Fill the argument array.
     unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed();
     Register scratch = ABINonArgReturnReg0;
     FillArgumentArray(masm, sig.args(), argOffset, offsetToCallerStackArgs, scratch, ToValue(false));
 
     // Prepare the arguments for the call to Module::callImport_*.
@@ -562,17 +562,17 @@ wasm::GenerateInterpExit(MacroAssembler&
     return offsets;
 }
 
 static const unsigned SavedTlsReg = sizeof(void*);
 
 // Generate a stub that is called via the internal ABI derived from the
 // signature of the import and calls into a compatible JIT function,
 // having boxed all the ABI arguments into the JIT stack frame layout.
-ProfilingOffsets
+InstrumentationOffsets
 wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi)
 {
     const Sig& sig = fi.sig();
 
     masm.setFramePushed(0);
 
     // JIT calls use the following stack layout (sp grows to the left):
     //   | retaddr | descriptor | callee | argc | this | arg1..N |
@@ -582,17 +582,17 @@ wasm::GenerateJitExit(MacroAssembler& ma
     // the return address.
     static_assert(AsmJSStackAlignment >= JitStackAlignment, "subsumes");
     unsigned sizeOfRetAddr = sizeof(void*);
     unsigned jitFrameBytes = 3 * sizeof(void*) + (1 + sig.args().length()) * sizeof(Value);
     unsigned totalJitFrameBytes = sizeOfRetAddr + jitFrameBytes + SavedTlsReg;
     unsigned jitFramePushed = StackDecrementForCall(masm, JitStackAlignment, totalJitFrameBytes) -
                               sizeOfRetAddr;
 
-    ProfilingOffsets offsets;
+    InstrumentationOffsets offsets;
     GenerateExitPrologue(masm, jitFramePushed, ExitReason::ImportJit, &offsets);
 
     // 1. Descriptor
     size_t argOffset = 0;
     uint32_t descriptor = MakeFrameDescriptor(jitFramePushed, JitFrame_Entry,
                                               JitFrameLayout::Size());
     masm.storePtr(ImmWord(uintptr_t(descriptor)), Address(masm.getStackPointer(), argOffset));
     argOffset += sizeof(size_t);
--- a/js/src/asmjs/WasmStubs.h
+++ b/js/src/asmjs/WasmStubs.h
@@ -28,20 +28,20 @@ namespace jit { class MacroAssembler; }
 namespace wasm {
 
 class FuncDefExport;
 class FuncImport;
 
 extern Offsets
 GenerateEntry(jit::MacroAssembler& masm, const FuncDefExport& func);
 
-extern ProfilingOffsets
+extern InstrumentationOffsets
 GenerateInterpExit(jit::MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex);
 
-extern ProfilingOffsets
+extern InstrumentationOffsets
 GenerateJitExit(jit::MacroAssembler& masm, const FuncImport& fi);
 
 extern Offsets
 GenerateJumpTarget(jit::MacroAssembler& masm, JumpTarget target);
 
 extern Offsets
 GenerateInterruptStub(jit::MacroAssembler& masm);
 
--- a/js/src/asmjs/WasmTypes.h
+++ b/js/src/asmjs/WasmTypes.h
@@ -624,17 +624,17 @@ struct SigWithId : Sig
     void operator=(Sig&& rhs) { Sig::operator=(Move(rhs)); }
 
     WASM_DECLARE_SERIALIZABLE(SigWithId)
 };
 
 typedef Vector<SigWithId, 0, SystemAllocPolicy> SigWithIdVector;
 typedef Vector<const SigWithId*, 0, SystemAllocPolicy> SigWithIdPtrVector;
 
-// The (,Profiling,Func)Offsets classes are used to record the offsets of
+// The (,Instrumentation,Func)Offsets classes are used to record the offsets of
 // different key points in a CodeRange during compilation.
 
 struct Offsets
 {
     explicit Offsets(uint32_t begin = 0, uint32_t end = 0)
       : begin(begin), end(end)
     {}
 
@@ -644,72 +644,72 @@ struct Offsets
     uint32_t end;
 
     void offsetBy(uint32_t offset) {
         begin += offset;
         end += offset;
     }
 };
 
-struct ProfilingOffsets : Offsets
+struct InstrumentationOffsets : Offsets
 {
-    MOZ_IMPLICIT ProfilingOffsets(uint32_t profilingReturn = 0)
-      : Offsets(), profilingReturn(profilingReturn)
+    MOZ_IMPLICIT InstrumentationOffsets(uint32_t instrumentationReturn = 0)
+      : Offsets(), instrumentationReturn(instrumentationReturn)
     {}
 
-    // For CodeRanges with ProfilingOffsets, 'begin' is the offset of the
-    // profiling entry.
-    uint32_t profilingEntry() const { return begin; }
+    // For CodeRanges with InstrumentationOffsets, 'begin' is the offset of the
+    // instrumentation entry.
+    uint32_t instrumentationEntry() const { return begin; }
 
-    // The profiling return is the offset of the return instruction, which
+    // The instrumentation return is the offset of the return instruction, which
     // precedes the 'end' by a variable number of instructions due to
     // out-of-line codegen.
-    uint32_t profilingReturn;
+    uint32_t instrumentationReturn;
 
     void offsetBy(uint32_t offset) {
         Offsets::offsetBy(offset);
-        profilingReturn += offset;
+        instrumentationReturn += offset;
     }
 };
 
-struct FuncOffsets : ProfilingOffsets
+struct FuncOffsets : InstrumentationOffsets
 {
     MOZ_IMPLICIT FuncOffsets()
-      : ProfilingOffsets(),
+      : InstrumentationOffsets(),
         tableEntry(0),
-        tableProfilingJump(0),
-        nonProfilingEntry(0),
-        profilingJump(0),
-        profilingEpilogue(0)
+        tableInstrumentationJump(0),
+        nonInstrumentationEntry(0),
+        instrumentationJump(0),
+        instrumentationEpilogue(0)
     {}
 
     // Function CodeRanges have a table entry which takes an extra signature
     // argument which is checked against the callee's signature before falling
-    // through to the normal prologue. When profiling is enabled, a nop on the
-    // fallthrough is patched to instead jump to the profiling epilogue.
+    // through to the normal prologue. When instrumentation is enabled, a nop on the
+    // fallthrough is patched to instead jump to the instrumentation epilogue.
     uint32_t tableEntry;
-    uint32_t tableProfilingJump;
+    uint32_t tableInstrumentationJump;
 
-    // Function CodeRanges have an additional non-profiling entry that comes
-    // after the profiling entry and a non-profiling epilogue that comes before
-    // the profiling epilogue.
-    uint32_t nonProfilingEntry;
+    // Function CodeRanges have an additional non-instrumentation entry that comes
+    // after the instrumentation entry and a non-instrumentation epilogue that comes before
+    // the instrumentation epilogue.
+    uint32_t nonInstrumentationEntry;
 
-    // When profiling is enabled, the 'nop' at offset 'profilingJump' is
-    // overwritten to be a jump to 'profilingEpilogue'.
-    uint32_t profilingJump;
-    uint32_t profilingEpilogue;
+    // When instrumentation is enabled, the 'nop' at offset 'instrumentationJump' is
+    // overwritten to be a jump to 'instrumentationEpilogue'.
+    uint32_t instrumentationJump;
+    uint32_t instrumentationEpilogue;
 
     void offsetBy(uint32_t offset) {
-        ProfilingOffsets::offsetBy(offset);
+        InstrumentationOffsets::offsetBy(offset);
         tableEntry += offset;
-        tableProfilingJump += offset;
-        nonProfilingEntry += offset;
-        profilingJump += offset;
-        profilingEpilogue += offset;
+        tableInstrumentationJump += offset;
+        nonInstrumentationEntry += offset;
+        instrumentationJump += offset;
+        instrumentationEpilogue += offset;
     }
 };
 
 // While the frame-pointer chain allows the stack to be unwound without
 // metadata, Error.stack still needs to know the line/column of every call in
 // the chain. A CallSiteDesc describes a single callsite to which CallSite adds
 // the metadata necessary to walk up to the next frame. Lastly CallSiteAndTarget
 // adds the function index of the callee.