Backed out changeset a9a241b0c121 (bug 1207827) for Windows SM bustage CLOSED TREE
authorWes Kocher <wkocher@mozilla.com>
Tue, 20 Oct 2015 13:53:27 -0700
changeset 303835 73f8bb57540110d2b80fa3ccc4dbf936fa394e39
parent 303834 c7cedbb6e3fb4662698c587bfad980d1113485ff
child 303836 f7e6d2a69f8c3b9ec43336aeb42e530690163a92
push id1001
push userraliiev@mozilla.com
push dateMon, 18 Jan 2016 19:06:03 +0000
treeherdermozilla-release@8b89261f3ac4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1207827
milestone44.0a1
backs outa9a241b0c121d47f4fe71a9cdd3feeb7cc552dee
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset a9a241b0c121 (bug 1207827) for Windows SM bustage CLOSED TREE
js/src/asmjs/AsmJSModule.cpp
js/src/asmjs/AsmJSModule.h
js/src/asmjs/AsmJSValidate.cpp
js/src/irregexp/NativeRegExpMacroAssembler.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineDebugModeOSR.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/Ion.cpp
js/src/jit/IonCaches.cpp
js/src/jit/JitFrames.cpp
js/src/jit/JitFrames.h
js/src/jit/LIR.h
js/src/jit/MacroAssembler.cpp
js/src/jit/PerfSpewer.cpp
js/src/jit/SharedIC.h
js/src/jit/arm/Assembler-arm.h
js/src/jit/arm/Trampoline-arm.cpp
js/src/jit/arm64/Assembler-arm64.h
js/src/jit/arm64/Trampoline-arm64.cpp
js/src/jit/mips-shared/Assembler-mips-shared.cpp
js/src/jit/mips-shared/Assembler-mips-shared.h
js/src/jit/mips32/Trampoline-mips32.cpp
js/src/jit/none/MacroAssembler-none.h
js/src/jit/shared/Assembler-shared.h
js/src/jit/shared/BaselineCompiler-shared.h
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/x64/Trampoline-x64.cpp
js/src/jit/x86-shared/Assembler-x86-shared.h
--- a/js/src/asmjs/AsmJSModule.cpp
+++ b/js/src/asmjs/AsmJSModule.cpp
@@ -309,46 +309,71 @@ AsmJSModule::finish(ExclusiveContext* cx
     masm.executableCopy(code_);
 
     // c.f. JitCode::copyFrom
     MOZ_ASSERT(masm.jumpRelocationTableBytes() == 0);
     MOZ_ASSERT(masm.dataRelocationTableBytes() == 0);
     MOZ_ASSERT(masm.preBarrierTableBytes() == 0);
     MOZ_ASSERT(!masm.hasSelfReference());
 
-    // Copy over metadata.
-    staticLinkData_.interruptExitOffset = interruptLabel.offset();
-    staticLinkData_.outOfBoundsExitOffset = outOfBoundsLabel.offset();
+    // Copy over metadata, making sure to update all offsets on ARM.
+
+    staticLinkData_.interruptExitOffset = masm.actualOffset(interruptLabel.offset());
+    staticLinkData_.outOfBoundsExitOffset = masm.actualOffset(outOfBoundsLabel.offset());
 
     // Heap-access metadata used for link-time patching and fault-handling.
     heapAccesses_ = masm.extractAsmJSHeapAccesses();
 
     // Call-site metadata used for stack unwinding.
     callSites_ = masm.extractCallSites();
 
+#if defined(JS_CODEGEN_ARM)
+    // ARM requires the offsets to be updated.
+    pod.functionBytes_ = masm.actualOffset(pod.functionBytes_);
+    for (size_t i = 0; i < heapAccesses_.length(); i++) {
+        AsmJSHeapAccess& a = heapAccesses_[i];
+        a.setInsnOffset(masm.actualOffset(a.insnOffset()));
+    }
+    for (unsigned i = 0; i < numExportedFunctions(); i++) {
+        if (!exportedFunction(i).isChangeHeap())
+            exportedFunction(i).updateCodeOffset(masm);
+    }
+    for (unsigned i = 0; i < numExits(); i++)
+        exit(i).updateOffsets(masm);
+    for (size_t i = 0; i < callSites_.length(); i++) {
+        CallSite& c = callSites_[i];
+        c.setReturnAddressOffset(masm.actualOffset(c.returnAddressOffset()));
+    }
+    for (size_t i = 0; i < codeRanges_.length(); i++) {
+        codeRanges_[i].updateOffsets(masm);
+        MOZ_ASSERT_IF(i > 0, codeRanges_[i - 1].end() <= codeRanges_[i].begin());
+    }
+    for (size_t i = 0; i < builtinThunkOffsets_.length(); i++)
+        builtinThunkOffsets_[i] = masm.actualOffset(builtinThunkOffsets_[i]);
+#endif
     MOZ_ASSERT(pod.functionBytes_ % AsmJSPageSize == 0);
 
     // Absolute link metadata: absolute addresses that refer to some fixed
     // address in the address space.
     AbsoluteLinkArray& absoluteLinks = staticLinkData_.absoluteLinks;
     for (size_t i = 0; i < masm.numAsmJSAbsoluteLinks(); i++) {
         AsmJSAbsoluteLink src = masm.asmJSAbsoluteLink(i);
-        if (!absoluteLinks[src.target].append(src.patchAt.offset()))
+        if (!absoluteLinks[src.target].append(masm.actualOffset(src.patchAt.offset())))
             return false;
     }
 
     // Relative link metadata: absolute addresses that refer to another point within
     // the asm.js module.
 
     // CodeLabels are used for switch cases and loads from floating-point /
     // SIMD values in the constant pool.
     for (size_t i = 0; i < masm.numCodeLabels(); i++) {
         CodeLabel src = masm.codeLabel(i);
         int32_t labelOffset = src.dest()->offset();
-        int32_t targetOffset = src.src()->offset();
+        int32_t targetOffset = masm.actualOffset(src.src()->offset());
         // The patched uses of a label embed a linked list where the
         // to-be-patched immediate is the offset of the next to-be-patched
         // instruction.
         while (labelOffset != LabelBase::INVALID_OFFSET) {
             size_t patchAtOffset = masm.labelOffsetToPatchOffset(labelOffset);
             RelativeLink link(RelativeLink::CodeLabel);
             link.patchAtOffset = patchAtOffset;
             link.targetOffset = targetOffset;
@@ -390,16 +415,25 @@ AsmJSModule::finish(ExclusiveContext* cx
     // Global data accesses on x64 use rip-relative addressing and thus do
     // not need patching after deserialization.
     for (size_t i = 0; i < masm.numAsmJSGlobalAccesses(); i++) {
         AsmJSGlobalAccess a = masm.asmJSGlobalAccess(i);
         masm.patchAsmJSGlobalAccess(a.patchAt, code_, globalData(), a.globalDataOffset);
     }
 #endif
 
+#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
+    // Fix up the code offsets.
+    for (size_t i = 0; i < profiledFunctions_.length(); i++) {
+        ProfiledFunction& pf = profiledFunctions_[i];
+        pf.pod.startCodeOffset = masm.actualOffset(pf.pod.startCodeOffset);
+        pf.pod.endCodeOffset = masm.actualOffset(pf.pod.endCodeOffset);
+    }
+#endif
+
     return true;
 }
 
 void
 AsmJSModule::setAutoFlushICacheRange()
 {
     MOZ_ASSERT(isFinished());
     AutoFlushICache::setRange(uintptr_t(code_), pod.codeBytes_);
@@ -1398,16 +1432,39 @@ AsmJSModule::CodeRange::CodeRange(AsmJSE
     PodZero(&u);  // zero padding for Valgrind
     u.kind_ = Thunk;
     u.thunk.target_ = builtin;
 
     MOZ_ASSERT(begin_ < profilingReturn_);
     MOZ_ASSERT(profilingReturn_ < end_);
 }
 
+void
+AsmJSModule::CodeRange::updateOffsets(jit::MacroAssembler& masm)
+{
+    uint32_t entryBefore = 0;
+    uint32_t profilingJumpBefore = 0;
+    uint32_t profilingEpilogueBefore = 0;
+    if (isFunction()) {
+        entryBefore = entry();
+        profilingJumpBefore = profilingJump();
+        profilingEpilogueBefore = profilingEpilogue();
+    }
+
+    begin_ = masm.actualOffset(begin_);
+    profilingReturn_ = masm.actualOffset(profilingReturn_);
+    end_ = masm.actualOffset(end_);
+
+    if (isFunction()) {
+        setDeltas(masm.actualOffset(entryBefore),
+                  masm.actualOffset(profilingJumpBefore),
+                  masm.actualOffset(profilingEpilogueBefore));
+    }
+}
+
 #if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
 size_t
 AsmJSModule::ProfiledFunction::serializedSize() const
 {
     return SerializedNameSize(name) +
            sizeof(pod);
 }
 
--- a/js/src/asmjs/AsmJSModule.h
+++ b/js/src/asmjs/AsmJSModule.h
@@ -393,16 +393,20 @@ class AsmJSModule
         void initInterpOffset(unsigned off) {
             MOZ_ASSERT(!interpCodeOffset_);
             interpCodeOffset_ = off;
         }
         void initJitOffset(unsigned off) {
             MOZ_ASSERT(!jitCodeOffset_);
             jitCodeOffset_ = off;
         }
+        void updateOffsets(jit::MacroAssembler& masm) {
+            interpCodeOffset_ = masm.actualOffset(interpCodeOffset_);
+            jitCodeOffset_ = masm.actualOffset(jitCodeOffset_);
+        }
 
         size_t serializedSize() const;
         uint8_t* serialize(uint8_t* cursor) const;
         const uint8_t* deserialize(ExclusiveContext* cx, const uint8_t* cursor);
         bool clone(ExclusiveContext* cx, Exit* out) const;
     };
 
     struct EntryArg {
@@ -507,16 +511,20 @@ class AsmJSModule
             return pod.isChangeHeap_;
         }
 
         void initCodeOffset(unsigned off) {
             MOZ_ASSERT(!isChangeHeap());
             MOZ_ASSERT(pod.codeOffset_ == UINT32_MAX);
             pod.codeOffset_ = off;
         }
+        void updateCodeOffset(jit::MacroAssembler& masm) {
+            MOZ_ASSERT(!isChangeHeap());
+            pod.codeOffset_ = masm.actualOffset(pod.codeOffset_);
+        }
 
         unsigned numArgs() const {
             MOZ_ASSERT(!isChangeHeap());
             return argCoercions_.length();
         }
         AsmJSCoercion argCoercion(unsigned i) const {
             MOZ_ASSERT(!isChangeHeap());
             return argCoercions_[i];
@@ -561,16 +569,17 @@ class AsmJSModule
       public:
         enum Kind { Function, Entry, JitFFI, SlowFFI, Interrupt, Thunk, Inline };
 
         CodeRange() {}
         CodeRange(uint32_t nameIndex, uint32_t lineNumber, const AsmJSFunctionLabels& l);
         CodeRange(Kind kind, uint32_t begin, uint32_t end);
         CodeRange(Kind kind, uint32_t begin, uint32_t profilingReturn, uint32_t end);
         CodeRange(AsmJSExit::BuiltinKind builtin, uint32_t begin, uint32_t pret, uint32_t end);
+        void updateOffsets(jit::MacroAssembler& masm);
 
         Kind kind() const { return Kind(u.kind_); }
         bool isFunction() const { return kind() == Function; }
         bool isEntry() const { return kind() == Entry; }
         bool isFFI() const { return kind() == JitFFI || kind() == SlowFFI; }
         bool isInterrupt() const { return kind() == Interrupt; }
         bool isThunk() const { return kind() == Thunk; }
 
--- a/js/src/asmjs/AsmJSValidate.cpp
+++ b/js/src/asmjs/AsmJSValidate.cpp
@@ -1493,17 +1493,17 @@ class MOZ_STACK_CLASS ModuleValidator
         // RelativeLinks that will be patched by AsmJSModule::staticallyLink.
         for (unsigned tableIndex = 0; tableIndex < numFuncPtrTables(); tableIndex++) {
             ModuleValidator::FuncPtrTable& table = funcPtrTable(tableIndex);
             unsigned tableBaseOffset = module_->offsetOfGlobalData() + table.globalDataOffset();
             for (unsigned elemIndex = 0; elemIndex < table.numElems(); elemIndex++) {
                 AsmJSModule::RelativeLink link(AsmJSModule::RelativeLink::RawPointer);
                 link.patchAtOffset = tableBaseOffset + elemIndex * sizeof(uint8_t*);
                 Label* entry = functionEntry(table.elem(elemIndex).funcIndex());
-                link.targetOffset = entry->offset();
+                link.targetOffset = masm().actualOffset(entry->offset());
                 if (!module_->addRelativeLink(link))
                     return false;
             }
         }
 
         *module = module_.forget();
         return true;
     }
--- a/js/src/irregexp/NativeRegExpMacroAssembler.cpp
+++ b/js/src/irregexp/NativeRegExpMacroAssembler.cpp
@@ -465,18 +465,20 @@ NativeRegExpMacroAssembler::GenerateCode
     writePerfSpewerJitCodeProfile(code, "RegExp");
 #endif
 
     AutoWritableJitCode awjc(code);
 
     for (size_t i = 0; i < labelPatches.length(); i++) {
         LabelPatch& v = labelPatches[i];
         MOZ_ASSERT(!v.label);
+        v.patchOffset.fixup(&masm);
+        uintptr_t offset = masm.actualOffset(v.labelOffset);
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, v.patchOffset),
-                                           ImmPtr(code->raw() + v.labelOffset),
+                                           ImmPtr(code->raw() + offset),
                                            ImmPtr(0));
     }
 
     JitSpew(JitSpew_Codegen, "Created RegExp (raw %p length %d)",
             (void*) code->raw(), (int) masm.bytesNeeded());
 
     RegExpCode res;
     res.jitCode = code;
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -150,16 +150,17 @@ BaselineCompiler::compile()
     // Encode the pc mapping table. See PCMappingIndexEntry for
     // more information.
     Vector<PCMappingIndexEntry> pcMappingIndexEntries(cx);
     CompactBufferWriter pcEntries;
     uint32_t previousOffset = 0;
 
     for (size_t i = 0; i < pcMappingEntries_.length(); i++) {
         PCMappingEntry& entry = pcMappingEntries_[i];
+        entry.fixupNativeOffset(masm);
 
         if (entry.addIndexEntry) {
             PCMappingIndexEntry indexEntry;
             indexEntry.pcOffset = entry.pcOffset;
             indexEntry.nativeOffset = entry.nativeOffset;
             indexEntry.bufferOffset = pcEntries.length();
             if (!pcMappingIndexEntries.append(indexEntry)) {
                 ReportOutOfMemory(cx);
@@ -184,16 +185,26 @@ BaselineCompiler::compile()
         previousOffset = entry.nativeOffset;
     }
 
     if (pcEntries.oom()) {
         ReportOutOfMemory(cx);
         return Method_Error;
     }
 
+    prologueOffset_.fixup(&masm);
+    epilogueOffset_.fixup(&masm);
+    profilerEnterFrameToggleOffset_.fixup(&masm);
+    profilerExitFrameToggleOffset_.fixup(&masm);
+#ifdef JS_TRACE_LOGGING
+    traceLoggerEnterToggleOffset_.fixup(&masm);
+    traceLoggerExitToggleOffset_.fixup(&masm);
+#endif
+    postDebugPrologueOffset_.fixup(&masm);
+
     // Note: There is an extra entry in the bytecode type map for the search hint, see below.
     size_t bytecodeTypeMapEntries = script->nTypeSets() + 1;
 
     mozilla::UniquePtr<BaselineScript, JS::DeletePolicy<BaselineScript> > baselineScript(
         BaselineScript::New(script, prologueOffset_.offset(),
                             epilogueOffset_.offset(),
                             profilerEnterFrameToggleOffset_.offset(),
                             profilerExitFrameToggleOffset_.offset(),
@@ -242,16 +253,17 @@ BaselineCompiler::compile()
     if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
         baselineScript->toggleProfilerInstrumentation(true);
 
     AutoWritableJitCode awjc(code);
 
     // Patch IC loads using IC entries.
     for (size_t i = 0; i < icLoadLabels_.length(); i++) {
         CodeOffsetLabel label = icLoadLabels_[i].label;
+        label.fixup(&masm);
         size_t icEntry = icLoadLabels_[i].icEntry;
         ICEntry* entryAddr = &(baselineScript->icEntry(icEntry));
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
                                            ImmPtr(entryAddr),
                                            ImmPtr((void*)-1));
     }
 
     if (modifiesArguments_)
--- a/js/src/jit/BaselineDebugModeOSR.cpp
+++ b/js/src/jit/BaselineDebugModeOSR.cpp
@@ -1163,16 +1163,17 @@ JitRuntime::generateBaselineDebugModeOSR
     masm.bind(&end);
 
     Linker linker(masm);
     AutoFlushICache afc("BaselineDebugModeOSRHandler");
     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
     if (!code)
         return nullptr;
 
+    noFrameRegPopOffset.fixup(&masm);
     *noFrameRegPopOffsetOut = noFrameRegPopOffset.offset();
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "BaselineDebugModeOSRHandler");
 #endif
 
     return code;
 }
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -6255,17 +6255,19 @@ ICGetProp_Fallback::Compiler::generateSt
     EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
 
     return true;
 }
 
 void
 ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
 {
-    cx->compartment()->jitCompartment()->initBaselineGetPropReturnAddr(code->raw() + returnOffset_);
+    CodeOffsetLabel offset(returnOffset_);
+    offset.fixup(&masm);
+    cx->compartment()->jitCompartment()->initBaselineGetPropReturnAddr(code->raw() + offset.offset());
 }
 
 bool
 ICGetProp_ArrayLength::Compiler::generateStubCode(MacroAssembler& masm)
 {
     MOZ_ASSERT(engine_ == Engine::Baseline);
 
     Label failure;
@@ -7752,17 +7754,19 @@ ICSetProp_Fallback::Compiler::generateSt
     EmitReturnFromIC(masm);
 
     return true;
 }
 
 void
 ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
 {
-    cx->compartment()->jitCompartment()->initBaselineSetPropReturnAddr(code->raw() + returnOffset_);
+    CodeOffsetLabel offset(returnOffset_);
+    offset.fixup(&masm);
+    cx->compartment()->jitCompartment()->initBaselineSetPropReturnAddr(code->raw() + offset.offset());
 }
 
 static void
 GuardGroupAndShapeMaybeUnboxedExpando(MacroAssembler& masm, JSObject* obj,
                                       Register object, Register scratch,
                                       size_t offsetOfGroup, size_t offsetOfShape, Label* failure)
 {
     // Guard against object group.
@@ -9528,17 +9532,19 @@ ICCall_Fallback::Compiler::generateStubC
 }
 
 void
 ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
 {
     if (MOZ_UNLIKELY(isSpread_))
         return;
 
-    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + returnOffset_,
+    CodeOffsetLabel offset(returnOffset_);
+    offset.fixup(&masm);
+    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset(),
                                                                     isConstructing_);
 }
 
 typedef bool (*CreateThisFn)(JSContext* cx, HandleObject callee, HandleObject newTarget,
                              MutableHandleValue rval);
 static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
 
 bool
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -709,16 +709,17 @@ BaselineScript::copyYieldEntries(JSScrip
 void
 BaselineScript::copyICEntries(JSScript* script, const ICEntry* entries, MacroAssembler& masm)
 {
     // Fix up the return offset in the IC entries and copy them in.
     // Also write out the IC entry ptrs in any fallback stubs that were added.
     for (uint32_t i = 0; i < numICEntries(); i++) {
         ICEntry& realEntry = icEntry(i);
         realEntry = entries[i];
+        realEntry.fixupReturnOffset(masm);
 
         if (!realEntry.hasStub()) {
             // VM call without any stubs.
             continue;
         }
 
         // If the attached stub is a fallback stub, then fix it up with
         // a pointer to the (now available) realEntry.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -8135,52 +8135,58 @@ CodeGenerator::link(JSContext* cx, Compi
 
     script->setIonScript(cx, ionScript);
 
     // Adopt fallback shared stubs from the compiler into the ion script.
     ionScript->adoptFallbackStubs(&stubSpace_);
 
     {
         AutoWritableJitCode awjc(code);
+        invalidateEpilogueData_.fixup(&masm);
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, invalidateEpilogueData_),
                                            ImmPtr(ionScript),
                                            ImmPtr((void*)-1));
 
         for (size_t i = 0; i < ionScriptLabels_.length(); i++) {
+            ionScriptLabels_[i].fixup(&masm);
             Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, ionScriptLabels_[i]),
                                                ImmPtr(ionScript),
                                                ImmPtr((void*)-1));
         }
 
 #ifdef JS_TRACE_LOGGING
         TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
         for (uint32_t i = 0; i < patchableTraceLoggers_.length(); i++) {
+            patchableTraceLoggers_[i].fixup(&masm);
             Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTraceLoggers_[i]),
                                                ImmPtr(logger),
                                                ImmPtr(nullptr));
         }
 
         if (patchableTLScripts_.length() > 0) {
             MOZ_ASSERT(TraceLogTextIdEnabled(TraceLogger_Scripts));
             TraceLoggerEvent event(logger, TraceLogger_Scripts, script);
             ionScript->setTraceLoggerEvent(event);
             uint32_t textId = event.payload()->textId();
             for (uint32_t i = 0; i < patchableTLScripts_.length(); i++) {
+                patchableTLScripts_[i].fixup(&masm);
                 Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTLScripts_[i]),
                                                    ImmPtr((void*) uintptr_t(textId)),
                                                    ImmPtr((void*)0));
             }
         }
 #endif
         // Patch shared stub IC loads using IC entries
         for (size_t i = 0; i < sharedStubs_.length(); i++) {
             CodeOffsetLabel label = sharedStubs_[i].label;
+            label.fixup(&masm);
 
             IonICEntry& entry = ionScript->sharedStubList()[i];
             entry = sharedStubs_[i].entry;
+            entry.fixupReturnOffset(masm);
             Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
                                                ImmPtr(&entry),
                                                ImmPtr((void*)-1));
 
             MOZ_ASSERT(entry.hasStub());
             MOZ_ASSERT(entry.firstStub()->isFallback());
 
             entry.firstStub()->toFallbackStub()->fixupICEntry(&entry);
@@ -8194,17 +8200,18 @@ CodeGenerator::link(JSContext* cx, Compi
     }
 
     JitSpew(JitSpew_Codegen, "Created IonScript %p (raw %p)",
             (void*) ionScript, (void*) code->raw());
 
     ionScript->setInvalidationEpilogueDataOffset(invalidateEpilogueData_.offset());
     ionScript->setOsrPc(gen->info().osrPc());
     ionScript->setOsrEntryOffset(getOsrEntryOffset());
-    ionScript->setInvalidationEpilogueOffset(invalidate_.offset());
+    ptrdiff_t real_invalidate = masm.actualOffset(invalidate_.offset());
+    ionScript->setInvalidationEpilogueOffset(real_invalidate);
 
     ionScript->setDeoptTable(deoptTable_);
 
 #if defined(JS_ION_PERF)
     if (PerfEnabled())
         perfSpewer_.writeProfile(script, code, masm);
 #endif
 
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -1111,20 +1111,24 @@ IonScript::copyPatchableBackedges(JSCont
 {
     JitRuntime* jrt = cx->runtime()->jitRuntime();
     JitRuntime::AutoMutateBackedges amb(jrt);
 
     for (size_t i = 0; i < backedgeEntries_; i++) {
         PatchableBackedgeInfo& info = backedges[i];
         PatchableBackedge* patchableBackedge = &backedgeList()[i];
 
+        // Convert to actual offsets for the benefit of the ARM backend.
         info.backedge.fixup(&masm);
+        uint32_t loopHeaderOffset = masm.actualOffset(info.loopHeader->offset());
+        uint32_t interruptCheckOffset = masm.actualOffset(info.interruptCheck->offset());
+
         CodeLocationJump backedge(code, info.backedge);
-        CodeLocationLabel loopHeader(code, CodeOffsetLabel(info.loopHeader->offset()));
-        CodeLocationLabel interruptCheck(code, CodeOffsetLabel(info.interruptCheck->offset()));
+        CodeLocationLabel loopHeader(code, CodeOffsetLabel(loopHeaderOffset));
+        CodeLocationLabel interruptCheck(code, CodeOffsetLabel(interruptCheckOffset));
         new(patchableBackedge) PatchableBackedge(backedge, loopHeader, interruptCheck);
 
         // Point the backedge to either of its possible targets, according to
         // whether an interrupt is currently desired, matching the targets
         // established by ensureIonCodeAccessible() above. We don't handle the
         // interrupt immediately as the interrupt lock is held here.
         if (cx->runtime()->hasPendingInterrupt())
             PatchBackedge(backedge, interruptCheck, JitRuntime::BackedgeInterruptCheck);
@@ -1138,22 +1142,26 @@ IonScript::copyPatchableBackedges(JSCont
 void
 IonScript::copySafepointIndices(const SafepointIndex* si, MacroAssembler& masm)
 {
     // Jumps in the caches reflect the offset of those jumps in the compiled
     // code, not the absolute positions of the jumps. Update according to the
     // final code address now.
     SafepointIndex* table = safepointIndices();
     memcpy(table, si, safepointIndexEntries_ * sizeof(SafepointIndex));
+    for (size_t i = 0; i < safepointIndexEntries_; i++)
+        table[i].adjustDisplacement(masm.actualOffset(table[i].displacement()));
 }
 
 void
 IonScript::copyOsiIndices(const OsiIndex* oi, MacroAssembler& masm)
 {
     memcpy(osiIndices(), oi, osiIndexEntries_ * sizeof(OsiIndex));
+    for (unsigned i = 0; i < osiIndexEntries_; i++)
+        osiIndices()[i].fixUpOffset(masm);
 }
 
 void
 IonScript::copyRuntimeData(const uint8_t* data)
 {
     memcpy(runtimeData(), data, runtimeSize());
 }
 
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -46,17 +46,17 @@ CodeLocationJump::repoint(JitCode* code,
     size_t new_off = (size_t)raw_;
 #ifdef JS_SMALL_BRANCH
     size_t jumpTableEntryOffset = reinterpret_cast<size_t>(jumpTableEntry_);
 #endif
     if (masm != nullptr) {
 #ifdef JS_CODEGEN_X64
         MOZ_ASSERT((uint64_t)raw_ <= UINT32_MAX);
 #endif
-        new_off = (uintptr_t)raw_;
+        new_off = masm->actualOffset((uintptr_t)raw_);
 #ifdef JS_SMALL_BRANCH
         jumpTableEntryOffset = masm->actualIndex(jumpTableEntryOffset);
 #endif
     }
     raw_ = code->raw() + new_off;
 #ifdef JS_SMALL_BRANCH
     jumpTableEntry_ = Assembler::PatchableJumpAddress(code, (size_t) jumpTableEntryOffset);
 #endif
@@ -67,27 +67,34 @@ void
 CodeLocationLabel::repoint(JitCode* code, MacroAssembler* masm)
 {
      MOZ_ASSERT(state_ == Relative);
      size_t new_off = (size_t)raw_;
      if (masm != nullptr) {
 #ifdef JS_CODEGEN_X64
         MOZ_ASSERT((uint64_t)raw_ <= UINT32_MAX);
 #endif
-        new_off = (uintptr_t)raw_;
+        new_off = masm->actualOffset((uintptr_t)raw_);
      }
      MOZ_ASSERT(new_off < code->instructionsSize());
 
      raw_ = code->raw() + new_off;
      setAbsolute();
 }
 
 void
+CodeOffsetLabel::fixup(MacroAssembler* masm)
+{
+     offset_ = masm->actualOffset(offset_);
+}
+
+void
 CodeOffsetJump::fixup(MacroAssembler* masm)
 {
+     offset_ = masm->actualOffset(offset_);
 #ifdef JS_SMALL_BRANCH
      jumpTableIndex_ = masm->actualIndex(jumpTableIndex_);
 #endif
 }
 
 const char*
 IonCache::CacheName(IonCache::Kind kind)
 {
@@ -239,19 +246,20 @@ class IonCache::StubAttacher
 
     void patchRejoinJump(MacroAssembler& masm, JitCode* code) {
         rejoinOffset_.fixup(&masm);
         CodeLocationJump rejoinJump(code, rejoinOffset_);
         AutoWritableJitCode awjc(code);
         PatchJump(rejoinJump, rejoinLabel_);
     }
 
-    void patchStubCodePointer(JitCode* code) {
+    void patchStubCodePointer(MacroAssembler& masm, JitCode* code) {
         if (hasStubCodePatchOffset_) {
             AutoWritableJitCode awjc(code);
+            stubCodePatchOffset_.fixup(&masm);
             Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, stubCodePatchOffset_),
                                                ImmPtr(code), STUB_ADDR);
         }
     }
 
     void patchNextStubJump(MacroAssembler& masm, JitCode* code) {
         // Patch the previous nextStubJump of the last stub, or the jump from the
         // codeGen, to jump into the newly allocated code.
@@ -292,17 +300,17 @@ IonCache::attachStub(MacroAssembler& mas
     incrementStubCount();
 
     // Update the success path to continue after the IC initial jump.
     attacher.patchRejoinJump(masm, code);
 
     // Replace the STUB_ADDR constant by the address of the generated stub, such
     // as it can be kept alive even if the cache is flushed (see
     // MarkJitExitFrame).
-    attacher.patchStubCodePointer(code);
+    attacher.patchStubCodePointer(masm, code);
 
     // Update the failure path.
     attacher.patchNextStubJump(masm, code);
 }
 
 bool
 IonCache::linkAndAttachStub(JSContext* cx, MacroAssembler& masm, StubAttacher& attacher,
                             IonScript* ion, const char* attachKind,
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1678,16 +1678,22 @@ GetPcScript(JSContext* cx, JSScript** sc
     if (pcRes)
         *pcRes = pc;
 
     // Add entry to cache.
     if (retAddr && rt->ionPcScriptCache)
         rt->ionPcScriptCache->add(hash, retAddr, pc, *scriptRes);
 }
 
+void
+OsiIndex::fixUpOffset(MacroAssembler& masm)
+{
+    callPointDisplacement_ = masm.actualOffset(callPointDisplacement_);
+}
+
 uint32_t
 OsiIndex::returnPointDisplacement() const
 {
     // In general, pointer arithmetic on code is bad, but in this case,
     // getting the return address from a call instruction, stepping over pools
     // would be wrong.
     return callPointDisplacement_ + Assembler::PatchWrite_NearCallSize();
 }
--- a/js/src/jit/JitFrames.h
+++ b/js/src/jit/JitFrames.h
@@ -162,16 +162,17 @@ class OsiIndex
 
     uint32_t returnPointDisplacement() const;
     uint32_t callPointDisplacement() const {
         return callPointDisplacement_;
     }
     uint32_t snapshotOffset() const {
         return snapshotOffset_;
     }
+    void fixUpOffset(MacroAssembler& masm);
 };
 
 // The layout of an Ion frame on the C stack is roughly:
 //      argN     _
 //      ...       \ - These are jsvals
 //      arg0      /
 //   -3 this    _/
 //   -2 callee
--- a/js/src/jit/LIR.h
+++ b/js/src/jit/LIR.h
@@ -1590,16 +1590,19 @@ class LSafepoint : public TempObject
     }
     uint32_t osiCallPointOffset() const {
         return osiCallPointOffset_;
     }
     void setOsiCallPointOffset(uint32_t osiCallPointOffset) {
         MOZ_ASSERT(!osiCallPointOffset_);
         osiCallPointOffset_ = osiCallPointOffset;
     }
+    void fixupOffset(MacroAssembler* masm) {
+        osiCallPointOffset_ = masm->actualOffset(osiCallPointOffset_);
+    }
 };
 
 class LInstruction::InputIterator
 {
   private:
     LInstruction& ins_;
     size_t idx_;
     bool snapshot_;
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -2187,16 +2187,17 @@ MacroAssembler::AutoProfilerCallInstrume
     masm.pop(reg);
 }
 
 void
 MacroAssembler::linkProfilerCallSites(JitCode* code)
 {
     for (size_t i = 0; i < profilerCallSites_.length(); i++) {
         CodeOffsetLabel offset = profilerCallSites_[i];
+        offset.fixup(this);
         CodeLocationLabel location(code, offset);
         PatchDataWithValueCheck(location, ImmPtr(location.raw()), ImmPtr((void*)-1));
     }
 }
 
 void
 MacroAssembler::alignJitStackBasedOnNArgs(Register nargs)
 {
@@ -2622,16 +2623,17 @@ MacroAssembler::linkExitFrame()
 
 void
 MacroAssembler::linkSelfReference(JitCode* code)
 {
     // If this code can transition to C++ code and witness a GC, then we need to store
     // the JitCode onto the stack in order to GC it correctly.  exitCodePatch should
     // be unset if the code never needed to push its JitCode*.
     if (hasSelfReference()) {
+        selfReferencePatch_.fixup(this);
         PatchDataWithValueCheck(CodeLocationLabel(code, selfReferencePatch_),
                                 ImmPtr(code),
                                 ImmPtr((void*)-1));
     }
 }
 
 //}}} check_macroassembler_style
 
--- a/js/src/jit/PerfSpewer.cpp
+++ b/js/src/jit/PerfSpewer.cpp
@@ -218,33 +218,33 @@ PerfSpewer::writeProfile(JSScript* scrip
     }
 
     if (PerfBlockEnabled() && basicBlocks_.length() > 0) {
         if (!lockPerfMap())
             return;
 
         uint32_t thisFunctionIndex = nextFunctionIndex++;
         uintptr_t funcStart = uintptr_t(code->raw());
-        uintptr_t funcEndInlineCode = funcStart + endInlineCode.offset();
+        uintptr_t funcEndInlineCode = funcStart + masm.actualOffset(endInlineCode.offset());
         uintptr_t funcEnd = funcStart + code->instructionsSize();
 
         // function begins with the prologue, which is located before the first basic block
-        size_t prologueSize = basicBlocks_[0].start.offset();
+        size_t prologueSize = masm.actualOffset(basicBlocks_[0].start.offset());
 
         if (prologueSize > 0) {
             fprintf(PerfFilePtr, "%" PRIxSIZE " %" PRIxSIZE " %s:%" PRIuSIZE ": Func%02d-Prologue\n",
                     funcStart, prologueSize, script->filename(), script->lineno(), thisFunctionIndex);
         }
 
         uintptr_t cur = funcStart + prologueSize;
         for (uint32_t i = 0; i < basicBlocks_.length(); i++) {
             Record& r = basicBlocks_[i];
 
-            uintptr_t blockStart = funcStart + r.start.offset();
-            uintptr_t blockEnd = funcStart + r.end.offset();
+            uintptr_t blockStart = funcStart + masm.actualOffset(r.start.offset());
+            uintptr_t blockEnd = funcStart + masm.actualOffset(r.end.offset());
 
             MOZ_ASSERT(cur <= blockStart);
             if (cur < blockStart) {
                 fprintf(PerfFilePtr, "%" PRIxPTR " %" PRIxPTR " %s:%" PRIuSIZE ": Func%02d-Block?\n",
                         cur, blockStart - cur,
                         script->filename(), script->lineno(),
                         thisFunctionIndex);
             }
--- a/js/src/jit/SharedIC.h
+++ b/js/src/jit/SharedIC.h
@@ -281,16 +281,23 @@ class ICEntry
         return CodeOffsetLabel(returnOffset_);
     }
 
     void setReturnOffset(CodeOffsetLabel offset) {
         MOZ_ASSERT(offset.offset() <= (size_t) UINT32_MAX);
         returnOffset_ = (uint32_t) offset.offset();
     }
 
+    void fixupReturnOffset(MacroAssembler& masm) {
+        CodeOffsetLabel offset = returnOffset();
+        offset.fixup(&masm);
+        MOZ_ASSERT(offset.offset() <= UINT32_MAX);
+        returnOffset_ = (uint32_t) offset.offset();
+    }
+
     uint32_t pcOffset() const {
         return pcOffset_;
     }
 
     jsbytecode* pc(JSScript* script) const {
         return script->offsetToPC(pcOffset_);
     }
 
--- a/js/src/jit/arm/Assembler-arm.h
+++ b/js/src/jit/arm/Assembler-arm.h
@@ -1266,16 +1266,17 @@ class Assembler : public AssemblerShared
     void spewData(BufferOffset addr, size_t numInstr, bool loadToPC);
     void spewLabel(Label* label);
     void spewRetarget(Label* label, Label* target);
     void spewTarget(Label* l);
 #endif
 
   public:
     void resetCounter();
+    uint32_t actualOffset(uint32_t off) const { return off; }
     uint32_t actualIndex(uint32_t) const;
     static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index);
     static uint32_t NopFill;
     static uint32_t GetNopFill();
     static uint32_t AsmPoolMaxOffset;
     static uint32_t GetPoolMaxOffset();
 
   protected:
@@ -1709,17 +1710,17 @@ class Assembler : public AssemblerShared
     void retarget(Label* label, Label* target);
     // I'm going to pretend this doesn't exist for now.
     void retarget(Label* label, void* target, Relocation::Kind reloc);
 
     void Bind(uint8_t* rawCode, AbsoluteLabel* label, const void* address);
 
     // See Bind
     size_t labelOffsetToPatchOffset(size_t offset) {
-        return offset;
+        return actualOffset(offset);
     }
 
     void as_bkpt();
 
   public:
     static void TraceJumpRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
     static void TraceDataRelocations(JSTracer* trc, JitCode* code, CompactBufferReader& reader);
 
--- a/js/src/jit/arm/Trampoline-arm.cpp
+++ b/js/src/jit/arm/Trampoline-arm.cpp
@@ -564,18 +564,20 @@ JitRuntime::generateArgumentsRectifier(J
     // Discard pushed arguments.
     masm.ma_alu(sp, lsr(r4, FRAMESIZE_SHIFT), sp, OpAdd);
 
     masm.ret();
     Linker linker(masm);
     AutoFlushICache afc("ArgumentsRectifier");
     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
+    CodeOffsetLabel returnLabel(returnOffset);
+    returnLabel.fixup(&masm);
     if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
+        *returnAddrOut = (void*) (code->raw() + returnLabel.offset());
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
 #endif
 
     return code;
 }
 
--- a/js/src/jit/arm64/Assembler-arm64.h
+++ b/js/src/jit/arm64/Assembler-arm64.h
@@ -234,22 +234,23 @@ class Assembler : public vixl::Assembler
             jumpRelocationTableBytes() +
             dataRelocationTableBytes() +
             preBarrierTableBytes();
     }
 
     void processCodeLabels(uint8_t* rawCode) {
         for (size_t i = 0; i < codeLabels_.length(); i++) {
             CodeLabel label = codeLabels_[i];
-            Bind(rawCode, label.dest(), rawCode + label.src()->offset());
+            Bind(rawCode, label.dest(), rawCode + actualOffset(label.src()->offset()));
         }
     }
 
     void Bind(uint8_t* rawCode, AbsoluteLabel* label, const void* address) {
-        *reinterpret_cast<const void**>(rawCode + label->offset()) = address;
+        uint32_t off = actualOffset(label->offset());
+        *reinterpret_cast<const void**>(rawCode + off) = address;
     }
     bool nextLink(BufferOffset cur, BufferOffset* next) {
         Instruction* link = getInstructionAt(cur);
         uint32_t nextLinkOffset = uint32_t(link->ImmPCRawOffset());
         if (nextLinkOffset == uint32_t(LabelBase::INVALID_OFFSET))
             return false;
         *next = BufferOffset(nextLinkOffset + cur.getOffset());
         return true;
@@ -257,21 +258,24 @@ class Assembler : public vixl::Assembler
     void retarget(Label* cur, Label* next);
 
     // The buffer is about to be linked. Ensure any constant pools or
     // excess bookkeeping has been flushed to the instruction stream.
     void flush() {
         armbuffer_.flushPool();
     }
 
+    int actualOffset(int curOffset) { return curOffset; }
     int actualIndex(int curOffset) {
         ARMBuffer::PoolEntry pe(curOffset);
         return armbuffer_.poolEntryOffset(pe);
     }
-    size_t labelOffsetToPatchOffset(size_t labelOff) { return labelOff; }
+    int labelOffsetToPatchOffset(int labelOff) {
+        return actualOffset(labelOff);
+    }
     static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index) {
         return code->raw() + index;
     }
     void setPrinter(Sprinter* sp) {
     }
 
     static bool SupportsFloatingPoint() { return true; }
     static bool SupportsSimd() { return js::jit::SupportsSimd; }
--- a/js/src/jit/arm64/Trampoline-arm64.cpp
+++ b/js/src/jit/arm64/Trampoline-arm64.cpp
@@ -417,18 +417,21 @@ JitRuntime::generateArgumentsRectifier(J
              Operand(x4, vixl::LSR, FRAMESIZE_SHIFT));
 
     // Pop the return address from earlier and branch.
     masm.ret();
 
     Linker linker(masm);
     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
-    if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
+    if (returnAddrOut) {
+        CodeOffsetLabel returnLabel(returnOffset);
+        returnLabel.fixup(&masm);
+        *returnAddrOut = (void*) (code->raw() + returnLabel.offset());
+    }
 
     return code;
 }
 
 static void
 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
 {
     // the stack should look like:
--- a/js/src/jit/mips-shared/Assembler-mips-shared.cpp
+++ b/js/src/jit/mips-shared/Assembler-mips-shared.cpp
@@ -84,16 +84,22 @@ InstImm::extractImm16(BOffImm16* dest)
 void
 AssemblerMIPSShared::finish()
 {
     MOZ_ASSERT(!isFinished);
     isFinished = true;
 }
 
 uint32_t
+AssemblerMIPSShared::actualOffset(uint32_t off_) const
+{
+    return off_;
+}
+
+uint32_t
 AssemblerMIPSShared::actualIndex(uint32_t idx_) const
 {
     return idx_;
 }
 
 uint8_t*
 AssemblerMIPSShared::PatchableJumpAddress(JitCode* code, uint32_t pe_)
 {
@@ -127,17 +133,17 @@ AssemblerMIPSShared::copyPreBarrierTable
         memcpy(dest, preBarriers_.buffer(), preBarriers_.length());
 }
 
 void
 AssemblerMIPSShared::processCodeLabels(uint8_t* rawCode)
 {
     for (size_t i = 0; i < codeLabels_.length(); i++) {
         CodeLabel label = codeLabels_[i];
-        asAsm().Bind(rawCode, label.dest(), rawCode + label.src()->offset());
+        asAsm().Bind(rawCode, label.dest(), rawCode + actualOffset(label.src()->offset()));
     }
 }
 
 AssemblerMIPSShared::Condition
 AssemblerMIPSShared::InvertCondition(Condition cond)
 {
     switch (cond) {
       case Equal:
--- a/js/src/jit/mips-shared/Assembler-mips-shared.h
+++ b/js/src/jit/mips-shared/Assembler-mips-shared.h
@@ -727,16 +727,17 @@ class AssemblerMIPSShared : public Assem
         return m_buffer.nextOffset();
     }
 
   protected:
     Instruction * editSrc (BufferOffset bo) {
         return m_buffer.getInst(bo);
     }
   public:
+    uint32_t actualOffset(uint32_t) const;
     uint32_t actualIndex(uint32_t) const;
     static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index);
   protected:
     Assembler& asAsm();
 
     // structure for fixing up pc-relative loads/jumps when a the machine code
     // gets moved (executable copy, gc, etc.)
     struct RelativePatch
@@ -1037,17 +1038,19 @@ class AssemblerMIPSShared : public Assem
     // label operations
     void bind(Label* label, BufferOffset boff = BufferOffset());
     uint32_t currentOffset() {
         return nextOffset().getOffset();
     }
     void retarget(Label* label, Label* target);
 
     // See Bind
-    size_t labelOffsetToPatchOffset(size_t offset) { return offset; }
+    size_t labelOffsetToPatchOffset(size_t offset) {
+        return actualOffset(offset);
+    }
 
     void call(Label* label);
     void call(void* target);
 
     void as_break(uint32_t code);
     void as_sync(uint32_t stype = 0);
 
   public:
--- a/js/src/jit/mips32/Trampoline-mips32.cpp
+++ b/js/src/jit/mips32/Trampoline-mips32.cpp
@@ -552,18 +552,20 @@ JitRuntime::generateArgumentsRectifier(J
     // Discard pushed arguments.
     masm.addPtr(t0, StackPointer);
 
     masm.ret();
     Linker linker(masm);
     AutoFlushICache afc("ArgumentsRectifier");
     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
+    CodeOffsetLabel returnLabel(returnOffset);
+    returnLabel.fixup(&masm);
     if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
+        *returnAddrOut = (void*) (code->raw() + returnLabel.offset());
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
 #endif
 
     return code;
 }
 
--- a/js/src/jit/none/MacroAssembler-none.h
+++ b/js/src/jit/none/MacroAssembler-none.h
@@ -184,16 +184,17 @@ class MacroAssemblerNone : public Assemb
 
     template <typename T> void bind(T) { MOZ_CRASH(); }
     template <typename T> void j(Condition, T) { MOZ_CRASH(); }
     template <typename T> void jump(T) { MOZ_CRASH(); }
     void haltingAlign(size_t) { MOZ_CRASH(); }
     void nopAlign(size_t) { MOZ_CRASH(); }
     void checkStackAlignment() { MOZ_CRASH(); }
     uint32_t currentOffset() { MOZ_CRASH(); }
+    uint32_t actualOffset(uint32_t) { MOZ_CRASH(); }
     uint32_t labelOffsetToPatchOffset(uint32_t) { MOZ_CRASH(); }
     CodeOffsetLabel labelForPatch() { MOZ_CRASH(); }
 
     void nop() { MOZ_CRASH(); }
     void breakpoint() { MOZ_CRASH(); }
     void abiret() { MOZ_CRASH(); }
     void ret() { MOZ_CRASH(); }
 
--- a/js/src/jit/shared/Assembler-shared.h
+++ b/js/src/jit/shared/Assembler-shared.h
@@ -490,16 +490,18 @@ class CodeOffsetLabel
 
   public:
     explicit CodeOffsetLabel(size_t offset) : offset_(offset) {}
     CodeOffsetLabel() : offset_(0) {}
 
     size_t offset() const {
         return offset_;
     }
+    void fixup(MacroAssembler* masm);
+
 };
 
 // Absolute location of a jump or a label in some generated JitCode block.
 // Can also encode a CodeOffset{Jump,Label}, such that the offset is initially
 // set and the absolute location later filled in after the final JitCode is
 // allocated.
 
 class CodeLocationJump
--- a/js/src/jit/shared/BaselineCompiler-shared.h
+++ b/js/src/jit/shared/BaselineCompiler-shared.h
@@ -38,16 +38,23 @@ class BaselineCompilerShared
     {
         uint32_t pcOffset;
         uint32_t nativeOffset;
         PCMappingSlotInfo slotInfo;
 
         // If set, insert a PCMappingIndexEntry before encoding the
         // current entry.
         bool addIndexEntry;
+
+        void fixupNativeOffset(MacroAssembler& masm) {
+            CodeOffsetLabel offset(nativeOffset);
+            offset.fixup(&masm);
+            MOZ_ASSERT(offset.offset() <= UINT32_MAX);
+            nativeOffset = (uint32_t) offset.offset();
+        }
     };
 
     js::Vector<PCMappingEntry, 16, SystemAllocPolicy> pcMappingEntries_;
 
     // Labels for the 'movWithPatch' for loading IC entry pointers in
     // the generated IC-calling code in the main jitcode.  These need
     // to be patched with the actual icEntry offsets after the BaselineScript
     // has been allocated.
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -627,18 +627,20 @@ CodeGeneratorShared::assignBailoutId(LSn
 }
 
 bool
 CodeGeneratorShared::encodeSafepoints()
 {
     for (SafepointIndex& index : safepointIndices_) {
         LSafepoint* safepoint = index.safepoint();
 
-        if (!safepoint->encoded())
+        if (!safepoint->encoded()) {
+            safepoint->fixupOffset(&masm);
             safepoints_.encode(safepoint);
+        }
 
         index.resolve();
     }
 
     return !safepoints_.oom();
 }
 
 bool
@@ -703,16 +705,24 @@ CodeGeneratorShared::generateCompactNati
 {
     MOZ_ASSERT(nativeToBytecodeScriptListLength_ == 0);
     MOZ_ASSERT(nativeToBytecodeScriptList_ == nullptr);
     MOZ_ASSERT(nativeToBytecodeMap_ == nullptr);
     MOZ_ASSERT(nativeToBytecodeMapSize_ == 0);
     MOZ_ASSERT(nativeToBytecodeTableOffset_ == 0);
     MOZ_ASSERT(nativeToBytecodeNumRegions_ == 0);
 
+    // Iterate through all nativeToBytecode entries, fix up their masm offsets.
+    for (unsigned i = 0; i < nativeToBytecodeList_.length(); i++) {
+        NativeToBytecode& entry = nativeToBytecodeList_[i];
+
+        // Fixup code offsets.
+        entry.nativeOffset = CodeOffsetLabel(masm.actualOffset(entry.nativeOffset.offset()));
+    }
+
     if (!createNativeToBytecodeScriptList(cx))
         return false;
 
     MOZ_ASSERT(nativeToBytecodeScriptListLength_ > 0);
     MOZ_ASSERT(nativeToBytecodeScriptList_ != nullptr);
 
     CompactBufferWriter writer;
     uint32_t tableOffset = 0;
@@ -848,19 +858,22 @@ CodeGeneratorShared::generateCompactTrac
 
     if (trackedOptimizations_.empty())
         return true;
 
     UniqueTrackedOptimizations unique(cx);
     if (!unique.init())
         return false;
 
-    // Iterate through all entries to deduplicate their optimization attempts.
+    // Iterate through all entries, fix up their masm offsets and deduplicate
+    // their optimization attempts.
     for (size_t i = 0; i < trackedOptimizations_.length(); i++) {
         NativeToTrackedOptimizations& entry = trackedOptimizations_[i];
+        entry.startOffset = CodeOffsetLabel(masm.actualOffset(entry.startOffset.offset()));
+        entry.endOffset = CodeOffsetLabel(masm.actualOffset(entry.endOffset.offset()));
         if (!unique.add(entry.optimizations))
             return false;
     }
 
     // Sort the unique optimization attempts by frequency to stabilize the
     // attempts' indices in the compact table we will write later.
     if (!unique.sortByFrequency(cx))
         return false;
--- a/js/src/jit/x64/Trampoline-x64.cpp
+++ b/js/src/jit/x64/Trampoline-x64.cpp
@@ -538,18 +538,20 @@ JitRuntime::generateArgumentsRectifier(J
 
     Linker linker(masm);
     JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
 #endif
 
+    CodeOffsetLabel returnLabel(returnOffset);
+    returnLabel.fixup(&masm);
     if (returnAddrOut)
-        *returnAddrOut = (void*)(code->raw() + returnOffset);
+        *returnAddrOut = (void*) (code->raw() + returnLabel.offset());
     return code;
 }
 
 static void
 PushBailoutFrame(MacroAssembler& masm, Register spArg)
 {
     // Push registers such that we can access them from [base + code].
     if (JitSupportsSimd()) {
--- a/js/src/jit/x86-shared/Assembler-x86-shared.h
+++ b/js/src/jit/x86-shared/Assembler-x86-shared.h
@@ -3118,16 +3118,20 @@ class AssemblerX86Shared : public Assemb
             masm.fstp32_m(src.disp(), src.base());
             break;
           default:
             MOZ_CRASH("unexpected operand kind");
         }
     }
 
     // Defined for compatibility with ARM's assembler
+    uint32_t actualOffset(uint32_t x) {
+        return x;
+    }
+
     uint32_t actualIndex(uint32_t x) {
         return x;
     }
 
     void flushBuffer() {
     }
 
     // Patching.