Bug 1417398 - Use a single JitCode instance for JitRuntime trampolines. r=nbp
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 16 Nov 2017 18:32:35 +0100
changeset 392267 30b2f19116ba3546cd0e6fc917edc05e012a56c8
parent 392266 8d18d31fd747bca1b949e48a7cab353b9bdd5abf
child 392268 9ff1b67407688bc646c3e9fc04ead8a201feab86
push id32916
push useraciure@mozilla.com
push dateFri, 17 Nov 2017 09:59:52 +0000
treeherdermozilla-central@a77c628829b3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersnbp
bugs1417398
milestone59.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1417398 - Use a single JitCode instance for JitRuntime trampolines. r=nbp
js/src/jit/Bailouts.cpp
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineCacheIRCompiler.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/Ion.cpp
js/src/jit/IonCacheIRCompiler.cpp
js/src/jit/IonCode.h
js/src/jit/Jit.cpp
js/src/jit/JitCompartment.h
js/src/jit/JitFrames.cpp
js/src/jit/JitFrames.h
js/src/jit/MacroAssembler-inl.h
js/src/jit/MacroAssembler.cpp
js/src/jit/MacroAssembler.h
js/src/jit/SharedIC.cpp
js/src/jit/arm/Bailouts-arm.cpp
js/src/jit/arm/CodeGenerator-arm.cpp
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/arm/MacroAssembler-arm.h
js/src/jit/arm/SharedICHelpers-arm.h
js/src/jit/arm/Trampoline-arm.cpp
js/src/jit/arm64/MacroAssembler-arm64.cpp
js/src/jit/arm64/MacroAssembler-arm64.h
js/src/jit/arm64/SharedICHelpers-arm64.h
js/src/jit/arm64/Trampoline-arm64.cpp
js/src/jit/mips-shared/CodeGenerator-mips-shared.cpp
js/src/jit/mips-shared/SharedICHelpers-mips-shared.h
js/src/jit/mips32/Bailouts-mips32.cpp
js/src/jit/mips32/MacroAssembler-mips32.cpp
js/src/jit/mips32/MacroAssembler-mips32.h
js/src/jit/mips32/Trampoline-mips32.cpp
js/src/jit/mips64/MacroAssembler-mips64.cpp
js/src/jit/mips64/MacroAssembler-mips64.h
js/src/jit/mips64/Trampoline-mips64.cpp
js/src/jit/none/SharedICHelpers-none.h
js/src/jit/none/Trampoline-none.cpp
js/src/jit/shared/Assembler-shared.h
js/src/jit/shared/BaselineCompiler-shared.cpp
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/shared/CodeGenerator-shared.h
js/src/jit/x64/MacroAssembler-x64.cpp
js/src/jit/x64/MacroAssembler-x64.h
js/src/jit/x64/SharedICHelpers-x64.h
js/src/jit/x64/Trampoline-x64.cpp
js/src/jit/x86-shared/CodeGenerator-x86-shared.cpp
js/src/jit/x86-shared/MacroAssembler-x86-shared.h
js/src/jit/x86/Bailouts-x86.cpp
js/src/jit/x86/MacroAssembler-x86.cpp
js/src/jit/x86/MacroAssembler-x86.h
js/src/jit/x86/SharedICHelpers-x86.h
js/src/jit/x86/Trampoline-x86.cpp
js/src/jsscript.cpp
js/src/wasm/WasmInstance.cpp
js/src/wasm/WasmInstance.h
js/src/wasm/WasmStubs.cpp
--- a/js/src/jit/Bailouts.cpp
+++ b/js/src/jit/Bailouts.cpp
@@ -224,17 +224,17 @@ jit::ExceptionHandlerBailout(JSContext* 
         MOZ_ASSERT(bailoutInfo);
 
         // Overwrite the kind so HandleException after the bailout returns
         // false, jumping directly to the exception tail.
         if (excInfo.propagatingIonExceptionForDebugMode())
             bailoutInfo->bailoutKind = Bailout_IonExceptionDebugMode;
 
         rfe->kind = ResumeFromException::RESUME_BAILOUT;
-        rfe->target = cx->runtime()->jitRuntime()->getBailoutTail()->raw();
+        rfe->target = cx->runtime()->jitRuntime()->getBailoutTail().value;
         rfe->bailoutInfo = bailoutInfo;
     } else {
         // Bailout failed. If the overrecursion check failed, clear the
         // exception to turn this into an uncatchable error, continue popping
         // all inline frames and have the caller report the error.
         MOZ_ASSERT(!bailoutInfo);
 
         if (retval == BAILOUT_RETURN_OVERRECURSED) {
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -1498,17 +1498,17 @@ InitFromBailout(JSContext* cx, HandleScr
         return false;
 
     // Push rectifier frame descriptor
     if (!builder.writeWord(rectifierFrameDescr, "Descriptor"))
         return false;
 
     // Push return address into the ArgumentsRectifier code, immediately after the ioncode
     // call.
-    void* rectReturnAddr = cx->runtime()->jitRuntime()->getArgumentsRectifierReturnAddr();
+    void* rectReturnAddr = cx->runtime()->jitRuntime()->getArgumentsRectifierReturnAddr().value;
     MOZ_ASSERT(rectReturnAddr);
     if (!builder.writePtr(rectReturnAddr, "ReturnAddr"))
         return false;
     MOZ_ASSERT(builder.framePushed() % JitStackAlignment == 0);
 
     return true;
 }
 
--- a/js/src/jit/BaselineCacheIRCompiler.cpp
+++ b/js/src/jit/BaselineCacheIRCompiler.cpp
@@ -144,17 +144,17 @@ class MOZ_RAII AutoStubFrame
 #endif
 };
 
 bool
 BaselineCacheIRCompiler::callVM(MacroAssembler& masm, const VMFunction& fun)
 {
     MOZ_ASSERT(inStubFrame_);
 
-    uint8_t* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
+    TrampolinePtr code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
     MOZ_ASSERT(fun.expectTailCall == NonTailCall);
     MOZ_ASSERT(engine_ == ICStubEngine::Baseline);
 
     EmitBaselineCallVM(code, masm);
     return true;
 }
 
 JitCode*
@@ -647,19 +647,18 @@ BaselineCacheIRCompiler::emitCallScripte
     masm.Push(scratch);
 
     // Handle arguments underflow.
     Label noUnderflow;
     masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
     masm.branch32(Assembler::Equal, callee, Imm32(0), &noUnderflow);
     {
         // Call the arguments rectifier.
-        JitCode* argumentsRectifier = cx_->runtime()->jitRuntime()->getArgumentsRectifier();
-        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
-        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
+        TrampolinePtr argumentsRectifier = cx_->runtime()->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, code);
     }
 
     masm.bind(&noUnderflow);
     masm.callJit(code);
 
     stubFrame.leave(masm, true);
     return true;
 }
@@ -1758,19 +1757,18 @@ BaselineCacheIRCompiler::emitCallScripte
     masm.load16ZeroExtend(Address(scratch1, JSFunction::offsetOfNargs()), scratch2);
     masm.loadPtr(Address(scratch1, JSFunction::offsetOfNativeOrScript()), scratch1);
     masm.loadBaselineOrIonRaw(scratch1, scratch1, nullptr);
 
     // Handle arguments underflow.
     masm.branch32(Assembler::BelowOrEqual, scratch2, Imm32(1), &noUnderflow);
     {
         // Call the arguments rectifier.
-        JitCode* argumentsRectifier = cx_->runtime()->jitRuntime()->getArgumentsRectifier();
-        masm.movePtr(ImmGCPtr(argumentsRectifier), scratch1);
-        masm.loadPtr(Address(scratch1, JitCode::offsetOfCode()), scratch1);
+        TrampolinePtr argumentsRectifier = cx_->runtime()->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, scratch1);
     }
 
     masm.bind(&noUnderflow);
     masm.callJit(scratch1);
 
     stubFrame.leave(masm, true);
     return true;
 }
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -4862,32 +4862,32 @@ BaselineCompiler::emit_JSOP_RESUME()
         masm.loadBaselineFramePtr(BaselineFrameReg, scratch2);
 
         prepareVMCall();
         pushArg(Imm32(resumeKind));
         pushArg(retVal);
         pushArg(genObj);
         pushArg(scratch2);
 
-        uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(GeneratorThrowInfo);
+        TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(GeneratorThrowInfo);
 
         // Create the frame descriptor.
         masm.subStackPtrFrom(scratch1);
         masm.makeFrameDescriptor(scratch1, JitFrame_BaselineJS, ExitFrameLayout::Size());
 
         // Push the frame descriptor and a dummy return address (it doesn't
         // matter what we push here, frame iterators will use the frame pc
         // set in jit::GeneratorThrowOrReturn).
         masm.push(scratch1);
 
         // On ARM64, the callee will push the return address.
 #ifndef JS_CODEGEN_ARM64
         masm.push(ImmWord(0));
 #endif
-        masm.jump(ImmPtr(code));
+        masm.jump(code);
     }
 
     // If the generator script has no JIT code, call into the VM.
     masm.bind(&interpret);
 
     prepareVMCall();
     if (resumeKind == GeneratorObject::NEXT) {
         pushArg(ImmGCPtr(cx->names().next));
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -3262,21 +3262,18 @@ ICCallScriptedCompiler::generateStubCode
     masm.Push(scratch);
 
     // Handle arguments underflow.
     Label noUnderflow;
     masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
     masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
     {
         // Call the arguments rectifier.
-        JitCode* argumentsRectifier =
-            cx->runtime()->jitRuntime()->getArgumentsRectifier();
-
-        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
-        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
+        TrampolinePtr argumentsRectifier = cx->runtime()->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, code);
     }
 
     masm.bind(&noUnderflow);
     masm.callJit(code);
 
     // If this is a constructing call, and the callee returns a non-object, replace it with
     // the |this| object passed in.
     if (isConstructing_) {
@@ -3754,21 +3751,18 @@ ICCall_ScriptedApplyArray::Compiler::gen
     masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
     masm.loadBaselineOrIonRaw(target, target, nullptr);
 
     // Handle arguments underflow.
     Label noUnderflow;
     masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
     {
         // Call the arguments rectifier.
-        JitCode* argumentsRectifier =
-            cx->runtime()->jitRuntime()->getArgumentsRectifier();
-
-        masm.movePtr(ImmGCPtr(argumentsRectifier), target);
-        masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
+        TrampolinePtr argumentsRectifier = cx->runtime()->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, target);
     }
     masm.bind(&noUnderflow);
     regs.add(argcReg);
 
     // Do call
     masm.callJit(target);
     leaveStubFrame(masm, true);
 
@@ -3847,21 +3841,18 @@ ICCall_ScriptedApplyArguments::Compiler:
     masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
     masm.loadBaselineOrIonRaw(target, target, nullptr);
 
     // Handle arguments underflow.
     Label noUnderflow;
     masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
     {
         // Call the arguments rectifier.
-        JitCode* argumentsRectifier =
-            cx->runtime()->jitRuntime()->getArgumentsRectifier();
-
-        masm.movePtr(ImmGCPtr(argumentsRectifier), target);
-        masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
+        TrampolinePtr argumentsRectifier = cx->runtime()->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, target);
     }
     masm.bind(&noUnderflow);
     regs.add(argcReg);
 
     // Do call
     masm.callJit(target);
     leaveStubFrame(masm, true);
 
@@ -3971,21 +3962,18 @@ ICCall_ScriptedFunCall::Compiler::genera
     masm.Push(scratch);
 
     // Handle arguments underflow.
     Label noUnderflow;
     masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
     masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
     {
         // Call the arguments rectifier.
-        JitCode* argumentsRectifier =
-            cx->runtime()->jitRuntime()->getArgumentsRectifier();
-
-        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
-        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
+        TrampolinePtr argumentsRectifier = cx->runtime()->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, code);
     }
 
     masm.bind(&noUnderflow);
     masm.callJit(code);
 
     leaveStubFrame(masm, true);
 
     // Enter type monitor IC to type-check result.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -4248,19 +4248,16 @@ CodeGenerator::visitCallGeneric(LCallGen
     Register objreg    = ToRegister(call->getTempObject());
     Register nargsreg  = ToRegister(call->getNargsReg());
     uint32_t unusedStack = StackOffsetOfPassedArg(call->argslot());
     Label invoke, thunk, makeCall, end;
 
     // Known-target case is handled by LCallKnown.
     MOZ_ASSERT(!call->hasSingleTarget());
 
-    // Generate an ArgumentsRectifier.
-    JitCode* argumentsRectifier = gen->jitRuntime()->getArgumentsRectifier();
-
     masm.checkStackAlignment();
 
     // Guard that calleereg is actually a function object.
     masm.branchTestObjClass(Assembler::NotEqual, calleereg, nargsreg, &JSFunction::class_, &invoke);
 
     // Guard that calleereg is an interpreted function with a JSScript.
     // If we are constructing, also ensure the callee is a constructor.
     if (call->mir()->isConstructing()) {
@@ -4293,18 +4290,18 @@ CodeGenerator::visitCallGeneric(LCallGen
     MOZ_ASSERT(call->numActualArgs() == call->mir()->numStackArgs() - numNonArgsOnStack);
     masm.load16ZeroExtend(Address(calleereg, JSFunction::offsetOfNargs()), nargsreg);
     masm.branch32(Assembler::Above, nargsreg, Imm32(call->numActualArgs()), &thunk);
     masm.jump(&makeCall);
 
     // Argument fixed needed. Load the ArgumentsRectifier.
     masm.bind(&thunk);
     {
-        masm.movePtr(ImmGCPtr(argumentsRectifier), objreg); // Necessary for GC marking.
-        masm.loadPtr(Address(objreg, JitCode::offsetOfCode()), objreg);
+        TrampolinePtr argumentsRectifier = gen->jitRuntime()->getArgumentsRectifier();
+        masm.movePtr(argumentsRectifier, objreg);
     }
 
     // Finally call the function in objreg.
     masm.bind(&makeCall);
     uint32_t callOffset = masm.callJit(objreg);
     markSafepointAt(callOffset, call);
 
     // Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
@@ -4722,20 +4719,18 @@ CodeGenerator::emitApplyGeneric(T* apply
         // underflow.
         masm.jump(&rejoin);
 
         // Argument fixup needed. Get ready to call the argumentsRectifier.
         {
             masm.bind(&underflow);
 
             // Hardcode the address of the argumentsRectifier code.
-            JitCode* argumentsRectifier = gen->jitRuntime()->getArgumentsRectifier();
-
-            masm.movePtr(ImmGCPtr(argumentsRectifier), objreg); // Necessary for GC marking.
-            masm.loadPtr(Address(objreg, JitCode::offsetOfCode()), objreg);
+            TrampolinePtr argumentsRectifier = gen->jitRuntime()->getArgumentsRectifier();
+            masm.movePtr(argumentsRectifier, objreg);
         }
 
         masm.bind(&rejoin);
 
         // Finally call the function in objreg, as assigned by one of the paths above.
         uint32_t callOffset = masm.callJit(objreg);
         markSafepointAt(callOffset, apply);
 
@@ -7946,24 +7941,24 @@ JitCompartment::generateStringConcatStub
 #endif
 #ifdef MOZ_VTUNE
     vtune::MarkStub(code, "StringConcatStub");
 #endif
 
     return code;
 }
 
-JitCode*
-JitRuntime::generateMallocStub(JSContext* cx)
+void
+JitRuntime::generateMallocStub(MacroAssembler& masm)
 {
     const Register regReturn = CallTempReg0;
     const Register regZone = CallTempReg0;
     const Register regNBytes = CallTempReg1;
 
-    MacroAssembler masm(cx);
+    mallocStubOffset_ = startTrampolineCode(masm);
 
     AllocatableRegisterSet regs(RegisterSet::Volatile());
 #ifdef JS_USE_LINK_REGISTER
     masm.pushReturnAddress();
 #endif
     regs.takeUnchecked(regZone);
     regs.takeUnchecked(regNBytes);
     LiveRegisterSet save(regs.asLiveSet());
@@ -7975,37 +7970,25 @@ JitRuntime::generateMallocStub(JSContext
     masm.setupUnalignedABICall(regTemp);
     masm.passABIArg(regZone);
     masm.passABIArg(regNBytes);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, MallocWrapper));
     masm.storeCallWordResult(regReturn);
 
     masm.PopRegsInMask(save);
     masm.ret();
-
-    Linker linker(masm);
-    AutoFlushICache afc("MallocStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "MallocStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "MallocStub");
-#endif
-
-    return code;
-}
-
-JitCode*
-JitRuntime::generateFreeStub(JSContext* cx)
+}
+
+void
+JitRuntime::generateFreeStub(MacroAssembler& masm)
 {
     const Register regSlots = CallTempReg0;
 
-    MacroAssembler masm(cx);
+    freeStubOffset_ = startTrampolineCode(masm);
+
 #ifdef JS_USE_LINK_REGISTER
     masm.pushReturnAddress();
 #endif
     AllocatableRegisterSet regs(RegisterSet::Volatile());
     regs.takeUnchecked(regSlots);
     LiveRegisterSet save(regs.asLiveSet());
     masm.PushRegsInMask(save);
 
@@ -8015,72 +7998,48 @@ JitRuntime::generateFreeStub(JSContext* 
     masm.setupUnalignedABICall(regTemp);
     masm.passABIArg(regSlots);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, js_free), MoveOp::GENERAL,
                      CheckUnsafeCallWithABI::DontCheckOther);
 
     masm.PopRegsInMask(save);
 
     masm.ret();
-
-    Linker linker(masm);
-    AutoFlushICache afc("FreeStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "FreeStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "FreeStub");
-#endif
-
-    return code;
-}
-
-
-JitCode*
-JitRuntime::generateLazyLinkStub(JSContext* cx)
-{
-    MacroAssembler masm(cx);
+}
+
+void
+JitRuntime::generateLazyLinkStub(MacroAssembler& masm)
+{
+    lazyLinkStubOffset_ = startTrampolineCode(masm);
+
 #ifdef JS_USE_LINK_REGISTER
     masm.pushReturnAddress();
 #endif
 
     AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
     Register temp0 = regs.takeAny();
 
     masm.loadJSContext(temp0);
     masm.enterFakeExitFrame(temp0, temp0, ExitFrameType::LazyLink);
-    masm.PushStubCode();
 
     masm.setupUnalignedABICall(temp0);
     masm.passABIArg(temp0);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, LazyLinkTopActivation), MoveOp::GENERAL,
                      CheckUnsafeCallWithABI::DontCheckHasExitFrame);
 
-    masm.leaveExitFrame(/* stub code */ sizeof(JitCode*));
+    masm.leaveExitFrame();
 
 #ifdef JS_USE_LINK_REGISTER
     // Restore the return address such that the emitPrologue function of the
     // CodeGenerator can push it back on the stack with pushReturnAddress.
     masm.popReturnAddress();
 #endif
     masm.jump(ReturnReg);
 
-    Linker linker(masm);
-    AutoFlushICache afc("LazyLinkStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "LazyLinkStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "LazyLinkStub");
-#endif
-    return code;
+    lazyLinkStubEndOffset_ = masm.currentOffset();
 }
 
 bool
 JitRuntime::generateTLEventVM(JSContext* cx, MacroAssembler& masm, const VMFunction& f,
                               bool enter)
 {
 #ifdef JS_TRACE_LOGGING
     bool vmEventEnabled = TraceLogTextIdEnabled(TraceLogger_VM);
@@ -9558,21 +9517,18 @@ CodeGenerator::generate()
     if (!generatePrologue())
         return false;
 
     // Before generating any code, we generate type checks for all parameters.
     // This comes before deoptTable_, because we can't use deopt tables without
     // creating the actual frame.
     generateArgumentsChecks();
 
-    if (frameClass_ != FrameSizeClass::None()) {
-        deoptTable_ = gen->jitRuntime()->getBailoutTable(frameClass_);
-        if (!deoptTable_)
-            return false;
-    }
+    if (frameClass_ != FrameSizeClass::None())
+        deoptTable_.emplace(gen->jitRuntime()->getBailoutTable(frameClass_));
 
     // Skip over the alternative entry to IonScript code.
     Label skipPrologue;
     masm.jump(&skipPrologue);
 
     // An alternative entry to the IonScript code, which doesn't test the
     // arguments.
     masm.flushBuffer();
@@ -9930,18 +9886,16 @@ CodeGenerator::link(JSContext* cx, Compi
     JitSpew(JitSpew_Codegen, "Created IonScript %p (raw %p)",
             (void*) ionScript, (void*) code->raw());
 
     ionScript->setInvalidationEpilogueDataOffset(invalidateEpilogueData_.offset());
     ionScript->setOsrPc(gen->info().osrPc());
     ionScript->setOsrEntryOffset(getOsrEntryOffset());
     ionScript->setInvalidationEpilogueOffset(invalidate_.offset());
 
-    ionScript->setDeoptTable(deoptTable_);
-
 #if defined(JS_ION_PERF)
     if (PerfEnabled())
         perfSpewer_.writeProfile(script, code, masm);
 #endif
 
 #ifdef MOZ_VTUNE
     vtune::MarkScript(code, script, "ion");
 #endif
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -189,189 +189,164 @@ jit::InitializeIon()
 #endif
     CheckPerf();
     return true;
 }
 
 JitRuntime::JitRuntime(JSRuntime* rt)
   : execAlloc_(rt),
     backedgeExecAlloc_(rt),
-    exceptionTail_(nullptr),
-    bailoutTail_(nullptr),
-    profilerExitFrameTail_(nullptr),
-    enterJIT_(nullptr),
-    bailoutHandler_(nullptr),
-    argumentsRectifier_(nullptr),
-    argumentsRectifierReturnAddr_(nullptr),
-    invalidator_(nullptr),
+    exceptionTailOffset_(0),
+    bailoutTailOffset_(0),
+    profilerExitFrameTailOffset_(0),
+    enterJITOffset_(0),
+    bailoutHandlerOffset_(0),
+    argumentsRectifierOffset_(0),
+    argumentsRectifierReturnOffset_(0),
+    invalidatorOffset_(0),
     debugTrapHandler_(nullptr),
     baselineDebugModeOSRHandler_(nullptr),
-    functionWrapperCode_(nullptr),
+    trampolineCode_(nullptr),
     functionWrappers_(nullptr),
     preventBackedgePatching_(false),
     jitcodeGlobalTable_(nullptr)
 {
 }
 
 JitRuntime::~JitRuntime()
 {
     js_delete(functionWrappers_.ref());
 
     // By this point, the jitcode global table should be empty.
     MOZ_ASSERT_IF(jitcodeGlobalTable_, jitcodeGlobalTable_->empty());
     js_delete(jitcodeGlobalTable_.ref());
 }
 
+uint32_t
+JitRuntime::startTrampolineCode(MacroAssembler& masm)
+{
+    masm.assumeUnreachable("Shouldn't get here");
+    masm.flushBuffer();
+    masm.haltingAlign(CodeAlignment);
+    return masm.currentOffset();
+}
+
 bool
 JitRuntime::initialize(JSContext* cx, AutoLockForExclusiveAccess& lock)
 {
     AutoAtomsCompartment ac(cx, lock);
 
     JitContext jctx(cx, nullptr);
 
     if (!cx->compartment()->ensureJitCompartmentExists(cx))
         return false;
 
     functionWrappers_ = cx->new_<VMWrapperMap>(cx);
     if (!functionWrappers_ || !functionWrappers_->init())
         return false;
 
-    JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
-    profilerExitFrameTail_ = generateProfilerExitFrameTailStub(cx);
-    if (!profilerExitFrameTail_)
-        return false;
-
-    JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
-
-    void* handler = JS_FUNC_TO_DATA_PTR(void*, jit::HandleException);
-
-    exceptionTail_ = generateExceptionTailStub(cx, handler);
-    if (!exceptionTail_)
-        return false;
-
+    MacroAssembler masm;
+
+    Label bailoutTail;
     JitSpew(JitSpew_Codegen, "# Emitting bailout tail stub");
-    bailoutTail_ = generateBailoutTailStub(cx);
-    if (!bailoutTail_)
-        return false;
+    generateBailoutTailStub(masm, &bailoutTail);
 
     if (cx->runtime()->jitSupportsFloatingPoint) {
         JitSpew(JitSpew_Codegen, "# Emitting bailout tables");
 
         // Initialize some Ion-only stubs that require floating-point support.
         BailoutTableVector& bailoutTables = bailoutTables_.writeRef();
         if (!bailoutTables.reserve(FrameSizeClass::ClassLimit().classId()))
             return false;
 
         for (uint32_t id = 0;; id++) {
             FrameSizeClass class_ = FrameSizeClass::FromClass(id);
             if (class_ == FrameSizeClass::ClassLimit())
                 break;
-            bailoutTables.infallibleAppend((JitCode*)nullptr);
             JitSpew(JitSpew_Codegen, "# Bailout table");
-            bailoutTables[id] = generateBailoutTable(cx, id);
-            if (!bailoutTables[id])
-                return false;
+            bailoutTables.infallibleAppend(generateBailoutTable(masm, &bailoutTail, id));
         }
 
         JitSpew(JitSpew_Codegen, "# Emitting bailout handler");
-        bailoutHandler_ = generateBailoutHandler(cx);
-        if (!bailoutHandler_)
-            return false;
+        generateBailoutHandler(masm, &bailoutTail);
 
         JitSpew(JitSpew_Codegen, "# Emitting invalidator");
-        invalidator_ = generateInvalidator(cx);
-        if (!invalidator_)
-            return false;
+        generateInvalidator(masm, &bailoutTail);
     }
 
     // The arguments rectifier has to use the same frame layout as the function
     // frames it rectifies.
     static_assert(mozilla::IsBaseOf<JitFrameLayout, RectifierFrameLayout>::value,
                   "a rectifier frame can be used with jit frame");
     static_assert(mozilla::IsBaseOf<JitFrameLayout, WasmToJSJitFrameLayout>::value,
                   "wasm frames simply are jit frames");
     static_assert(sizeof(JitFrameLayout) == sizeof(WasmToJSJitFrameLayout),
                   "thus a rectifier frame can be used with a wasm frame");
 
     JitSpew(JitSpew_Codegen, "# Emitting sequential arguments rectifier");
-    argumentsRectifier_ = generateArgumentsRectifier(cx, &argumentsRectifierReturnAddr_.writeRef());
-    if (!argumentsRectifier_)
-        return false;
+    generateArgumentsRectifier(masm);
 
     JitSpew(JitSpew_Codegen, "# Emitting EnterJIT sequence");
-    enterJIT_ = generateEnterJIT(cx);
-    if (!enterJIT_)
-        return false;
+    generateEnterJIT(cx, masm);
 
     JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Value");
-    valuePreBarrier_ = generatePreBarrier(cx, MIRType::Value);
-    if (!valuePreBarrier_)
-        return false;
+    valuePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Value);
 
     JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for String");
-    stringPreBarrier_ = generatePreBarrier(cx, MIRType::String);
-    if (!stringPreBarrier_)
-        return false;
+    stringPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::String);
 
     JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Object");
-    objectPreBarrier_ = generatePreBarrier(cx, MIRType::Object);
-    if (!objectPreBarrier_)
-        return false;
+    objectPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Object);
 
     JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Shape");
-    shapePreBarrier_ = generatePreBarrier(cx, MIRType::Shape);
-    if (!shapePreBarrier_)
-        return false;
+    shapePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Shape);
 
     JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for ObjectGroup");
-    objectGroupPreBarrier_ = generatePreBarrier(cx, MIRType::ObjectGroup);
-    if (!objectGroupPreBarrier_)
-        return false;
+    objectGroupPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::ObjectGroup);
 
     JitSpew(JitSpew_Codegen, "# Emitting malloc stub");
-    mallocStub_ = generateMallocStub(cx);
-    if (!mallocStub_)
-        return false;
+    generateMallocStub(masm);
 
     JitSpew(JitSpew_Codegen, "# Emitting free stub");
-    freeStub_ = generateFreeStub(cx);
-    if (!freeStub_)
+    generateFreeStub(masm);
+
+    JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
+    generateLazyLinkStub(masm);
+
+    JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
+    for (VMFunction* fun = VMFunction::functions; fun; fun = fun->next) {
+        if (functionWrappers_->has(fun)) {
+            // Duplicate VMFunction definition. See VMFunction::hash.
+            continue;
+        }
+        JitSpew(JitSpew_Codegen, "# VM function wrapper");
+        if (!generateVMWrapper(cx, masm, *fun))
+            return false;
+    }
+
+    JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
+    Label profilerExitTail;
+    generateProfilerExitFrameTailStub(masm, &profilerExitTail);
+
+    JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
+    void* handler = JS_FUNC_TO_DATA_PTR(void*, jit::HandleException);
+    generateExceptionTailStub(masm, handler, &profilerExitTail);
+
+    Linker linker(masm);
+    AutoFlushICache afc("Trampolines");
+    trampolineCode_ = linker.newCode<NoGC>(cx, OTHER_CODE);
+    if (!trampolineCode_)
         return false;
 
-    {
-        JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
-        MacroAssembler masm;
-        for (VMFunction* fun = VMFunction::functions; fun; fun = fun->next) {
-            if (functionWrappers_->has(fun)) {
-                // Duplicate VMFunction definition. See VMFunction::hash.
-                continue;
-            }
-            JitSpew(JitSpew_Codegen, "# VM function wrapper");
-            if (!generateVMWrapper(cx, masm, *fun))
-                return false;
-        }
-
-        Linker linker(masm);
-        AutoFlushICache afc("VMWrappers");
-        functionWrapperCode_ = linker.newCode<NoGC>(cx, OTHER_CODE);
-        if (!functionWrapperCode_)
-            return false;
-
 #ifdef JS_ION_PERF
-        writePerfSpewerJitCodeProfile(functionWrapperCode_, "VMWrappers");
+    writePerfSpewerJitCodeProfile(trampolineCode_, "Trampolines");
 #endif
 #ifdef MOZ_VTUNE
-        vtune::MarkStub(functionWrapperCode_, "VMWrappers");
+    vtune::MarkStub(trampolineCode_, "Trampolines");
 #endif
-    }
-
-    JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
-    lazyLinkStub_ = generateLazyLinkStub(cx);
-    if (!lazyLinkStub_)
-        return false;
 
     jitcodeGlobalTable_ = cx->new_<JitcodeGlobalTable>();
     if (!jitcodeGlobalTable_)
         return false;
 
     return true;
 }
 
@@ -728,37 +703,40 @@ JitZone::addSizeOfIncludingThis(mozilla:
     *jitZone += mallocSizeOf(this);
     *jitZone += baselineCacheIRStubCodes_.sizeOfExcludingThis(mallocSizeOf);
     *jitZone += ionCacheIRStubInfoSet_.sizeOfExcludingThis(mallocSizeOf);
 
     *baselineStubsOptimized += optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
     *cachedCFG += cfgSpace_.sizeOfExcludingThis(mallocSizeOf);
 }
 
-JitCode*
+TrampolinePtr
 JitRuntime::getBailoutTable(const FrameSizeClass& frameClass) const
 {
     MOZ_ASSERT(frameClass != FrameSizeClass::None());
-    return bailoutTables_.ref()[frameClass.classId()];
+    return trampolineCode(bailoutTables_.ref()[frameClass.classId()].startOffset);
 }
 
-uint8_t*
+uint32_t
+JitRuntime::getBailoutTableSize(const FrameSizeClass& frameClass) const
+{
+    MOZ_ASSERT(frameClass != FrameSizeClass::None());
+    return bailoutTables_.ref()[frameClass.classId()].size;
+}
+
+TrampolinePtr
 JitRuntime::getVMWrapper(const VMFunction& f) const
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
-    MOZ_ASSERT(functionWrapperCode_);
+    MOZ_ASSERT(trampolineCode_);
 
     JitRuntime::VMWrapperMap::Ptr p = functionWrappers_->readonlyThreadsafeLookup(&f);
     MOZ_ASSERT(p);
-
-    uint32_t offset = p->value();
-    MOZ_ASSERT(offset < functionWrapperCode_->instructionsSize());
-
-    return functionWrapperCode_->raw() + offset;
+    return trampolineCode(p->value());
 }
 
 template <AllowGC allowGC>
 JitCode*
 JitCode::New(JSContext* cx, uint8_t* code, uint32_t bufferSize, uint32_t headerSize,
              ExecutablePool* pool, CodeKind kind)
 {
     JitCode* codeObj = Allocate<JitCode, allowGC>(cx);
@@ -858,17 +836,16 @@ JitCode::finalize(FreeOp* fop)
     if (!PerfEnabled())
         pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
 
     pool_ = nullptr;
 }
 
 IonScript::IonScript()
   : method_(nullptr),
-    deoptTable_(nullptr),
     osrPc_(nullptr),
     osrEntryOffset_(0),
     skipArgCheckEntryOffset_(0),
     invalidateEpilogueOffset_(0),
     invalidateEpilogueDataOffset_(0),
     numBailouts_(0),
     hasProfilingInstrumentation_(false),
     recompiling_(false),
@@ -1017,19 +994,16 @@ IonScript::adoptFallbackStubs(FallbackIC
 }
 
 void
 IonScript::trace(JSTracer* trc)
 {
     if (method_)
         TraceEdge(trc, &method_, "method");
 
-    if (deoptTable_)
-        TraceEdge(trc, &deoptTable_, "deoptimizationTable");
-
     for (size_t i = 0; i < numConstants(); i++)
         TraceEdge(trc, &getConstant(i), "constant");
 
     // Mark all IC stub codes hanging off the IC stub entries.
     for (size_t i = 0; i < numSharedStubs(); i++) {
         IonICEntry& ent = sharedStubList()[i];
         ent.trace(trc);
     }
@@ -2816,20 +2790,21 @@ InvalidateActivation(FreeOp* fop, const 
             JitSpew(JitSpew_IonInvalidate, "#%zu wasm frames @ %p", frameno, frame.fp());
             break;
         }
 #endif // JS_JITSPEW
 
         if (!frame.isIonScripted())
             continue;
 
+        JitRuntime* jrt = fop->runtime()->jitRuntime();
+
         bool calledFromLinkStub = false;
-        JitCode* lazyLinkStub = fop->runtime()->jitRuntime()->lazyLinkStub();
-        if (frame.returnAddressToFp() >= lazyLinkStub->raw() &&
-            frame.returnAddressToFp() < lazyLinkStub->rawEnd())
+        if (frame.returnAddressToFp() >= jrt->lazyLinkStub().value &&
+            frame.returnAddressToFp() < jrt->lazyLinkStubEnd().value)
         {
             calledFromLinkStub = true;
         }
 
         // See if the frame has already been invalidated.
         if (!calledFromLinkStub && frame.checkInvalidation())
             continue;
 
--- a/js/src/jit/IonCacheIRCompiler.cpp
+++ b/js/src/jit/IonCacheIRCompiler.cpp
@@ -353,23 +353,23 @@ IonCacheIRCompiler::prepareVMCall(MacroA
 #endif
 }
 
 bool
 IonCacheIRCompiler::callVM(MacroAssembler& masm, const VMFunction& fun)
 {
     MOZ_ASSERT(calledPrepareVMCall_);
 
-    uint8_t* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
+    TrampolinePtr code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
 
     uint32_t frameSize = fun.explicitStackSlots() * sizeof(void*);
     uint32_t descriptor = MakeFrameDescriptor(frameSize, JitFrame_IonICCall,
                                               ExitFrameLayout::Size());
     masm.Push(Imm32(descriptor));
-    masm.callJit(ImmPtr(code));
+    masm.callJit(code);
 
     // Remove rest of the frame left on the stack. We remove the return address
     // which is implicitly poped when returning.
     int framePop = sizeof(ExitFrameLayout) - sizeof(void*);
 
     // Pop arguments from framePushed.
     masm.implicitPop(frameSize + framePop);
     masm.freeStack(IonICCallFrameLayout::Size());
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -163,19 +163,16 @@ struct PatchableBackedgeInfo;
 
 // An IonScript attaches Ion-generated information to a JSScript.
 struct IonScript
 {
   private:
     // Code pointer containing the actual method.
     PreBarrieredJitCode method_;
 
-    // Deoptimization table used by this method.
-    PreBarrieredJitCode deoptTable_;
-
     // Entrypoint for OSR, or nullptr.
     jsbytecode* osrPc_;
 
     // Offset to OSR entrypoint from method_->raw(), or 0.
     uint32_t osrEntryOffset_;
 
     // Offset to entrypoint skipping type arg check from method_->raw().
     uint32_t skipArgCheckEntryOffset_;
@@ -358,19 +355,16 @@ struct IonScript
   public:
     JitCode* method() const {
         return method_;
     }
     void setMethod(JitCode* code) {
         MOZ_ASSERT(!invalidated());
         method_ = code;
     }
-    void setDeoptTable(JitCode* code) {
-        deoptTable_ = code;
-    }
     void setOsrPc(jsbytecode* osrPc) {
         osrPc_ = osrPc;
     }
     jsbytecode* osrPc() const {
         return osrPc_;
     }
     void setOsrEntryOffset(uint32_t offset) {
         MOZ_ASSERT(!osrEntryOffset_);
--- a/js/src/jit/Jit.cpp
+++ b/js/src/jit/Jit.cpp
@@ -58,17 +58,17 @@ EnterJit(JSContext* cx, RunState& state,
         constructing = state.asInvoke()->constructing();
         maxArgc = args.length() + 1;
         maxArgv = args.array() - 1; // -1 to include |this|
         envChain = nullptr;
         calleeToken = CalleeToToken(&args.callee().as<JSFunction>(), constructing);
 
         unsigned numFormals = script->functionNonDelazifying()->nargs();
         if (numFormals > numActualArgs)
-            code = cx->runtime()->jitRuntime()->getArgumentsRectifier()->raw();
+            code = cx->runtime()->jitRuntime()->getArgumentsRectifier().value;
     } else {
         numActualArgs = 0;
         constructing = false;
         if (script->isDirectEvalInFunction()) {
             if (state.asExecute()->newTarget().isNull()) {
                 ScriptFrameIter iter(cx);
                 state.asExecute()->setNewTarget(iter.newTarget());
             }
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -83,104 +83,120 @@ class JitRuntime
     // Executable allocator for all code except wasm code and Ion code with
     // patchable backedges (see below).
     ActiveThreadData<ExecutableAllocator> execAlloc_;
 
     // Executable allocator for Ion scripts with patchable backedges.
     ActiveThreadData<ExecutableAllocator> backedgeExecAlloc_;
 
     // Shared exception-handler tail.
-    ExclusiveAccessLockWriteOnceData<JitCode*> exceptionTail_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> exceptionTailOffset_;
 
     // Shared post-bailout-handler tail.
-    ExclusiveAccessLockWriteOnceData<JitCode*> bailoutTail_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> bailoutTailOffset_;
 
     // Shared profiler exit frame tail.
-    ExclusiveAccessLockWriteOnceData<JitCode*> profilerExitFrameTail_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> profilerExitFrameTailOffset_;
 
     // Trampoline for entering JIT code.
-    ExclusiveAccessLockWriteOnceData<JitCode*> enterJIT_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> enterJITOffset_;
 
     // Vector mapping frame class sizes to bailout tables.
-    typedef Vector<JitCode*, 4, SystemAllocPolicy> BailoutTableVector;
+    struct BailoutTable {
+        uint32_t startOffset;
+        uint32_t size;
+        BailoutTable(uint32_t startOffset, uint32_t size)
+          : startOffset(startOffset), size(size)
+        {}
+    };
+    typedef Vector<BailoutTable, 4, SystemAllocPolicy> BailoutTableVector;
     ExclusiveAccessLockWriteOnceData<BailoutTableVector> bailoutTables_;
 
     // Generic bailout table; used if the bailout table overflows.
-    ExclusiveAccessLockWriteOnceData<JitCode*> bailoutHandler_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> bailoutHandlerOffset_;
 
     // Argument-rectifying thunk, in the case of insufficient arguments passed
     // to a function call site.
-    ExclusiveAccessLockWriteOnceData<JitCode*> argumentsRectifier_;
-    ExclusiveAccessLockWriteOnceData<void*> argumentsRectifierReturnAddr_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> argumentsRectifierOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> argumentsRectifierReturnOffset_;
 
     // Thunk that invalides an (Ion compiled) caller on the Ion stack.
-    ExclusiveAccessLockWriteOnceData<JitCode*> invalidator_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> invalidatorOffset_;
 
     // Thunk that calls the GC pre barrier.
-    ExclusiveAccessLockWriteOnceData<JitCode*> valuePreBarrier_;
-    ExclusiveAccessLockWriteOnceData<JitCode*> stringPreBarrier_;
-    ExclusiveAccessLockWriteOnceData<JitCode*> objectPreBarrier_;
-    ExclusiveAccessLockWriteOnceData<JitCode*> shapePreBarrier_;
-    ExclusiveAccessLockWriteOnceData<JitCode*> objectGroupPreBarrier_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> valuePreBarrierOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> stringPreBarrierOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> objectPreBarrierOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> shapePreBarrierOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> objectGroupPreBarrierOffset_;
 
     // Thunk to call malloc/free.
-    ExclusiveAccessLockWriteOnceData<JitCode*> mallocStub_;
-    ExclusiveAccessLockWriteOnceData<JitCode*> freeStub_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> mallocStubOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> freeStubOffset_;
 
     // Thunk called to finish compilation of an IonScript.
-    ExclusiveAccessLockWriteOnceData<JitCode*> lazyLinkStub_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> lazyLinkStubOffset_;
+    ExclusiveAccessLockWriteOnceData<uint32_t> lazyLinkStubEndOffset_;
 
     // Thunk used by the debugger for breakpoint and step mode.
     ExclusiveAccessLockWriteOnceData<JitCode*> debugTrapHandler_;
 
     // Thunk used to fix up on-stack recompile of baseline scripts.
     ExclusiveAccessLockWriteOnceData<JitCode*> baselineDebugModeOSRHandler_;
     ExclusiveAccessLockWriteOnceData<void*> baselineDebugModeOSRHandlerNoFrameRegPopAddr_;
 
-    // Code for all VMFunction wrappers.
-    ExclusiveAccessLockWriteOnceData<JitCode*> functionWrapperCode_;
+    // Code for trampolines and VMFunction wrappers.
+    ExclusiveAccessLockWriteOnceData<JitCode*> trampolineCode_;
 
     // Map VMFunction addresses to the offset of the wrapper in
-    // functionWrapperCode_.
+    // trampolineCode_.
     using VMWrapperMap = HashMap<const VMFunction*, uint32_t, VMFunction>;
     ExclusiveAccessLockWriteOnceData<VMWrapperMap*> functionWrappers_;
 
     // If true, the signal handler to interrupt Ion code should not attempt to
     // patch backedges, as some thread is busy modifying data structures.
     mozilla::Atomic<bool> preventBackedgePatching_;
 
     // Global table of jitcode native address => bytecode address mappings.
     UnprotectedData<JitcodeGlobalTable*> jitcodeGlobalTable_;
 
   private:
-    JitCode* generateLazyLinkStub(JSContext* cx);
-    JitCode* generateProfilerExitFrameTailStub(JSContext* cx);
-    JitCode* generateExceptionTailStub(JSContext* cx, void* handler);
-    JitCode* generateBailoutTailStub(JSContext* cx);
-    JitCode* generateEnterJIT(JSContext* cx);
-    JitCode* generateArgumentsRectifier(JSContext* cx, void** returnAddrOut);
-    JitCode* generateBailoutTable(JSContext* cx, uint32_t frameClass);
-    JitCode* generateBailoutHandler(JSContext* cx);
-    JitCode* generateInvalidator(JSContext* cx);
-    JitCode* generatePreBarrier(JSContext* cx, MIRType type);
-    JitCode* generateMallocStub(JSContext* cx);
-    JitCode* generateFreeStub(JSContext* cx);
+    void generateLazyLinkStub(MacroAssembler& masm);
+    void generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail);
+    void generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail);
+    void generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail);
+    void generateEnterJIT(JSContext* cx, MacroAssembler& masm);
+    void generateArgumentsRectifier(MacroAssembler& masm);
+    BailoutTable generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass);
+    void generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail);
+    void generateInvalidator(MacroAssembler& masm, Label* bailoutTail);
+    uint32_t generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type);
+    void generateMallocStub(MacroAssembler& masm);
+    void generateFreeStub(MacroAssembler& masm);
     JitCode* generateDebugTrapHandler(JSContext* cx);
     JitCode* generateBaselineDebugModeOSRHandler(JSContext* cx, uint32_t* noFrameRegPopOffsetOut);
     bool generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f);
 
     bool generateTLEventVM(JSContext* cx, MacroAssembler& masm, const VMFunction& f, bool enter);
 
     inline bool generateTLEnterVM(JSContext* cx, MacroAssembler& masm, const VMFunction& f) {
         return generateTLEventVM(cx, masm, f, /* enter = */ true);
     }
     inline bool generateTLExitVM(JSContext* cx, MacroAssembler& masm, const VMFunction& f) {
         return generateTLEventVM(cx, masm, f, /* enter = */ false);
     }
 
+    uint32_t startTrampolineCode(MacroAssembler& masm);
+
+    TrampolinePtr trampolineCode(uint32_t offset) const {
+        MOZ_ASSERT(offset > 0);
+        MOZ_ASSERT(offset < trampolineCode_->instructionsSize());
+        return TrampolinePtr(trampolineCode_->raw() + offset);
+    }
+
   public:
     explicit JitRuntime(JSRuntime* rt);
     ~JitRuntime();
     MOZ_MUST_USE bool initialize(JSContext* cx, js::AutoLockForExclusiveAccess& lock);
 
     static void Trace(JSTracer* trc, js::AutoLockForExclusiveAccess& lock);
     static void TraceJitcodeGlobalTableForMinorGC(JSTracer* trc);
     static MOZ_MUST_USE bool MarkJitcodeGlobalTableIteratively(GCMarker* marker);
@@ -223,76 +239,85 @@ class JitRuntime
             }
         }
     };
 
     bool preventBackedgePatching() const {
         return preventBackedgePatching_;
     }
 
-    uint8_t* getVMWrapper(const VMFunction& f) const;
+    TrampolinePtr getVMWrapper(const VMFunction& f) const;
     JitCode* debugTrapHandler(JSContext* cx);
     JitCode* getBaselineDebugModeOSRHandler(JSContext* cx);
     void* getBaselineDebugModeOSRHandlerAddress(JSContext* cx, bool popFrameReg);
 
-    JitCode* getGenericBailoutHandler() const {
-        return bailoutHandler_;
+    TrampolinePtr getGenericBailoutHandler() const {
+        return trampolineCode(bailoutHandlerOffset_);
     }
 
-    JitCode* getExceptionTail() const {
-        return exceptionTail_;
+    TrampolinePtr getExceptionTail() const {
+        return trampolineCode(exceptionTailOffset_);
     }
 
-    JitCode* getBailoutTail() const {
-        return bailoutTail_;
+    TrampolinePtr getBailoutTail() const {
+        return trampolineCode(bailoutTailOffset_);
     }
 
-    JitCode* getProfilerExitFrameTail() const {
-        return profilerExitFrameTail_;
+    TrampolinePtr getProfilerExitFrameTail() const {
+        return trampolineCode(profilerExitFrameTailOffset_);
     }
 
-    JitCode* getBailoutTable(const FrameSizeClass& frameClass) const;
+    TrampolinePtr getBailoutTable(const FrameSizeClass& frameClass) const;
+    uint32_t getBailoutTableSize(const FrameSizeClass& frameClass) const;
 
-    JitCode* getArgumentsRectifier() const {
-        return argumentsRectifier_;
+    TrampolinePtr getArgumentsRectifier() const {
+        return trampolineCode(argumentsRectifierOffset_);
     }
 
-    void* getArgumentsRectifierReturnAddr() const {
-        return argumentsRectifierReturnAddr_;
+    TrampolinePtr getArgumentsRectifierReturnAddr() const {
+        return trampolineCode(argumentsRectifierReturnOffset_);
     }
 
-    JitCode* getInvalidationThunk() const {
-        return invalidator_;
+    TrampolinePtr getInvalidationThunk() const {
+        return trampolineCode(invalidatorOffset_);
     }
 
     EnterJitCode enterJit() const {
-        return enterJIT_->as<EnterJitCode>();
+        return JS_DATA_TO_FUNC_PTR(EnterJitCode, trampolineCode(enterJITOffset_).value);
     }
 
-    JitCode* preBarrier(MIRType type) const {
+    TrampolinePtr preBarrier(MIRType type) const {
         switch (type) {
-          case MIRType::Value: return valuePreBarrier_;
-          case MIRType::String: return stringPreBarrier_;
-          case MIRType::Object: return objectPreBarrier_;
-          case MIRType::Shape: return shapePreBarrier_;
-          case MIRType::ObjectGroup: return objectGroupPreBarrier_;
+          case MIRType::Value:
+            return trampolineCode(valuePreBarrierOffset_);
+          case MIRType::String:
+            return trampolineCode(stringPreBarrierOffset_);
+          case MIRType::Object:
+            return trampolineCode(objectPreBarrierOffset_);
+          case MIRType::Shape:
+            return trampolineCode(shapePreBarrierOffset_);
+          case MIRType::ObjectGroup:
+            return trampolineCode(objectGroupPreBarrierOffset_);
           default: MOZ_CRASH();
         }
     }
 
-    JitCode* mallocStub() const {
-        return mallocStub_;
+    TrampolinePtr mallocStub() const {
+        return trampolineCode(mallocStubOffset_);
     }
 
-    JitCode* freeStub() const {
-        return freeStub_;
+    TrampolinePtr freeStub() const {
+        return trampolineCode(freeStubOffset_);
     }
 
-    JitCode* lazyLinkStub() const {
-        return lazyLinkStub_;
+    TrampolinePtr lazyLinkStub() const {
+        return trampolineCode(lazyLinkStubOffset_);
+    }
+    TrampolinePtr lazyLinkStubEnd() const {
+        return trampolineCode(lazyLinkStubEndOffset_);
     }
 
     bool hasJitcodeGlobalTable() const {
         return jitcodeGlobalTable_ != nullptr;
     }
 
     JitcodeGlobalTable* getJitcodeGlobalTable() {
         MOZ_ASSERT(hasJitcodeGlobalTable());
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1147,17 +1147,16 @@ TraceJitExitFrame(JSTracer* trc, const J
         }
         return;
     }
 
     if (frame.isExitFrameLayout<LazyLinkExitFrameLayout>()) {
         LazyLinkExitFrameLayout* ll = frame.exitFrame()->as<LazyLinkExitFrameLayout>();
         JitFrameLayout* layout = ll->jsFrame();
 
-        TraceRoot(trc, ll->stubCode(), "lazy-link-code");
         layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
         TraceThisAndArguments(trc, frame);
         return;
     }
 
     if (frame.isBareExit()) {
         // Nothing to trace. Fake exit frame pushed for VM functions with
         // nothing to trace on the stack.
--- a/js/src/jit/JitFrames.h
+++ b/js/src/jit/JitFrames.h
@@ -825,30 +825,26 @@ struct IonDOMMethodExitFrameLayoutTraits
         offsetof(IonDOMMethodExitFrameLayout, argv_);
 };
 
 // Cannot inherit implementation since we need to extend the top of
 // ExitFrameLayout.
 class LazyLinkExitFrameLayout
 {
   protected: // silence clang warning about unused private fields
-    JitCode* stubCode_;
     ExitFooterFrame footer_;
     JitFrameLayout exit_;
 
   public:
     static ExitFrameType Type() { return ExitFrameType::LazyLink; }
 
     static inline size_t Size() {
         return sizeof(LazyLinkExitFrameLayout);
     }
 
-    inline JitCode** stubCode() {
-        return &stubCode_;
-    }
     inline JitFrameLayout* jsFrame() {
         return &exit_;
     }
     static size_t offsetOfExitFrame() {
         return offsetof(LazyLinkExitFrameLayout, exit_);
     }
 };
 
--- a/js/src/jit/MacroAssembler-inl.h
+++ b/js/src/jit/MacroAssembler-inl.h
@@ -78,16 +78,22 @@ MacroAssembler::PushWithPatch(ImmPtr imm
 {
     return PushWithPatch(ImmWord(uintptr_t(imm.value)));
 }
 
 // ===============================================================
 // Simple call functions.
 
 void
+MacroAssembler::call(TrampolinePtr code)
+{
+    call(ImmPtr(code.value));
+}
+
+void
 MacroAssembler::call(const wasm::CallSiteDesc& desc, const Register reg)
 {
     CodeOffset l = call(reg);
     append(desc, l);
 }
 
 void
 MacroAssembler::call(const wasm::CallSiteDesc& desc, uint32_t funcIndex)
@@ -229,17 +235,17 @@ uint32_t
 MacroAssembler::callJit(JitCode* callee)
 {
     AutoProfilerCallInstrumentation profiler(*this);
     call(callee);
     return currentOffset();
 }
 
 uint32_t
-MacroAssembler::callJit(ImmPtr code)
+MacroAssembler::callJit(TrampolinePtr code)
 {
     AutoProfilerCallInstrumentation profiler(*this);
     call(code);
     return currentOffset();
 }
 
 void
 MacroAssembler::makeFrameDescriptor(Register frameSizeReg, FrameType type, uint32_t headerSize)
@@ -300,24 +306,16 @@ MacroAssembler::buildFakeExitFrame(Regis
     MOZ_ASSERT(framePushed() == initialDepth + ExitFrameLayout::Size());
     return retAddr;
 }
 
 // ===============================================================
 // Exit frame footer.
 
 void
-MacroAssembler::PushStubCode()
-{
-    // Make sure that we do not erase an existing self-reference.
-    MOZ_ASSERT(!hasSelfReference());
-    selfReferencePatch_ = PushWithPatch(ImmWord(-1));
-}
-
-void
 MacroAssembler::enterExitFrame(Register cxreg, Register scratch, const VMFunction* f)
 {
     MOZ_ASSERT(f);
     linkExitFrame(cxreg, scratch);
     // Push VMFunction pointer, to mark arguments.
     Push(ImmPtr(f));
 }
 
@@ -336,22 +334,16 @@ MacroAssembler::enterFakeExitFrameForNat
 }
 
 void
 MacroAssembler::leaveExitFrame(size_t extraFrame)
 {
     freeStack(ExitFooterFrame::Size() + extraFrame);
 }
 
-bool
-MacroAssembler::hasSelfReference() const
-{
-    return selfReferencePatch_.bound();
-}
-
 // ===============================================================
 // Move instructions
 
 void
 MacroAssembler::moveValue(const ConstantOrRegister& src, const ValueOperand& dest)
 {
     if (src.constant()) {
         moveValue(src.value(), dest);
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -1695,17 +1695,17 @@ MacroAssembler::loadBaselineFramePtr(Reg
     subPtr(Imm32(BaselineFrame::Size()), dest);
 }
 
 void
 MacroAssembler::handleFailure()
 {
     // Re-entry code is irrelevant because the exception will leave the
     // running function and never come back
-    JitCode* excTail = GetJitContext()->runtime->jitRuntime()->getExceptionTail();
+    TrampolinePtr excTail = GetJitContext()->runtime->jitRuntime()->getExceptionTail();
     jump(excTail);
 }
 
 #ifdef DEBUG
 static void
 AssumeUnreachable_(const char* output) {
     MOZ_ReportAssertionFailure(output, __FILE__, __LINE__);
 }
@@ -2356,17 +2356,16 @@ MacroAssembler::finish()
 
     MacroAssemblerSpecific::finish();
 }
 
 void
 MacroAssembler::link(JitCode* code)
 {
     MOZ_ASSERT(!oom());
-    linkSelfReference(code);
     linkProfilerCallSites(code);
 }
 
 MacroAssembler::AutoProfilerCallInstrumentation::AutoProfilerCallInstrumentation(
     MacroAssembler& masm
     MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
@@ -2876,29 +2875,16 @@ MacroAssembler::callWithABI(wasm::Byteco
 
 void
 MacroAssembler::linkExitFrame(Register cxreg, Register scratch)
 {
     loadPtr(Address(cxreg, JSContext::offsetOfActivation()), scratch);
     storeStackPtr(Address(scratch, JitActivation::offsetOfPackedExitFP()));
 }
 
-void
-MacroAssembler::linkSelfReference(JitCode* code)
-{
-    // If this code can transition to C++ code and witness a GC, then we need to store
-    // the JitCode onto the stack in order to GC it correctly.  exitCodePatch should
-    // be unset if the code never needed to push its JitCode*.
-    if (hasSelfReference()) {
-        PatchDataWithValueCheck(CodeLocationLabel(code, selfReferencePatch_),
-                                ImmPtr(code),
-                                ImmPtr((void*)-1));
-    }
-}
-
 // ===============================================================
 // Branch functions
 
 void
 MacroAssembler::branchIfNotInterpretedConstructor(Register fun, Register scratch, Label* label)
 {
     // 16-bit loads are slow and unaligned 32-bit loads may be too so
     // perform an aligned 32-bit load and adjust the bitmask accordingly.
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -507,16 +507,18 @@ class MacroAssembler : public MacroAssem
     // Call a target native function, which is neither traceable nor movable.
     void call(ImmPtr imm) PER_SHARED_ARCH;
     void call(wasm::SymbolicAddress imm) PER_SHARED_ARCH;
     inline void call(const wasm::CallSiteDesc& desc, wasm::SymbolicAddress imm);
 
     // Call a target JitCode, which must be traceable, and may be movable.
     void call(JitCode* c) PER_SHARED_ARCH;
 
+    inline void call(TrampolinePtr code);
+
     inline void call(const wasm::CallSiteDesc& desc, const Register reg);
     inline void call(const wasm::CallSiteDesc& desc, uint32_t funcDefIndex);
     inline void call(const wasm::CallSiteDesc& desc, wasm::Trap trap);
 
     CodeOffset callWithPatch() PER_SHARED_ARCH;
     void patchCall(uint32_t callerOffset, uint32_t calleeOffset) PER_SHARED_ARCH;
 
     // Push the return address and make a call. On platforms where this function
@@ -646,17 +648,17 @@ class MacroAssembler : public MacroAssem
     // push the return address).
     //
     // These functions return the offset of the return address, in order to use
     // the return address to index the safepoints, which are used to list all
     // live registers.
     inline uint32_t callJitNoProfiler(Register callee);
     inline uint32_t callJit(Register callee);
     inline uint32_t callJit(JitCode* code);
-    inline uint32_t callJit(ImmPtr code);
+    inline uint32_t callJit(TrampolinePtr code);
 
     // The frame descriptor is the second field of all Jit frames, pushed before
     // calling the Jit function.  It is a composite value defined in JitFrames.h
     inline void makeFrameDescriptor(Register frameSizeReg, FrameType type, uint32_t headerSize);
 
     // Push the frame descriptor, based on the statically known framePushed.
     inline void pushStaticFrameDescriptor(FrameType type, uint32_t headerSize);
 
@@ -695,27 +697,16 @@ class MacroAssembler : public MacroAssem
     // Exit frame footer.
     //
     // When calling outside the Jit we push an exit frame. To mark the stack
     // correctly, we have to push additional information, called the Exit frame
     // footer, which is used to identify how the stack is marked.
     //
     // See JitFrames.h, and MarkJitExitFrame in JitFrames.cpp.
 
-    // If the current piece of code might be garbage collected, then the exit
-    // frame footer must contain a pointer to the current JitCode, such that the
-    // garbage collector can keep the code alive as long this code is on the
-    // stack. This function pushes a placeholder which is replaced when the code
-    // is linked.
-    inline void PushStubCode();
-
-    // Return true if the code contains a self-reference which needs to be
-    // patched when the code is linked.
-    inline bool hasSelfReference() const;
-
     // Push stub code and the VMFunction pointer.
     inline void enterExitFrame(Register cxreg, Register scratch, const VMFunction* f);
 
     // Push an exit frame token to identify which fake exit frame this footer
     // corresponds to.
     inline void enterFakeExitFrame(Register cxreg, Register scratch, ExitFrameType type);
 
     // Push an exit frame token for a native call.
@@ -724,24 +715,16 @@ class MacroAssembler : public MacroAssem
     // Pop ExitFrame footer in addition to the extra frame.
     inline void leaveExitFrame(size_t extraFrame = 0);
 
   private:
     // Save the top of the stack into JitActivation::packedExitFP of the
     // current thread, which should be the location of the latest exit frame.
     void linkExitFrame(Register cxreg, Register scratch);
 
-    // Patch the value of PushStubCode with the pointer to the finalized code.
-    void linkSelfReference(JitCode* code);
-
-    // If the JitCode that created this assembler needs to transition into the VM,
-    // we want to store the JitCode on the stack in order to mark it during a GC.
-    // This is a reference to a patch location where the JitCode* will be written.
-    CodeOffset selfReferencePatch_;
-
   public:
     // ===============================================================
     // Move instructions
 
     inline void move64(Imm64 imm, Register64 dest) PER_ARCH;
     inline void move64(Register64 src, Register64 dest) PER_ARCH;
 
     inline void moveFloat32ToGPR(FloatRegister src, Register dest) PER_SHARED_ARCH;
@@ -1694,17 +1677,17 @@ class MacroAssembler : public MacroAssem
             branchTestGCThing(Assembler::NotEqual, address, &done);
         else if (type == MIRType::Object || type == MIRType::String)
             branchPtr(Assembler::Equal, address, ImmWord(0), &done);
 
         Push(PreBarrierReg);
         computeEffectiveAddress(address, PreBarrierReg);
 
         const JitRuntime* rt = GetJitContext()->runtime->jitRuntime();
-        JitCode* preBarrier = rt->preBarrier(type);
+        TrampolinePtr preBarrier = rt->preBarrier(type);
 
         call(preBarrier);
         Pop(PreBarrierReg);
 
         bind(&done);
     }
 
     template<typename T>
@@ -1944,16 +1927,22 @@ class MacroAssembler : public MacroAssem
         push(scratch);
     }
 
     void PushBaselineFramePtr(Register framePtr, Register scratch) {
         loadBaselineFramePtr(framePtr, scratch);
         Push(scratch);
     }
 
+    using MacroAssemblerSpecific::movePtr;
+
+    void movePtr(TrampolinePtr ptr, Register dest) {
+        movePtr(ImmPtr(ptr.value), dest);
+    }
+
   private:
     void handleFailure();
 
   public:
     Label* exceptionLabel() {
         // Exceptions are currently handled the same way as sequential failures.
         return &failureLabel_;
     }
--- a/js/src/jit/SharedIC.cpp
+++ b/js/src/jit/SharedIC.cpp
@@ -549,34 +549,34 @@ ICStubCompiler::getStubCode()
 #endif
 
     return newStubCode;
 }
 
 bool
 ICStubCompiler::tailCallVM(const VMFunction& fun, MacroAssembler& masm)
 {
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
     MOZ_ASSERT(fun.expectTailCall == TailCall);
     uint32_t argSize = fun.explicitStackSlots() * sizeof(void*);
     if (engine_ == Engine::Baseline) {
         EmitBaselineTailCallVM(code, masm, argSize);
     } else {
         uint32_t stackSize = argSize + fun.extraValuesToPop * sizeof(Value);
         EmitIonTailCallVM(code, masm, stackSize);
     }
     return true;
 }
 
 bool
 ICStubCompiler::callVM(const VMFunction& fun, MacroAssembler& masm)
 {
     MOZ_ASSERT(inStubFrame_);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
     MOZ_ASSERT(fun.expectTailCall == NonTailCall);
     MOZ_ASSERT(engine_ == Engine::Baseline);
 
     EmitBaselineCallVM(code, masm);
     return true;
 }
 
 void
--- a/js/src/jit/arm/Bailouts-arm.cpp
+++ b/js/src/jit/arm/Bailouts-arm.cpp
@@ -85,22 +85,25 @@ BailoutFrameInfo::BailoutFrameInfo(const
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
     JSRuntime* rt = activation->compartment()->runtimeFromActiveCooperatingThread();
-    JitCode* code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
+    TrampolinePtr code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
+#ifdef DEBUG
+    uint32_t tableSize = rt->jitRuntime()->getBailoutTableSize(bailout->frameClass());
+#endif
     uintptr_t tableOffset = bailout->tableOffset();
-    uintptr_t tableStart = reinterpret_cast<uintptr_t>(Assembler::BailoutTableStart(code->raw()));
+    uintptr_t tableStart = reinterpret_cast<uintptr_t>(Assembler::BailoutTableStart(code.value));
 
     MOZ_ASSERT(tableOffset >= tableStart &&
-               tableOffset < tableStart + code->instructionsSize());
+               tableOffset < tableStart + tableSize);
     MOZ_ASSERT((tableOffset - tableStart) % BAILOUT_TABLE_ENTRY_SIZE == 0);
 
     uint32_t bailoutId = ((tableOffset - tableStart) / BAILOUT_TABLE_ENTRY_SIZE) - 1;
     MOZ_ASSERT(bailoutId < BAILOUT_TABLE_SIZE);
 
     snapshotOffset_ = topIonScript_->bailoutToSnapshot(bailoutId);
 }
 
--- a/js/src/jit/arm/CodeGenerator-arm.cpp
+++ b/js/src/jit/arm/CodeGenerator-arm.cpp
@@ -133,18 +133,18 @@ CodeGeneratorARM::generateOutOfLineCode(
 
     if (deoptLabel_.used()) {
         // All non-table-based bailouts will go here.
         masm.bind(&deoptLabel_);
 
         // Push the frame size, so the handler can recover the IonScript.
         masm.ma_mov(Imm32(frameSize()), lr);
 
-        JitCode* handler = gen->jitRuntime()->getGenericBailoutHandler();
-        masm.branch(handler);
+        TrampolinePtr handler = gen->jitRuntime()->getGenericBailoutHandler();
+        masm.jump(handler);
     }
 
     return !masm.oom();
 }
 
 void
 CodeGeneratorARM::bailoutIf(Assembler::Condition condition, LSnapshot* snapshot)
 {
@@ -152,17 +152,17 @@ CodeGeneratorARM::bailoutIf(Assembler::C
 
     // Though the assembler doesn't track all frame pushes, at least make sure
     // the known value makes sense. We can't use bailout tables if the stack
     // isn't properly aligned to the static frame size.
     MOZ_ASSERT_IF(frameClass_ != FrameSizeClass::None(),
                   frameClass_.frameSize() == masm.framePushed());
 
     if (assignBailoutId(snapshot)) {
-        uint8_t* bailoutTable = Assembler::BailoutTableStart(deoptTable_->raw());
+        uint8_t* bailoutTable = Assembler::BailoutTableStart(deoptTable_->value);
         uint8_t* code = bailoutTable + snapshot->bailoutId() * BAILOUT_TABLE_ENTRY_SIZE;
         masm.ma_b(code, condition);
         return;
     }
 
     // We could not use a jump table, either because all bailout IDs were
     // reserved, or a jump table is not optimal for this frame size or
     // platform. Whatever, we will generate a lazy bailout.
@@ -1781,19 +1781,19 @@ CodeGeneratorARM::generateInvalidateEpil
 
     masm.bind(&invalidate_);
 
     // Push the return address of the point that we bailed out at onto the stack.
     masm.Push(lr);
 
     // Push the Ion script onto the stack (when we determine what that pointer is).
     invalidateEpilogueData_ = masm.pushWithPatch(ImmWord(uintptr_t(-1)));
-    JitCode* thunk = gen->jitRuntime()->getInvalidationThunk();
-
-    masm.branch(thunk);
+
+    TrampolinePtr thunk = gen->jitRuntime()->getInvalidationThunk();
+    masm.jump(thunk);
 
     // We should never reach this point in JIT code -- the invalidation thunk
     // should pop the invalidated JS frame and return directly to its caller.
     masm.assumeUnreachable("Should have returned directly to its caller instead of here.");
 }
 
 void
 CodeGeneratorARM::visitLoadTypedArrayElementStatic(LLoadTypedArrayElementStatic* ins)
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -3581,17 +3581,17 @@ MacroAssemblerARMCompat::breakpoint(Cond
 
 void
 MacroAssemblerARMCompat::checkStackAlignment()
 {
     asMasm().assertStackAlignment(ABIStackAlignment);
 }
 
 void
-MacroAssemblerARMCompat::handleFailureWithHandlerTail(void* handler)
+MacroAssemblerARMCompat::handleFailureWithHandlerTail(void* handler, Label* profilerExitTail)
 {
     // Reserve space for exception information.
     int size = (sizeof(ResumeFromException) + 7) & ~7;
 
     Imm8 size8(size);
     as_sub(sp, sp, size8);
     ma_mov(sp, r0);
 
@@ -3678,17 +3678,17 @@ MacroAssemblerARMCompat::handleFailureWi
     // If profiling is enabled, then update the lastProfilingFrame to refer to caller
     // frame before returning.
     {
         Label skipProfilingInstrumentation;
         // Test if profiler enabled.
         AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->geckoProfiler().addressOfEnabled());
         asMasm().branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
                           &skipProfilingInstrumentation);
-        profilerExitFrame();
+        jump(profilerExitTail);
         bind(&skipProfilingInstrumentation);
     }
 
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to the
     // bailout tail stub.
     bind(&bailout);
@@ -4743,17 +4743,17 @@ MacroAssemblerARMCompat::profilerEnterFr
     loadPtr(Address(scratch, offsetof(JSContext, profilingActivation_)), scratch);
     storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
     storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
 }
 
 void
 MacroAssemblerARMCompat::profilerExitFrame()
 {
-    branch(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
+    jump(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
 }
 
 MacroAssembler&
 MacroAssemblerARM::asMasm()
 {
     return *static_cast<MacroAssembler*>(this);
 }
 
--- a/js/src/jit/arm/MacroAssembler-arm.h
+++ b/js/src/jit/arm/MacroAssembler-arm.h
@@ -659,19 +659,19 @@ class MacroAssemblerARMCompat : public M
     }
 
     void jump(Label* label) {
         as_b(label);
     }
     void jump(JitCode* code) {
         branch(code);
     }
-    void jump(ImmPtr code) {
+    void jump(TrampolinePtr code) {
         ScratchRegisterScope scratch(asMasm());
-        movePtr(code, scratch);
+        movePtr(ImmPtr(code.value), scratch);
         ma_bx(scratch);
     }
     void jump(Register reg) {
         ma_bx(reg);
     }
     void jump(const Address& addr) {
         ScratchRegisterScope scratch(asMasm());
         SecondScratchRegisterScope scratch2(asMasm());
@@ -975,17 +975,17 @@ class MacroAssemblerARMCompat : public M
 
     void storePayload(const Value& val, const Address& dest);
     void storePayload(Register src, const Address& dest);
     void storePayload(const Value& val, const BaseIndex& dest);
     void storePayload(Register src, const BaseIndex& dest);
     void storeTypeTag(ImmTag tag, const Address& dest);
     void storeTypeTag(ImmTag tag, const BaseIndex& dest);
 
-    void handleFailureWithHandlerTail(void* handler);
+    void handleFailureWithHandlerTail(void* handler, Label* profilerExitTail);
 
     /////////////////////////////////////////////////////////////////
     // Common interface.
     /////////////////////////////////////////////////////////////////
   public:
     void not32(Register reg);
 
     void move32(Imm32 imm, Register dest);
--- a/js/src/jit/arm/SharedICHelpers-arm.h
+++ b/js/src/jit/arm/SharedICHelpers-arm.h
@@ -74,17 +74,17 @@ EmitReturnFromIC(MacroAssembler& masm)
 
 inline void
 EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
 {
     masm.ma_mov(reg, lr);
 }
 
 inline void
-EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
+EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t argSize)
 {
     // We assume during this that R0 and R1 have been pushed, and that R2 is
     // unused.
     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
 
     // Compute frame size.
     masm.movePtr(BaselineFrameReg, r0);
     masm.as_add(r0, r0, Imm8(BaselineFrame::FramePointerOffset));
@@ -100,21 +100,21 @@ EmitBaselineTailCallVM(uint8_t* target, 
     // Push frame descriptor and perform the tail call.
     // ICTailCallReg (lr) already contains the return address (as we keep
     // it there through the stub calls), but the VMWrapper code being called
     // expects the return address to also be pushed on the stack.
     MOZ_ASSERT(ICTailCallReg == lr);
     masm.makeFrameDescriptor(r0, JitFrame_BaselineJS, ExitFrameLayout::Size());
     masm.push(r0);
     masm.push(lr);
-    masm.jump(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
-EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
+EmitIonTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t stackSize)
 {
     // We assume during this that R0 and R1 have been pushed, and that R2 is
     // unused.
     MOZ_ASSERT(R2 == ValueOperand(r1, r0));
 
     masm.loadPtr(Address(sp, stackSize), r0);
     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), r0);
     masm.add32(Imm32(stackSize + JitStubFrameLayout::Size() - sizeof(intptr_t)), r0);
@@ -122,37 +122,37 @@ EmitIonTailCallVM(uint8_t* target, Macro
     // Push frame descriptor and perform the tail call.
     // ICTailCallReg (lr) already contains the return address (as we keep
     // it there through the stub calls), but the VMWrapper code being called
     // expects the return address to also be pushed on the stack.
     MOZ_ASSERT(ICTailCallReg == lr);
     masm.makeFrameDescriptor(r0, JitFrame_IonJS, ExitFrameLayout::Size());
     masm.push(r0);
     masm.push(lr);
-    masm.jump(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
 EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32_t headerSize)
 {
     // Compute stub frame size. We have to add two pointers: the stub reg and
     // previous frame pointer pushed by EmitEnterStubFrame.
     masm.mov(BaselineFrameReg, reg);
     masm.as_add(reg, reg, Imm8(sizeof(void*) * 2));
     masm.ma_sub(BaselineStackReg, reg);
 
     masm.makeFrameDescriptor(reg, JitFrame_BaselineStub, headerSize);
 }
 
 inline void
-EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
+EmitBaselineCallVM(TrampolinePtr target, MacroAssembler& masm)
 {
     EmitBaselineCreateStubFrameDescriptor(masm, r0, ExitFrameLayout::Size());
     masm.push(r0);
-    masm.call(ImmPtr(target));
+    masm.call(target);
 }
 
 // Size of vales pushed by EmitEnterStubFrame.
 static const uint32_t STUB_FRAME_SIZE = 4 * sizeof(void*);
 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = sizeof(void*);
 
 inline void
 EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch)
--- a/js/src/jit/arm/Trampoline-arm.cpp
+++ b/js/src/jit/arm/Trampoline-arm.cpp
@@ -99,25 +99,26 @@ struct EnterJITStack
 
 /*
  * This method generates a trampoline for a c++ function with the following
  * signature:
  *   void enter(void* code, int argc, Value* argv, InterpreterFrame* fp, CalleeToken
  *              calleeToken, JSObject* scopeChain, Value* vp)
  *   ...using standard EABI calling convention
  */
-JitCode*
-JitRuntime::generateEnterJIT(JSContext* cx)
+void
+JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm)
 {
+    enterJITOffset_ = startTrampolineCode(masm);
+
     const Address slot_token(sp, offsetof(EnterJITStack, token));
     const Address slot_vp(sp, offsetof(EnterJITStack, vp));
 
     MOZ_ASSERT(OsrFrameReg == r3);
 
-    MacroAssembler masm(cx);
     Assembler* aasm = &masm;
 
     // Save non-volatile registers. These must be saved by the trampoline,
     // rather than the JIT'd code, because they are scanned by the conservative
     // scanner.
     masm.startDataTransferM(IsStore, sp, DB, WriteBack);
     masm.transferReg(r4); // [sp,0]
     masm.transferReg(r5); // [sp,4]
@@ -371,33 +372,25 @@ JitRuntime::generateEnterJIT(JSContext* 
     // We're using a load-double here. In order for that to work, the data needs
     // to be stored in two consecutive registers, make sure this is the case
     //   MOZ_ASSERT(JSReturnReg_Type.code() == JSReturnReg_Data.code()+1);
     //   aasm->as_extdtr(IsStore, 64, true, Offset,
     //                   JSReturnReg_Data, EDtrAddr(r5, EDtrOffImm(0)));
 
     // Restore non-volatile registers and return.
     GenerateReturn(masm, true);
-
-    Linker linker(masm);
-    AutoFlushICache afc("EnterJIT");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "EnterJIT");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateInvalidator(JSContext* cx)
+void
+JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail)
 {
     // See large comment in x86's JitRuntime::generateInvalidator.
-    MacroAssembler masm(cx);
+
+    invalidatorOffset_ = startTrampolineCode(masm);
+
     // At this point, one of two things has happened:
     // 1) Execution has just returned from C code, which left the stack aligned
     // 2) Execution has just returned from Ion code, which left the stack unaligned.
     // The old return address should not matter, but we still want the stack to
     // be aligned, and there is no good reason to automatically align it with a
     // call to setupUnalignedABICall.
     masm.as_bic(sp, sp, Imm8(7));
     masm.startDataTransferM(IsStore, sp, DB, WriteBack);
@@ -447,35 +440,23 @@ JitRuntime::generateInvalidator(JSContex
         ScratchRegisterScope scratch(masm);
         masm.ma_add(sp, Imm32(sizeof(InvalidationBailoutStack) + sizeOfRetval + sizeOfBailoutInfo), sp, scratch);
     }
     // Remove the space that this frame was using before the bailout (computed
     // by InvalidationBailout)
     masm.ma_add(sp, r1, sp);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
-
-    Linker linker(masm);
-    AutoFlushICache afc("Invalidator");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-    JitSpew(JitSpew_IonInvalidate, "   invalidation thunk created at %p", (void*) code->raw());
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "Invalidator");
-#endif
-
-    return code;
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
+void
+JitRuntime::generateArgumentsRectifier(MacroAssembler& masm)
 {
-    MacroAssembler masm(cx);
+    argumentsRectifierOffset_ = startTrampolineCode(masm);
     masm.pushReturnAddress();
 
     // Copy number of actual arguments into r0 and r8.
     masm.ma_ldr(DTRAddr(sp, DtrOffImm(RectifierFrameLayout::offsetOfNumActualArgs())), r0);
     masm.mov(r0, r8);
 
     // Load the number of |undefined|s to push into r6.
     masm.ma_ldr(DTRAddr(sp, DtrOffImm(RectifierFrameLayout::offsetOfCalleeToken())), r1);
@@ -545,17 +526,17 @@ JitRuntime::generateArgumentsRectifier(J
     masm.ma_push(r1); // callee token
     masm.ma_push(r6); // frame descriptor.
 
     // Call the target function.
     // Note that this code assumes the function is JITted.
     masm.andPtr(Imm32(CalleeTokenMask), r1);
     masm.ma_ldr(DTRAddr(r1, DtrOffImm(JSFunction::offsetOfNativeOrScript())), r3);
     masm.loadBaselineOrIonRaw(r3, r3, nullptr);
-    uint32_t returnOffset = masm.callJitNoProfiler(r3);
+    argumentsRectifierReturnOffset_ = masm.callJitNoProfiler(r3);
 
     // arg1
     //  ...
     // argN
     // num actual args
     // callee token
     // sizeDescriptor     <- sp now
     // return address
@@ -573,28 +554,16 @@ JitRuntime::generateArgumentsRectifier(J
     // callee token
     // sizeDescriptor
     // return address
 
     // Discard pushed arguments.
     masm.ma_alu(sp, lsr(r4, FRAMESIZE_SHIFT), sp, OpAdd);
 
     masm.ret();
-    Linker linker(masm);
-    AutoFlushICache afc("ArgumentsRectifier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-    if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
-#endif
-
-    return code;
 }
 
 static void
 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
 {
     // the stack should look like:
     // [IonFrame]
     // bailoutFrame.registersnapshot
@@ -647,17 +616,17 @@ PushBailoutFrame(MacroAssembler& masm, u
     // stack.
     masm.transferReg(lr);
     masm.finishDataTransfer();
 
     masm.ma_mov(sp, spArg);
 }
 
 static void
-GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
+GenerateBailoutThunk(MacroAssembler& masm, uint32_t frameClass, Label* bailoutTail)
 {
     PushBailoutFrame(masm, frameClass, r0);
 
     // SP % 8 == 4
     // STEP 1c: Call the bailout function, giving a pointer to the
     //          structure we just blitted onto the stack.
     const int sizeOfBailoutInfo = sizeof(void*)*2;
     masm.reserveStack(sizeOfBailoutInfo);
@@ -708,72 +677,53 @@ GenerateBailoutThunk(JSContext* cx, Macr
                           // the stack.
                           + sizeof(void*)
                           // Everything else that was pushed on the stack.
                           + bailoutFrameSize)
                     , sp, scratch);
     }
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
+JitRuntime::BailoutTable
+JitRuntime::generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass)
 {
-    MacroAssembler masm(cx);
+    uint32_t offset = startTrampolineCode(masm);
 
     {
         // Emit the table without any pools being inserted.
         Label bailout;
         AutoForbidPools afp(&masm, BAILOUT_TABLE_SIZE);
         for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++)
             masm.ma_bl(&bailout);
         masm.bind(&bailout);
     }
 
-    GenerateBailoutThunk(cx, masm, frameClass);
-
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTable");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+    GenerateBailoutThunk(masm, frameClass, bailoutTail);
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTable");
-#endif
-
-    return code;
+    return BailoutTable(offset, masm.currentOffset() - offset);
 }
 
-JitCode*
-JitRuntime::generateBailoutHandler(JSContext* cx)
+void
+JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm(cx);
-    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
+    bailoutHandlerOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutHandler");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
-#endif
-
-    return code;
+    GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail);
 }
 
 bool
 JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
 
-    masm.flushBuffer();
-    uint32_t wrapperOffset = masm.currentOffset();
+    uint32_t wrapperOffset = startTrampolineCode(masm);
 
     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
 
     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
                   "Wrapper register set must be a superset of Volatile register set.");
 
     // The context is the first argument; r0 is the first argument register.
     Register cxreg = r0;
@@ -933,20 +883,20 @@ JitRuntime::generateVMWrapper(JSContext*
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(ExitFrameLayout) +
                     f.explicitStackSlots() * sizeof(void*) +
                     f.extraValuesToPop * sizeof(Value)));
 
     return functionWrappers_->putNew(&f, wrapperOffset);
 }
 
-JitCode*
-JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
+uint32_t
+JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type)
 {
-    MacroAssembler masm(cx);
+    uint32_t offset = startTrampolineCode(masm);
 
     LiveRegisterSet save;
     if (cx->runtime()->jitSupportsFloatingPoint) {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                                  FloatRegisterSet(FloatRegisters::VolatileDoubleMask));
     } else {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                                  FloatRegisterSet());
@@ -960,25 +910,17 @@ JitRuntime::generatePreBarrier(JSContext
     masm.setupUnalignedABICall(r2);
     masm.passABIArg(r0);
     masm.passABIArg(r1);
     masm.callWithABI(IonMarkFunction(type));
     save.take(AnyRegister(lr));
     save.add(pc);
     masm.PopRegsInMask(save);
 
-    Linker linker(masm);
-    AutoFlushICache afc("PreBarrier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "PreBarrier");
-#endif
-
-    return code;
+    return offset;
 }
 
 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
 static const VMFunction HandleDebugTrapInfo =
     FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
 
 JitCode*
 JitRuntime::generateDebugTrapHandler(JSContext* cx)
@@ -993,17 +935,17 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
 
     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure the
     // stub frame has a nullptr ICStub pointer, since this pointer is marked
     // during GC.
     masm.movePtr(ImmPtr(nullptr), ICStubReg);
     EmitBaselineEnterStubFrame(masm, scratch2);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
     masm.push(lr);
     masm.push(scratch1);
     EmitBaselineCallVM(code, masm);
 
     EmitBaselineLeaveStubFrame(masm);
 
     // If the stub returns |true|, we have to perform a forced return (return
     // from the JS frame). If the stub returns |false|, just return from the
@@ -1036,56 +978,39 @@ JitRuntime::generateDebugTrapHandler(JSC
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
 #endif
 
     return codeDbg;
 }
 
-JitCode*
-JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
+void
+JitRuntime::generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail)
 {
-    MacroAssembler masm;
-
-    masm.handleFailureWithHandlerTail(handler);
+    exceptionTailOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("ExceptionTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
-#endif
-
-    return code;
+    masm.bind(masm.failureLabel());
+    masm.handleFailureWithHandlerTail(handler, profilerExitTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTailStub(JSContext* cx)
+void
+JitRuntime::generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
+    bailoutTailOffset_ = startTrampolineCode(masm);
+    masm.bind(bailoutTail);
 
     masm.generateBailoutTail(r1, r2);
-
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
+void
+JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail)
 {
-    MacroAssembler masm;
+    profilerExitFrameTailOffset_ = startTrampolineCode(masm);
+    masm.bind(profilerExitTail);
 
     Register scratch1 = r5;
     Register scratch2 = r6;
     Register scratch3 = r7;
     Register scratch4 = r8;
 
     //
     // The code generated below expects that the current stack pointer points
@@ -1407,19 +1332,9 @@ JitRuntime::generateProfilerExitFrameTai
     //
     masm.bind(&handle_Entry);
     {
         masm.movePtr(ImmPtr(nullptr), scratch1);
         masm.storePtr(scratch1, lastProfilingCallSite);
         masm.storePtr(scratch1, lastProfilingFrame);
         masm.ret();
     }
-
-    Linker linker(masm);
-    AutoFlushICache afc("ProfilerExitFrameTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
 }
--- a/js/src/jit/arm64/MacroAssembler-arm64.cpp
+++ b/js/src/jit/arm64/MacroAssembler-arm64.cpp
@@ -127,17 +127,17 @@ MacroAssemblerCompat::movePatchablePtr(I
 void
 MacroAssemblerCompat::loadPrivate(const Address& src, Register dest)
 {
     loadPtr(src, dest);
     asMasm().lshiftPtr(Imm32(1), dest);
 }
 
 void
-MacroAssemblerCompat::handleFailureWithHandlerTail(void* handler)
+MacroAssemblerCompat::handleFailureWithHandlerTail(void* handler, Label* profilerExitTail)
 {
     // Reserve space for exception information.
     int64_t size = (sizeof(ResumeFromException) + 7) & ~7;
     Sub(GetStackPointer64(), GetStackPointer64(), Operand(size));
     if (!GetStackPointer64().Is(sp))
         Mov(sp, GetStackPointer64());
 
     Mov(x0, GetStackPointer64());
--- a/js/src/jit/arm64/MacroAssembler-arm64.h
+++ b/js/src/jit/arm64/MacroAssembler-arm64.h
@@ -653,20 +653,20 @@ class MacroAssemblerCompat : public vixl
     }
 
     void jump(Label* label) {
         B(label);
     }
     void jump(JitCode* code) {
         branch(code);
     }
-    void jump(ImmPtr code) {
+    void jump(TrampolinePtr code) {
         syncStackPtr();
         BufferOffset loc = b(-1); // The jump target will be patched by executableCopy().
-        addPendingJump(loc, code, Relocation::HARDCODED);
+        addPendingJump(loc, ImmPtr(code.value), Relocation::HARDCODED);
     }
     void jump(RepatchLabel* label) {
         MOZ_CRASH("jump (repatchlabel)");
     }
     void jump(Register reg) {
         Br(ARMRegister(reg, 64));
     }
     void jump(const Address& addr) {
@@ -1856,21 +1856,21 @@ class MacroAssemblerCompat : public vixl
             Add(dest64, dest64, Operand(address.offset));
     }
 
   public:
     CodeOffset labelForPatch() {
         return CodeOffset(nextOffset().getOffset());
     }
 
-    void handleFailureWithHandlerTail(void* handler);
+    void handleFailureWithHandlerTail(void* handler, Label* profilerExitTail);
 
     void profilerEnterFrame(Register framePtr, Register scratch);
     void profilerExitFrame() {
-        branch(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
+        jump(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
     }
     Address ToPayload(Address value) {
         return value;
     }
     Address ToType(Address value) {
         return value;
     }
 
--- a/js/src/jit/arm64/SharedICHelpers-arm64.h
+++ b/js/src/jit/arm64/SharedICHelpers-arm64.h
@@ -74,17 +74,17 @@ EmitReturnFromIC(MacroAssembler& masm)
 
 inline void
 EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
 {
     masm.movePtr(reg, lr);
 }
 
 inline void
-EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
+EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t argSize)
 {
     // We assume that R0 has been pushed, and R2 is unused.
     MOZ_ASSERT(R2 == ValueOperand(r0));
 
     // Compute frame size into w0. Used below in makeFrameDescriptor().
     masm.Sub(x0, BaselineFrameReg64, masm.GetStackPointer64());
     masm.Add(w0, w0, Operand(BaselineFrame::FramePointerOffset));
 
@@ -103,21 +103,21 @@ EmitBaselineTailCallVM(uint8_t* target, 
     masm.makeFrameDescriptor(r0, JitFrame_BaselineJS, ExitFrameLayout::Size());
     masm.push(r0);
 
     // The return address will be pushed by the VM wrapper, for compatibility
     // with direct calls. Refer to the top of generateVMWrapper().
     // ICTailCallReg (lr) already contains the return address (as we keep
     // it there through the stub calls).
 
-    masm.jump(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
-EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
+EmitIonTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t stackSize)
 {
     MOZ_CRASH("Not implemented yet.");
 }
 
 inline void
 EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32_t headerSize)
 {
     ARMRegister reg64(reg, 64);
@@ -125,21 +125,21 @@ EmitBaselineCreateStubFrameDescriptor(Ma
     // Compute stub frame size.
     masm.Sub(reg64, masm.GetStackPointer64(), Operand(sizeof(void*) * 2));
     masm.Sub(reg64, BaselineFrameReg64, reg64);
 
     masm.makeFrameDescriptor(reg, JitFrame_BaselineStub, headerSize);
 }
 
 inline void
-EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
+EmitBaselineCallVM(TrampolinePtr target, MacroAssembler& masm)
 {
     EmitBaselineCreateStubFrameDescriptor(masm, r0, ExitFrameLayout::Size());
     masm.push(r0);
-    masm.call(ImmPtr(target));
+    masm.call(target);
 }
 
 // Size of values pushed by EmitEnterStubFrame.
 static const uint32_t STUB_FRAME_SIZE = 4 * sizeof(void*);
 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = sizeof(void*);
 
 inline void
 EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch)
--- a/js/src/jit/arm64/Trampoline-arm64.cpp
+++ b/js/src/jit/arm64/Trampoline-arm64.cpp
@@ -25,20 +25,20 @@ static const LiveRegisterSet AllRegs =
     LiveRegisterSet(GeneralRegisterSet(Registers::AllMask & ~(1 << 31 | 1 << 30 | 1 << 29| 1 << 28)),
                 FloatRegisterSet(FloatRegisters::AllMask));
 
 /* This method generates a trampoline on ARM64 for a c++ function with
  * the following signature:
  *   bool blah(void* code, int argc, Value* argv, JSObject* scopeChain, Value* vp)
  *   ...using standard AArch64 calling convention
  */
-JitCode*
-JitRuntime::generateEnterJIT(JSContext* cx)
+void
+JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm)
 {
-    MacroAssembler masm(cx);
+    enterJITOffset_ = startTrampolineCode(masm);
 
     const Register reg_code      = IntArgReg0; // EnterJitData::jitcode.
     const Register reg_argc      = IntArgReg1; // EnterJitData::maxArgc.
     const Register reg_argv      = IntArgReg2; // EnterJitData::maxArgv.
     const Register reg_osrFrame  = IntArgReg3; // EnterJitData::osrFrame.
     const Register reg_callee    = IntArgReg4; // EnterJitData::calleeToken.
     const Register reg_scope     = IntArgReg5; // EnterJitData::scopeChain.
     const Register reg_osrNStack = IntArgReg6; // EnterJitData::osrNumStackValues.
@@ -267,31 +267,24 @@ JitRuntime::generateEnterJIT(JSContext* 
     masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
 
     // Restore old frame pointer.
     masm.pop(r30, r29);
 
     // Return using the value popped into x30.
     masm.abiret();
 
-    Linker linker(masm);
-    AutoFlushICache afc("EnterJIT");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "EnterJIT");
-#endif
-
-    return code;
+    // Reset stack pointer.
+    masm.SetStackPointer64(PseudoStackPointer64);
 }
 
-JitCode*
-JitRuntime::generateInvalidator(JSContext* cx)
+void
+JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
+    invalidatorOffset_ = startTrampolineCode(masm);
 
     masm.push(r0, r1, r2, r3);
 
     masm.PushRegsInMask(AllRegs);
     masm.moveStackPtrTo(r0);
 
     masm.Sub(x1, masm.GetStackPointer64(), Operand(sizeof(size_t)));
     masm.Sub(x2, masm.GetStackPointer64(), Operand(sizeof(size_t) + sizeof(void*)));
@@ -307,28 +300,23 @@ JitRuntime::generateInvalidator(JSContex
 
     masm.pop(r2, r1);
 
     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(), x1);
     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(),
              Operand(sizeof(InvalidationBailoutStack)));
     masm.syncStackPtr();
 
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
-
-    Linker linker(masm);
-    AutoFlushICache afc("Invalidator");
-    return linker.newCode<NoGC>(cx, OTHER_CODE);
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
+void
+JitRuntime::generateArgumentsRectifier(MacroAssembler& masm)
 {
-    MacroAssembler masm;
+    argumentsRectifierOffset_ = startTrampolineCode(masm);
 
     // Save the return address for later.
     masm.push(lr);
 
     // Load the information that the rectifier needs from the stack.
     masm.Ldr(w0, MemOperand(masm.GetStackPointer64(), RectifierFrameLayout::offsetOfNumActualArgs()));
     masm.Ldr(x1, MemOperand(masm.GetStackPointer64(), RectifierFrameLayout::offsetOfCalleeToken()));
 
@@ -406,38 +394,29 @@ JitRuntime::generateArgumentsRectifier(J
 
     masm.push(r0,  // Number of actual arguments.
               r1,  // Callee token.
               r6); // Frame descriptor.
 
     // Load the address of the code that is getting called.
     masm.Ldr(x3, MemOperand(x5, JSFunction::offsetOfNativeOrScript()));
     masm.loadBaselineOrIonRaw(r3, r3, nullptr);
-    uint32_t returnOffset = masm.callJitNoProfiler(r3);
+    argumentsRectifierReturnOffset_ = masm.callJitNoProfiler(r3);
 
     // Clean up!
     // Get the size of the stack frame, and clean up the later fixed frame.
     masm.Ldr(x4, MemOperand(masm.GetStackPointer64(), 24, vixl::PostIndex));
 
     // Now that the size of the stack frame sans the fixed frame has been loaded,
     // add that onto the stack pointer.
     masm.Add(masm.GetStackPointer64(), masm.GetStackPointer64(),
              Operand(x4, vixl::LSR, FRAMESIZE_SHIFT));
 
     // Pop the return address from earlier and branch.
     masm.ret();
-
-    Linker linker(masm);
-    AutoFlushICache afc("ArgumentsRectifier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-    if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
-
-    return code;
 }
 
 static void
 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
 {
     // the stack should look like:
     // [IonFrame]
     // bailoutFrame.registersnapshot
@@ -482,17 +461,17 @@ PushBailoutFrame(MacroAssembler& masm, u
     // the end of the current stack. Sadly, the ABI says that we need to always
     // point to the lowest place that has been written. The OS is free to do
     // whatever it wants below sp.
     masm.push(r30, r9);
     masm.moveStackPtrTo(spArg);
 }
 
 static void
-GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
+GenerateBailoutThunk(MacroAssembler& masm, uint32_t frameClass, Label* bailoutTail)
 {
     PushBailoutFrame(masm, frameClass, r0);
 
     // SP % 8 == 4
     // STEP 1c: Call the bailout function, giving a pointer to the
     //          structure we just blitted onto the stack.
     // Make space for the BaselineBailoutInfo* outparam.
     const int sizeOfBailoutInfo = sizeof(void*) * 2;
@@ -519,56 +498,40 @@ GenerateBailoutThunk(JSContext* cx, Macr
         masm.addToStackPtr(Imm32(BailoutDataSize + 32));
         masm.addToStackPtr(scratch64.asUnsized());
     } else {
         uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
         masm.addToStackPtr(Imm32(frameSize + BailoutDataSize + sizeof(void*)));
     }
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
-}
-
-JitCode*
-JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
-{
-    // FIXME: Implement.
-    MacroAssembler masm;
-    masm.breakpoint();
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTable");
-    return linker.newCode<NoGC>(cx, OTHER_CODE);
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutHandler(JSContext* cx)
+JitRuntime::BailoutTable
+JitRuntime::generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass)
 {
-    MacroAssembler masm(cx);
-    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
+    MOZ_CRASH("arm64 does not use bailout tables");
+}
 
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutHandler");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+void
+JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail)
+{
+    bailoutHandlerOffset_ = startTrampolineCode(masm);
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
-#endif
-
-    return code;
+    GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail);
 }
 
 bool
 JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
 
-    masm.flushBuffer();
-    uint32_t wrapperOffset = masm.currentOffset();
+    uint32_t wrapperOffset = startTrampolineCode(masm);
 
     // Avoid conflicts with argument registers while discarding the result after
     // the function call.
     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
 
     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
                   "Wrapper register set must be a superset of the Volatile register set.");
 
@@ -742,20 +705,20 @@ JitRuntime::generateVMWrapper(JSContext*
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(ExitFrameLayout) +
               f.explicitStackSlots() * sizeof(void*) +
               f.extraValuesToPop * sizeof(Value)));
 
     return functionWrappers_->putNew(&f, wrapperOffset);
 }
 
-JitCode*
-JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
+uint32_t
+JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type)
 {
-    MacroAssembler masm(cx);
+    uint32_t offset = startTrampolineCode(masm);
 
     LiveRegisterSet regs = LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                                            FloatRegisterSet(FloatRegisters::VolatileMask));
 
     // Also preserve the return address.
     regs.add(lr);
 
     masm.PushRegsInMask(regs);
@@ -768,19 +731,17 @@ JitRuntime::generatePreBarrier(JSContext
     masm.passABIArg(PreBarrierReg);
     masm.callWithABI(IonMarkFunction(type));
 
     // Pop the volatile regs and restore LR.
     masm.PopRegsInMask(regs);
 
     masm.abiret();
 
-    Linker linker(masm);
-    AutoFlushICache afc("PreBarrier");
-    return linker.newCode<NoGC>(cx, OTHER_CODE);
+    return offset;
 }
 
 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
 static const VMFunction HandleDebugTrapInfo =
     FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
 
 JitCode*
 JitRuntime::generateDebugTrapHandler(JSContext* cx)
@@ -799,17 +760,17 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.Sub(ARMRegister(scratch1, 64), BaselineFrameReg64, Operand(BaselineFrame::Size()));
 
     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure the
     // stub frame has a nullptr ICStub pointer, since this pointer is marked
     // during GC.
     masm.movePtr(ImmPtr(nullptr), ICStubReg);
     EmitBaselineEnterStubFrame(masm, scratch2);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
     masm.asVIXL().Push(vixl::lr, ARMRegister(scratch1, 64));
     EmitBaselineCallVM(code, masm);
 
     EmitBaselineLeaveStubFrame(masm);
 
     // If the stub returns |true|, we have to perform a forced return (return
     // from the JS frame). If the stub returns |false|, just return from the
     // trap stub so that execution continues at the current pc.
@@ -832,55 +793,39 @@ JitRuntime::generateDebugTrapHandler(JSC
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
 #endif
 
     return codeDbg;
 }
 
-JitCode*
-JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
+void
+JitRuntime::generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail)
 {
-    MacroAssembler masm(cx);
-
-    masm.handleFailureWithHandlerTail(handler);
+    exceptionTailOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("ExceptionTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
-#endif
-
-    return code;
+    masm.bind(masm.failureLabel());
+    masm.handleFailureWithHandlerTail(handler, profilerExitTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTailStub(JSContext* cx)
+void
+JitRuntime::generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm(cx);
+    bailoutTailOffset_ = startTrampolineCode(masm);
+    masm.bind(bailoutTail);
 
     masm.generateBailoutTail(r1, r2);
-
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+}
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
-#endif
-
-    return code;
-}
-JitCode*
-JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
+void
+JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail)
 {
-    MacroAssembler masm;
+    profilerExitFrameTailOffset_ = startTrampolineCode(masm);
+    masm.bind(profilerExitTail);
 
     Register scratch1 = r8;
     Register scratch2 = r9;
     Register scratch3 = r10;
     Register scratch4 = r11;
 
     //
     // The code generated below expects that the current stack pointer points
@@ -1203,19 +1148,9 @@ JitRuntime::generateProfilerExitFrameTai
     //
     masm.bind(&handle_Entry);
     {
         masm.movePtr(ImmPtr(nullptr), scratch1);
         masm.storePtr(scratch1, lastProfilingCallSite);
         masm.storePtr(scratch1, lastProfilingFrame);
         masm.ret();
     }
-
-    Linker linker(masm);
-    AutoFlushICache afc("ProfilerExitFrameTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
 }
--- a/js/src/jit/mips-shared/CodeGenerator-mips-shared.cpp
+++ b/js/src/jit/mips-shared/CodeGenerator-mips-shared.cpp
@@ -202,19 +202,18 @@ CodeGeneratorMIPSShared::generateOutOfLi
         masm.bind(&deoptLabel_);
 
         // Push the frame size, so the handler can recover the IonScript.
         // Frame size is stored in 'ra' and pushed by GenerateBailoutThunk
         // We have to use 'ra' because generateBailoutTable will implicitly do
         // the same.
         masm.move32(Imm32(frameSize()), ra);
 
-        JitCode* handler = gen->jitRuntime()->getGenericBailoutHandler();
-
-        masm.branch(handler);
+        TrampolinePtr handler = gen->jitRuntime()->getGenericBailoutHandler();
+        masm.jump(handler);
     }
 
     return !masm.oom();
 }
 
 void
 CodeGeneratorMIPSShared::bailoutFrom(Label* label, LSnapshot* snapshot)
 {
@@ -1838,19 +1837,19 @@ CodeGeneratorMIPSShared::generateInvalid
     masm.bind(&invalidate_);
 
     // Push the return address of the point that we bailed out at to the stack
     masm.Push(ra);
 
     // Push the Ion script onto the stack (when we determine what that
     // pointer is).
     invalidateEpilogueData_ = masm.pushWithPatch(ImmWord(uintptr_t(-1)));
-    JitCode* thunk = gen->jitRuntime()->getInvalidationThunk();
-
-    masm.branch(thunk);
+    TrampolinePtr thunk = gen->jitRuntime()->getInvalidationThunk();
+
+    masm.jump(thunk);
 
     // We should never reach this point in JIT code -- the invalidation thunk
     // should pop the invalidated JS frame and return directly to its caller.
     masm.assumeUnreachable("Should have returned directly to its caller instead of here.");
 }
 
 void
 CodeGeneratorMIPSShared::visitLoadTypedArrayElementStatic(LLoadTypedArrayElementStatic* ins)
--- a/js/src/jit/mips-shared/SharedICHelpers-mips-shared.h
+++ b/js/src/jit/mips-shared/SharedICHelpers-mips-shared.h
@@ -73,17 +73,17 @@ EmitReturnFromIC(MacroAssembler& masm)
 
 inline void
 EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
 {
     masm.movePtr(reg, ra);
 }
 
 inline void
-EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
+EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t argSize)
 {
     Register scratch = R2.scratchReg();
 
     // Compute frame size.
     masm.movePtr(BaselineFrameReg, scratch);
     masm.addPtr(Imm32(BaselineFrame::FramePointerOffset), scratch);
     masm.subPtr(BaselineStackReg, scratch);
 
@@ -97,50 +97,50 @@ EmitBaselineTailCallVM(uint8_t* target, 
     // keep it there through the stub calls), but the VMWrapper code being
     // called expects the return address to also be pushed on the stack.
     MOZ_ASSERT(ICTailCallReg == ra);
     masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS, ExitFrameLayout::Size());
     masm.subPtr(Imm32(sizeof(CommonFrameLayout)), StackPointer);
     masm.storePtr(scratch, Address(StackPointer, CommonFrameLayout::offsetOfDescriptor()));
     masm.storePtr(ra, Address(StackPointer, CommonFrameLayout::offsetOfReturnAddress()));
 
-    masm.branch(target);
+    masm.jump(target);
 }
 
 inline void
-EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
+EmitIonTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t stackSize)
 {
     Register scratch = R2.scratchReg();
 
     masm.loadPtr(Address(sp, stackSize), scratch);
     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch);
     masm.addPtr(Imm32(stackSize + JitStubFrameLayout::Size() - sizeof(intptr_t)), scratch);
 
     // Push frame descriptor and perform the tail call.
     MOZ_ASSERT(ICTailCallReg == ra);
     masm.makeFrameDescriptor(scratch, JitFrame_IonJS, ExitFrameLayout::Size());
     masm.push(scratch);
     masm.push(ICTailCallReg);
-    masm.branch(target);
+    masm.jump(target);
 }
 
 inline void
 EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32_t headerSize)
 {
     // Compute stub frame size. We have to add two pointers: the stub reg and
     // previous frame pointer pushed by EmitEnterStubFrame.
     masm.movePtr(BaselineFrameReg, reg);
     masm.addPtr(Imm32(sizeof(intptr_t) * 2), reg);
     masm.subPtr(BaselineStackReg, reg);
 
     masm.makeFrameDescriptor(reg, JitFrame_BaselineStub, headerSize);
 }
 
 inline void
-EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
+EmitBaselineCallVM(TrampolinePtr target, MacroAssembler& masm)
 {
     Register scratch = R2.scratchReg();
     EmitBaselineCreateStubFrameDescriptor(masm, scratch, ExitFrameLayout::Size());
     masm.push(scratch);
     masm.call(target);
 }
 
 struct BaselineStubFrame {
--- a/js/src/jit/mips32/Bailouts-mips32.cpp
+++ b/js/src/jit/mips32/Bailouts-mips32.cpp
@@ -28,21 +28,24 @@ BailoutFrameInfo::BailoutFrameInfo(const
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
     JSRuntime* rt = activation->compartment()->runtimeFromActiveCooperatingThread();
-    JitCode* code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
+    TrampolinePtr code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
+#ifdef DEBUG
+    uint32_t tableSize = rt->jitRuntime()->getBailoutTableSize(bailout->frameClass());
+#endif
     uintptr_t tableOffset = bailout->tableOffset();
-    uintptr_t tableStart = reinterpret_cast<uintptr_t>(code->raw());
+    uintptr_t tableStart = reinterpret_cast<uintptr_t>(code.value);
 
     MOZ_ASSERT(tableOffset >= tableStart &&
-               tableOffset < tableStart + code->instructionsSize());
+               tableOffset < tableStart + tableSize);
     MOZ_ASSERT((tableOffset - tableStart) % BAILOUT_TABLE_ENTRY_SIZE == 0);
 
     uint32_t bailoutId = ((tableOffset - tableStart) / BAILOUT_TABLE_ENTRY_SIZE) - 1;
     MOZ_ASSERT(bailoutId < BAILOUT_TABLE_SIZE);
 
     snapshotOffset_ = topIonScript_->bailoutToSnapshot(bailoutId);
 }
--- a/js/src/jit/mips32/MacroAssembler-mips32.cpp
+++ b/js/src/jit/mips32/MacroAssembler-mips32.cpp
@@ -1840,17 +1840,17 @@ MacroAssembler::alignFrameForICArguments
 void
 MacroAssembler::restoreFrameAlignmentForICArguments(AfterICSaveLive& aic)
 {
     if (aic.alignmentPadding != 0)
         freeStack(aic.alignmentPadding);
 }
 
 void
-MacroAssemblerMIPSCompat::handleFailureWithHandlerTail(void* handler)
+MacroAssemblerMIPSCompat::handleFailureWithHandlerTail(void* handler, Label* profilerExitTail)
 {
     // Reserve space for exception information.
     int size = (sizeof(ResumeFromException) + ABIStackAlignment) & ~(ABIStackAlignment - 1);
     asMasm().subPtr(Imm32(size), StackPointer);
     ma_move(a0, StackPointer); // Use a0 since it is a first function argument
 
     // Call the handler.
     asMasm().setupUnalignedABICall(a1);
@@ -1924,17 +1924,17 @@ MacroAssemblerMIPSCompat::handleFailureW
     // If profiling is enabled, then update the lastProfilingFrame to refer to caller
     // frame before returning.
     {
         Label skipProfilingInstrumentation;
         // Test if profiler enabled.
         AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->geckoProfiler().addressOfEnabled());
         asMasm().branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
                           &skipProfilingInstrumentation);
-        profilerExitFrame();
+        jump(profilerExitTail);
         bind(&skipProfilingInstrumentation);
     }
 
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
@@ -2078,17 +2078,17 @@ MacroAssemblerMIPSCompat::profilerEnterF
     loadPtr(Address(scratch, offsetof(JSContext, profilingActivation_)), scratch);
     storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
     storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
 }
 
 void
 MacroAssemblerMIPSCompat::profilerExitFrame()
 {
-    branch(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
+    jump(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
 }
 
 void
 MacroAssembler::subFromStackPtr(Imm32 imm32)
 {
     if (imm32.value)
         asMasm().subPtr(imm32, StackPointer);
 }
--- a/js/src/jit/mips32/MacroAssembler-mips32.h
+++ b/js/src/jit/mips32/MacroAssembler-mips32.h
@@ -500,17 +500,17 @@ class MacroAssemblerMIPSCompat : public 
 
     void storePayload(const Value& val, Address dest);
     void storePayload(Register src, Address dest);
     void storePayload(const Value& val, const BaseIndex& dest);
     void storePayload(Register src, const BaseIndex& dest);
     void storeTypeTag(ImmTag tag, Address dest);
     void storeTypeTag(ImmTag tag, const BaseIndex& dest);
 
-    void handleFailureWithHandlerTail(void* handler);
+    void handleFailureWithHandlerTail(void* handler, Label* profilerExitTail);
 
     /////////////////////////////////////////////////////////////////
     // Common interface.
     /////////////////////////////////////////////////////////////////
   public:
     // The following functions are exposed for use in platform-shared code.
 
     template<typename T>
--- a/js/src/jit/mips32/Trampoline-mips32.cpp
+++ b/js/src/jit/mips32/Trampoline-mips32.cpp
@@ -123,27 +123,28 @@ GeneratePrologue(MacroAssembler& masm)
 
 /*
  * This method generates a trampoline for a c++ function with the following
  * signature:
  *   void enter(void* code, int argc, Value* argv, InterpreterFrame* fp,
  *              CalleeToken calleeToken, JSObject* scopeChain, Value* vp)
  *   ...using standard EABI calling convention
  */
-JitCode*
-JitRuntime::generateEnterJIT(JSContext* cx)
+void
+JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm)
 {
+    enterJITOffset_ = startTrampolineCode(masm);
+
     const Register reg_code = a0;
     const Register reg_argc = a1;
     const Register reg_argv = a2;
     const mozilla::DebugOnly<Register> reg_frame = a3;
 
     MOZ_ASSERT(OsrFrameReg == reg_frame);
 
-    MacroAssembler masm(cx);
     GeneratePrologue(masm);
 
     const Address slotToken(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, calleeToken));
     const Address slotVp(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, vp));
 
     // Save stack pointer into s4
     masm.movePtr(StackPointer, s4);
 
@@ -331,32 +332,22 @@ JitRuntime::generateEnterJIT(JSContext* 
     masm.addPtr(s0, StackPointer);
 
     // Store the returned value into the slotVp
     masm.loadPtr(slotVp, s1);
     masm.storeValue(JSReturnOperand, Address(s1, 0));
 
     // Restore non-volatile registers and return.
     GenerateReturn(masm, ShortJump);
-
-    Linker linker(masm);
-    AutoFlushICache afc("GenerateEnterJIT");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "EnterJIT");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateInvalidator(JSContext* cx)
+void
+JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm(cx);
+    invalidatorOffset_ = startTrampolineCode(masm);
 
     // NOTE: Members ionScript_ and osiPointReturnAddress_ of
     // InvalidationBailoutStack are already on the stack.
     static const uint32_t STACK_DATA_SIZE = sizeof(InvalidationBailoutStack) -
                                             2 * sizeof(uintptr_t);
 
     // Stack has to be alligned here. If not, we will have to fix it.
     masm.checkStackAlignment();
@@ -400,35 +391,23 @@ JitRuntime::generateInvalidator(JSContex
     // (InvaliationBailoutStack) and the space that was allocated for the
     // return value.
     masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer);
     // remove the space that this frame was using before the bailout
     // (computed by InvalidationBailout)
     masm.addPtr(a1, StackPointer);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
-
-    Linker linker(masm);
-    AutoFlushICache afc("Invalidator");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-    JitSpew(JitSpew_IonInvalidate, "   invalidation thunk created at %p", (void*) code->raw());
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "Invalidator");
-#endif
-
-    return code;
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
+void
+JitRuntime::generateArgumentsRectifier(MacroAssembler& masm)
 {
-    MacroAssembler masm(cx);
+    argumentsRectifierOffset_ = startTrampolineCode(masm);
     masm.pushReturnAddress();
 
     Register numActArgsReg = t6;
     Register calleeTokenReg = t7;
     Register numArgsReg = t5;
 
     // Load the number of actual arguments into numActArgsReg
     masm.loadPtr(Address(StackPointer, RectifierFrameLayout::offsetOfNumActualArgs()),
@@ -524,17 +503,17 @@ JitRuntime::generateArgumentsRectifier(J
     // Push frame descriptor.
     masm.storePtr(t0, Address(StackPointer, 0));
 
     // Call the target function.
     // Note that this code assumes the function is JITted.
     masm.andPtr(Imm32(CalleeTokenMask), calleeTokenReg);
     masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1);
     masm.loadBaselineOrIonRaw(t1, t1, nullptr);
-    uint32_t returnOffset = masm.callJitNoProfiler(t1);
+    argumentsRectifierReturnOffset_ = masm.callJitNoProfiler(t1);
 
     // arg1
     //  ...
     // argN
     // num actual args
     // callee token
     // sizeDescriptor     <- sp now
     // return address
@@ -554,28 +533,16 @@ JitRuntime::generateArgumentsRectifier(J
     // callee token
     // sizeDescriptor
     // return address
 
     // Discard pushed arguments.
     masm.addPtr(t0, StackPointer);
 
     masm.ret();
-    Linker linker(masm);
-    AutoFlushICache afc("ArgumentsRectifier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-    if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
-#endif
-
-    return code;
 }
 
 // NOTE: Members snapshotOffset_ and padding_ of BailoutStack
 // are not stored in PushBailoutFrame().
 static const uint32_t bailoutDataSize = sizeof(BailoutStack) - 2 * sizeof(uintptr_t);
 static const uint32_t bailoutInfoOutParamSize = 2 * sizeof(uintptr_t);
 
 /* There are two different stack layouts when doing bailout. They are
@@ -624,17 +591,17 @@ PushBailoutFrame(MacroAssembler& masm, u
     // Put frame class to stack
     masm.storePtr(ImmWord(frameClass), Address(StackPointer, BailoutStack::offsetOfFrameClass()));
 
     // Put pointer to BailoutStack as first argument to the Bailout()
     masm.movePtr(StackPointer, spArg);
 }
 
 static void
-GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
+GenerateBailoutThunk(MacroAssembler& masm, uint32_t frameClass, Label* bailoutTail)
 {
     PushBailoutFrame(masm, frameClass, a0);
 
     // Put pointer to BailoutInfo
     masm.subPtr(Imm32(bailoutInfoOutParamSize), StackPointer);
     masm.storePtr(ImmPtr(nullptr), Address(StackPointer, 0));
     masm.movePtr(StackPointer, a1);
 
@@ -659,74 +626,55 @@ GenerateBailoutThunk(JSContext* cx, Macr
         masm.addPtr(a1, StackPointer);
     } else {
         uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
         // Remove the data this fuction added and frame size.
         masm.addPtr(Imm32(bailoutDataSize + bailoutInfoOutParamSize + frameSize), StackPointer);
     }
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
+JitRuntime::BailoutTable
+JitRuntime::generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass)
 {
-    MacroAssembler masm(cx);
+    uint32_t offset = startTrampolineCode(masm);
 
     Label bailout;
     for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++) {
         // Calculate offset to the end of table
         int32_t offset = (BAILOUT_TABLE_SIZE - i) * BAILOUT_TABLE_ENTRY_SIZE;
 
         // We use the 'ra' as table offset later in GenerateBailoutThunk
         masm.as_bal(BOffImm16(offset));
         masm.nop();
     }
     masm.bind(&bailout);
 
-    GenerateBailoutThunk(cx, masm, frameClass);
-
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTable");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+    GenerateBailoutThunk(masm, frameClass, bailoutTail);
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTable");
-#endif
-
-    return code;
+    return BailoutTable(offset, masm.currentOffset() - offset);
 }
 
-JitCode*
-JitRuntime::generateBailoutHandler(JSContext* cx)
+void
+JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm(cx);
-    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
+    bailoutHandlerOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutHandler");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
-#endif
-
-    return code;
+    GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail);
 }
 
 bool
 JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
 
-    masm.flushBuffer();
-    uint32_t wrapperOffset = masm.currentOffset();
+    uint32_t wrapperOffset = startTrampolineCode(masm);
 
     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
 
     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
                   "Wrapper register set should be a superset of Volatile register set.");
 
     // The context is the first argument; a0 is the first argument register.
     Register cxreg = a0;
@@ -911,20 +859,20 @@ JitRuntime::generateVMWrapper(JSContext*
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(ExitFrameLayout) +
                     f.explicitStackSlots() * sizeof(uintptr_t) +
                     f.extraValuesToPop * sizeof(Value)));
 
     return functionWrappers_->putNew(&f, wrapperOffset);
 }
 
-JitCode*
-JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
+uint32_t
+JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type)
 {
-    MacroAssembler masm(cx);
+    uint32_t offset = startTrampolineCode(masm);
 
     LiveRegisterSet save;
     if (cx->runtime()->jitSupportsFloatingPoint) {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                            FloatRegisterSet(FloatRegisters::VolatileMask));
     } else {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                            FloatRegisterSet());
@@ -939,25 +887,17 @@ JitRuntime::generatePreBarrier(JSContext
     masm.passABIArg(a0);
     masm.passABIArg(a1);
     masm.callWithABI(IonMarkFunction(type));
 
     save.take(AnyRegister(ra));
     masm.PopRegsInMask(save);
     masm.ret();
 
-    Linker linker(masm);
-    AutoFlushICache afc("PreBarrier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "PreBarrier");
-#endif
-
-    return code;
+    return offset;
 }
 
 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
 static const VMFunction HandleDebugTrapInfo =
     FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
 
 JitCode*
 JitRuntime::generateDebugTrapHandler(JSContext* cx)
@@ -972,17 +912,17 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
 
     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
     // the stub frame has a nullptr ICStub pointer, since this pointer is
     // marked during GC.
     masm.movePtr(ImmPtr(nullptr), ICStubReg);
     EmitBaselineEnterStubFrame(masm, scratch2);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
 
     masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
     masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));
     masm.storePtr(scratch1, Address(StackPointer, 0));
 
     EmitBaselineCallVM(code, masm);
 
     EmitBaselineLeaveStubFrame(masm);
@@ -1021,56 +961,39 @@ JitRuntime::generateDebugTrapHandler(JSC
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
 #endif
 
     return codeDbg;
 }
 
 
-JitCode*
-JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
+void
+JitRuntime::generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail)
 {
-    MacroAssembler masm;
-
-    masm.handleFailureWithHandlerTail(handler);
+    exceptionTailOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("ExceptionTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
-#endif
-
-    return code;
+    masm.bind(masm.failureLabel());
+    masm.handleFailureWithHandlerTail(handler, profilerExitTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTailStub(JSContext* cx)
+void
+JitRuntime::generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
+    bailoutTailOffset_ = startTrampolineCode(masm);
+    masm.bind(bailoutTail);
 
     masm.generateBailoutTail(a1, a2);
-
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
+void
+JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail)
 {
-    MacroAssembler masm;
+    profilerExitFrameTailOffset_ = startTrampolineCode(masm);
+    masm.bind(profilerExitTail);
 
     Register scratch1 = t0;
     Register scratch2 = t1;
     Register scratch3 = t2;
     Register scratch4 = t3;
 
     //
     // The code generated below expects that the current stack pointer points
@@ -1389,19 +1312,9 @@ JitRuntime::generateProfilerExitFrameTai
     //
     masm.bind(&handle_Entry);
     {
         masm.movePtr(ImmPtr(nullptr), scratch1);
         masm.storePtr(scratch1, lastProfilingCallSite);
         masm.storePtr(scratch1, lastProfilingFrame);
         masm.ret();
     }
-
-    Linker linker(masm);
-    AutoFlushICache afc("ProfilerExitFrameTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
 }
--- a/js/src/jit/mips64/MacroAssembler-mips64.cpp
+++ b/js/src/jit/mips64/MacroAssembler-mips64.cpp
@@ -2010,17 +2010,17 @@ MacroAssembler::alignFrameForICArguments
 void
 MacroAssembler::restoreFrameAlignmentForICArguments(AfterICSaveLive& aic)
 {
     if (aic.alignmentPadding != 0)
         freeStack(aic.alignmentPadding);
 }
 
 void
-MacroAssemblerMIPS64Compat::handleFailureWithHandlerTail(void* handler)
+MacroAssemblerMIPS64Compat::handleFailureWithHandlerTail(void* handler, Label* profilerExitTail)
 {
     // Reserve space for exception information.
     int size = (sizeof(ResumeFromException) + ABIStackAlignment) & ~(ABIStackAlignment - 1);
     asMasm().subPtr(Imm32(size), StackPointer);
     ma_move(a0, StackPointer); // Use a0 since it is a first function argument
 
     // Call the handler.
     asMasm().setupUnalignedABICall(a1);
@@ -2094,17 +2094,17 @@ MacroAssemblerMIPS64Compat::handleFailur
     // If profiling is enabled, then update the lastProfilingFrame to refer to caller
     // frame before returning.
     {
         Label skipProfilingInstrumentation;
         // Test if profiler enabled.
         AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->geckoProfiler().addressOfEnabled());
         asMasm().branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
                           &skipProfilingInstrumentation);
-        profilerExitFrame();
+        jump(profilerExitTail);
         bind(&skipProfilingInstrumentation);
     }
 
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
@@ -2248,17 +2248,17 @@ MacroAssemblerMIPS64Compat::profilerEnte
     loadPtr(Address(scratch, offsetof(JSContext, profilingActivation_)), scratch);
     storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
     storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
 }
 
 void
 MacroAssemblerMIPS64Compat::profilerExitFrame()
 {
-    branch(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
+    jump(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
 }
 
 void
 MacroAssembler::subFromStackPtr(Imm32 imm32)
 {
     if (imm32.value)
         asMasm().subPtr(imm32, StackPointer);
 }
--- a/js/src/jit/mips64/MacroAssembler-mips64.h
+++ b/js/src/jit/mips64/MacroAssembler-mips64.h
@@ -506,17 +506,17 @@ class MacroAssemblerMIPS64Compat : publi
         }
     }
     void pushValue(JSValueType type, Register reg) {
         boxValue(type, reg, ScratchRegister);
         push(ScratchRegister);
     }
     void pushValue(const Address& addr);
 
-    void handleFailureWithHandlerTail(void* handler);
+    void handleFailureWithHandlerTail(void* handler, Label* profilerExitTail);
 
     /////////////////////////////////////////////////////////////////
     // Common interface.
     /////////////////////////////////////////////////////////////////
   public:
     // The following functions are exposed for use in platform-shared code.
 
     template<typename T>
--- a/js/src/jit/mips64/Trampoline-mips64.cpp
+++ b/js/src/jit/mips64/Trampoline-mips64.cpp
@@ -150,28 +150,29 @@ GeneratePrologue(MacroAssembler& masm)
     masm.as_sd(f30, StackPointer, offsetof(EnterJITRegs, f30));
     masm.as_sd(f31, StackPointer, offsetof(EnterJITRegs, f31));
 }
 
 
 // Generates a trampoline for calling Jit compiled code from a C++ function.
 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
 // calling convention.
-JitCode *
-JitRuntime::generateEnterJIT(JSContext* cx)
+void
+JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm)
 {
+    enterJITOffset_ = startTrampolineCode(masm);
+
     const Register reg_code = IntArgReg0;
     const Register reg_argc = IntArgReg1;
     const Register reg_argv = IntArgReg2;
     const mozilla::DebugOnly<Register> reg_frame = IntArgReg3;
     const Register reg_token = IntArgReg4;
     const Register reg_chain = IntArgReg5;
     const Register reg_values = IntArgReg6;
     const Register reg_vp = IntArgReg7;
-    MacroAssembler masm(cx);
 
     MOZ_ASSERT(OsrFrameReg == reg_frame);
 
     GeneratePrologue(masm);
 
     // Save stack pointer into s4
     masm.movePtr(StackPointer, s4);
 
@@ -354,32 +355,22 @@ JitRuntime::generateEnterJIT(JSContext* 
     masm.addPtr(s0, StackPointer);
 
     // Store the returned value into the vp
     masm.as_ld(reg_vp, StackPointer, offsetof(EnterJITRegs, a7));
     masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
 
     // Restore non-volatile registers and return.
     GenerateReturn(masm, ShortJump);
-
-    Linker linker(masm);
-    AutoFlushICache afc("GenerateEnterJIT");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "EnterJIT");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateInvalidator(JSContext* cx)
+void
+JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm(cx);
+    invalidatorOffset_ = startTrampolineCode(masm);
 
     // Stack has to be alligned here. If not, we will have to fix it.
     masm.checkStackAlignment();
 
     // Push registers such that we can access them from [base + code].
     masm.PushRegsInMask(AllRegs);
 
     // Pass pointer to InvalidationBailoutStack structure.
@@ -405,37 +396,25 @@ JitRuntime::generateInvalidator(JSContex
     // (InvaliationBailoutStack) and the space that was allocated for the
     // return value.
     masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer);
     // remove the space that this frame was using before the bailout
     // (computed by InvalidationBailout)
     masm.addPtr(a1, StackPointer);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
-
-    Linker linker(masm);
-    AutoFlushICache afc("Invalidator");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-    JitSpew(JitSpew_IonInvalidate, "   invalidation thunk created at %p", (void*) code->raw());
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "Invalidator");
-#endif
-
-    return code;
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
+void
+JitRuntime::generateArgumentsRectifier(MacroAssembler& masm)
 {
     // Do not erase the frame pointer in this function.
 
-    MacroAssembler masm(cx);
+    argumentsRectifierOffset_ = startTrampolineCode(masm);
     masm.pushReturnAddress();
     // Caller:
     // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- sp
 
     // Add |this|, in the counter of known arguments.
     masm.loadPtr(Address(StackPointer, RectifierFrameLayout::offsetOfNumActualArgs()), s3);
     masm.addPtr(Imm32(1), s3);
 
@@ -569,42 +548,30 @@ JitRuntime::generateArgumentsRectifier(J
     // Push frame descriptor.
     masm.storePtr(t2, Address(StackPointer, 0));
 
     // Call the target function.
     // Note that this code assumes the function is JITted.
     masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), calleeTokenReg);
     masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1);
     masm.loadBaselineOrIonRaw(t1, t1, nullptr);
-    uint32_t returnOffset = masm.callJitNoProfiler(t1);
+    argumentsRectifierReturnOffset_ = masm.callJitNoProfiler(t1);
 
     // Remove the rectifier frame.
     // t2 <- descriptor with FrameType.
     masm.loadPtr(Address(StackPointer, 0), t2);
     masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), t2); // t2 <- descriptor.
 
     // Discard descriptor, calleeToken and number of actual arguments.
     masm.addPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
 
     // Discard pushed arguments.
     masm.addPtr(t2, StackPointer);
 
     masm.ret();
-    Linker linker(masm);
-    AutoFlushICache afc("ArgumentsRectifier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-    if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
-#endif
-
-    return code;
 }
 
 /* - When bailout is done via out of line code (lazy bailout).
  * Frame size is stored in $ra (look at
  * CodeGeneratorMIPS64::generateOutOfLineCode()) and thunk code should save it
  * on stack. Other difference is that members snapshotOffset_ and padding_ are
  * pushed to the stack by CodeGeneratorMIPS64::visitOutOfLineBailout(). Field
  * frameClassId_ is forced to be NO_FRAME_SIZE_CLASS_ID
@@ -620,17 +587,17 @@ PushBailoutFrame(MacroAssembler& masm, R
     // Push registers such that we can access them from [base + code].
     masm.PushRegsInMask(AllRegs);
 
     // Put pointer to BailoutStack as first argument to the Bailout()
     masm.movePtr(StackPointer, spArg);
 }
 
 static void
-GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
+GenerateBailoutThunk(MacroAssembler& masm, uint32_t frameClass, Label* bailoutTail)
 {
     PushBailoutFrame(masm, a0);
 
     // Put pointer to BailoutInfo
     static const uint32_t sizeOfBailoutInfo = sizeof(uintptr_t) * 2;
     masm.subPtr(Imm32(sizeOfBailoutInfo), StackPointer);
     masm.movePtr(StackPointer, a1);
 
@@ -655,51 +622,40 @@ GenerateBailoutThunk(JSContext* cx, Macr
     masm.loadPtr(Address(StackPointer,
                          sizeOfBailoutInfo + BailoutStack::offsetOfFrameSize()), a1);
     // Remove complete BailoutStack class and data after it
     masm.addPtr(Imm32(sizeof(BailoutStack) + sizeOfBailoutInfo), StackPointer);
     // Remove frame size srom stack
     masm.addPtr(a1, StackPointer);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
-    masm.branch(bailoutTail);
+    masm.jump(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
+JitRuntime::BailoutTable
+JitRuntime::generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass)
 {
     MOZ_CRASH("MIPS64 does not use bailout tables");
 }
 
-JitCode*
-JitRuntime::generateBailoutHandler(JSContext* cx)
+void
+JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm(cx);
-    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
+    bailoutHandlerOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutHandler");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
-#endif
-
-    return code;
+    GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail);
 }
 
 bool
 JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
 
-    masm.flushBuffer();
-    uint32_t wrapperOffset = masm.currentOffset();
+    uint32_t wrapperOffset = startTrampolineCode(masm);
 
     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
 
     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
                   "Wrapper register set should be a superset of Volatile register set.");
 
     // The context is the first argument; a0 is the first argument register.
     Register cxreg = a0;
@@ -859,53 +815,45 @@ JitRuntime::generateVMWrapper(JSContext*
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(ExitFrameLayout) +
                     f.explicitStackSlots() * sizeof(void*) +
                     f.extraValuesToPop * sizeof(Value)));
 
     return functionWrappers_->putNew(&f, wrapperOffset);
 }
 
-JitCode*
-JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
+uint32_t
+JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type)
 {
-    MacroAssembler masm(cx);
+    uint32_t offset = startTrampolineCode(masm);
 
     LiveRegisterSet save;
     if (cx->runtime()->jitSupportsFloatingPoint) {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
-                           FloatRegisterSet(FloatRegisters::VolatileMask));
+                                 FloatRegisterSet(FloatRegisters::VolatileMask));
     } else {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
-                           FloatRegisterSet());
+                                 FloatRegisterSet());
     }
     save.add(ra);
     masm.PushRegsInMask(save);
 
     MOZ_ASSERT(PreBarrierReg == a1);
     masm.movePtr(ImmPtr(cx->runtime()), a0);
 
     masm.setupUnalignedABICall(a2);
     masm.passABIArg(a0);
     masm.passABIArg(a1);
     masm.callWithABI(IonMarkFunction(type));
 
     save.take(AnyRegister(ra));
     masm.PopRegsInMask(save);
     masm.ret();
 
-    Linker linker(masm);
-    AutoFlushICache afc("PreBarrier");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "PreBarrier");
-#endif
-
-    return code;
+    return offset;
 }
 
 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
 static const VMFunction HandleDebugTrapInfo =
     FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
 
 JitCode*
 JitRuntime::generateDebugTrapHandler(JSContext* cx)
@@ -920,17 +868,17 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
 
     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
     // the stub frame has a nullptr ICStub pointer, since this pointer is
     // marked during GC.
     masm.movePtr(ImmPtr(nullptr), ICStubReg);
     EmitBaselineEnterStubFrame(masm, scratch2);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
 
     masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
     masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));
     masm.storePtr(scratch1, Address(StackPointer, 0));
 
     EmitBaselineCallVM(code, masm);
 
     EmitBaselineLeaveStubFrame(masm);
@@ -968,57 +916,39 @@ JitRuntime::generateDebugTrapHandler(JSC
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
 #endif
 
     return codeDbg;
 }
 
-
-JitCode*
-JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
+void
+JitRuntime::generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail)
 {
-    MacroAssembler masm;
-
-    masm.handleFailureWithHandlerTail(handler);
+    exceptionTailOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    AutoFlushICache afc("ExceptionTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
-#endif
-
-    return code;
+    masm.bind(masm.failureLabel());
+    masm.handleFailureWithHandlerTail(handler, profilerExitTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTailStub(JSContext* cx)
+void
+JitRuntime::generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
+    bailoutTailOffset_ = startTrampolineCode(masm);
+    masm.bind(bailoutTail);
 
     masm.generateBailoutTail(a1, a2);
-
-    Linker linker(masm);
-    AutoFlushICache afc("BailoutTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
+void
+JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail)
 {
-    MacroAssembler masm;
+    profilerExitFrameTailOffset_ = startTrampolineCode(masm);
+    masm.bind(profilerExitTail);
 
     Register scratch1 = t0;
     Register scratch2 = t1;
     Register scratch3 = t2;
     Register scratch4 = t3;
 
     //
     // The code generated below expects that the current stack pointer points
@@ -1337,19 +1267,9 @@ JitRuntime::generateProfilerExitFrameTai
     //
     masm.bind(&handle_Entry);
     {
         masm.movePtr(ImmPtr(nullptr), scratch1);
         masm.storePtr(scratch1, lastProfilingCallSite);
         masm.storePtr(scratch1, lastProfilingFrame);
         masm.ret();
     }
-
-    Linker linker(masm);
-    AutoFlushICache afc("ProfilerExitFrameTailStub");
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
 }
--- a/js/src/jit/none/SharedICHelpers-none.h
+++ b/js/src/jit/none/SharedICHelpers-none.h
@@ -15,20 +15,20 @@ static const uint32_t STUB_FRAME_SIZE = 
 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = 0;
 
 inline void EmitRestoreTailCallReg(MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitRepushTailCallReg(MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitCallIC(CodeOffset*, MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitEnterTypeMonitorIC(MacroAssembler&, size_t v = 0) { MOZ_CRASH(); }
 inline void EmitReturnFromIC(MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitChangeICReturnAddress(MacroAssembler&, Register) { MOZ_CRASH(); }
-inline void EmitBaselineTailCallVM(JitCode*, MacroAssembler&, uint32_t) { MOZ_CRASH(); }
-inline void EmitIonTailCallVM(JitCode*, MacroAssembler&, uint32_t) { MOZ_CRASH(); }
+inline void EmitBaselineTailCallVM(TrampolinePtr, MacroAssembler&, uint32_t) { MOZ_CRASH(); }
+inline void EmitIonTailCallVM(TrampolinePtr, MacroAssembler&, uint32_t) { MOZ_CRASH(); }
 inline void EmitBaselineCreateStubFrameDescriptor(MacroAssembler&, Register, uint32_t) { MOZ_CRASH(); }
-inline void EmitBaselineCallVM(JitCode*, MacroAssembler&) { MOZ_CRASH(); }
+inline void EmitBaselineCallVM(TrampolinePtr, MacroAssembler&) { MOZ_CRASH(); }
 inline void EmitBaselineEnterStubFrame(MacroAssembler&, Register) { MOZ_CRASH(); }
 inline void EmitBaselineLeaveStubFrame(MacroAssembler&, bool v = false) { MOZ_CRASH(); }
 inline void EmitStowICValues(MacroAssembler&, int) { MOZ_CRASH(); }
 inline void EmitUnstowICValues(MacroAssembler&, int, bool v = false) { MOZ_CRASH(); }
 inline void EmitStubGuardFailure(MacroAssembler&) { MOZ_CRASH(); }
 
 template <typename T> inline void EmitPreBarrier(MacroAssembler&, T, MIRType) { MOZ_CRASH(); }
 
--- a/js/src/jit/none/Trampoline-none.cpp
+++ b/js/src/jit/none/Trampoline-none.cpp
@@ -11,25 +11,26 @@
 
 using namespace js;
 using namespace js::jit;
 
 // This file includes stubs for generating the JIT trampolines when there is no
 // JIT backend, and also includes implementations for assorted random things
 // which can't be implemented in headers.
 
-JitCode* JitRuntime::generateEnterJIT(JSContext*) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateInvalidator(JSContext*) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateArgumentsRectifier(JSContext*, void**) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateBailoutTable(JSContext*, uint32_t) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateBailoutHandler(JSContext*) { MOZ_CRASH(); }
-JitCode* JitRuntime::generatePreBarrier(JSContext*, MIRType) { MOZ_CRASH(); }
+void JitRuntime::generateEnterJIT(JSContext*, MacroAssembler&) { MOZ_CRASH(); }
+void JitRuntime::generateInvalidator(MacroAssembler&, Label*) { MOZ_CRASH(); }
+void JitRuntime::generateArgumentsRectifier(MacroAssembler&) { MOZ_CRASH(); }
+JitRuntime::BailoutTable JitRuntime::generateBailoutTable(MacroAssembler&, Label*, uint32_t) { MOZ_CRASH(); }
+void JitRuntime::generateBailoutHandler(MacroAssembler&, Label*) { MOZ_CRASH(); }
+uint32_t JitRuntime::generatePreBarrier(JSContext*, MacroAssembler&, MIRType) { MOZ_CRASH(); }
 JitCode* JitRuntime::generateDebugTrapHandler(JSContext*) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateExceptionTailStub(JSContext*, void*) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateBailoutTailStub(JSContext*) { MOZ_CRASH(); }
+void JitRuntime::generateExceptionTailStub(MacroAssembler&, void*, Label*) { MOZ_CRASH(); }
+void JitRuntime::generateBailoutTailStub(MacroAssembler&, Label*) { MOZ_CRASH(); }
+void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler&, Label*) { MOZ_CRASH(); }
 
 bool JitRuntime::generateVMWrapper(JSContext*, MacroAssembler&, const VMFunction&) { MOZ_CRASH(); }
 
 FrameSizeClass FrameSizeClass::FromDepth(uint32_t) { MOZ_CRASH(); }
 FrameSizeClass FrameSizeClass::ClassLimit() { MOZ_CRASH(); }
 uint32_t FrameSizeClass::frameSize() const { MOZ_CRASH(); }
 
 BailoutFrameInfo::BailoutFrameInfo(const JitActivationIterator& iter, BailoutStack* bailout)
@@ -41,9 +42,8 @@ BailoutFrameInfo::BailoutFrameInfo(const
 {
     MOZ_CRASH();
 }
 
 bool ICCompare_Int32::Compiler::generateStubCode(MacroAssembler&) { MOZ_CRASH(); }
 bool ICCompare_Double::Compiler::generateStubCode(MacroAssembler&) { MOZ_CRASH(); }
 bool ICBinaryArith_Int32::Compiler::generateStubCode(MacroAssembler&) { MOZ_CRASH(); }
 bool ICUnaryArith_Int32::Compiler::generateStubCode(MacroAssembler&) { MOZ_CRASH(); }
-JitCode* JitRuntime::generateProfilerExitFrameTailStub(JSContext*) { MOZ_CRASH(); }
--- a/js/src/jit/shared/Assembler-shared.h
+++ b/js/src/jit/shared/Assembler-shared.h
@@ -248,16 +248,32 @@ class ImmGCPtr
         // wasm shouldn't be creating GC things
         MOZ_ASSERT(!IsCompilingWasm());
     }
 
   private:
     ImmGCPtr() : value(0) {}
 };
 
+// Pointer to trampoline code. Trampoline code is kept alive until the runtime
+// is destroyed, so does not need to be traced.
+struct TrampolinePtr
+{
+    uint8_t* value;
+
+    TrampolinePtr()
+      : value(nullptr)
+    { }
+    explicit TrampolinePtr(uint8_t* value)
+      : value(value)
+    {
+        MOZ_ASSERT(value);
+    }
+};
+
 // Pointer to be embedded as an immediate that is loaded/stored from by an
 // instruction.
 struct AbsoluteAddress
 {
     void* addr;
 
     explicit AbsoluteAddress(const void* addr)
       : addr(const_cast<void*>(addr))
--- a/js/src/jit/shared/BaselineCompiler-shared.cpp
+++ b/js/src/jit/shared/BaselineCompiler-shared.cpp
@@ -53,17 +53,17 @@ BaselineCompilerShared::prepareVMCall()
 
     // Save the frame pointer.
     masm.Push(BaselineFrameReg);
 }
 
 bool
 BaselineCompilerShared::callVM(const VMFunction& fun, CallVMPhase phase)
 {
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
 
 #ifdef DEBUG
     // Assert prepareVMCall() has been called.
     MOZ_ASSERT(inCall_);
     inCall_ = false;
 
     // Assert the frame does not have an override pc when we're executing JIT code.
     {
@@ -118,17 +118,17 @@ BaselineCompilerShared::callVM(const VMF
         masm.bind(&afterWrite);
         masm.store32(ICTailCallReg, frameSizeAddress);
         masm.add32(Imm32(argSize), ICTailCallReg);
         masm.makeFrameDescriptor(ICTailCallReg, JitFrame_BaselineJS, ExitFrameLayout::Size());
         masm.push(ICTailCallReg);
     }
     MOZ_ASSERT(fun.expectTailCall == NonTailCall);
     // Perform the call.
-    masm.call(ImmPtr(code));
+    masm.call(code);
     uint32_t callOffset = masm.currentOffset();
     masm.pop(BaselineFrameReg);
 
 #ifdef DEBUG
     // Assert the frame does not have an override pc when we're executing JIT code.
     {
         Label ok;
         masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -42,17 +42,17 @@ CodeGeneratorShared::ensureMasm(MacroAss
 CodeGeneratorShared::CodeGeneratorShared(MIRGenerator* gen, LIRGraph* graph, MacroAssembler* masmArg)
   : maybeMasm_(),
     masm(ensureMasm(masmArg)),
     gen(gen),
     graph(*graph),
     current(nullptr),
     snapshots_(),
     recovers_(),
-    deoptTable_(nullptr),
+    deoptTable_(),
 #ifdef DEBUG
     pushedArgs_(0),
 #endif
     lastOsiPointOffset_(0),
     safepoints_(graph->totalSlotCount(), (gen->info().nargs() + 1) * sizeof(Value)),
     returnLabel_(),
     stubSpace_(),
     nativeToBytecodeMap_(nullptr),
@@ -1361,17 +1361,17 @@ CodeGeneratorShared::callVM(const VMFunc
     //    ... frame ...
     //    [args]
 #ifdef DEBUG
     MOZ_ASSERT(pushedArgs_ == fun.explicitArgs);
     pushedArgs_ = 0;
 #endif
 
     // Get the wrapper of the VM function.
-    uint8_t* wrapper = gen->jitRuntime()->getVMWrapper(fun);
+    TrampolinePtr wrapper = gen->jitRuntime()->getVMWrapper(fun);
 
 #ifdef CHECK_OSIPOINT_REGISTERS
     if (shouldVerifyOsiPointRegs(ins->safepoint()))
         StoreAllLiveRegs(masm, ins->safepoint()->liveRegs());
 #endif
 
     // Push an exit frame descriptor. If |dynStack| is a valid pointer to a
     // register, then its value is added to the value of the |framePushed()| to
@@ -1383,17 +1383,17 @@ CodeGeneratorShared::callVM(const VMFunc
     } else {
         masm.pushStaticFrameDescriptor(JitFrame_IonJS, ExitFrameLayout::Size());
     }
 
     // Call the wrapper function.  The wrapper is in charge to unwind the stack
     // when returning from the call.  Failures are handled with exceptions based
     // on the return value of the C functions.  To guard the outcome of the
     // returned value, use another LIR instruction.
-    uint32_t callOffset = masm.callJit(ImmPtr(wrapper));
+    uint32_t callOffset = masm.callJit(wrapper);
     markSafepointAt(callOffset, ins);
 
     // Remove rest of the frame left on the stack. We remove the return address
     // which is implicitly poped when returning.
     int framePop = sizeof(ExitFrameLayout) - sizeof(void*);
 
     // Pop arguments from framePushed.
     masm.implicitPop(fun.explicitStackSlots() * sizeof(void*) + framePop);
--- a/js/src/jit/shared/CodeGenerator-shared.h
+++ b/js/src/jit/shared/CodeGenerator-shared.h
@@ -73,17 +73,17 @@ class CodeGeneratorShared : public LElem
     MacroAssembler& masm;
 
   protected:
     MIRGenerator* gen;
     LIRGraph& graph;
     LBlock* current;
     SnapshotWriter snapshots_;
     RecoverWriter recovers_;
-    JitCode* deoptTable_;
+    mozilla::Maybe<TrampolinePtr> deoptTable_;
 #ifdef DEBUG
     uint32_t pushedArgs_;
 #endif
     uint32_t lastOsiPointOffset_;
     SafepointWriter safepoints_;
     Label invalidate_;
     CodeOffset invalidateEpilogueData_;
 
--- a/js/src/jit/x64/MacroAssembler-x64.cpp
+++ b/js/src/jit/x64/MacroAssembler-x64.cpp
@@ -293,17 +293,17 @@ MacroAssemblerX64::boxValue(JSValueType 
         bind(&upper32BitsZeroed);
     }
 #endif
     mov(ImmShiftedTag(tag), dest);
     orq(src, dest);
 }
 
 void
-MacroAssemblerX64::handleFailureWithHandlerTail(void* handler)
+MacroAssemblerX64::handleFailureWithHandlerTail(void* handler, Label* profilerExitTail)
 {
     // Reserve space for exception information.
     subq(Imm32(sizeof(ResumeFromException)), rsp);
     movq(rsp, rax);
 
     // Call the handler.
     asMasm().setupUnalignedABICall(rcx);
     asMasm().passABIArg(rax);
@@ -365,17 +365,17 @@ MacroAssemblerX64::handleFailureWithHand
     pop(rbp);
 
     // If profiling is enabled, then update the lastProfilingFrame to refer to caller
     // frame before returning.
     {
         Label skipProfilingInstrumentation;
         AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->geckoProfiler().addressOfEnabled());
         asMasm().branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        profilerExitFrame();
+        jump(profilerExitTail);
         bind(&skipProfilingInstrumentation);
     }
 
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
@@ -399,17 +399,17 @@ MacroAssemblerX64::profilerEnterFrame(Re
     loadPtr(Address(scratch, offsetof(JSContext, profilingActivation_)), scratch);
     storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
     storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
 }
 
 void
 MacroAssemblerX64::profilerExitFrame()
 {
-    jmp(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
+    jump(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
 }
 
 MacroAssembler&
 MacroAssemblerX64::asMasm()
 {
     return *static_cast<MacroAssembler*>(this);
 }
 
--- a/js/src/jit/x64/MacroAssembler-x64.h
+++ b/js/src/jit/x64/MacroAssembler-x64.h
@@ -942,17 +942,17 @@ class MacroAssemblerX64 : public MacroAs
         vcvtsq2ss(src, dest, dest);
     }
 
     inline void incrementInt32Value(const Address& addr);
 
     inline void ensureDouble(const ValueOperand& source, FloatRegister dest, Label* failure);
 
   public:
-    void handleFailureWithHandlerTail(void* handler);
+    void handleFailureWithHandlerTail(void* handler, Label* profilerExitTail);
 
     // Instrumentation for entering and leaving the profiler.
     void profilerEnterFrame(Register framePtr, Register scratch);
     void profilerExitFrame();
 };
 
 typedef MacroAssemblerX64 MacroAssemblerSpecific;
 
--- a/js/src/jit/x64/SharedICHelpers-x64.h
+++ b/js/src/jit/x64/SharedICHelpers-x64.h
@@ -65,17 +65,17 @@ EmitReturnFromIC(MacroAssembler& masm)
 
 inline void
 EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
 {
     masm.storePtr(reg, Address(StackPointer, 0));
 }
 
 inline void
-EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
+EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t argSize)
 {
     ScratchRegisterScope scratch(masm);
 
     // We an assume during this that R0 and R1 have been pushed.
     masm.movq(BaselineFrameReg, scratch);
     masm.addq(Imm32(BaselineFrame::FramePointerOffset), scratch);
     masm.subq(BaselineStackReg, scratch);
 
@@ -83,59 +83,59 @@ EmitBaselineTailCallVM(uint8_t* target, 
     masm.movq(scratch, rdx);
     masm.subq(Imm32(argSize), rdx);
     masm.store32(rdx, Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));
 
     // Push frame descriptor and perform the tail call.
     masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS, ExitFrameLayout::Size());
     masm.push(scratch);
     masm.push(ICTailCallReg);
-    masm.jmp(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
-EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
+EmitIonTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t stackSize)
 {
     // For tail calls, find the already pushed JitFrame_IonJS signifying the
     // end of the Ion frame. Retrieve the length of the frame and repush
     // JitFrame_IonJS with the extra stacksize, rendering the original
     // JitFrame_IonJS obsolete.
 
     ScratchRegisterScope scratch(masm);
 
     masm.loadPtr(Address(esp, stackSize), scratch);
     masm.shrq(Imm32(FRAMESIZE_SHIFT), scratch);
     masm.addq(Imm32(stackSize + JitStubFrameLayout::Size() - sizeof(intptr_t)), scratch);
 
     // Push frame descriptor and perform the tail call.
     masm.makeFrameDescriptor(scratch, JitFrame_IonJS, ExitFrameLayout::Size());
     masm.push(scratch);
     masm.push(ICTailCallReg);
-    masm.jmp(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
 EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32_t headerSize)
 {
     // Compute stub frame size. We have to add two pointers: the stub reg and previous
     // frame pointer pushed by EmitEnterStubFrame.
     masm.movq(BaselineFrameReg, reg);
     masm.addq(Imm32(sizeof(void*) * 2), reg);
     masm.subq(BaselineStackReg, reg);
 
     masm.makeFrameDescriptor(reg, JitFrame_BaselineStub, headerSize);
 }
 
 inline void
-EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
+EmitBaselineCallVM(TrampolinePtr target, MacroAssembler& masm)
 {
     ScratchRegisterScope scratch(masm);
     EmitBaselineCreateStubFrameDescriptor(masm, scratch, ExitFrameLayout::Size());
     masm.push(scratch);
-    masm.call(ImmPtr(target));
+    masm.call(target);
 }
 
 // Size of vales pushed by EmitEnterStubFrame.
 static const uint32_t STUB_FRAME_SIZE = 4 * sizeof(void*);
 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = sizeof(void*);
 
 inline void
 EmitBaselineEnterStubFrame(MacroAssembler& masm, Register)
--- a/js/src/jit/x64/Trampoline-x64.cpp
+++ b/js/src/jit/x64/Trampoline-x64.cpp
@@ -26,20 +26,21 @@ using mozilla::IsPowerOfTwo;
 // use the ability to reference register values on the stack by index.
 static const LiveRegisterSet AllRegs =
     LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
                          FloatRegisterSet(FloatRegisters::AllMask));
 
 // Generates a trampoline for calling Jit compiled code from a C++ function.
 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
 // calling convention.
-JitCode*
-JitRuntime::generateEnterJIT(JSContext* cx)
+void
+JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm)
 {
-    MacroAssembler masm(cx);
+    enterJITOffset_ = startTrampolineCode(masm);
+
     masm.assertStackAlignment(ABIStackAlignment, -int32_t(sizeof(uintptr_t)) /* return address */);
 
     const Register reg_code  = IntArgReg0;
     const Register reg_argc  = IntArgReg1;
     const Register reg_argv  = IntArgReg2;
     MOZ_ASSERT(OsrFrameReg == IntArgReg3);
 
 #if defined(_WIN64)
@@ -331,37 +332,24 @@ JitRuntime::generateEnterJIT(JSContext* 
     masm.pop(r14);
     masm.pop(r13);
     masm.pop(r12);
     masm.pop(rbx);
 
     // Restore frame pointer and return.
     masm.pop(rbp);
     masm.ret();
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "EnterJIT");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "EnterJIT");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateInvalidator(JSContext* cx)
+void
+JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail)
 {
-    AutoJitContextAlloc ajca(cx);
-    MacroAssembler masm(cx);
+    // See explanatory comment in x86's JitRuntime::generateInvalidator.
 
-    // See explanatory comment in x86's JitRuntime::generateInvalidator.
+    invalidatorOffset_ = startTrampolineCode(masm);
 
     masm.addq(Imm32(sizeof(uintptr_t)), rsp);
 
     // Push registers such that we can access them from [base + code].
     masm.PushRegsInMask(AllRegs);
 
     masm.movq(rsp, rax); // Argument to jit::InvalidationBailout.
 
@@ -382,38 +370,26 @@ JitRuntime::generateInvalidator(JSContex
 
     masm.pop(r9); // Get the bailoutInfo outparam.
     masm.pop(rbx); // Get the frameSize outparam.
 
     // Pop the machine state and the dead frame.
     masm.lea(Operand(rsp, rbx, TimesOne, sizeof(InvalidationBailoutStack)), rsp);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
     masm.jmp(bailoutTail);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "Invalidator");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "Invalidator");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
+void
+JitRuntime::generateArgumentsRectifier(MacroAssembler& masm)
 {
     // Do not erase the frame pointer in this function.
 
-    MacroAssembler masm(cx);
+    argumentsRectifierOffset_ = startTrampolineCode(masm);
+
     // Caller:
     // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- rsp
 
     // Add |this|, in the counter of known arguments.
     masm.loadPtr(Address(rsp, RectifierFrameLayout::offsetOfNumActualArgs()), r8);
     masm.addl(Imm32(1), r8);
 
     // Load |nformals| into %rcx.
@@ -533,40 +509,26 @@ JitRuntime::generateArgumentsRectifier(J
     masm.push(rax); // callee token
     masm.push(r9); // descriptor
 
     // Call the target function.
     // Note that this code assumes the function is JITted.
     masm.andq(Imm32(uint32_t(CalleeTokenMask)), rax);
     masm.loadPtr(Address(rax, JSFunction::offsetOfNativeOrScript()), rax);
     masm.loadBaselineOrIonRaw(rax, rax, nullptr);
-    uint32_t returnOffset = masm.callJitNoProfiler(rax);
+    argumentsRectifierReturnOffset_ = masm.callJitNoProfiler(rax);
 
     // Remove the rectifier frame.
     masm.pop(r9);             // r9 <- descriptor with FrameType.
     masm.shrq(Imm32(FRAMESIZE_SHIFT), r9);
     masm.pop(r11);            // Discard calleeToken.
     masm.pop(r11);            // Discard numActualArgs.
     masm.addq(r9, rsp);       // Discard pushed arguments.
 
     masm.ret();
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "ArgumentsRectifier");
-#endif
-
-    if (returnAddrOut)
-        *returnAddrOut = (void*)(code->raw() + returnOffset);
-    return code;
 }
 
 static void
 PushBailoutFrame(MacroAssembler& masm, Register spArg)
 {
     // Push registers such that we can access them from [base + code].
     if (JitSupportsSimd()) {
         masm.PushRegsInMask(AllRegs);
@@ -587,17 +549,17 @@ PushBailoutFrame(MacroAssembler& masm, R
         }
     }
 
     // Get the stack pointer into a register, pre-alignment.
     masm.movq(rsp, spArg);
 }
 
 static void
-GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
+GenerateBailoutThunk(MacroAssembler& masm, uint32_t frameClass, Label* bailoutTail)
 {
     PushBailoutFrame(masm, r8);
 
     // Make space for Bailout's bailoutInfo outparam.
     masm.reserveStack(sizeof(void*));
     masm.movq(rsp, r9);
 
     // Call the bailout function.
@@ -617,53 +579,40 @@ GenerateBailoutThunk(JSContext* cx, Macr
     //
     // Remove both the bailout frame and the topmost Ion frame's stack.
     static const uint32_t BailoutDataSize = sizeof(RegisterDump);
     masm.addq(Imm32(BailoutDataSize), rsp);
     masm.pop(rcx);
     masm.lea(Operand(rsp, rcx, TimesOne, sizeof(void*)), rsp);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
     masm.jmp(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
+JitRuntime::BailoutTable
+JitRuntime::generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass)
 {
     MOZ_CRASH("x64 does not use bailout tables");
 }
 
-JitCode*
-JitRuntime::generateBailoutHandler(JSContext* cx)
+void
+JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
-    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+    bailoutHandlerOffset_ = startTrampolineCode(masm);
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "BailoutHandler");
-#endif
-
-    return code;
+    GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail);
 }
 
 bool
 JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
 
-    masm.flushBuffer();
-    uint32_t wrapperOffset = masm.currentOffset();
+    uint32_t wrapperOffset = startTrampolineCode(masm);
 
     // Avoid conflicts with argument registers while discarding the result after
     // the function call.
     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
 
     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
                    "Wrapper register set must be a superset of Volatile register set");
 
@@ -821,48 +770,38 @@ JitRuntime::generateVMWrapper(JSContext*
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(ExitFrameLayout) +
                     f.explicitStackSlots() * sizeof(void*) +
                     f.extraValuesToPop * sizeof(Value)));
 
     return functionWrappers_->putNew(&f, wrapperOffset);
 }
 
-JitCode*
-JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
+uint32_t
+JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type)
 {
-    MacroAssembler masm;
+    uint32_t offset = startTrampolineCode(masm);
 
     LiveRegisterSet regs =
         LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
-                             FloatRegisterSet(FloatRegisters::VolatileMask));
+                        FloatRegisterSet(FloatRegisters::VolatileMask));
     masm.PushRegsInMask(regs);
 
     MOZ_ASSERT(PreBarrierReg == rdx);
     masm.mov(ImmPtr(cx->runtime()), rcx);
 
     masm.setupUnalignedABICall(rax);
     masm.passABIArg(rcx);
     masm.passABIArg(rdx);
     masm.callWithABI(IonMarkFunction(type));
 
     masm.PopRegsInMask(regs);
     masm.ret();
 
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "PreBarrier");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "PreBarrier");
-#endif
-
-    return code;
+    return offset;
 }
 
 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
 static const VMFunction HandleDebugTrapInfo =
     FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
 
 JitCode*
 JitRuntime::generateDebugTrapHandler(JSContext* cx)
@@ -886,17 +825,17 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.subPtr(Imm32(BaselineFrame::Size()), scratch2);
 
     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
     // the stub frame has a nullptr ICStub pointer, since this pointer is marked
     // during GC.
     masm.movePtr(ImmPtr(nullptr), ICStubReg);
     EmitBaselineEnterStubFrame(masm, scratch3);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
     masm.push(scratch1);
     masm.push(scratch2);
     EmitBaselineCallVM(code, masm);
 
     EmitBaselineLeaveStubFrame(masm);
 
     // If the stub returns |true|, we have to perform a forced return
     // (return from the JS frame). If the stub returns |false|, just return
@@ -931,60 +870,39 @@ JitRuntime::generateDebugTrapHandler(JSC
 #endif
 #ifdef MOZ_VTUNE
     vtune::MarkStub(codeDbg, "DebugTrapHandler");
 #endif
 
     return codeDbg;
 }
 
-JitCode*
-JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
+void
+JitRuntime::generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail)
 {
-    MacroAssembler masm;
-
-    masm.handleFailureWithHandlerTail(handler);
+    exceptionTailOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "ExceptionTailStub");
-#endif
-
-    return code;
+    masm.bind(masm.failureLabel());
+    masm.handleFailureWithHandlerTail(handler, profilerExitTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTailStub(JSContext* cx)
+void
+JitRuntime::generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
+    bailoutTailOffset_ = startTrampolineCode(masm);
+    masm.bind(bailoutTail);
 
     masm.generateBailoutTail(rdx, r9);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "BailoutTailStub");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
+void
+JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail)
 {
-    MacroAssembler masm;
+    profilerExitFrameTailOffset_ = startTrampolineCode(masm);
+    masm.bind(profilerExitTail);
 
     Register scratch1 = r8;
     Register scratch2 = r9;
     Register scratch3 = r10;
     Register scratch4 = r11;
 
     //
     // The code generated below expects that the current stack pointer points
@@ -1297,21 +1215,9 @@ JitRuntime::generateProfilerExitFrameTai
     //
     masm.bind(&handle_Entry);
     {
         masm.movePtr(ImmPtr(nullptr), scratch1);
         masm.storePtr(scratch1, lastProfilingCallSite);
         masm.storePtr(scratch1, lastProfilingFrame);
         masm.ret();
     }
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
 }
--- a/js/src/jit/x86-shared/CodeGenerator-x86-shared.cpp
+++ b/js/src/jit/x86-shared/CodeGenerator-x86-shared.cpp
@@ -481,18 +481,18 @@ CodeGeneratorX86Shared::generateOutOfLin
 
     if (deoptLabel_.used()) {
         // All non-table-based bailouts will go here.
         masm.bind(&deoptLabel_);
 
         // Push the frame size, so the handler can recover the IonScript.
         masm.push(Imm32(frameSize()));
 
-        JitCode* handler = gen->jitRuntime()->getGenericBailoutHandler();
-        masm.jmp(ImmPtr(handler->raw()), Relocation::JITCODE);
+        TrampolinePtr handler = gen->jitRuntime()->getGenericBailoutHandler();
+        masm.jump(handler);
     }
 
     return !masm.oom();
 }
 
 class BailoutJump {
     Assembler::Condition cond_;
 
@@ -536,17 +536,17 @@ CodeGeneratorX86Shared::bailout(const T&
     MOZ_ASSERT_IF(frameClass_ != FrameSizeClass::None() && deoptTable_,
                   frameClass_.frameSize() == masm.framePushed());
 
 #ifdef JS_CODEGEN_X86
     // On x64, bailout tables are pointless, because 16 extra bytes are
     // reserved per external jump, whereas it takes only 10 bytes to encode a
     // a non-table based bailout.
     if (assignBailoutId(snapshot)) {
-        binder(masm, deoptTable_->raw() + snapshot->bailoutId() * BAILOUT_TABLE_ENTRY_SIZE);
+        binder(masm, deoptTable_->value + snapshot->bailoutId() * BAILOUT_TABLE_ENTRY_SIZE);
         return;
     }
 #endif
 
     // We could not use a jump table, either because all bailout IDs were
     // reserved, or a jump table is not optimal for this frame size or
     // platform. Whatever, we will generate a lazy bailout.
     //
@@ -2406,18 +2406,18 @@ CodeGeneratorX86Shared::generateInvalida
     // epilogue.
     for (size_t i = 0; i < sizeof(void*); i += Assembler::NopSize())
         masm.nop();
 
     masm.bind(&invalidate_);
 
     // Push the Ion script onto the stack (when we determine what that pointer is).
     invalidateEpilogueData_ = masm.pushWithPatch(ImmWord(uintptr_t(-1)));
-    JitCode* thunk = gen->jitRuntime()->getInvalidationThunk();
-
+
+    TrampolinePtr thunk = gen->jitRuntime()->getInvalidationThunk();
     masm.call(thunk);
 
     // We should never reach this point in JIT code -- the invalidation thunk should
     // pop the invalidated JS frame and return directly to its caller.
     masm.assumeUnreachable("Should have returned directly to its caller instead of here.");
 }
 
 void
--- a/js/src/jit/x86-shared/MacroAssembler-x86-shared.h
+++ b/js/src/jit/x86-shared/MacroAssembler-x86-shared.h
@@ -536,18 +536,18 @@ class MacroAssemblerX86Shared : public A
     }
 
     void jump(Label* label) {
         jmp(label);
     }
     void jump(JitCode* code) {
         jmp(code);
     }
-    void jump(ImmPtr code) {
-        jmp(code);
+    void jump(TrampolinePtr code) {
+        jmp(ImmPtr(code.value));
     }
     void jump(RepatchLabel* label) {
         jmp(label);
     }
     void jump(Register reg) {
         jmp(Operand(reg));
     }
     void jump(const Address& addr) {
--- a/js/src/jit/x86/Bailouts-x86.cpp
+++ b/js/src/jit/x86/Bailouts-x86.cpp
@@ -81,22 +81,25 @@ BailoutFrameInfo::BailoutFrameInfo(const
 
     if (bailout->frameClass() == FrameSizeClass::None()) {
         snapshotOffset_ = bailout->snapshotOffset();
         return;
     }
 
     // Compute the snapshot offset from the bailout ID.
     JSRuntime* rt = activation->compartment()->runtimeFromActiveCooperatingThread();
-    JitCode* code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
+    TrampolinePtr code = rt->jitRuntime()->getBailoutTable(bailout->frameClass());
+#ifdef DEBUG
+    uint32_t tableSize = rt->jitRuntime()->getBailoutTableSize(bailout->frameClass());
+#endif
     uintptr_t tableOffset = bailout->tableOffset();
-    uintptr_t tableStart = reinterpret_cast<uintptr_t>(code->raw());
+    uintptr_t tableStart = reinterpret_cast<uintptr_t>(code.value);
 
     MOZ_ASSERT(tableOffset >= tableStart &&
-               tableOffset < tableStart + code->instructionsSize());
+               tableOffset < tableStart + tableSize);
     MOZ_ASSERT((tableOffset - tableStart) % BAILOUT_TABLE_ENTRY_SIZE == 0);
 
     uint32_t bailoutId = ((tableOffset - tableStart) / BAILOUT_TABLE_ENTRY_SIZE) - 1;
     MOZ_ASSERT(bailoutId < BAILOUT_TABLE_SIZE);
 
     snapshotOffset_ = topIonScript_->bailoutToSnapshot(bailoutId);
 }
 
--- a/js/src/jit/x86/MacroAssembler-x86.cpp
+++ b/js/src/jit/x86/MacroAssembler-x86.cpp
@@ -192,17 +192,17 @@ MacroAssemblerX86::finish()
             addCodeLabel(CodeLabel(use, cst));
         masm.simd128Constant(v.value.bytes());
         if (!enoughMemory_)
             return;
     }
 }
 
 void
-MacroAssemblerX86::handleFailureWithHandlerTail(void* handler)
+MacroAssemblerX86::handleFailureWithHandlerTail(void* handler, Label* profilerExitTail)
 {
     // Reserve space for exception information.
     subl(Imm32(sizeof(ResumeFromException)), esp);
     movl(esp, eax);
 
     // Call the handler.
     asMasm().setupUnalignedABICall(ecx);
     asMasm().passABIArg(eax);
@@ -268,17 +268,17 @@ MacroAssemblerX86::handleFailureWithHand
     // If profiling is enabled, then update the lastProfilingFrame to refer to caller
     // frame before returning.
     {
         Label skipProfilingInstrumentation;
         // Test if profiler enabled.
         AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->geckoProfiler().addressOfEnabled());
         asMasm().branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
                           &skipProfilingInstrumentation);
-        profilerExitFrame();
+        jump(profilerExitTail);
         bind(&skipProfilingInstrumentation);
     }
 
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
@@ -302,17 +302,17 @@ MacroAssemblerX86::profilerEnterFrame(Re
     loadPtr(Address(scratch, offsetof(JSContext, profilingActivation_)), scratch);
     storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
     storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
 }
 
 void
 MacroAssemblerX86::profilerExitFrame()
 {
-    jmp(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
+    jump(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
 }
 
 MacroAssembler&
 MacroAssemblerX86::asMasm()
 {
     return *static_cast<MacroAssembler*>(this);
 }
 
--- a/js/src/jit/x86/MacroAssembler-x86.h
+++ b/js/src/jit/x86/MacroAssembler-x86.h
@@ -822,17 +822,17 @@ class MacroAssemblerX86 : public MacroAs
         loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, globalArea) + globalDataOffset), dest);
     }
     void loadWasmPinnedRegsFromTls() {
         // x86 doesn't have any pinned registers.
     }
 
   public:
     // Used from within an Exit frame to handle a pending exception.
-    void handleFailureWithHandlerTail(void* handler);
+    void handleFailureWithHandlerTail(void* handler, Label* profilerExitTail);
 
     // Instrumentation for entering and leaving the profiler.
     void profilerEnterFrame(Register framePtr, Register scratch);
     void profilerExitFrame();
 };
 
 typedef MacroAssemblerX86 MacroAssemblerSpecific;
 
--- a/js/src/jit/x86/SharedICHelpers-x86.h
+++ b/js/src/jit/x86/SharedICHelpers-x86.h
@@ -66,17 +66,17 @@ EmitReturnFromIC(MacroAssembler& masm)
 
 inline void
 EmitChangeICReturnAddress(MacroAssembler& masm, Register reg)
 {
     masm.storePtr(reg, Address(StackPointer, 0));
 }
 
 inline void
-EmitBaselineTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t argSize)
+EmitBaselineTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t argSize)
 {
     // We assume during this that R0 and R1 have been pushed.
 
     // Compute frame size.
     masm.movl(BaselineFrameReg, eax);
     masm.addl(Imm32(BaselineFrame::FramePointerOffset), eax);
     masm.subl(BaselineStackReg, eax);
 
@@ -84,56 +84,56 @@ EmitBaselineTailCallVM(uint8_t* target, 
     masm.movl(eax, ebx);
     masm.subl(Imm32(argSize), ebx);
     masm.store32(ebx, Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFrameSize()));
 
     // Push frame descriptor and perform the tail call.
     masm.makeFrameDescriptor(eax, JitFrame_BaselineJS, ExitFrameLayout::Size());
     masm.push(eax);
     masm.push(ICTailCallReg);
-    masm.jmp(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
-EmitIonTailCallVM(uint8_t* target, MacroAssembler& masm, uint32_t stackSize)
+EmitIonTailCallVM(TrampolinePtr target, MacroAssembler& masm, uint32_t stackSize)
 {
     // For tail calls, find the already pushed JitFrame_IonJS signifying the
     // end of the Ion frame. Retrieve the length of the frame and repush
     // JitFrame_IonJS with the extra stacksize, rendering the original
     // JitFrame_IonJS obsolete.
 
     masm.loadPtr(Address(esp, stackSize), eax);
     masm.shrl(Imm32(FRAMESIZE_SHIFT), eax);
     masm.addl(Imm32(stackSize + JitStubFrameLayout::Size() - sizeof(intptr_t)), eax);
 
     // Push frame descriptor and perform the tail call.
     masm.makeFrameDescriptor(eax, JitFrame_IonJS, ExitFrameLayout::Size());
     masm.push(eax);
     masm.push(ICTailCallReg);
-    masm.jmp(ImmPtr(target));
+    masm.jump(target);
 }
 
 inline void
 EmitBaselineCreateStubFrameDescriptor(MacroAssembler& masm, Register reg, uint32_t headerSize)
 {
     // Compute stub frame size. We have to add two pointers: the stub reg and previous
     // frame pointer pushed by EmitEnterStubFrame.
     masm.movl(BaselineFrameReg, reg);
     masm.addl(Imm32(sizeof(void*) * 2), reg);
     masm.subl(BaselineStackReg, reg);
 
     masm.makeFrameDescriptor(reg, JitFrame_BaselineStub, headerSize);
 }
 
 inline void
-EmitBaselineCallVM(uint8_t* target, MacroAssembler& masm)
+EmitBaselineCallVM(TrampolinePtr target, MacroAssembler& masm)
 {
     EmitBaselineCreateStubFrameDescriptor(masm, eax, ExitFrameLayout::Size());
     masm.push(eax);
-    masm.call(ImmPtr(target));
+    masm.call(target);
 }
 
 // Size of vales pushed by EmitEnterStubFrame.
 static const uint32_t STUB_FRAME_SIZE = 4 * sizeof(void*);
 static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = sizeof(void*);
 
 inline void
 EmitBaselineEnterStubFrame(MacroAssembler& masm, Register scratch)
--- a/js/src/jit/x86/Trampoline-x86.cpp
+++ b/js/src/jit/x86/Trampoline-x86.cpp
@@ -46,20 +46,21 @@ enum EnterJitEbpArgumentOffset {
     ARG_STACKVALUES     = 8 * sizeof(void*),
     ARG_RESULT          = 9 * sizeof(void*)
 };
 
 
 // Generates a trampoline for calling Jit compiled code from a C++ function.
 // The trampoline use the EnterJitCode signature, with the standard cdecl
 // calling convention.
-JitCode*
-JitRuntime::generateEnterJIT(JSContext* cx)
+void
+JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm)
 {
-    MacroAssembler masm(cx);
+    enterJITOffset_ = startTrampolineCode(masm);
+
     masm.assertStackAlignment(ABIStackAlignment, -int32_t(sizeof(uintptr_t)) /* return address */);
 
     // Save old stack frame pointer, set new stack frame pointer.
     masm.push(ebp);
     masm.movl(esp, ebp);
 
     // Save non-volatile registers. These must be saved by the trampoline,
     // rather than the JIT'd code, because they are scanned by the conservative
@@ -318,32 +319,22 @@ JitRuntime::generateEnterJIT(JSContext* 
     // Restore non-volatile registers
     masm.pop(edi);
     masm.pop(esi);
     masm.pop(ebx);
 
     // Restore old stack frame pointer
     masm.pop(ebp);
     masm.ret();
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "EnterJIT");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateInvalidator(JSContext* cx)
+void
+JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail)
 {
-    AutoJitContextAlloc ajca(cx);
-    MacroAssembler masm(cx);
+    invalidatorOffset_ = startTrampolineCode(masm);
 
     // We do the minimum amount of work in assembly and shunt the rest
     // off to InvalidationBailout. Assembly does:
     //
     // - Pop the return address from the invalidation epilogue call.
     // - Push the machine state onto the stack.
     // - Call the InvalidationBailout routine with the stack pointer.
     // - Now that the frame has been bailed out, convert the invalidated
@@ -374,37 +365,24 @@ JitRuntime::generateInvalidator(JSContex
 
     masm.pop(ecx); // Get bailoutInfo outparam.
     masm.pop(ebx); // Get the frameSize outparam.
 
     // Pop the machine state and the dead frame.
     masm.lea(Operand(esp, ebx, TimesOne, sizeof(InvalidationBailoutStack)), esp);
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
     masm.jmp(bailoutTail);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-    JitSpew(JitSpew_IonInvalidate, "   invalidation thunk created at %p", (void*) code->raw());
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "Invalidator");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "Invalidator");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
+void
+JitRuntime::generateArgumentsRectifier(MacroAssembler& masm)
 {
-    MacroAssembler masm(cx);
+    argumentsRectifierOffset_ = startTrampolineCode(masm);
+
     // Caller:
     // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- esp
 
     // Load argc.
     masm.loadPtr(Address(esp, RectifierFrameLayout::offsetOfNumActualArgs()), esi);
 
     // Load the number of |undefined|s to push into %ecx.
     masm.loadPtr(Address(esp, RectifierFrameLayout::offsetOfCalleeToken()), eax);
@@ -525,44 +503,30 @@ JitRuntime::generateArgumentsRectifier(J
     masm.push(eax); // callee token
     masm.push(ebx); // descriptor
 
     // Call the target function.
     // Note that this assumes the function is JITted.
     masm.andl(Imm32(CalleeTokenMask), eax);
     masm.loadPtr(Address(eax, JSFunction::offsetOfNativeOrScript()), eax);
     masm.loadBaselineOrIonRaw(eax, eax, nullptr);
-    uint32_t returnOffset = masm.callJitNoProfiler(eax);
+    argumentsRectifierReturnOffset_ = masm.callJitNoProfiler(eax);
 
     // Remove the rectifier frame.
     masm.pop(ebx);            // ebx <- descriptor with FrameType.
     masm.shrl(Imm32(FRAMESIZE_SHIFT), ebx); // ebx <- descriptor.
     masm.pop(edi);            // Discard calleeToken.
     masm.pop(edi);            // Discard number of actual arguments.
 
     // Discard pushed arguments, but not the pushed frame pointer.
     BaseIndex unwind = BaseIndex(esp, ebx, TimesOne, -int32_t(sizeof(void*)));
     masm.lea(Operand(unwind), esp);
 
     masm.pop(FramePointer);
     masm.ret();
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "ArgumentsRectifier");
-#endif
-
-    if (returnAddrOut)
-        *returnAddrOut = (void*) (code->raw() + returnOffset);
-    return code;
 }
 
 static void
 PushBailoutFrame(MacroAssembler& masm, uint32_t frameClass, Register spArg)
 {
     // Push registers such that we can access them from [base + code].
     if (JitSupportsSimd()) {
         masm.PushRegsInMask(AllRegs);
@@ -586,17 +550,17 @@ PushBailoutFrame(MacroAssembler& masm, u
     // Push the bailout table number.
     masm.push(Imm32(frameClass));
 
     // The current stack pointer is the first argument to jit::Bailout.
     masm.movl(esp, spArg);
 }
 
 static void
-GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
+GenerateBailoutThunk(MacroAssembler& masm, uint32_t frameClass, Label* bailoutTail)
 {
     PushBailoutFrame(masm, frameClass, eax);
 
     // Make space for Bailout's baioutInfo outparam.
     masm.reserveStack(sizeof(void*));
     masm.movl(esp, ebx);
 
     // Call the bailout function. This will correct the size of the bailout.
@@ -629,72 +593,49 @@ GenerateBailoutThunk(JSContext* cx, Macr
         //    ... frame ...
         //    bailoutId
         //    ... bailoutFrame ...
         uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
         masm.addl(Imm32(BailoutDataSize + sizeof(void*) + frameSize), esp);
     }
 
     // Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx.
-    JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
     masm.jmp(bailoutTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
+JitRuntime::BailoutTable
+JitRuntime::generateBailoutTable(MacroAssembler& masm, Label* bailoutTail, uint32_t frameClass)
 {
-    MacroAssembler masm;
+    uint32_t offset = startTrampolineCode(masm);
 
     Label bailout;
     for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++)
         masm.call(&bailout);
     masm.bind(&bailout);
 
-    GenerateBailoutThunk(cx, masm, frameClass);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+    GenerateBailoutThunk(masm, frameClass, bailoutTail);
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTable");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "BailoutTable");
-#endif
-
-    return code;
+    return BailoutTable(offset, masm.currentOffset() - offset);
 }
 
-JitCode*
-JitRuntime::generateBailoutHandler(JSContext* cx)
+void
+JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
-    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
+    bailoutHandlerOffset_ = startTrampolineCode(masm);
 
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "BailoutHandler");
-#endif
-
-    return code;
+    GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail);
 }
 
 bool
 JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, const VMFunction& f)
 {
     MOZ_ASSERT(functionWrappers_);
     MOZ_ASSERT(functionWrappers_->initialized());
 
-    masm.flushBuffer();
-    uint32_t wrapperOffset = masm.currentOffset();
+    uint32_t wrapperOffset = startTrampolineCode(masm);
 
     // Avoid conflicts with argument registers while discarding the result after
     // the function call.
     AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
 
     static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
                    "Wrapper register set must be a superset of Volatile register set.");
 
@@ -847,20 +788,20 @@ JitRuntime::generateVMWrapper(JSContext*
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(ExitFrameLayout) +
                     f.explicitStackSlots() * sizeof(void*) +
                     f.extraValuesToPop * sizeof(Value)));
 
     return functionWrappers_->putNew(&f, wrapperOffset);
 }
 
-JitCode*
-JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
+uint32_t
+JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type)
 {
-    MacroAssembler masm;
+    uint32_t offset = startTrampolineCode(masm);
 
     LiveRegisterSet save;
     if (cx->runtime()->jitSupportsFloatingPoint) {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                                  FloatRegisterSet(FloatRegisters::VolatileMask));
     } else {
         save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
                                  FloatRegisterSet());
@@ -873,27 +814,17 @@ JitRuntime::generatePreBarrier(JSContext
     masm.setupUnalignedABICall(eax);
     masm.passABIArg(ecx);
     masm.passABIArg(edx);
     masm.callWithABI(IonMarkFunction(type));
 
     masm.PopRegsInMask(save);
     masm.ret();
 
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "PreBarrier");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "PreBarrier");
-#endif
-
-    return code;
+    return offset;
 }
 
 typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
 static const VMFunction HandleDebugTrapInfo =
     FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
 
 JitCode*
 JitRuntime::generateDebugTrapHandler(JSContext* cx)
@@ -917,17 +848,17 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.subPtr(Imm32(BaselineFrame::Size()), scratch2);
 
     // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
     // the stub frame has a nullptr ICStub pointer, since this pointer is
     // marked during GC.
     masm.movePtr(ImmPtr(nullptr), ICStubReg);
     EmitBaselineEnterStubFrame(masm, scratch3);
 
-    uint8_t* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
+    TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
     masm.push(scratch1);
     masm.push(scratch2);
     EmitBaselineCallVM(code, masm);
 
     EmitBaselineLeaveStubFrame(masm);
 
     // If the stub returns |true|, we have to perform a forced return
     // (return from the JS frame). If the stub returns |false|, just return
@@ -962,60 +893,39 @@ JitRuntime::generateDebugTrapHandler(JSC
 #endif
 #ifdef MOZ_VTUNE
     vtune::MarkStub(codeDbg, "DebugTrapHandler");
 #endif
 
     return codeDbg;
 }
 
-JitCode*
-JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
+void
+JitRuntime::generateExceptionTailStub(MacroAssembler& masm, void* handler, Label* profilerExitTail)
 {
-    MacroAssembler masm;
-
-    masm.handleFailureWithHandlerTail(handler);
+    exceptionTailOffset_ = startTrampolineCode(masm);
 
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "ExceptionTailStub");
-#endif
-
-    return code;
+    masm.bind(masm.failureLabel());
+    masm.handleFailureWithHandlerTail(handler, profilerExitTail);
 }
 
-JitCode*
-JitRuntime::generateBailoutTailStub(JSContext* cx)
+void
+JitRuntime::generateBailoutTailStub(MacroAssembler& masm, Label* bailoutTail)
 {
-    MacroAssembler masm;
+    bailoutTailOffset_ = startTrampolineCode(masm);
+    masm.bind(bailoutTail);
 
     masm.generateBailoutTail(edx, ecx);
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "BailoutTailStub");
-#endif
-
-    return code;
 }
 
-JitCode*
-JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
+void
+JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm, Label* profilerExitTail)
 {
-    MacroAssembler masm;
+    profilerExitFrameTailOffset_ = startTrampolineCode(masm);
+    masm.bind(profilerExitTail);
 
     Register scratch1 = eax;
     Register scratch2 = ebx;
     Register scratch3 = esi;
     Register scratch4 = edi;
 
     //
     // The code generated below expects that the current stack pointer points
@@ -1331,21 +1241,9 @@ JitRuntime::generateProfilerExitFrameTai
     //
     masm.bind(&handle_Entry);
     {
         masm.movePtr(ImmPtr(nullptr), scratch1);
         masm.storePtr(scratch1, lastProfilingCallSite);
         masm.storePtr(scratch1, lastProfilingFrame);
         masm.ret();
     }
-
-    Linker linker(masm);
-    JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-#ifdef MOZ_VTUNE
-    vtune::MarkStub(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
 }
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -4480,18 +4480,18 @@ LazyScript::hasUncompiledEnclosingScript
 }
 
 void
 JSScript::updateBaselineOrIonRaw(JSRuntime* maybeRuntime)
 {
     if (hasBaselineScript() && baseline->hasPendingIonBuilder()) {
         MOZ_ASSERT(maybeRuntime);
         MOZ_ASSERT(!isIonCompilingOffThread());
-        baselineOrIonRaw = maybeRuntime->jitRuntime()->lazyLinkStub()->raw();
-        baselineOrIonSkipArgCheck = maybeRuntime->jitRuntime()->lazyLinkStub()->raw();
+        baselineOrIonRaw = maybeRuntime->jitRuntime()->lazyLinkStub().value;
+        baselineOrIonSkipArgCheck = maybeRuntime->jitRuntime()->lazyLinkStub().value;
     } else if (hasIonScript()) {
         baselineOrIonRaw = ion->method()->raw();
         baselineOrIonSkipArgCheck = ion->method()->raw() + ion->getSkipArgCheckEntryOffset();
     } else if (hasBaselineScript()) {
         baselineOrIonRaw = baseline->method()->raw();
         baselineOrIonSkipArgCheck = baseline->method()->raw();
     } else {
         baselineOrIonRaw = nullptr;
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -315,16 +315,17 @@ Instance::Instance(JSContext* cx,
                    UniqueDebugState debug,
                    UniqueGlobalSegment globals,
                    HandleWasmMemoryObject memory,
                    SharedTableVector&& tables,
                    Handle<FunctionVector> funcImports,
                    const ValVector& globalImports)
   : compartment_(cx->compartment()),
     object_(object),
+    jsJitArgsRectifier_(),
     code_(code),
     debug_(Move(debug)),
     globals_(Move(globals)),
     memory_(memory),
     tables_(Move(tables)),
     enterFrameTrapsEnabled_(false)
 {
 #ifdef DEBUG
@@ -496,18 +497,16 @@ Instance::tracePrivate(JSTracer* trc)
 {
     // This method is only called from WasmInstanceObject so the only reason why
     // TraceEdge is called is so that the pointer can be updated during a moving
     // GC. TraceWeakEdge may sound better, but it is less efficient given that
     // we know object_ is already marked.
     MOZ_ASSERT(!gc::IsAboutToBeFinalized(&object_));
     TraceEdge(trc, &object_, "wasm instance object");
 
-    TraceNullableEdge(trc, &jsJitArgsRectifier_, "wasm jit args rectifier");
-
     // OK to just do one tier here; though the tiers have different funcImports
     // tables, they share the tls object.
     for (const FuncImport& fi : metadata(code().stableTier()).funcImports)
         TraceNullableEdge(trc, &funcImportTls(fi).obj, "wasm import");
 
     for (const SharedTable& table : tables_)
         table->trace(trc);
 
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -15,16 +15,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 
 #ifndef wasm_instance_h
 #define wasm_instance_h
 
 #include "gc/Barrier.h"
+#include "jit/shared/Assembler-shared.h"
 #include "vm/SharedMem.h"
 #include "wasm/WasmCode.h"
 #include "wasm/WasmDebug.h"
 #include "wasm/WasmProcess.h"
 #include "wasm/WasmTable.h"
 
 namespace js {
 namespace wasm {
@@ -68,17 +69,17 @@ typedef UniquePtr<GlobalSegment> UniqueG
 // The instance's code may be shared among multiple instances provided none of
 // those instances are being debugged. Instances that are being debugged own
 // their code.
 
 class Instance
 {
     JSCompartment* const            compartment_;
     ReadBarrieredWasmInstanceObject object_;
-    GCPtrJitCode                    jsJitArgsRectifier_;
+    jit::TrampolinePtr              jsJitArgsRectifier_;
     const SharedCode                code_;
     const UniqueDebugState          debug_;
     const UniqueGlobalSegment       globals_;
     GCPtrWasmMemoryObject           memory_;
     SharedTableVector               tables_;
     bool                            enterFrameTrapsEnabled_;
 
     // Internal helpers:
--- a/js/src/wasm/WasmStubs.cpp
+++ b/js/src/wasm/WasmStubs.cpp
@@ -839,17 +839,16 @@ GenerateImportJitExit(MacroAssembler& ma
 
     GenerateJitExitEpilogue(masm, masm.framePushed(), offsets);
 
     {
         // Call the arguments rectifier.
         masm.bind(&rectify);
         masm.loadPtr(Address(WasmTlsReg, offsetof(TlsData, instance)), callee);
         masm.loadPtr(Address(callee, Instance::offsetOfJSJitArgsRectifier()), callee);
-        masm.loadPtr(Address(callee, JitCode::offsetOfCode()), callee);
         masm.jump(&rejoinBeforeCall);
     }
 
     if (oolConvert.used()) {
         masm.bind(&oolConvert);
         masm.setFramePushed(nativeFramePushed);
 
         // Coercion calls use the following stack layout (sp grows to the left):