Bug 1171945: IonMonkey - Part 2: Add platform in ionmonkey for sharedcaches, r=jandem
authorHannes Verschore <hv1989@gmail.com>
Wed, 19 Aug 2015 15:15:46 +0200
changeset 291177 5484f536cd1aab2c85839ecdc52d2bd82e5e725b
parent 291176 e96e3039b44b45ed2eda7c5d9b439fef5a701a6c
child 291178 a8ecb6c04941001659c5e0545535c2b82d8d57a5
push id5245
push userraliiev@mozilla.com
push dateThu, 29 Oct 2015 11:30:51 +0000
treeherdermozilla-beta@dac831dc1bd0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs1171945
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1171945: IonMonkey - Part 2: Add platform in ionmonkey for sharedcaches, r=jandem
js/src/jit/BaselineJIT.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/CodeGenerator.h
js/src/jit/ICStubSpace.h
js/src/jit/Ion.cpp
js/src/jit/IonCode.h
js/src/jit/JitCompartment.h
js/src/jit/LIR.h
js/src/jit/Lowering.cpp
js/src/jit/Lowering.h
js/src/jit/MIR.h
js/src/jit/MOpcodes.h
js/src/jit/SharedIC.cpp
js/src/jit/SharedIC.h
js/src/jit/TypePolicy.cpp
js/src/jit/arm/Lowering-arm.cpp
js/src/jit/arm/Lowering-arm.h
js/src/jit/arm64/Lowering-arm64.cpp
js/src/jit/arm64/Lowering-arm64.h
js/src/jit/mips32/Lowering-mips32.cpp
js/src/jit/mips32/Lowering-mips32.h
js/src/jit/none/Lowering-none.h
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/shared/CodeGenerator-shared.h
js/src/jit/shared/LIR-shared.h
js/src/jit/shared/LOpcodes-shared.h
js/src/jit/shared/Lowering-shared-inl.h
js/src/jit/shared/Lowering-shared.h
js/src/jit/x64/Lowering-x64.cpp
js/src/jit/x64/Lowering-x64.h
js/src/jit/x86/Lowering-x86.cpp
js/src/jit/x86/Lowering-x86.h
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -436,20 +436,17 @@ BaselineScript::trace(JSTracer* trc)
 {
     TraceEdge(trc, &method_, "baseline-method");
     if (templateScope_)
         TraceEdge(trc, &templateScope_, "baseline-template-scope");
 
     // Mark all IC stub codes hanging off the IC stub entries.
     for (size_t i = 0; i < numICEntries(); i++) {
         ICEntry& ent = icEntry(i);
-        if (!ent.hasStub())
-            continue;
-        for (ICStub* stub = ent.firstStub(); stub; stub = stub->next())
-            stub->trace(trc);
+        ent.trace(trc);
     }
 }
 
 /* static */
 void
 BaselineScript::writeBarrierPre(Zone* zone, BaselineScript* script)
 {
     if (zone->needsIncrementalBarrier())
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -1677,16 +1677,61 @@ CodeGenerator::visitStringReplace(LStrin
     if (lir->string()->isConstant())
         pushArg(ImmGCPtr(lir->string()->toConstant()->toString()));
     else
         pushArg(ToRegister(lir->string()));
 
     callVM(StringReplaceInfo, lir);
 }
 
+void
+CodeGenerator::emitSharedStub(ICStub::Kind kind, LInstruction* lir)
+{
+    JSScript* script = lir->mirRaw()->block()->info().script();
+    jsbytecode* pc = lir->mirRaw()->toInstruction()->resumePoint()->pc();
+
+    // Create descriptor signifying end of Ion frame.
+    uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS);
+    masm.Push(Imm32(descriptor));
+
+    // Call into the stubcode.
+    CodeOffsetLabel patchOffset;
+    IonICEntry entry(script->pcToOffset(pc), ICEntry::Kind_Op, script);
+    EmitCallIC(&patchOffset, masm);
+    entry.setReturnOffset(CodeOffsetLabel(masm.currentOffset()));
+
+    SharedStub sharedStub(kind, entry, patchOffset);
+    masm.propagateOOM(sharedStubs_.append(sharedStub));
+
+    // Fix up upon return.
+    uint32_t callOffset = masm.currentOffset();
+    masm.freeStack(sizeof(intptr_t));
+    markSafepointAt(callOffset, lir);
+}
+
+void
+CodeGenerator::visitBinarySharedStub(LBinarySharedStub* lir)
+{
+    JSOp jsop = JSOp(*lir->mir()->resumePoint()->pc());
+    switch (jsop) {
+      default:
+        MOZ_CRASH("Unsupported jsop in shared stubs.");
+    }
+}
+
+void
+CodeGenerator::visitUnarySharedStub(LUnarySharedStub* lir)
+{
+    JSOp jsop = JSOp(*lir->mir()->resumePoint()->pc());
+    switch (jsop) {
+      default:
+        MOZ_CRASH("Unsupported jsop in shared stubs.");
+    }
+}
+
 typedef JSObject* (*LambdaFn)(JSContext*, HandleFunction, HandleObject);
 static const VMFunction LambdaInfo = FunctionInfo<LambdaFn>(js::Lambda);
 
 void
 CodeGenerator::visitLambdaForSingleton(LLambdaForSingleton* lir)
 {
     pushArg(ToRegister(lir->scopeChain()));
     pushArg(ImmGCPtr(lir->mir()->info().fun));
@@ -7801,16 +7846,35 @@ struct AutoDiscardIonCode
     }
 
     void keepIonCode() {
         keep = true;
     }
 };
 
 bool
+CodeGenerator::linkSharedStubs(JSContext* cx)
+{
+    for (uint32_t i = 0; i < sharedStubs_.length(); i++) {
+        ICStub *stub = nullptr;
+
+        switch (sharedStubs_[i].kind) {
+          default:
+            MOZ_CRASH("Unsupported shared stub.");
+        }
+
+        if (!stub)
+            return false;
+
+        sharedStubs_[i].entry.setFirstStub(stub);
+    }
+    return true;
+}
+
+bool
 CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
 {
     RootedScript script(cx, gen->info().script());
     OptimizationLevel optimizationLevel = gen->optimizationInfo().level();
 
     // Capture the SIMD template objects which are used during the
     // compilation. This iterates over the template objects, using read-barriers
     // to let the GC know that the generated code relies on these template
@@ -7825,16 +7889,19 @@ CodeGenerator::link(JSContext* cx, Compi
         // since that will cancel this compilation too.
         if (!Invalidate(cx, script, /* resetUses */ false, /* cancelOffThread*/ false))
             return false;
     }
 
     if (scriptCounts_ && !script->hasScriptCounts() && !script->initScriptCounts(cx))
         return false;
 
+    if (!linkSharedStubs(cx))
+        return false;
+
     // Check to make sure we didn't have a mid-build invalidation. If so, we
     // will trickle to jit::Compile() and return Method_Skipped.
     uint32_t warmUpCount = script->getWarmUpCount();
     RecompileInfo recompileInfo;
     if (!FinishCompilation(cx, script, constraints, &recompileInfo))
         return true;
 
     // IonMonkey could have inferred better type information during
@@ -7856,17 +7923,18 @@ CodeGenerator::link(JSContext* cx, Compi
 
     IonScript* ionScript =
       IonScript::New(cx, recompileInfo,
                      graph.totalSlotCount(), argumentSlots, scriptFrameSize,
                      snapshots_.listSize(), snapshots_.RVATableSize(),
                      recovers_.size(), bailouts_.length(), graph.numConstants(),
                      safepointIndices_.length(), osiIndices_.length(),
                      cacheList_.length(), runtimeData_.length(),
-                     safepoints_.size(), patchableBackedges_.length(), optimizationLevel);
+                     safepoints_.size(), patchableBackedges_.length(),
+                     sharedStubs_.length(), optimizationLevel);
     if (!ionScript)
         return false;
     discardIonCode.ionScript = ionScript;
 
     // Also, note that creating the code here during an incremental GC will
     // trace the code and mark all GC things it refers to. This captures any
     // read barriers which were skipped while compiling the script off thread.
     Linker linker(masm);
@@ -7950,16 +8018,19 @@ CodeGenerator::link(JSContext* cx, Compi
     ionScript->setSkipArgCheckEntryOffset(getSkipArgCheckEntryOffset());
 
     // If SPS is enabled, mark IonScript as having been instrumented with SPS
     if (isProfilerInstrumentationEnabled())
         ionScript->setHasProfilingInstrumentation();
 
     script->setIonScript(cx, ionScript);
 
+    // Adopt fallback shared stubs from the compiler into the ion script.
+    ionScript->adoptFallbackStubs(&stubSpace_);
+
     {
         AutoWritableJitCode awjc(code);
         invalidateEpilogueData_.fixup(&masm);
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, invalidateEpilogueData_),
                                            ImmPtr(ionScript),
                                            ImmPtr((void*)-1));
 
         for (size_t i = 0; i < ionScriptLabels_.length(); i++) {
@@ -7986,16 +8057,33 @@ CodeGenerator::link(JSContext* cx, Compi
             for (uint32_t i = 0; i < patchableTLScripts_.length(); i++) {
                 patchableTLScripts_[i].fixup(&masm);
                 Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTLScripts_[i]),
                                                    ImmPtr((void*) uintptr_t(textId)),
                                                    ImmPtr((void*)0));
             }
         }
 #endif
+        // Patch shared stub IC loads using IC entries
+        for (size_t i = 0; i < sharedStubs_.length(); i++) {
+            CodeOffsetLabel label = sharedStubs_[i].label;
+            label.fixup(&masm);
+
+            IonICEntry& entry = ionScript->sharedStubList()[i];
+            entry = sharedStubs_[i].entry;
+            entry.fixupReturnOffset(masm);
+            Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
+                                               ImmPtr(&entry),
+                                               ImmPtr((void*)-1));
+
+            MOZ_ASSERT(entry.hasStub());
+            MOZ_ASSERT(entry.firstStub()->isFallback());
+
+            entry.firstStub()->toFallbackStub()->fixupICEntry(&entry);
+        }
     }
 
     JitSpew(JitSpew_Codegen, "Created IonScript %p (raw %p)",
             (void*) ionScript, (void*) code->raw());
 
     ionScript->setInvalidationEpilogueDataOffset(invalidateEpilogueData_.offset());
     ionScript->setOsrPc(gen->info().osrPc());
     ionScript->setOsrEntryOffset(getOsrEntryOffset());
--- a/js/src/jit/CodeGenerator.h
+++ b/js/src/jit/CodeGenerator.h
@@ -54,16 +54,17 @@ class CodeGenerator : public CodeGenerat
   public:
     CodeGenerator(MIRGenerator* gen, LIRGraph* graph, MacroAssembler* masm = nullptr);
     ~CodeGenerator();
 
   public:
     bool generate();
     bool generateAsmJS(AsmJSFunctionLabels* labels);
     bool link(JSContext* cx, CompilerConstraintList* constraints);
+    bool linkSharedStubs(JSContext* cx);
 
     void visitOsiPoint(LOsiPoint* lir);
     void visitGoto(LGoto* lir);
     void visitTableSwitch(LTableSwitch* ins);
     void visitTableSwitchV(LTableSwitchV* ins);
     void visitCloneLiteral(LCloneLiteral* lir);
     void visitParameter(LParameter* lir);
     void visitCallee(LCallee* lir);
@@ -101,16 +102,19 @@ class CodeGenerator : public CodeGenerat
     void visitInteger(LInteger* lir);
     void visitRegExp(LRegExp* lir);
     void visitRegExpExec(LRegExpExec* lir);
     void visitOutOfLineRegExpExec(OutOfLineRegExpExec* ool);
     void visitRegExpTest(LRegExpTest* lir);
     void visitOutOfLineRegExpTest(OutOfLineRegExpTest* ool);
     void visitRegExpReplace(LRegExpReplace* lir);
     void visitStringReplace(LStringReplace* lir);
+    void emitSharedStub(ICStub::Kind kind, LInstruction* lir);
+    void visitBinarySharedStub(LBinarySharedStub* lir);
+    void visitUnarySharedStub(LUnarySharedStub* lir);
     void visitLambda(LLambda* lir);
     void visitOutOfLineLambdaArrow(OutOfLineLambdaArrow* ool);
     void visitLambdaArrow(LLambdaArrow* lir);
     void visitLambdaForSingleton(LLambdaForSingleton* lir);
     void visitPointer(LPointer* lir);
     void visitKeepAliveObject(LKeepAliveObject* lir);
     void visitSlots(LSlots* lir);
     void visitLoadSlotT(LLoadSlotT* lir);
@@ -474,16 +478,28 @@ class CodeGenerator : public CodeGenerat
     void emitStoreHoleCheck(Register elements, const LAllocation* index, int32_t offsetAdjustment,
                             LSnapshot* snapshot);
 
     void emitAssertRangeI(const Range* r, Register input);
     void emitAssertRangeD(const Range* r, FloatRegister input, FloatRegister temp);
 
     Vector<CodeOffsetLabel, 0, JitAllocPolicy> ionScriptLabels_;
 
+    struct SharedStub {
+        ICStub::Kind kind;
+        IonICEntry entry;
+        CodeOffsetLabel label;
+
+        SharedStub(ICStub::Kind kind, IonICEntry entry, CodeOffsetLabel label)
+          : kind(kind), entry(entry), label(label)
+        {}
+    };
+
+    Vector<SharedStub, 0, SystemAllocPolicy> sharedStubs_;
+
     void branchIfInvalidated(Register temp, Label* invalidated);
 
 #ifdef DEBUG
     void emitDebugResultChecks(LInstruction* ins);
     void emitObjectOrStringResultChecks(LInstruction* lir, MDefinition* mir);
     void emitValueResultChecks(LInstruction* lir, MDefinition* mir);
 #endif
 
new file mode 100644
--- /dev/null
+++ b/js/src/jit/ICStubSpace.h
@@ -0,0 +1,78 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef jit_ICStubSpace_h
+#define jit_ICStubSpace_h
+
+#include "ds/LifoAlloc.h"
+
+namespace js {
+namespace jit {
+
+// ICStubSpace is an abstraction for allocation policy and storage for stub data.
+// There are two kinds of stubs: optimized stubs and fallback stubs (the latter
+// also includes stubs that can make non-tail calls that can GC).
+//
+// Optimized stubs are allocated per-compartment and are always purged when
+// JIT-code is discarded. Fallback stubs are allocated per BaselineScript and
+// are only destroyed when the BaselineScript is destroyed.
+class ICStubSpace
+{
+  protected:
+    LifoAlloc allocator_;
+
+    explicit ICStubSpace(size_t chunkSize)
+      : allocator_(chunkSize)
+    {}
+
+  public:
+    inline void* alloc(size_t size) {
+        return allocator_.alloc(size);
+    }
+
+    JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
+
+    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
+        return allocator_.sizeOfExcludingThis(mallocSizeOf);
+    }
+};
+
+// Space for optimized stubs. Every JitCompartment has a single
+// OptimizedICStubSpace.
+struct OptimizedICStubSpace : public ICStubSpace
+{
+    static const size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024;
+
+  public:
+    OptimizedICStubSpace()
+      : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
+    {}
+
+    void free() {
+        allocator_.freeAll();
+    }
+};
+
+// Space for fallback stubs. Every BaselineScript has a
+// FallbackICStubSpace.
+struct FallbackICStubSpace : public ICStubSpace
+{
+    static const size_t STUB_DEFAULT_CHUNK_SIZE = 256;
+
+  public:
+    FallbackICStubSpace()
+      : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
+    {}
+
+    inline void adoptFrom(FallbackICStubSpace* other) {
+        allocator_.steal(&(other->allocator_));
+    }
+};
+
+} // namespace jit
+} // namespace js
+
+#endif /* jit_ICStubSpace_h */
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -892,29 +892,31 @@ IonScript::IonScript()
     snapshotsListSize_(0),
     snapshotsRVATableSize_(0),
     constantTable_(0),
     constantEntries_(0),
     backedgeList_(0),
     backedgeEntries_(0),
     invalidationCount_(0),
     recompileInfo_(),
-    osrPcMismatchCounter_(0)
+    osrPcMismatchCounter_(0),
+    fallbackStubSpace_()
 {
 }
 
 IonScript*
 IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
                uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
                size_t snapshotsListSize, size_t snapshotsRVATableSize,
                size_t recoversSize, size_t bailoutEntries,
                size_t constants, size_t safepointIndices,
                size_t osiIndices, size_t cacheEntries,
                size_t runtimeSize,  size_t safepointsSize,
-               size_t backedgeEntries, OptimizationLevel optimizationLevel)
+               size_t backedgeEntries, size_t sharedStubEntries,
+               OptimizationLevel optimizationLevel)
 {
     static const int DataAlignment = sizeof(void*);
 
     if (snapshotsListSize >= MAX_BUFFER_SIZE ||
         (bailoutEntries >= MAX_BUFFER_SIZE / sizeof(uint32_t)))
     {
         ReportOutOfMemory(cx);
         return nullptr;
@@ -928,26 +930,29 @@ IonScript::New(JSContext* cx, RecompileI
     size_t paddedBailoutSize = AlignBytes(bailoutEntries * sizeof(uint32_t), DataAlignment);
     size_t paddedConstantsSize = AlignBytes(constants * sizeof(Value), DataAlignment);
     size_t paddedSafepointIndicesSize = AlignBytes(safepointIndices * sizeof(SafepointIndex), DataAlignment);
     size_t paddedOsiIndicesSize = AlignBytes(osiIndices * sizeof(OsiIndex), DataAlignment);
     size_t paddedCacheEntriesSize = AlignBytes(cacheEntries * sizeof(uint32_t), DataAlignment);
     size_t paddedRuntimeSize = AlignBytes(runtimeSize, DataAlignment);
     size_t paddedSafepointSize = AlignBytes(safepointsSize, DataAlignment);
     size_t paddedBackedgeSize = AlignBytes(backedgeEntries * sizeof(PatchableBackedge), DataAlignment);
+    size_t paddedSharedStubSize = AlignBytes(sharedStubEntries * sizeof(IonICEntry), DataAlignment);
+
     size_t bytes = paddedSnapshotsSize +
                    paddedRecoversSize +
                    paddedBailoutSize +
                    paddedConstantsSize +
                    paddedSafepointIndicesSize +
                    paddedOsiIndicesSize +
                    paddedCacheEntriesSize +
                    paddedRuntimeSize +
                    paddedSafepointSize +
-                   paddedBackedgeSize;
+                   paddedBackedgeSize +
+                   paddedSharedStubSize;
     IonScript* script = cx->zone()->pod_malloc_with_extra<IonScript, uint8_t>(bytes);
     if (!script)
         return nullptr;
     new (script) IonScript();
 
     uint32_t offsetCursor = sizeof(IonScript);
 
     script->runtimeData_ = offsetCursor;
@@ -986,38 +991,55 @@ IonScript::New(JSContext* cx, RecompileI
     script->constantTable_ = offsetCursor;
     script->constantEntries_ = constants;
     offsetCursor += paddedConstantsSize;
 
     script->backedgeList_ = offsetCursor;
     script->backedgeEntries_ = backedgeEntries;
     offsetCursor += paddedBackedgeSize;
 
+    script->sharedStubList_ = offsetCursor;
+    script->sharedStubEntries_ = sharedStubEntries;
+    offsetCursor += paddedSharedStubSize;
+
     script->frameSlots_ = frameSlots;
     script->argumentSlots_ = argumentSlots;
 
     script->frameSize_ = frameSize;
 
     script->recompileInfo_ = recompileInfo;
     script->optimizationLevel_ = optimizationLevel;
 
     return script;
 }
 
 void
+IonScript::adoptFallbackStubs(FallbackICStubSpace* stubSpace)
+
+{
+    fallbackStubSpace()->adoptFrom(stubSpace);
+}
+
+void
 IonScript::trace(JSTracer* trc)
 {
     if (method_)
         TraceEdge(trc, &method_, "method");
 
     if (deoptTable_)
         TraceEdge(trc, &deoptTable_, "deoptimizationTable");
 
     for (size_t i = 0; i < numConstants(); i++)
         TraceEdge(trc, &getConstant(i), "constant");
+
+    // Mark all IC stub codes hanging off the IC stub entries.
+    for (size_t i = 0; i < numSharedStubs(); i++) {
+        ICEntry& ent = sharedStubList()[i];
+        ent.trace(trc);
+    }
 }
 
 /* static */ void
 IonScript::writeBarrierPre(Zone* zone, IonScript* ionScript)
 {
     if (zone->needsIncrementalBarrier())
         ionScript->trace(zone->barrierTracer());
 }
@@ -1223,16 +1245,74 @@ IonScript::Destroy(FreeOp* fop, IonScrip
 
 void
 IonScript::toggleBarriers(bool enabled)
 {
     method()->togglePreBarriers(enabled);
 }
 
 void
+IonScript::purgeOptimizedStubs(Zone* zone)
+{
+    for (size_t i = 0; i < numSharedStubs(); i++) {
+        ICEntry& entry = sharedStubList()[i];
+        if (!entry.hasStub())
+            continue;
+
+        ICStub* lastStub = entry.firstStub();
+        while (lastStub->next())
+            lastStub = lastStub->next();
+
+        if (lastStub->isFallback()) {
+            // Unlink all stubs allocated in the optimized space.
+            ICStub* stub = entry.firstStub();
+            ICStub* prev = nullptr;
+
+            while (stub->next()) {
+                if (!stub->allocatedInFallbackSpace()) {
+                    lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
+                    stub = stub->next();
+                    continue;
+                }
+
+                prev = stub;
+                stub = stub->next();
+            }
+
+            if (lastStub->isMonitoredFallback()) {
+                // Monitor stubs can't make calls, so are always in the
+                // optimized stub space.
+                ICTypeMonitor_Fallback* lastMonStub =
+                    lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
+                lastMonStub->resetMonitorStubChain(zone);
+            }
+        } else if (lastStub->isTypeMonitor_Fallback()) {
+            lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
+        } else {
+            MOZ_ASSERT(lastStub->isTableSwitch());
+        }
+    }
+
+#ifdef DEBUG
+    // All remaining stubs must be allocated in the fallback space.
+    for (size_t i = 0; i < numSharedStubs(); i++) {
+        ICEntry& entry = sharedStubList()[i];
+        if (!entry.hasStub())
+            continue;
+
+        ICStub* stub = entry.firstStub();
+        while (stub->next()) {
+            MOZ_ASSERT(stub->allocatedInFallbackSpace());
+            stub = stub->next();
+        }
+    }
+#endif
+}
+
+void
 IonScript::purgeCaches()
 {
     // Don't reset any ICs if we're invalidated, otherwise, repointing the
     // inline jump could overwrite an invalidation marker. These ICs can
     // no longer run, however, the IC slow paths may be active on the stack.
     // ICs therefore are required to check for invalidation before patching,
     // to ensure the same invariant.
     if (invalidated())
@@ -2670,16 +2750,17 @@ InvalidateActivation(FreeOp* fop, const 
             continue;
 
         IonScript* ionScript = script->ionScript();
 
         // Purge ICs before we mark this script as invalidated. This will
         // prevent lastJump_ from appearing to be a bogus pointer, just
         // in case anyone tries to read it.
         ionScript->purgeCaches();
+        ionScript->purgeOptimizedStubs(script->zone());
 
         // Clean up any pointers from elsewhere in the runtime to this IonScript
         // which is about to become disconnected from its JSScript.
         ionScript->unlinkFromRuntime(fop);
 
         // This frame needs to be invalidated. We do the following:
         //
         // 1. Increment the reference counter to keep the ionScript alive
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -10,28 +10,30 @@
 #include "mozilla/Atomics.h"
 #include "mozilla/MemoryReporting.h"
 #include "mozilla/PodOperations.h"
 
 #include "jstypes.h"
 
 #include "gc/Heap.h"
 #include "jit/ExecutableAllocator.h"
+#include "jit/ICStubSpace.h"
 #include "jit/IonOptimizationLevels.h"
 #include "jit/IonTypes.h"
 #include "js/UbiNode.h"
 #include "vm/TraceLogging.h"
 #include "vm/TypeInference.h"
 
 namespace js {
 namespace jit {
 
 class MacroAssembler;
 class PatchableBackedge;
 class IonBuilder;
+class IonICEntry;
 
 typedef Vector<JSObject*, 4, JitAllocPolicy> ObjectVector;
 
 class JitCode : public gc::TenuredCell
 {
   protected:
     uint8_t* code_;
     ExecutablePool* pool_;
@@ -254,29 +256,36 @@ struct IonScript
     // Constant table for constants stored in snapshots.
     uint32_t constantTable_;
     uint32_t constantEntries_;
 
     // List of patchable backedges which are threaded into the runtime's list.
     uint32_t backedgeList_;
     uint32_t backedgeEntries_;
 
+    // List of entries to the shared stub.
+    uint32_t sharedStubList_;
+    uint32_t sharedStubEntries_;
+
     // Number of references from invalidation records.
     uint32_t invalidationCount_;
 
     // Identifier of the compilation which produced this code.
     RecompileInfo recompileInfo_;
 
     // The optimization level this script was compiled in.
     OptimizationLevel optimizationLevel_;
 
     // Number of times we tried to enter this script via OSR but failed due to
     // a LOOPENTRY pc other than osrPc_.
     uint32_t osrPcMismatchCounter_;
 
+    // Allocated space for fallback stubs.
+    FallbackICStubSpace fallbackStubSpace_;
+
     // The tracelogger event used to log the start/stop of this IonScript.
     TraceLoggerEvent traceLoggerScriptEvent_;
 
   private:
     inline uint8_t* bottomBuffer() {
         return reinterpret_cast<uint8_t*>(this);
     }
     inline const uint8_t* bottomBuffer() const {
@@ -322,17 +331,18 @@ struct IonScript
 
     static IonScript* New(JSContext* cx, RecompileInfo recompileInfo,
                           uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
                           size_t snapshotsListSize, size_t snapshotsRVATableSize,
                           size_t recoversSize, size_t bailoutEntries,
                           size_t constants, size_t safepointIndexEntries,
                           size_t osiIndexEntries, size_t cacheEntries,
                           size_t runtimeSize, size_t safepointsSize,
-                          size_t backedgeEntries, OptimizationLevel optimizationLevel);
+                          size_t backedgeEntries, size_t sharedStubEntries,
+                          OptimizationLevel optimizationLevel);
     static void Trace(JSTracer* trc, IonScript* script);
     static void Destroy(FreeOp* fop, IonScript* script);
 
     static inline size_t offsetOfMethod() {
         return offsetof(IonScript, method_);
     }
     static inline size_t offsetOfOsrEntryOffset() {
         return offsetof(IonScript, osrEntryOffset_);
@@ -481,16 +491,22 @@ struct IonScript
     }
     inline IonCache& getCache(uint32_t offset) {
         MOZ_ASSERT(offset < runtimeSize_);
         return *(IonCache*) &runtimeData()[offset];
     }
     size_t numCaches() const {
         return cacheEntries_;
     }
+    IonICEntry* sharedStubList() {
+        return (IonICEntry*) &bottomBuffer()[sharedStubList_];
+    }
+    size_t numSharedStubs() const {
+        return sharedStubEntries_;
+    }
     size_t runtimeSize() const {
         return runtimeSize_;
     }
     CacheLocation* getCacheLocs(uint32_t locIndex) {
         MOZ_ASSERT(locIndex < runtimeSize_);
         return (CacheLocation*) &runtimeData()[locIndex];
     }
     void toggleBarriers(bool enabled);
@@ -551,16 +567,22 @@ struct IonScript
     bool isRecompiling() const {
         return recompiling_;
     }
 
     void clearRecompiling() {
         recompiling_ = false;
     }
 
+    FallbackICStubSpace* fallbackStubSpace() {
+        return &fallbackStubSpace_;
+    }
+    void adoptFallbackStubs(FallbackICStubSpace* stubSpace);
+    void purgeOptimizedStubs(Zone* zone);
+
     enum ShouldIncreaseAge {
         IncreaseAge = true,
         KeepAge = false
     };
 
     static void writeBarrierPre(Zone* zone, IonScript* ionScript);
 };
 
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -9,16 +9,17 @@
 
 #include "mozilla/Array.h"
 #include "mozilla/MemoryReporting.h"
 
 #include "jsweakcache.h"
 
 #include "builtin/TypedObject.h"
 #include "jit/CompileInfo.h"
+#include "jit/ICStubSpace.h"
 #include "jit/IonCode.h"
 #include "jit/JitFrames.h"
 #include "jit/shared/Assembler-shared.h"
 #include "js/Value.h"
 #include "vm/Stack.h"
 
 namespace js {
 namespace jit {
@@ -54,76 +55,16 @@ struct EnterJitData
 };
 
 typedef void (*EnterJitCode)(void* code, unsigned argc, Value* argv, InterpreterFrame* fp,
                              CalleeToken calleeToken, JSObject* scopeChain,
                              size_t numStackValues, Value* vp);
 
 class JitcodeGlobalTable;
 
-// ICStubSpace is an abstraction for allocation policy and storage for stub data.
-// There are two kinds of stubs: optimized stubs and fallback stubs (the latter
-// also includes stubs that can make non-tail calls that can GC).
-//
-// Optimized stubs are allocated per-compartment and are always purged when
-// JIT-code is discarded. Fallback stubs are allocated per BaselineScript and
-// are only destroyed when the BaselineScript is destroyed.
-class ICStubSpace
-{
-  protected:
-    LifoAlloc allocator_;
-
-    explicit ICStubSpace(size_t chunkSize)
-      : allocator_(chunkSize)
-    {}
-
-  public:
-    inline void* alloc(size_t size) {
-        return allocator_.alloc(size);
-    }
-
-    JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
-
-    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return allocator_.sizeOfExcludingThis(mallocSizeOf);
-    }
-};
-
-// Space for optimized stubs. Every JitCompartment has a single
-// OptimizedICStubSpace.
-struct OptimizedICStubSpace : public ICStubSpace
-{
-    static const size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024;
-
-  public:
-    OptimizedICStubSpace()
-      : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
-    {}
-
-    void free() {
-        allocator_.freeAll();
-    }
-};
-
-// Space for fallback stubs. Every BaselineScript has a
-// FallbackICStubSpace.
-struct FallbackICStubSpace : public ICStubSpace
-{
-    static const size_t STUB_DEFAULT_CHUNK_SIZE = 256;
-
-  public:
-    FallbackICStubSpace()
-      : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
-    {}
-
-    inline void adoptFrom(FallbackICStubSpace* other) {
-        allocator_.steal(&(other->allocator_));
-    }
-};
-
 // Information about a loop backedge in the runtime, which can be set to
 // point to either the loop header or to an OOL interrupt checking stub,
 // if signal handlers are being used to implement interrupts.
 class PatchableBackedge : public InlineListNode<PatchableBackedge>
 {
     friend class JitRuntime;
 
     CodeLocationJump backedge;
--- a/js/src/jit/LIR.h
+++ b/js/src/jit/LIR.h
@@ -247,22 +247,22 @@ class LUse : public LAllocation
         set(policy, 0, usedAtStart);
     }
     explicit LUse(Register reg, bool usedAtStart = false) {
         set(FIXED, reg.code(), usedAtStart);
     }
     explicit LUse(FloatRegister reg, bool usedAtStart = false) {
         set(FIXED, reg.code(), usedAtStart);
     }
-    LUse(Register reg, uint32_t virtualRegister) {
-        set(FIXED, reg.code(), false);
+    LUse(Register reg, uint32_t virtualRegister, bool usedAtStart = false) {
+        set(FIXED, reg.code(), usedAtStart);
         setVirtualRegister(virtualRegister);
     }
-    LUse(FloatRegister reg, uint32_t virtualRegister) {
-        set(FIXED, reg.code(), false);
+    LUse(FloatRegister reg, uint32_t virtualRegister, bool usedAtStart = false) {
+        set(FIXED, reg.code(), usedAtStart);
         setVirtualRegister(virtualRegister);
     }
 
     void setVirtualRegister(uint32_t index) {
         MOZ_ASSERT(index < VREG_MASK);
 
         uint32_t old = data() & ~(VREG_MASK << VREG_SHIFT);
         setData(old | (index << VREG_SHIFT));
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -26,16 +26,26 @@ using JS::GenericNaN;
 
 void
 LIRGenerator::useBoxAtStart(LInstruction* lir, size_t n, MDefinition* mir, LUse::Policy policy)
 {
     return useBox(lir, n, mir, policy, true);
 }
 
 void
+LIRGenerator::useBoxFixedAtStart(LInstruction* lir, size_t n, MDefinition* mir, ValueOperand op)
+{
+#if defined(JS_NUNBOX32)
+    return useBoxFixed(lir, n, mir, op.typeReg(), op.payloadReg(), true);
+#elif defined(JS_PUNBOX64)
+    return useBoxFixed(lir, n, mir, op.valueReg(), op.scratchReg(), true);
+#endif
+}
+
+void
 LIRGenerator::visitCloneLiteral(MCloneLiteral* ins)
 {
     MOZ_ASSERT(ins->type() == MIRType_Object);
     MOZ_ASSERT(ins->input()->type() == MIRType_Object);
 
     LCloneLiteral* lir = new(alloc()) LCloneLiteral(useRegisterAtStart(ins->input()));
     defineReturn(lir, ins);
     assignSafepoint(lir, ins);
@@ -2108,16 +2118,48 @@ LIRGenerator::visitStringReplace(MString
     LStringReplace* lir = new(alloc()) LStringReplace(useRegisterOrConstantAtStart(ins->string()),
                                                       useRegisterAtStart(ins->pattern()),
                                                       useRegisterOrConstantAtStart(ins->replacement()));
     defineReturn(lir, ins);
     assignSafepoint(lir, ins);
 }
 
 void
+LIRGenerator::visitBinarySharedStub(MBinarySharedStub* ins)
+{
+    MDefinition* lhs = ins->getOperand(0);
+    MDefinition* rhs = ins->getOperand(1);
+
+    MOZ_ASSERT(ins->type() == MIRType_Value);
+    MOZ_ASSERT(ins->type() == MIRType_Value);
+
+    LBinarySharedStub* lir = new(alloc()) LBinarySharedStub();
+
+    useBoxFixedAtStart(lir, LBinarySharedStub::LhsInput, lhs, R0);
+    useBoxFixedAtStart(lir, LBinarySharedStub::RhsInput, rhs, R1);
+
+    defineSharedStubReturn(lir, ins);
+    assignSafepoint(lir, ins);
+}
+
+void
+LIRGenerator::visitUnarySharedStub(MUnarySharedStub* ins)
+{
+    MDefinition* input = ins->getOperand(0);
+    MOZ_ASSERT(ins->type() == MIRType_Value);
+
+    LUnarySharedStub* lir = new(alloc()) LUnarySharedStub();
+
+    useBoxFixedAtStart(lir, LUnarySharedStub::Input, input, R0);
+
+    defineSharedStubReturn(lir, ins);
+    assignSafepoint(lir, ins);
+}
+
+void
 LIRGenerator::visitLambda(MLambda* ins)
 {
     if (ins->info().singletonType || ins->info().useSingletonForClone) {
         // If the function has a singleton type, this instruction will only be
         // executed once so we don't bother inlining it.
         //
         // If UseSingletonForClone is true, we will assign a singleton type to
         // the clone and we have to clone the script, we can't do that inline.
--- a/js/src/jit/Lowering.h
+++ b/js/src/jit/Lowering.h
@@ -45,16 +45,17 @@ class LIRGenerator : public LIRGenerator
     { }
 
     bool generate();
 
   private:
 
     void useBoxAtStart(LInstruction* lir, size_t n, MDefinition* mir,
                        LUse::Policy policy = LUse::REGISTER);
+    void useBoxFixedAtStart(LInstruction* lir, size_t n, MDefinition* mir, ValueOperand op);
 
     void lowerBitOp(JSOp op, MInstruction* ins);
     void lowerShiftOp(JSOp op, MShiftInstruction* ins);
     void lowerBinaryV(JSOp op, MBinaryInstruction* ins);
     void definePhis();
 
     void lowerCallArguments(MCall* call);
 
@@ -155,16 +156,18 @@ class LIRGenerator : public LIRGenerator
     void visitTruncateToInt32(MTruncateToInt32* truncate);
     void visitToString(MToString* convert);
     void visitToObjectOrNull(MToObjectOrNull* convert);
     void visitRegExp(MRegExp* ins);
     void visitRegExpExec(MRegExpExec* ins);
     void visitRegExpTest(MRegExpTest* ins);
     void visitRegExpReplace(MRegExpReplace* ins);
     void visitStringReplace(MStringReplace* ins);
+    void visitBinarySharedStub(MBinarySharedStub* ins);
+    void visitUnarySharedStub(MUnarySharedStub* ins);
     void visitLambda(MLambda* ins);
     void visitLambdaArrow(MLambdaArrow* ins);
     void visitKeepAliveObject(MKeepAliveObject* ins);
     void visitSlots(MSlots* ins);
     void visitElements(MElements* ins);
     void visitConstantElements(MConstantElements* ins);
     void visitConvertElementsToDoubles(MConvertElementsToDoubles* ins);
     void visitMaybeToDoubleElement(MMaybeToDoubleElement* ins);
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -7061,16 +7061,55 @@ class MOsrReturnValue
         return new(alloc) MOsrReturnValue(entry);
     }
 
     MOsrEntry* entry() {
         return getOperand(0)->toOsrEntry();
     }
 };
 
+class MBinarySharedStub
+  : public MBinaryInstruction,
+    public MixPolicy<BoxPolicy<0>, BoxPolicy<1> >::Data
+{
+    explicit MBinarySharedStub(MDefinition* left, MDefinition* right)
+      : MBinaryInstruction(left, right)
+    {
+        setResultType(MIRType_Value);
+    }
+
+  public:
+    INSTRUCTION_HEADER(BinarySharedStub)
+
+    static MBinarySharedStub* New(TempAllocator& alloc, MDefinition* left, MDefinition* right)
+    {
+        return new(alloc) MBinarySharedStub(left, right);
+    }
+
+};
+
+class MUnarySharedStub
+  : public MUnaryInstruction,
+    public BoxPolicy<0>::Data
+{
+    explicit MUnarySharedStub(MDefinition* input)
+      : MUnaryInstruction(input)
+    {
+        setResultType(MIRType_Value);
+    }
+
+  public:
+    INSTRUCTION_HEADER(UnarySharedStub)
+
+    static MUnarySharedStub* New(TempAllocator& alloc, MDefinition* input)
+    {
+        return new(alloc) MUnarySharedStub(input);
+    }
+};
+
 // Check the current frame for over-recursion past the global stack limit.
 class MCheckOverRecursed
   : public MNullaryInstruction
 {
   public:
     INSTRUCTION_HEADER(CheckOverRecursed)
 
     static MCheckOverRecursed* New(TempAllocator& alloc) {
--- a/js/src/jit/MOpcodes.h
+++ b/js/src/jit/MOpcodes.h
@@ -44,16 +44,18 @@ namespace jit {
     _(Compare)                                                              \
     _(Phi)                                                                  \
     _(Beta)                                                                 \
     _(OsrValue)                                                             \
     _(OsrScopeChain)                                                        \
     _(OsrReturnValue)                                                       \
     _(OsrArgumentsObject)                                                   \
     _(ReturnFromCtor)                                                       \
+    _(BinarySharedStub)                                                     \
+    _(UnarySharedStub)                                                      \
     _(CheckOverRecursed)                                                    \
     _(DefVar)                                                               \
     _(DefFun)                                                               \
     _(CreateThis)                                                           \
     _(CreateThisWithProto)                                                  \
     _(CreateThisWithTemplate)                                               \
     _(CreateArgumentsObject)                                                \
     _(GetArgumentsObjectArg)                                                \
--- a/js/src/jit/SharedIC.cpp
+++ b/js/src/jit/SharedIC.cpp
@@ -76,16 +76,24 @@ TypeFallbackICSpew(JSContext* cx, ICType
 #endif
 
 ICFallbackStub*
 ICEntry::fallbackStub() const
 {
     return firstStub()->getChainFallback();
 }
 
+void
+ICEntry::trace(JSTracer* trc)
+{
+    if (!hasStub())
+        return;
+    for (ICStub* stub = firstStub(); stub; stub = stub->next())
+        stub->trace(trc);
+}
 
 ICStubConstIterator&
 ICStubConstIterator::operator++()
 {
     MOZ_ASSERT(currentStub_ != nullptr);
     currentStub_ = currentStub_->next();
     return *this;
 }
@@ -137,17 +145,17 @@ ICStub::updateCode(JitCode* code)
     // Write barrier on the old code.
     JitCode::writeBarrierPre(jitCode());
     stubCode_ = code->raw();
 }
 
 /* static */ void
 ICStub::trace(JSTracer* trc)
 {
-    markCode(trc, "baseline-stub-jitcode");
+    markCode(trc, "shared-stub-jitcode");
 
     // If the stub is a monitored fallback stub, then mark the monitor ICs hanging
     // off of that stub.  We don't need to worry about the regular monitored stubs,
     // because the regular monitored stubs will always have a monitored fallback stub
     // that references the same stub chain.
     if (isMonitoredFallback()) {
         ICTypeMonitor_Fallback* lastMonStub = toMonitoredFallbackStub()->fallbackMonitorStub();
         for (ICStubConstIterator iter(lastMonStub->firstMonitorStub()); !iter.atEnd(); iter++) {
--- a/js/src/jit/SharedIC.h
+++ b/js/src/jit/SharedIC.h
@@ -201,22 +201,22 @@ class ICFallbackStub;
 void FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...);
 void TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub, const char* fmt, ...);
 #else
 #define FallbackICSpew(...)
 #define TypeFallbackICSpew(...)
 #endif
 
 //
-// An entry in the Baseline IC descriptor table.
+// An entry in the JIT IC descriptor table.
 //
 class ICEntry
 {
   private:
-    // A pointer to the baseline IC stub for this instruction.
+    // A pointer to the shared IC stub for this instruction.
     ICStub* firstStub_;
 
     // Offset from the start of the JIT code where the IC
     // load and call instructions are.
     uint32_t returnOffset_;
 
     // The PC of this IC's bytecode op within the JSScript.
     uint32_t pcOffset_ : 28;
@@ -330,18 +330,37 @@ class ICEntry
 
     static inline size_t offsetOfFirstStub() {
         return offsetof(ICEntry, firstStub_);
     }
 
     inline ICStub** addressOfFirstStub() {
         return &firstStub_;
     }
+
+    void trace(JSTracer* trc);
 };
 
+class IonICEntry : public ICEntry
+{
+  JSScript* script_;
+
+  public:
+    IonICEntry(uint32_t pcOffset, Kind kind, JSScript* script)
+      : ICEntry(pcOffset, kind),
+        script_(script)
+    { }
+
+    JSScript* script() {
+        return script_;
+    }
+
+};
+
+
 class ICMonitoredStub;
 class ICMonitoredFallbackStub;
 class ICUpdatedStub;
 
 // Constant iterator that traverses arbitrary chains of ICStubs.
 // No requirements are made of the ICStub used to construct this
 // iterator, aside from that the stub be part of a nullptr-terminated
 // chain.
@@ -777,17 +796,17 @@ class ICFallbackStub : public ICStub
     }
 
     inline size_t numOptimizedStubs() const {
         return (size_t) numOptimizedStubs_;
     }
 
     // The icEntry and lastStubPtrAddr_ fields can't be initialized when the stub is
     // created since the stub is created at compile time, and we won't know the IC entry
-    // address until after compile when the BaselineScript is created.  This method
+    // address until after compile when the JitScript is created.  This method
     // allows these fields to be fixed up at that point.
     void fixupICEntry(ICEntry* icEntry) {
         MOZ_ASSERT(icEntry_ == nullptr);
         MOZ_ASSERT(lastStubPtrAddr_ == nullptr);
         icEntry_ = icEntry;
         lastStubPtrAddr_ = icEntry_->addressOfFirstStub();
     }
 
--- a/js/src/jit/TypePolicy.cpp
+++ b/js/src/jit/TypePolicy.cpp
@@ -1190,16 +1190,17 @@ FilterTypeSetPolicy::adjustInputs(TempAl
     _(MixPolicy<ObjectPolicy<0>, ObjectPolicy<1> >)                     \
     _(MixPolicy<ObjectPolicy<0>, StringPolicy<1> >)                     \
     _(MixPolicy<ObjectPolicy<0>, ConvertToStringPolicy<2> >)            \
     _(MixPolicy<ObjectPolicy<1>, ConvertToStringPolicy<0> >)            \
     _(MixPolicy<SimdSameAsReturnedTypePolicy<0>, SimdSameAsReturnedTypePolicy<1> >) \
     _(MixPolicy<SimdSameAsReturnedTypePolicy<0>, SimdScalarPolicy<1> >) \
     _(MixPolicy<StringPolicy<0>, IntPolicy<1> >)                        \
     _(MixPolicy<StringPolicy<0>, StringPolicy<1> >)                     \
+    _(MixPolicy<BoxPolicy<0>, BoxPolicy<1> >)                           \
     _(NoFloatPolicy<0>)                                                 \
     _(NoFloatPolicyAfter<1>)                                            \
     _(NoFloatPolicyAfter<2>)                                            \
     _(ObjectPolicy<0>)                                                  \
     _(ObjectPolicy<1>)                                                  \
     _(ObjectPolicy<3>)                                                  \
     _(SimdPolicy<0>)                                                    \
     _(SimdSameAsReturnedTypePolicy<0>)                                  \
--- a/js/src/jit/arm/Lowering-arm.cpp
+++ b/js/src/jit/arm/Lowering-arm.cpp
@@ -14,24 +14,24 @@
 
 using namespace js;
 using namespace js::jit;
 
 using mozilla::FloorLog2;
 
 void
 LIRGeneratorARM::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1,
-                             Register reg2)
+                             Register reg2, bool useAtStart)
 {
     MOZ_ASSERT(mir->type() == MIRType_Value);
     MOZ_ASSERT(reg1 != reg2);
 
     ensureDefined(mir);
-    lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
-    lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir)));
+    lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
+    lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir), useAtStart));
 }
 
 LAllocation
 LIRGeneratorARM::useByteOpRegister(MDefinition* mir)
 {
     return useRegister(mir);
 }
 
--- a/js/src/jit/arm/Lowering-arm.h
+++ b/js/src/jit/arm/Lowering-arm.h
@@ -17,17 +17,18 @@ class LIRGeneratorARM : public LIRGenera
   public:
     LIRGeneratorARM(MIRGenerator* gen, MIRGraph& graph, LIRGraph& lirGraph)
       : LIRGeneratorShared(gen, graph, lirGraph)
     { }
 
   protected:
     // Adds a box input to an instruction, setting operand |n| to the type and
     // |n+1| to the payload.
-    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
+    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2,
+                     bool useAtStart = false);
 
     // x86 has constraints on what registers can be formatted for 1-byte
     // stores and loads; on ARM all registers are okay.
     LAllocation useByteOpRegister(MDefinition* mir);
     LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition* mir);
     LDefinition tempByteOpRegister();
 
     inline LDefinition tempToUnbox() {
--- a/js/src/jit/arm64/Lowering-arm64.cpp
+++ b/js/src/jit/arm64/Lowering-arm64.cpp
@@ -13,17 +13,17 @@
 #include "jit/shared/Lowering-shared-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
 using mozilla::FloorLog2;
 
 void
-LIRGeneratorARM64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register)
+LIRGeneratorARM64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register,                               bool useAtStart)
 {
     MOZ_CRASH("useBoxFixed");
 }
 
 LAllocation
 LIRGeneratorARM64::useByteOpRegister(MDefinition* mir)
 {
     MOZ_CRASH("useByteOpRegister");
--- a/js/src/jit/arm64/Lowering-arm64.h
+++ b/js/src/jit/arm64/Lowering-arm64.h
@@ -17,17 +17,18 @@ class LIRGeneratorARM64 : public LIRGene
   public:
     LIRGeneratorARM64(MIRGenerator* gen, MIRGraph& graph, LIRGraph& lirGraph)
       : LIRGeneratorShared(gen, graph, lirGraph)
     { }
 
   protected:
     // Adds a box input to an instruction, setting operand |n| to the type and
     // |n+1| to the payload.
-    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
+    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2,
+                     bool useAtStart = false);
 
     LAllocation useByteOpRegister(MDefinition* mir);
     LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition* mir);
 
     inline LDefinition tempToUnbox() {
         return temp();
     }
 
--- a/js/src/jit/mips32/Lowering-mips32.cpp
+++ b/js/src/jit/mips32/Lowering-mips32.cpp
@@ -15,24 +15,24 @@
 
 using namespace js;
 using namespace js::jit;
 
 using mozilla::FloorLog2;
 
 void
 LIRGeneratorMIPS::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1,
-                              Register reg2)
+                              Register reg2, bool useAtStart)
 {
     MOZ_ASSERT(mir->type() == MIRType_Value);
     MOZ_ASSERT(reg1 != reg2);
 
     ensureDefined(mir);
-    lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
-    lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir)));
+    lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
+    lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir), useAtStart));
 }
 
 LAllocation
 LIRGeneratorMIPS::useByteOpRegister(MDefinition* mir)
 {
     return useRegister(mir);
 }
 
--- a/js/src/jit/mips32/Lowering-mips32.h
+++ b/js/src/jit/mips32/Lowering-mips32.h
@@ -17,17 +17,18 @@ class LIRGeneratorMIPS : public LIRGener
   protected:
     LIRGeneratorMIPS(MIRGenerator* gen, MIRGraph& graph, LIRGraph& lirGraph)
       : LIRGeneratorShared(gen, graph, lirGraph)
     { }
 
   protected:
     // Adds a box input to an instruction, setting operand |n| to the type and
     // |n+1| to the payload.
-    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
+    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2
+                     bool useAtStart = false);
 
     // x86 has constraints on what registers can be formatted for 1-byte
     // stores and loads; on MIPS all registers are okay.
     LAllocation useByteOpRegister(MDefinition* mir);
     LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition* mir);
     LDefinition tempByteOpRegister();
 
     inline LDefinition tempToUnbox() {
--- a/js/src/jit/none/Lowering-none.h
+++ b/js/src/jit/none/Lowering-none.h
@@ -16,17 +16,17 @@ class LIRGeneratorNone : public LIRGener
 {
   public:
     LIRGeneratorNone(MIRGenerator* gen, MIRGraph& graph, LIRGraph& lirGraph)
       : LIRGeneratorShared(gen, graph, lirGraph)
     {
         MOZ_CRASH();
     }
 
-    void useBoxFixed(LInstruction*, size_t, MDefinition*, Register, Register) { MOZ_CRASH(); }
+    void useBoxFixed(LInstruction*, size_t, MDefinition*, Register, Register, bool useAtStart = false) { MOZ_CRASH(); }
 
     LAllocation useByteOpRegister(MDefinition*) { MOZ_CRASH(); }
     LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition*) { MOZ_CRASH(); }
     LDefinition tempByteOpRegister() { MOZ_CRASH(); }
     LDefinition tempToUnbox() { MOZ_CRASH(); }
     bool needTempForPostBarrier() { MOZ_CRASH(); }
     void lowerUntypedPhiInput(MPhi*, uint32_t, LBlock*, size_t) { MOZ_CRASH(); }
     void defineUntypedPhi(MPhi*, size_t) { MOZ_CRASH(); }
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -50,16 +50,17 @@ CodeGeneratorShared::CodeGeneratorShared
     recovers_(),
     deoptTable_(nullptr),
 #ifdef DEBUG
     pushedArgs_(0),
 #endif
     lastOsiPointOffset_(0),
     safepoints_(graph->totalSlotCount(), (gen->info().nargs() + 1) * sizeof(Value)),
     returnLabel_(),
+    stubSpace_(),
     nativeToBytecodeMap_(nullptr),
     nativeToBytecodeMapSize_(0),
     nativeToBytecodeTableOffset_(0),
     nativeToBytecodeNumRegions_(0),
     nativeToBytecodeScriptList_(nullptr),
     nativeToBytecodeScriptListLength_(0),
     trackedOptimizationsMap_(nullptr),
     trackedOptimizationsMapSize_(0),
--- a/js/src/jit/shared/CodeGenerator-shared.h
+++ b/js/src/jit/shared/CodeGenerator-shared.h
@@ -80,16 +80,21 @@ class CodeGeneratorShared : public LElem
 #ifdef DEBUG
     uint32_t pushedArgs_;
 #endif
     uint32_t lastOsiPointOffset_;
     SafepointWriter safepoints_;
     Label invalidate_;
     CodeOffsetLabel invalidateEpilogueData_;
 
+    // Label for the common return path.
+    NonAssertingLabel returnLabel_;
+
+    FallbackICStubSpace stubSpace_;
+
     js::Vector<SafepointIndex, 0, SystemAllocPolicy> safepointIndices_;
     js::Vector<OsiIndex, 0, SystemAllocPolicy> osiIndices_;
 
     // Mapping from bailout table ID to an offset in the snapshot buffer.
     js::Vector<SnapshotOffset, 0, SystemAllocPolicy> bailouts_;
 
     // Allocated data space needed at runtime.
     js::Vector<uint8_t, 0, SystemAllocPolicy> runtimeData_;
@@ -100,19 +105,16 @@ class CodeGeneratorShared : public LElem
     // Patchable backedges generated for loops.
     Vector<PatchableBackedgeInfo, 0, SystemAllocPolicy> patchableBackedges_;
 
 #ifdef JS_TRACE_LOGGING
     js::Vector<CodeOffsetLabel, 0, SystemAllocPolicy> patchableTraceLoggers_;
     js::Vector<CodeOffsetLabel, 0, SystemAllocPolicy> patchableTLScripts_;
 #endif
 
-    // Label for the common return path.
-    NonAssertingLabel returnLabel_;
-
   public:
     struct NativeToBytecode {
         CodeOffsetLabel nativeOffset;
         InlineScriptTree* tree;
         jsbytecode* pc;
     };
 
   protected:
--- a/js/src/jit/shared/LIR-shared.h
+++ b/js/src/jit/shared/LIR-shared.h
@@ -3915,16 +3915,41 @@ class LStringReplace: public LStrReplace
     {
     }
 
     const MStringReplace* mir() const {
         return mir_->toStringReplace();
     }
 };
 
+class LBinarySharedStub : public LCallInstructionHelper<BOX_PIECES, 2 * BOX_PIECES, 0>
+{
+  public:
+    LIR_HEADER(BinarySharedStub)
+
+    const MBinarySharedStub* mir() const {
+        return mir_->toBinarySharedStub();
+    }
+
+    static const size_t LhsInput = 0;
+    static const size_t RhsInput = BOX_PIECES;
+};
+
+class LUnarySharedStub : public LCallInstructionHelper<BOX_PIECES, BOX_PIECES, 0>
+{
+  public:
+    LIR_HEADER(UnarySharedStub)
+
+    const MUnarySharedStub* mir() const {
+        return mir_->toUnarySharedStub();
+    }
+
+    static const size_t Input = 0;
+};
+
 class LLambdaForSingleton : public LCallInstructionHelper<1, 1, 0>
 {
   public:
     LIR_HEADER(LambdaForSingleton)
 
     explicit LLambdaForSingleton(const LAllocation& scopeChain)
     {
         setOperand(0, scopeChain);
--- a/js/src/jit/shared/LOpcodes-shared.h
+++ b/js/src/jit/shared/LOpcodes-shared.h
@@ -188,16 +188,18 @@
     _(OsrReturnValue)               \
     _(OsrArgumentsObject)           \
     _(RegExp)                       \
     _(RegExpExec)                   \
     _(RegExpTest)                   \
     _(RegExpReplace)                \
     _(StringReplace)                \
     _(Substr)                       \
+    _(BinarySharedStub)             \
+    _(UnarySharedStub)              \
     _(Lambda)                       \
     _(LambdaArrow)                  \
     _(LambdaForSingleton)           \
     _(KeepAliveObject)              \
     _(Slots)                        \
     _(Elements)                     \
     _(ConvertElementsToDoubles)     \
     _(MaybeToDoubleElement)         \
--- a/js/src/jit/shared/Lowering-shared-inl.h
+++ b/js/src/jit/shared/Lowering-shared-inl.h
@@ -104,16 +104,40 @@ LIRGeneratorShared::defineBox(LInstructi
 #endif
     lir->setMir(mir);
 
     mir->setVirtualRegister(vreg);
     add(lir);
 }
 
 void
+LIRGeneratorShared::defineSharedStubReturn(LInstruction* lir, MDefinition* mir)
+{
+    lir->setMir(mir);
+
+    MOZ_ASSERT(lir->isBinarySharedStub() || lir->isUnarySharedStub());
+    MOZ_ASSERT(mir->type() == MIRType_Value);
+
+    uint32_t vreg = getVirtualRegister();
+
+#if defined(JS_NUNBOX32)
+    lir->setDef(TYPE_INDEX, LDefinition(vreg + VREG_TYPE_OFFSET, LDefinition::TYPE,
+                                        LGeneralReg(JSReturnReg_Type)));
+    lir->setDef(PAYLOAD_INDEX, LDefinition(vreg + VREG_DATA_OFFSET, LDefinition::PAYLOAD,
+                                           LGeneralReg(JSReturnReg_Data)));
+    getVirtualRegister();
+#elif defined(JS_PUNBOX64)
+    lir->setDef(0, LDefinition(vreg, LDefinition::BOX, LGeneralReg(JSReturnReg)));
+#endif
+
+    mir->setVirtualRegister(vreg);
+    add(lir);
+}
+
+void
 LIRGeneratorShared::defineReturn(LInstruction* lir, MDefinition* mir)
 {
     lir->setMir(mir);
 
     MOZ_ASSERT(lir->isCall());
 
     uint32_t vreg = getVirtualRegister();
 
--- a/js/src/jit/shared/Lowering-shared.h
+++ b/js/src/jit/shared/Lowering-shared.h
@@ -137,16 +137,17 @@ class LIRGeneratorShared : public MDefin
     template <size_t Ops, size_t Temps>
     inline void defineFixed(LInstructionHelper<1, Ops, Temps>* lir, MDefinition* mir,
                             const LAllocation& output);
 
     template <size_t Ops, size_t Temps>
     inline void defineBox(LInstructionHelper<BOX_PIECES, Ops, Temps>* lir, MDefinition* mir,
                           LDefinition::Policy policy = LDefinition::REGISTER);
 
+    inline void defineSharedStubReturn(LInstruction* lir, MDefinition* mir);
     inline void defineReturn(LInstruction* lir, MDefinition* mir);
 
     template <size_t X>
     inline void define(details::LInstructionFixedDefsTempsHelper<1, X>* lir, MDefinition* mir,
                        LDefinition::Policy policy = LDefinition::REGISTER);
     template <size_t X>
     inline void define(details::LInstructionFixedDefsTempsHelper<1, X>* lir, MDefinition* mir,
                        const LDefinition& def);
--- a/js/src/jit/x64/Lowering-x64.cpp
+++ b/js/src/jit/x64/Lowering-x64.cpp
@@ -10,22 +10,22 @@
 #include "jit/x64/Assembler-x64.h"
 
 #include "jit/shared/Lowering-shared-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
 void
-LIRGeneratorX64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register)
+LIRGeneratorX64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register, bool useAtStart)
 {
     MOZ_ASSERT(mir->type() == MIRType_Value);
 
     ensureDefined(mir);
-    lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
+    lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
 }
 
 LAllocation
 LIRGeneratorX64::useByteOpRegister(MDefinition* mir)
 {
     return useRegister(mir);
 }
 
--- a/js/src/jit/x64/Lowering-x64.h
+++ b/js/src/jit/x64/Lowering-x64.h
@@ -19,17 +19,17 @@ class LIRGeneratorX64 : public LIRGenera
       : LIRGeneratorX86Shared(gen, graph, lirGraph)
     { }
 
   protected:
     void lowerUntypedPhiInput(MPhi* phi, uint32_t inputPosition, LBlock* block, size_t lirIndex);
     void defineUntypedPhi(MPhi* phi, size_t lirIndex);
 
     // Adds a use at operand |n| of a value-typed insturction.
-    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register);
+    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register, bool useAtStart = false);
 
     // x86 has constraints on what registers can be formatted for 1-byte
     // stores and loads; on x64 all registers are okay.
     LAllocation useByteOpRegister(MDefinition* mir);
     LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition* mir);
     LDefinition tempByteOpRegister();
 
     LDefinition tempToUnbox();
--- a/js/src/jit/x86/Lowering-x86.cpp
+++ b/js/src/jit/x86/Lowering-x86.cpp
@@ -11,24 +11,24 @@
 
 #include "jit/shared/Lowering-shared-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
 void
 LIRGeneratorX86::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1,
-                             Register reg2)
+                             Register reg2, bool useAtStart)
 {
     MOZ_ASSERT(mir->type() == MIRType_Value);
     MOZ_ASSERT(reg1 != reg2);
 
     ensureDefined(mir);
-    lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
-    lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir)));
+    lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
+    lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir), useAtStart));
 }
 
 LAllocation
 LIRGeneratorX86::useByteOpRegister(MDefinition* mir)
 {
     return useFixed(mir, eax);
 }
 
--- a/js/src/jit/x86/Lowering-x86.h
+++ b/js/src/jit/x86/Lowering-x86.h
@@ -17,17 +17,18 @@ class LIRGeneratorX86 : public LIRGenera
   public:
     LIRGeneratorX86(MIRGenerator* gen, MIRGraph& graph, LIRGraph& lirGraph)
       : LIRGeneratorX86Shared(gen, graph, lirGraph)
     { }
 
   protected:
     // Adds a box input to an instruction, setting operand |n| to the type and
     // |n+1| to the payload.
-    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
+    void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2,
+                     bool useAtStart = false);
 
     // It's a trap! On x86, the 1-byte store can only use one of
     // {al,bl,cl,dl,ah,bh,ch,dh}. That means if the register allocator
     // gives us one of {edi,esi,ebp,esp}, we're out of luck. (The formatter
     // will assert on us.) Ideally, we'd just ask the register allocator to
     // give us one of {al,bl,cl,dl}. For now, just useFixed(al).
     LAllocation useByteOpRegister(MDefinition* mir);
     LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition* mir);