Bug 1277973 - Baldr: make the throw stub not be a JumpTarget (r=bbouvier)
authorLuke Wagner <luke@mozilla.com>
Thu, 13 Oct 2016 13:17:54 -0500
changeset 317898 e3f5d150e1baec0ad3289c15e73c7ef816f590cc
parent 317897 9126be480c4594a25352ce6e585fb8b6afe5ef6f
child 317899 9a51acfd009478c4120527415b2f5c15c59d14cb
push id33170
push usercbook@mozilla.com
push dateFri, 14 Oct 2016 10:37:07 +0000
treeherderautoland@0d101ebfd95c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbbouvier
bugs1277973
milestone52.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1277973 - Baldr: make the throw stub not be a JumpTarget (r=bbouvier) MozReview-Commit-ID: 3rf0z6BFkBu
js/src/asmjs/WasmGenerator.cpp
js/src/asmjs/WasmInstance.h
js/src/asmjs/WasmStubs.cpp
js/src/asmjs/WasmStubs.h
js/src/asmjs/WasmTypes.h
--- a/js/src/asmjs/WasmGenerator.cpp
+++ b/js/src/asmjs/WasmGenerator.cpp
@@ -437,39 +437,42 @@ ModuleGenerator::finishCodegen()
     // larger than the JumpImmediateRange, even local uses of Label will fail
     // due to the large absolute offsets temporarily stored by Label::bind().
 
     OffsetVector entries;
     ProfilingOffsetVector interpExits;
     ProfilingOffsetVector jitExits;
     EnumeratedArray<JumpTarget, JumpTarget::Limit, Offsets> jumpTargets;
     Offsets interruptExit;
+    Offsets throwStub;
 
     {
         TempAllocator alloc(&lifo_);
         MacroAssembler masm(MacroAssembler::AsmJSToken(), alloc);
+        Label throwLabel;
 
         if (!entries.resize(numFuncDefExports))
             return false;
         for (uint32_t i = 0; i < numFuncDefExports; i++)
             entries[i] = GenerateEntry(masm, metadata_->funcDefExports[i]);
 
         if (!interpExits.resize(numFuncImports()))
             return false;
         if (!jitExits.resize(numFuncImports()))
             return false;
         for (uint32_t i = 0; i < numFuncImports(); i++) {
-            interpExits[i] = GenerateInterpExit(masm, metadata_->funcImports[i], i);
-            jitExits[i] = GenerateJitExit(masm, metadata_->funcImports[i]);
+            interpExits[i] = GenerateInterpExit(masm, metadata_->funcImports[i], i, &throwLabel);
+            jitExits[i] = GenerateJitExit(masm, metadata_->funcImports[i], &throwLabel);
         }
 
         for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit))
-            jumpTargets[target] = GenerateJumpTarget(masm, target);
+            jumpTargets[target] = GenerateJumpTarget(masm, target, &throwLabel);
 
-        interruptExit = GenerateInterruptStub(masm);
+        interruptExit = GenerateInterruptStub(masm, &throwLabel);
+        throwStub = GenerateThrowStub(masm, &throwLabel);
 
         if (masm.oom() || !masm_.asmMergeWith(masm))
             return false;
     }
 
     // Adjust each of the resulting Offsets (to account for being merged into
     // masm_) and then create code ranges for all the stubs.
 
@@ -497,16 +500,20 @@ ModuleGenerator::finishCodegen()
         if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, jumpTargets[target]))
             return false;
     }
 
     interruptExit.offsetBy(offsetInWhole);
     if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, interruptExit))
         return false;
 
+    throwStub.offsetBy(offsetInWhole);
+    if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, throwStub))
+        return false;
+
     // Fill in LinkData with the offsets of these stubs.
 
     linkData_.interruptOffset = interruptExit.begin;
     linkData_.outOfBoundsOffset = jumpTargets[JumpTarget::OutOfBounds].begin;
     linkData_.unalignedAccessOffset = jumpTargets[JumpTarget::UnalignedAccess].begin;
 
     // Only call convertOutOfRangeBranchesToThunks after all other codegen that may
     // emit new jumps to JumpTargets has finished.
--- a/js/src/asmjs/WasmInstance.h
+++ b/js/src/asmjs/WasmInstance.h
@@ -19,36 +19,33 @@
 #ifndef wasm_instance_h
 #define wasm_instance_h
 
 #include "asmjs/WasmCode.h"
 #include "asmjs/WasmTable.h"
 #include "gc/Barrier.h"
 
 namespace js {
-
-class WasmActivation;
-
 namespace wasm {
 
 // Instance represents a wasm instance and provides all the support for runtime
 // execution of code in the instance. Instances share various immutable data
 // structures with the Module from which they were instantiated and other
 // instances instantiated from the same Module. However, an Instance has no
 // direct reference to its source Module which allows a Module to be destroyed
 // while it still has live Instances.
 
 class Instance
 {
-    JSCompartment* const                 compartment_;
-    ReadBarrieredWasmInstanceObject      object_;
-    const UniqueCode                     code_;
-    GCPtrWasmMemoryObject                memory_;
-    SharedTableVector                    tables_;
-    TlsData                              tlsData_;
+    JSCompartment* const            compartment_;
+    ReadBarrieredWasmInstanceObject object_;
+    const UniqueCode                code_;
+    GCPtrWasmMemoryObject           memory_;
+    SharedTableVector               tables_;
+    TlsData                         tlsData_;
 
     // Internal helpers:
     const void** addressOfSigId(const SigIdDesc& sigId) const;
     FuncImportTls& funcImportTls(const FuncImport& fi);
     TableTls& tableTls(const TableDesc& td) const;
 
     // Import call slow paths which are called directly from wasm code.
     friend void* AddressOf(SymbolicAddress, ExclusiveContext*);
--- a/js/src/asmjs/WasmStubs.cpp
+++ b/js/src/asmjs/WasmStubs.cpp
@@ -429,17 +429,18 @@ FillArgumentArray(MacroAssembler& masm, 
         }
     }
 }
 
 // Generate a stub that is called via the internal ABI derived from the
 // signature of the import and calls into an appropriate callImport C++
 // function, having boxed all the ABI arguments into a homogeneous Value array.
 ProfilingOffsets
-wasm::GenerateInterpExit(MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex)
+wasm::GenerateInterpExit(MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex,
+                         Label* throwLabel)
 {
     const Sig& sig = fi.sig();
 
     masm.setFramePushed(0);
 
     // Argument types for Module::callImport_*:
     static const MIRType typeArray[] = { MIRType::Pointer,   // Instance*
                                          MIRType::Pointer,   // funcImportIndex
@@ -503,37 +504,37 @@ wasm::GenerateInterpExit(MacroAssembler&
     i++;
     MOZ_ASSERT(i.done());
 
     // Make the call, test whether it succeeded, and extract the return value.
     AssertStackAlignment(masm, ABIStackAlignment);
     switch (sig.ret()) {
       case ExprType::Void:
         masm.call(SymbolicAddress::CallImport_Void);
-        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         break;
       case ExprType::I32:
         masm.call(SymbolicAddress::CallImport_I32);
-        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         masm.load32(argv, ReturnReg);
         break;
       case ExprType::I64:
         masm.call(SymbolicAddress::CallImport_I64);
-        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         masm.load64(argv, ReturnReg64);
         break;
       case ExprType::F32:
         masm.call(SymbolicAddress::CallImport_F64);
-        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         masm.loadDouble(argv, ReturnDoubleReg);
         masm.convertDoubleToFloat32(ReturnDoubleReg, ReturnFloat32Reg);
         break;
       case ExprType::F64:
         masm.call(SymbolicAddress::CallImport_F64);
-        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         masm.loadDouble(argv, ReturnDoubleReg);
         break;
       case ExprType::I8x16:
       case ExprType::I16x8:
       case ExprType::I32x4:
       case ExprType::F32x4:
       case ExprType::B8x16:
       case ExprType::B16x8:
@@ -563,17 +564,17 @@ wasm::GenerateInterpExit(MacroAssembler&
 }
 
 static const unsigned SavedTlsReg = sizeof(void*);
 
 // Generate a stub that is called via the internal ABI derived from the
 // signature of the import and calls into a compatible JIT function,
 // having boxed all the ABI arguments into the JIT stack frame layout.
 ProfilingOffsets
-wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi)
+wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi, Label* throwLabel)
 {
     const Sig& sig = fi.sig();
 
     masm.setFramePushed(0);
 
     // JIT calls use the following stack layout (sp grows to the left):
     //   | retaddr | descriptor | callee | argc | this | arg1..N |
     // After the JIT frame, the global register (if present) is saved since the
@@ -695,17 +696,17 @@ wasm::GenerateJitExit(MacroAssembler& ma
     //   (sp + sizeof(void*)) % JitStackAlignment == 0
     // But now we possibly want to call one of several different C++ functions,
     // so subtract the sizeof(void*) so that sp is aligned for an ABI call.
     static_assert(ABIStackAlignment <= JitStackAlignment, "subsumes");
     masm.reserveStack(sizeOfRetAddr);
     unsigned nativeFramePushed = masm.framePushed();
     AssertStackAlignment(masm, ABIStackAlignment);
 
-    masm.branchTestMagic(Assembler::Equal, JSReturnOperand, JumpTarget::Throw);
+    masm.branchTestMagic(Assembler::Equal, JSReturnOperand, throwLabel);
 
     Label oolConvert;
     switch (sig.ret()) {
       case ExprType::Void:
         break;
       case ExprType::I32:
         masm.convertValueToInt32(JSReturnOperand, ReturnDoubleReg, ReturnReg, &oolConvert,
                                  /* -0 check */ false);
@@ -771,27 +772,27 @@ wasm::GenerateJitExit(MacroAssembler& ma
         i++;
         MOZ_ASSERT(i.done());
 
         // Call coercion function
         AssertStackAlignment(masm, ABIStackAlignment);
         switch (sig.ret()) {
           case ExprType::I32:
             masm.call(SymbolicAddress::CoerceInPlace_ToInt32);
-            masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+            masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
             masm.unboxInt32(Address(masm.getStackPointer(), offsetToCoerceArgv), ReturnReg);
             break;
           case ExprType::F64:
             masm.call(SymbolicAddress::CoerceInPlace_ToNumber);
-            masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+            masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
             masm.loadDouble(Address(masm.getStackPointer(), offsetToCoerceArgv), ReturnDoubleReg);
             break;
           case ExprType::F32:
             masm.call(SymbolicAddress::CoerceInPlace_ToNumber);
-            masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, JumpTarget::Throw);
+            masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
             masm.loadDouble(Address(masm.getStackPointer(), offsetToCoerceArgv), ReturnDoubleReg);
             masm.convertDoubleToFloat32(ReturnDoubleReg, ReturnFloat32Reg);
             break;
           default:
             MOZ_CRASH("Unsupported convert type");
         }
 
         masm.jump(&done);
@@ -803,17 +804,17 @@ wasm::GenerateJitExit(MacroAssembler& ma
     offsets.end = masm.currentOffset();
     return offsets;
 }
 
 // Generate a stub that is called immediately after the prologue when there is a
 // stack overflow. This stub calls a C++ function to report the error and then
 // jumps to the throw stub to pop the activation.
 static Offsets
-GenerateStackOverflow(MacroAssembler& masm)
+GenerateStackOverflow(MacroAssembler& masm, Label* throwLabel)
 {
     masm.haltingAlign(CodeAlignment);
 
     Offsets offsets;
     offsets.begin = masm.currentOffset();
 
     // If we reach here via the non-profiling prologue, WasmActivation::fp has
     // not been updated. To enable stack unwinding from C++, store to it now. If
@@ -827,25 +828,25 @@ GenerateStackOverflow(MacroAssembler& ma
 
     // Prepare the stack for calling C++.
     if (uint32_t d = StackDecrementForCall(ABIStackAlignment, sizeof(AsmJSFrame), ShadowStackSpace))
         masm.subFromStackPtr(Imm32(d));
 
     // No need to restore the stack; the throw stub pops everything.
     masm.assertStackAlignment(ABIStackAlignment);
     masm.call(SymbolicAddress::ReportOverRecursed);
-    masm.jump(JumpTarget::Throw);
+    masm.jump(throwLabel);
 
     offsets.end = masm.currentOffset();
     return offsets;
 }
 
 // Generate a stub that calls into HandleTrap with the right trap reason.
 static Offsets
-GenerateTrapStub(MacroAssembler& masm, Trap reason)
+GenerateTrapStub(MacroAssembler& masm, Trap reason, Label* throwLabel)
 {
     masm.haltingAlign(CodeAlignment);
 
     Offsets offsets;
     offsets.begin = masm.currentOffset();
 
     // sp can be anything at this point, so ensure it is aligned when calling
     // into C++.  We unconditionally jump to throw so don't worry about
@@ -864,73 +865,38 @@ GenerateTrapStub(MacroAssembler& masm, T
         masm.store32(Imm32(int32_t(reason)),
                      Address(masm.getStackPointer(), i->offsetFromArgBase()));
     }
 
     i++;
     MOZ_ASSERT(i.done());
 
     masm.call(SymbolicAddress::HandleTrap);
-    masm.jump(JumpTarget::Throw);
-
-    offsets.end = masm.currentOffset();
-    return offsets;
-}
-
-// If an exception is thrown, simply pop all frames (since asm.js does not
-// contain try/catch). To do this:
-//  1. Restore 'sp' to it's value right after the PushRegsInMask in GenerateEntry.
-//  2. PopRegsInMask to restore the caller's non-volatile registers.
-//  3. Return (to CallAsmJS).
-static Offsets
-GenerateThrow(MacroAssembler& masm)
-{
-    masm.haltingAlign(CodeAlignment);
-
-    Offsets offsets;
-    offsets.begin = masm.currentOffset();
-
-    // We are about to pop all frames in this WasmActivation. Set fp to null to
-    // maintain the invariant that fp is either null or pointing to a valid
-    // frame.
-    Register scratch = ABINonArgReturnReg0;
-    masm.loadWasmActivationFromSymbolicAddress(scratch);
-    masm.storePtr(ImmWord(0), Address(scratch, WasmActivation::offsetOfFP()));
-
-    masm.setFramePushed(FramePushedForEntrySP);
-    masm.loadStackPtr(Address(scratch, WasmActivation::offsetOfEntrySP()));
-    masm.Pop(scratch);
-    masm.PopRegsInMask(NonVolatileRegs);
-    MOZ_ASSERT(masm.framePushed() == 0);
-
-    masm.mov(ImmWord(0), ReturnReg);
-    masm.ret();
+    masm.jump(throwLabel);
 
     offsets.end = masm.currentOffset();
     return offsets;
 }
 
 Offsets
-wasm::GenerateJumpTarget(MacroAssembler& masm, JumpTarget target)
+wasm::GenerateJumpTarget(MacroAssembler& masm, JumpTarget target, Label* throwLabel)
 {
     switch (target) {
       case JumpTarget::StackOverflow:
-        return GenerateStackOverflow(masm);
-      case JumpTarget::Throw:
-        return GenerateThrow(masm);
+        return GenerateStackOverflow(masm, throwLabel);
       case JumpTarget::IndirectCallToNull:
       case JumpTarget::IndirectCallBadSig:
       case JumpTarget::OutOfBounds:
       case JumpTarget::UnalignedAccess:
       case JumpTarget::Unreachable:
       case JumpTarget::IntegerOverflow:
       case JumpTarget::InvalidConversionToInteger:
       case JumpTarget::IntegerDivideByZero:
       case JumpTarget::ImpreciseSimdConversion:
-        return GenerateTrapStub(masm, Trap(target));
+        return GenerateTrapStub(masm, Trap(target), throwLabel);
       case JumpTarget::Limit:
         break;
     }
     MOZ_CRASH("bad JumpTarget");
 }
 
 static const LiveRegisterSet AllRegsExceptSP(
     GeneralRegisterSet(Registers::AllMask & ~(uint32_t(1) << Registers::StackPointer)),
@@ -940,17 +906,17 @@ static const LiveRegisterSet AllRegsExce
 // code. That means we must first save *all* registers and restore *all*
 // registers (except the stack pointer) when we resume. The address to resume to
 // (assuming that js::HandleExecutionInterrupt doesn't indicate that the
 // execution should be aborted) is stored in WasmActivation::resumePC_.
 // Unfortunately, loading this requires a scratch register which we don't have
 // after restoring all registers. To hack around this, push the resumePC on the
 // stack so that it can be popped directly into PC.
 Offsets
-wasm::GenerateInterruptStub(MacroAssembler& masm)
+wasm::GenerateInterruptStub(MacroAssembler& masm, Label* throwLabel)
 {
     masm.haltingAlign(CodeAlignment);
 
     Offsets offsets;
     offsets.begin = masm.currentOffset();
 
 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
     // Be very careful here not to perturb the machine state before saving it
@@ -973,17 +939,17 @@ wasm::GenerateInterruptStub(MacroAssembl
     masm.moveStackPtrTo(ABINonVolatileReg);
     masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1)));
     if (ShadowStackSpace)
         masm.subFromStackPtr(Imm32(ShadowStackSpace));
 
     masm.assertStackAlignment(ABIStackAlignment);
     masm.call(SymbolicAddress::HandleExecutionInterrupt);
 
-    masm.branchIfFalseBool(ReturnReg, JumpTarget::Throw);
+    masm.branchIfFalseBool(ReturnReg, throwLabel);
 
     // Restore the StackPointer to its position before the call.
     masm.moveToStackPtr(ABINonVolatileReg);
 
     // Restore the machine state to before the interrupt.
     masm.PopRegsInMask(AllRegsExceptSP); // restore all GP/FP registers (except SP)
     masm.popFlags();              // after this, nothing that sets conditions
     masm.ret();                   // pop resumePC into PC
@@ -1015,17 +981,17 @@ wasm::GenerateInterruptStub(MacroAssembl
 
     masm.assertStackAlignment(ABIStackAlignment);
     masm.call(SymbolicAddress::HandleExecutionInterrupt);
 
 # ifdef USES_O32_ABI
     masm.addToStackPtr(Imm32(4 * sizeof(intptr_t)));
 # endif
 
-    masm.branchIfFalseBool(ReturnReg, JumpTarget::Throw);
+    masm.branchIfFalseBool(ReturnReg, throwLabel);
 
     // This will restore stack to the address before the call.
     masm.moveToStackPtr(s0);
     masm.PopRegsInMask(AllRegsExceptSP);
 
     // Pop resumePC into PC. Clobber HeapReg to make the jump and restore it
     // during jump delay slot.
     masm.loadPtr(Address(StackPointer, 0), HeapReg);
@@ -1057,17 +1023,17 @@ wasm::GenerateInterruptStub(MacroAssembl
     // Save all FP registers
     static_assert(!SupportsSimd, "high lanes of SIMD registers need to be saved too.");
     masm.PushRegsInMask(LiveRegisterSet(GeneralRegisterSet(0),
                                         FloatRegisterSet(FloatRegisters::AllDoubleMask)));
 
     masm.assertStackAlignment(ABIStackAlignment);
     masm.call(SymbolicAddress::HandleExecutionInterrupt);
 
-    masm.branchIfFalseBool(ReturnReg, JumpTarget::Throw);
+    masm.branchIfFalseBool(ReturnReg, throwLabel);
 
     // Restore the machine state to before the interrupt. this will set the pc!
 
     // Restore all FP registers
     masm.PopRegsInMask(LiveRegisterSet(GeneralRegisterSet(0),
                                        FloatRegisterSet(FloatRegisters::AllDoubleMask)));
     masm.mov(r6,sp);
     masm.as_vmsr(r5);
@@ -1096,8 +1062,43 @@ wasm::GenerateInterruptStub(MacroAssembl
     MOZ_CRASH();
 #else
 # error "Unknown architecture!"
 #endif
 
     offsets.end = masm.currentOffset();
     return offsets;
 }
+
+// Generate a stub that restores the stack pointer to what it was on entry to
+// the wasm activation, sets the return register to 'false' and then executes a
+// return which will return from this wasm activation to the caller. This stub
+// should only be called after the caller has reported an error (or, in the case
+// of the interrupt stub, intends to interrupt execution).
+Offsets
+wasm::GenerateThrowStub(MacroAssembler& masm, Label* throwLabel)
+{
+    masm.haltingAlign(CodeAlignment);
+
+    masm.bind(throwLabel);
+
+    Offsets offsets;
+    offsets.begin = masm.currentOffset();
+
+    // We are about to pop all frames in this WasmActivation. Set fp to null to
+    // maintain the invariant that fp is either null or pointing to a valid
+    // frame.
+    Register scratch = ABINonArgReturnReg0;
+    masm.loadWasmActivationFromSymbolicAddress(scratch);
+    masm.storePtr(ImmWord(0), Address(scratch, WasmActivation::offsetOfFP()));
+
+    masm.setFramePushed(FramePushedForEntrySP);
+    masm.loadStackPtr(Address(scratch, WasmActivation::offsetOfEntrySP()));
+    masm.Pop(scratch);
+    masm.PopRegsInMask(NonVolatileRegs);
+    MOZ_ASSERT(masm.framePushed() == 0);
+
+    masm.mov(ImmWord(0), ReturnReg);
+    masm.ret();
+
+    offsets.end = masm.currentOffset();
+    return offsets;
+}
--- a/js/src/asmjs/WasmStubs.h
+++ b/js/src/asmjs/WasmStubs.h
@@ -18,34 +18,38 @@
 
 #ifndef wasm_stubs_h
 #define wasm_stubs_h
 
 #include "asmjs/WasmTypes.h"
 
 namespace js {
 
-namespace jit { class MacroAssembler; }
+namespace jit { class MacroAssembler; class Label; }
 
 namespace wasm {
 
 class FuncDefExport;
 class FuncImport;
 
 extern Offsets
 GenerateEntry(jit::MacroAssembler& masm, const FuncDefExport& func);
 
 extern ProfilingOffsets
-GenerateInterpExit(jit::MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex);
+GenerateInterpExit(jit::MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex,
+                   jit::Label* throwLabel);
 
 extern ProfilingOffsets
-GenerateJitExit(jit::MacroAssembler& masm, const FuncImport& fi);
+GenerateJitExit(jit::MacroAssembler& masm, const FuncImport& fi, jit::Label* throwLabel);
 
 extern Offsets
-GenerateJumpTarget(jit::MacroAssembler& masm, JumpTarget target);
+GenerateJumpTarget(jit::MacroAssembler& masm, JumpTarget target, jit::Label* throwLabel);
 
 extern Offsets
-GenerateInterruptStub(jit::MacroAssembler& masm);
+GenerateInterruptStub(jit::MacroAssembler& masm, jit::Label* throwLabel);
+
+extern Offsets
+GenerateThrowStub(jit::MacroAssembler& masm, jit::Label* throwLabel);
 
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_stubs_h
--- a/js/src/asmjs/WasmTypes.h
+++ b/js/src/asmjs/WasmTypes.h
@@ -890,17 +890,16 @@ enum class JumpTarget
     IntegerDivideByZero = unsigned(Trap::IntegerDivideByZero),
     OutOfBounds = unsigned(Trap::OutOfBounds),
     UnalignedAccess = unsigned(Trap::UnalignedAccess),
     IndirectCallToNull = unsigned(Trap::IndirectCallToNull),
     IndirectCallBadSig = unsigned(Trap::IndirectCallBadSig),
     ImpreciseSimdConversion = unsigned(Trap::ImpreciseSimdConversion),
     // Non-traps
     StackOverflow,
-    Throw,
     Limit
 };
 
 typedef EnumeratedArray<JumpTarget, JumpTarget::Limit, Uint32Vector> JumpSiteArray;
 
 // Assumptions captures ambient state that must be the same when compiling and
 // deserializing a module for the compiled code to be valid. If it's not, then
 // the module must be recompiled from scratch.