Bug 1186424: Make ModuleCompiler standalone; r=luke
☠☠ backed out by f8dd6bbde6ce ☠ ☠
authorBenjamin Bouvier <benj@benj.me>
Thu, 30 Jul 2015 12:58:05 +0200
changeset 257950 e41895c02c0bd19b62add8b3e58a97ecaa17e028
parent 257949 65b067184f104310be372c7ad634f05ae67f8fbf
child 257951 bdeb7135e5414be2b76b81bbc0c97b5a6231a3b8
push id29238
push userryanvm@gmail.com
push dateMon, 17 Aug 2015 13:06:57 +0000
treeherdermozilla-central@a6eeb28458fd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke
bugs1186424
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1186424: Make ModuleCompiler standalone; r=luke
js/src/asmjs/AsmJSModule.h
js/src/asmjs/AsmJSValidate.cpp
--- a/js/src/asmjs/AsmJSModule.h
+++ b/js/src/asmjs/AsmJSModule.h
@@ -437,16 +437,18 @@ class AsmJSModule
         ArgCoercionVector argCoercions_;
         struct Pod {
             bool isChangeHeap_;
             ReturnType returnType_;
             uint32_t codeOffset_;
             uint32_t startOffsetInModule_;  // Store module-start-relative offsets
             uint32_t endOffsetInModule_;    // so preserved by serialization.
         } pod;
+
+        // Used only during compilation => not needed during serialization
         uint32_t funcIndex_;
 
         friend class AsmJSModule;
 
         ExportedFunction(PropertyName* name,
                          uint32_t startOffsetInModule, uint32_t endOffsetInModule,
                          PropertyName* maybeFieldName,
                          ArgCoercionVector&& argCoercions,
--- a/js/src/asmjs/AsmJSValidate.cpp
+++ b/js/src/asmjs/AsmJSValidate.cpp
@@ -1087,41 +1087,38 @@ class ModuleGlobals
     class FuncPtrTable
     {
         Signature sig_;
         uint32_t mask_;
         uint32_t globalDataOffset_;
         uint32_t tableIndex_;
         FuncPtrVector elems_;
 
+        FuncPtrTable(FuncPtrTable&& rhs) = delete;
+
       public:
         FuncPtrTable(ExclusiveContext* cx, Signature&& sig, uint32_t mask, uint32_t gdo,
                      uint32_t tableIndex)
           : sig_(Move(sig)), mask_(mask), globalDataOffset_(gdo), tableIndex_(tableIndex),
             elems_(cx)
         {}
 
-        FuncPtrTable(FuncPtrTable&& rhs)
-          : sig_(Move(rhs.sig_)), mask_(rhs.mask_), globalDataOffset_(rhs.globalDataOffset_),
-            elems_(Move(rhs.elems_))
-        {}
-
         Signature& sig() { return sig_; }
         const Signature& sig() const { return sig_; }
         unsigned mask() const { return mask_; }
         unsigned globalDataOffset() const { return globalDataOffset_; }
         unsigned tableIndex() const { return tableIndex_; }
 
         bool initialized() const { return !elems_.empty(); }
         void initElems(FuncPtrVector&& elems) { elems_ = Move(elems); MOZ_ASSERT(initialized()); }
         unsigned numElems() const { MOZ_ASSERT(initialized()); return elems_.length(); }
         const Func& elem(unsigned i) const { return *elems_[i]; }
     };
 
-    typedef Vector<FuncPtrTable> FuncPtrTableVector;
+    typedef Vector<FuncPtrTable*> FuncPtrTableVector;
     typedef Vector<Func*> FuncVector;
 
   private:
     FuncVector functions_;
     FuncPtrTableVector funcPtrTables_;
 
   public:
     ModuleGlobals(ExclusiveContext* cx)
@@ -1138,23 +1135,36 @@ class ModuleGlobals
     bool addFunction(Func* func) {
         return functions_.append(func);
     }
 
     unsigned numFuncPtrTables() const {
         return funcPtrTables_.length();
     }
     FuncPtrTable& funcPtrTable(unsigned i) {
-        return funcPtrTables_[i];
-    }
-    bool addFuncPtrTable(FuncPtrTable&& table) {
-        return funcPtrTables_.append(Move(table));
+        return *funcPtrTables_[i];
+    }
+    bool addFuncPtrTable(FuncPtrTable* table) {
+        return funcPtrTables_.append(table);
     }
 };
 
+// The ModuleValidator encapsulates the entire validation of an asm.js module.
+// Its lifetime goes from the validation of the top components of an asm.js
+// module (all the globals), the emission of bytecode for all the functions in
+// the module and the validation of function's pointer tables.
+//
+// Rooting note: ModuleValidator is a stack class that contains unrooted
+// PropertyName (JSAtom) pointers.  This is safe because it cannot be
+// constructed without a TokenStream reference.  TokenStream is itself a stack
+// class that cannot be constructed without an AutoKeepAtoms being live on the
+// stack, which prevents collection of atoms.
+//
+// ModuleValidator is marked as rooted in the rooting analysis.  Don't add
+// non-JSAtom pointers, or this will break!
 class MOZ_STACK_CLASS ModuleValidator
 {
   public:
     class Global
     {
       public:
         enum Which {
             Variable,
@@ -1314,41 +1324,72 @@ class MOZ_STACK_CLASS ModuleValidator
         ArrayView(PropertyName* name, Scalar::Type type)
           : name(name), type(type)
         {}
 
         PropertyName* name;
         Scalar::Type type;
     };
 
+    class ExitDescriptor
+    {
+        PropertyName* name_;
+        Signature* sig_;
+
+      public:
+        ExitDescriptor(PropertyName* name, Signature* sig)
+          : name_(name), sig_(sig)
+        {}
+        PropertyName* name() const {
+            return name_;
+        }
+        const Signature& sig() const {
+            return *sig_;
+        }
+
+        // ExitDescriptor is a HashPolicy:
+        typedef ExitDescriptor Lookup;
+        static HashNumber hash(const ExitDescriptor& d) {
+            HashNumber hn = HashGeneric(d.name_, d.sig_->retType().which());
+            const VarTypeVector& args = d.sig_->args();
+            for (unsigned i = 0; i < args.length(); i++)
+                hn = AddToHash(hn, args[i].which());
+            return hn;
+        }
+        static bool match(const ExitDescriptor& lhs, const ExitDescriptor& rhs) {
+            return lhs.name_ == rhs.name_ && lhs.sig_ == rhs.sig_;
+        }
+    };
+
   private:
     typedef HashMap<PropertyName*, Global*> GlobalMap;
     typedef HashMap<PropertyName*, MathBuiltin> MathNameMap;
     typedef HashMap<PropertyName*, AsmJSAtomicsBuiltinFunction> AtomicsNameMap;
     typedef HashMap<PropertyName*, AsmJSSimdOperation> SimdOperationNameMap;
     typedef Vector<ArrayView> ArrayViewVector;
+    typedef HashMap<ExitDescriptor, unsigned, ExitDescriptor> ExitMap;
 
     ExclusiveContext* cx_;
     AsmJSParser&      parser_;
 
     ScopedJSDeletePtr<AsmJSModule> module_;
     LifoAlloc                      moduleLifo_;
     ModuleGlobals&                 moduleGlobals_;
 
     GlobalMap                      globals_;
     ArrayViewVector                arrayViews_;
+    ExitMap                        exits_;
 
     MathNameMap                    standardLibraryMathNames_;
     AtomicsNameMap                 standardLibraryAtomicsNames_;
     SimdOperationNameMap           standardLibrarySimdOpNames_;
 
     ParseNode*      moduleFunctionNode_;
     PropertyName*   moduleFunctionName_;
 
-    // XXX maybe factor out with MC
     UniquePtr<char[], JS::FreePolicy> errorString_;
     uint32_t                          errorOffset_;
     bool                              errorOverRecursed_;
 
     bool                           canValidateChangeHeap_;
     bool                           hasChangeHeap_;
 
     bool supportsSimd_;
@@ -1356,16 +1397,17 @@ class MOZ_STACK_CLASS ModuleValidator
   public:
     ModuleValidator(ExclusiveContext* cx, AsmJSParser& parser, ModuleGlobals& moduleGlobals)
       : cx_(cx),
         parser_(parser),
         moduleLifo_(LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
         moduleGlobals_(moduleGlobals),
         globals_(cx),
         arrayViews_(cx),
+        exits_(cx),
         standardLibraryMathNames_(cx),
         standardLibraryAtomicsNames_(cx),
         standardLibrarySimdOpNames_(cx),
         moduleFunctionNode_(parser.pc->maybeFunction),
         moduleFunctionName_(nullptr),
         errorString_(nullptr),
         errorOffset_(UINT32_MAX),
         errorOverRecursed_(false),
@@ -1415,17 +1457,17 @@ class MOZ_STACK_CLASS ModuleValidator
         if (!atom)
             return false;
         return standardLibrarySimdOpNames_.putNew(atom->asPropertyName(), op);
     }
 
   public:
 
     bool init() {
-        if (!globals_.init())
+        if (!globals_.init() || !exits_.init())
             return false;
 
         if (!standardLibraryMathNames_.init() ||
             !addStandardLibraryMathName("sin", AsmJSMathBuiltin_sin) ||
             !addStandardLibraryMathName("cos", AsmJSMathBuiltin_cos) ||
             !addStandardLibraryMathName("tan", AsmJSMathBuiltin_tan) ||
             !addStandardLibraryMathName("asin", AsmJSMathBuiltin_asin) ||
             !addStandardLibraryMathName("acos", AsmJSMathBuiltin_acos) ||
@@ -1653,54 +1695,63 @@ class MOZ_STACK_CLASS ModuleValidator
         AsmJSModule::ReturnType retType = func.sig().retType().toModuleReturnType();
         return module_->addExportedFunction(func.name(), func.srcBegin(), func.srcEnd(),
                                             maybeFieldName, Move(argCoercions), retType, func.funcIndex());
     }
     bool addExportedChangeHeap(PropertyName* name, const Global& g, PropertyName* maybeFieldName) {
         return module_->addExportedChangeHeap(name, g.changeHeapSrcBegin(), g.changeHeapSrcEnd(),
                                               maybeFieldName);
     }
-    bool addFunction(PropertyName* name, Signature&& sig, ModuleGlobals::Func** func, uint32_t* outFuncIndex) {
+    bool addFunction(PropertyName* name, Signature&& sig, ModuleGlobals::Func** func) {
         uint32_t funcIndex = moduleGlobals_.numFunctions();
-        if (outFuncIndex)
-            *outFuncIndex = funcIndex;
         Global* global = moduleLifo_.new_<Global>(Global::Function);
         if (!global)
             return false;
         global->u.funcIndex_ = funcIndex;
         if (!globals_.putNew(name, global))
             return false;
         Label* entry = moduleLifo_.new_<Label>();
         if (!entry)
             return false;
         *func = moduleLifo_.new_<ModuleGlobals::Func>(name, Move(sig), entry, funcIndex);
         if (!*func)
             return false;
         return moduleGlobals_.addFunction(*func);
     }
-    bool addFuncPtrTable(PropertyName* name, Signature&& sig, uint32_t mask, ModuleGlobals::FuncPtrTable** table,
-                         uint32_t* tableIndexOut)
+    bool addFuncPtrTable(PropertyName* name, Signature&& sig, uint32_t mask, ModuleGlobals::FuncPtrTable** table)
     {
         uint32_t tableIndex = moduleGlobals_.numFuncPtrTables();
-        if (tableIndexOut)
-            *tableIndexOut = tableIndex;
         Global* global = moduleLifo_.new_<Global>(Global::FuncPtrTable);
         if (!global)
             return false;
         global->u.funcPtrTableIndex_ = tableIndex;
         if (!globals_.putNew(name, global))
             return false;
         uint32_t globalDataOffset;
         if (!module_->addFuncPtrTable(/* numElems = */ mask + 1, &globalDataOffset))
             return false;
-        ModuleGlobals::FuncPtrTable tmpTable(cx_, Move(sig), mask, globalDataOffset, tableIndex);
-        if (!moduleGlobals_.addFuncPtrTable(Move(tmpTable)))
-            return false;
-        *table = &moduleGlobals_.funcPtrTable(tableIndex);
-        return true;
+        *table = moduleLifo_.new_<ModuleGlobals::FuncPtrTable>(cx_, Move(sig), mask, globalDataOffset, tableIndex);
+        return *table && moduleGlobals_.addFuncPtrTable(*table);
+    }
+    bool addExit(unsigned ffiIndex, PropertyName* name, Signature&& sig, unsigned* exitIndex,
+                 Signature** lifoSig)
+    {
+        Signature* signature = moduleLifo_.new_<Signature>(Move(sig));
+        if (!signature)
+            return false;
+        *lifoSig = signature;
+        ExitDescriptor exitDescriptor(name, signature);
+        ExitMap::AddPtr p = exits_.lookupForAdd(exitDescriptor);
+        if (p) {
+            *exitIndex = p->value();
+            return true;
+        }
+        if (!module_->addExit(ffiIndex, exitIndex))
+            return false;
+        return exits_.add(p, Move(exitDescriptor), *exitIndex);
     }
 
     bool tryOnceToValidateChangeHeap() {
         bool ret = canValidateChangeHeap_;
         canValidateChangeHeap_ = false;
         return ret;
     }
     bool hasChangeHeap() const {
@@ -1709,17 +1760,16 @@ class MOZ_STACK_CLASS ModuleValidator
     bool tryRequireHeapLengthToBeAtLeast(uint32_t len) {
         return module_->tryRequireHeapLengthToBeAtLeast(len);
     }
     uint32_t minHeapLength() const {
         return module_->minHeapLength();
     }
 
     // Error handling.
-    // XXX maybe factor out with ModuleCompiler
     bool failOffset(uint32_t offset, const char* str) {
         MOZ_ASSERT(!errorString_);
         MOZ_ASSERT(errorOffset_ == UINT32_MAX);
         MOZ_ASSERT(str);
         errorOffset_ = offset;
         errorString_ = DuplicateString(cx_, str);
         return false;
     }
@@ -1729,17 +1779,18 @@ class MOZ_STACK_CLASS ModuleValidator
             return failOffset(pn->pn_pos.begin, str);
 
         // The exact rooting static analysis does not perform dataflow analysis, so it believes
         // that unrooted things on the stack during compilation may still be accessed after this.
         // Since pn is typically only null under OOM, this suppression simply forces any GC to be
         // delayed until the compilation is off the stack and more memory can be freed.
         gc::AutoSuppressGC nogc(cx_);
         TokenPos pos;
-        if (!tokenStream().peekTokenPos(&pos))
+        TokenStream::Modifier modifier = tokenStream().hasLookahead() ? tokenStream().getLookaheadModifier() : TokenStream::None;
+        if (!tokenStream().peekTokenPos(&pos, modifier))
             return false;
         return failOffset(pos.begin, str);
     }
 
     bool failfVA(ParseNode* pn, const char* fmt, va_list ap) {
         MOZ_ASSERT(!errorString_);
         MOZ_ASSERT(errorOffset_ == UINT32_MAX);
         MOZ_ASSERT(fmt);
@@ -1769,29 +1820,32 @@ class MOZ_STACK_CLASS ModuleValidator
         errorOverRecursed_ = true;
         return false;
     }
 
     // Read-only interface
     ExclusiveContext* cx() const { return cx_; }
     ParseNode*    moduleFunctionNode() const { return moduleFunctionNode_; }
     PropertyName* moduleFunctionName() const { return moduleFunctionName_; }
-    ScopedJSDeletePtr<AsmJSModule>& modulePtr() { return module_; }// XXX me doesn't like that
+    ScopedJSDeletePtr<AsmJSModule>& modulePtr() { return module_; }
     const AsmJSModule& module() const { return *module_.get(); }
     AsmJSParser& parser() const { return parser_; }
     TokenStream& tokenStream() const { return parser_.tokenStream; }
     bool supportsSimd() const { return supportsSimd_; }
     LifoAlloc& lifo() { return moduleLifo_; }
 
     unsigned numArrayViews() const {
         return arrayViews_.length();
     }
     const ArrayView& arrayView(unsigned i) const {
         return arrayViews_[i];
     }
+    unsigned numFunctions() const {
+        return moduleGlobals_.numFunctions();
+    }
     ModuleGlobals::Func& function(unsigned i) const {
         return *moduleGlobals_.function(i);
     }
     unsigned numFuncPtrTables() const {
         return moduleGlobals_.numFuncPtrTables();
     }
     ModuleGlobals::FuncPtrTable& funcPtrTable(unsigned i) const {
         return moduleGlobals_.funcPtrTable(i);
@@ -1877,251 +1931,116 @@ class MOZ_STACK_CLASS ModuleValidator
 //      foo(1);      // Exit #1: (int) -> void
 //      foo(1.5);    // Exit #2: (double) -> void
 //      bar(1)|0;    // Exit #3: (int) -> int
 //      bar(2)|0;    // Exit #3: (int) -> int
 //    }
 //  }
 //
 // The ModuleCompiler maintains a hash table (ExitMap) which allows a call site
-// to add a new exit or reuse an existing one. The key is an ExitDescriptor
-// (which holds the exit pairing) and the value is an index into the
-// Vector<Exit> stored in the AsmJSModule.
+// to add a new exit or reuse an existing one. The key is an index into the
+// Vector<Exit> stored in the AsmJSModule and the value is the signature of
+// that exit's variant.
 //
-// Rooting note: ModuleCompiler is a stack class that contains unrooted
-// PropertyName (JSAtom) pointers.  This is safe because it cannot be
-// constructed without a TokenStream reference.  TokenStream is itself a stack
-// class that cannot be constructed without an AutoKeepAtoms being live on the
-// stack, which prevents collection of atoms.
-//
-// ModuleCompiler is marked as rooted in the rooting analysis.  Don't add
-// non-JSAtom pointers, or this will break!
+// The same rooting note in the top comment of ModuleValidator applies here as
+// well.
 class MOZ_STACK_CLASS ModuleCompiler
 {
   public:
-
-    class ExitDescriptor
-    {
-        PropertyName* name_;
-        Signature* sig_;
-
-      public:
-        ExitDescriptor(PropertyName* name, Signature* sig)
-          : name_(name), sig_(sig)
-        {}
-        PropertyName* name() const {
-            return name_;
-        }
-        const Signature& sig() const {
-            return *sig_;
-        }
-
-        // ExitDescriptor is a HashPolicy:
-        typedef ExitDescriptor Lookup;
-        static HashNumber hash(const ExitDescriptor& d) {
-            HashNumber hn = HashGeneric(d.name_, d.sig_->retType().which());
-            const VarTypeVector& args = d.sig_->args();
-            for (unsigned i = 0; i < args.length(); i++)
-                hn = AddToHash(hn, args[i].which());
-            return hn;
-        }
-        static bool match(const ExitDescriptor& lhs, const ExitDescriptor& rhs) {
-            return lhs.name_ == rhs.name_ && lhs.sig_ == rhs.sig_;
-        }
-    };
-
-    typedef HashMap<ExitDescriptor, unsigned, ExitDescriptor> ExitMap;
+    // Map exitIndex to the corresponding exit's Signature.
+    typedef HashMap<unsigned, const Signature*> ExitMap;
 
   private:
     struct SlowFunction
     {
         SlowFunction(PropertyName* name, unsigned ms, unsigned line, unsigned column)
          : name(name), ms(ms), line(line), column(column)
         {}
 
         PropertyName* name;
         unsigned ms;
         unsigned line;
         unsigned column;
     };
 
     typedef Vector<SlowFunction> SlowFunctionVector;
-    typedef Vector<Signature*> SignatureVector;
-
-    ExclusiveContext *             cx_;
-    AsmJSParser &                  parser_;
-
-    MacroAssembler                 masm_;
-
-    ScopedJSDeletePtr<AsmJSModule>& module_; // XXX me don't like that
-    LifoAlloc                      moduleLifo_;
-    ModuleGlobals &                moduleGlobals_;
+
+    ExclusiveContext *              cx_;
+    AsmJSParser &                   parser_;
+
+    MacroAssembler                  masm_;
+
+    ScopedJSDeletePtr<AsmJSModule>& module_;
+    ModuleGlobals &                 moduleGlobals_;
 
     ExitMap                        exits_;
-    SignatureVector                exitsSignatures_;
+
     NonAssertingLabel              stackOverflowLabel_;
     NonAssertingLabel              asyncInterruptLabel_;
     NonAssertingLabel              syncInterruptLabel_;
     NonAssertingLabel              onDetachedLabel_;
     NonAssertingLabel              onOutOfBoundsLabel_;
     NonAssertingLabel              onConversionErrorLabel_;
 
-    UniquePtr<char[], JS::FreePolicy> errorString_;
-    uint32_t                       errorOffset_;
-    bool                           errorOverRecursed_;
-
     int64_t                        usecBefore_;
     SlowFunctionVector             slowFunctions_;
 
     DebugOnly<bool>                finishedFunctionBodies_;
 
   public:
     ModuleCompiler(ExclusiveContext* cx, AsmJSParser& parser, ScopedJSDeletePtr<AsmJSModule>& module,
                    ModuleGlobals& moduleGlobals)
       : cx_(cx),
         parser_(parser),
         masm_(MacroAssembler::AsmJSToken()),
         module_(module),
-        moduleLifo_(LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
         moduleGlobals_(moduleGlobals),
         exits_(cx),
-        exitsSignatures_(cx),
-        errorString_(nullptr),
-        errorOffset_(UINT32_MAX),
-        errorOverRecursed_(false),
         usecBefore_(PRMJ_Now()),
         slowFunctions_(cx),
         finishedFunctionBodies_(false)
     {}
 
-    ~ModuleCompiler() {
-        if (errorString_) {
-            MOZ_ASSERT(errorOffset_ != UINT32_MAX);
-            tokenStream().reportAsmJSError(errorOffset_,
-                                           JSMSG_USE_ASM_TYPE_FAIL,
-                                           errorString_.get());
-        }
-        if (errorOverRecursed_)
-            ReportOverRecursed(cx_);
-    }
-
     bool init() {
         return exits_.init();
     }
 
-    bool failOffset(uint32_t offset, const char* str) {
-        MOZ_ASSERT(!errorString_);
-        MOZ_ASSERT(errorOffset_ == UINT32_MAX);
-        MOZ_ASSERT(str);
-        errorOffset_ = offset;
-        errorString_ = DuplicateString(cx_, str);
-        return false;
-    }
-
-    bool fail(ParseNode* pn, const char* str) {
-        if (pn)
-            return failOffset(pn->pn_pos.begin, str);
-
-        // The exact rooting static analysis does not perform dataflow analysis, so it believes
-        // that unrooted things on the stack during compilation may still be accessed after this.
-        // Since pn is typically only null under OOM, this suppression simply forces any GC to be
-        // delayed until the compilation is off the stack and more memory can be freed.
-        gc::AutoSuppressGC nogc(cx_);
-        TokenPos pos;
-        TokenStream::Modifier modifier = tokenStream().hasLookahead() ? tokenStream().getLookaheadModifier() : TokenStream::None;
-        if (!tokenStream().peekTokenPos(&pos, modifier))
-            return false;
-        return failOffset(pos.begin, str);
-    }
-
-    bool failfVA(ParseNode* pn, const char* fmt, va_list ap) {
-        MOZ_ASSERT(!errorString_);
-        MOZ_ASSERT(errorOffset_ == UINT32_MAX);
-        MOZ_ASSERT(fmt);
-        errorOffset_ = pn ? pn->pn_pos.begin : tokenStream().currentToken().pos.end;
-        errorString_.reset(JS_vsmprintf(fmt, ap));
-        return false;
-    }
-
-    bool failf(ParseNode* pn, const char* fmt, ...) {
-        va_list ap;
-        va_start(ap, fmt);
-        failfVA(pn, fmt, ap);
-        va_end(ap);
-        return false;
-    }
-
-    bool failName(ParseNode* pn, const char* fmt, PropertyName* name) {
-        // This function is invoked without the caller properly rooting its locals.
-        gc::AutoSuppressGC suppress(cx_);
-        JSAutoByteString bytes;
-        if (AtomToPrintableString(cx_, name, &bytes))
-            failf(pn, fmt, bytes.ptr());
-        return false;
-    }
-
-    bool failOverRecursed() {
-        errorOverRecursed_ = true;
-        return false;
-    }
-
     /*************************************************** Read-only interface */
 
     ExclusiveContext* cx() const { return cx_; }
     AsmJSParser& parser() const { return parser_; }
     TokenStream& tokenStream() const { return parser_.tokenStream; }
     MacroAssembler& masm() { return masm_; }
     Label& stackOverflowLabel() { return stackOverflowLabel_; }
     Label& asyncInterruptLabel() { return asyncInterruptLabel_; }
     Label& syncInterruptLabel() { return syncInterruptLabel_; }
     Label& onDetachedLabel() { return onDetachedLabel_; }
     Label& onOutOfBoundsLabel() { return onOutOfBoundsLabel_; }
     Label& onConversionErrorLabel() { return onConversionErrorLabel_; }
-    bool hasError() const { return errorString_ != nullptr; }
     const AsmJSModule& module() const { return *module_.get(); }
     bool usesSignalHandlersForInterrupt() const { return module_->usesSignalHandlersForInterrupt(); }
     bool usesSignalHandlersForOOB() const { return module_->usesSignalHandlersForOOB(); }
 
     uint32_t minHeapLength() const {
         return module_->minHeapLength();
     }
     ExitMap::Range allExits() const {
         return exits_.all();
     }
-    const Signature& exitSignature(unsigned i) const {
-        return *exitsSignatures_[i];
-    }
-    unsigned numFunctions() const {
-        return moduleGlobals_.numFunctions();
-    }
     ModuleGlobals::Func& function(unsigned i) const {
         return *moduleGlobals_.function(i);
     }
-    ModuleGlobals::FuncPtrTable& funcPtrTable(unsigned i) const {
-        return moduleGlobals_.funcPtrTable(i);
-    }
 
     /***************************************************** Mutable interface */
-    bool addExit(unsigned ffiIndex, PropertyName* name, Signature&& sig, unsigned* exitIndex) {
-        Signature* signature = moduleLifo_.new_<Signature>(Move(sig));
-        if (!signature)
-            return false;
-        ExitDescriptor exitDescriptor(name, signature);
-        ExitMap::AddPtr p = exits_.lookupForAdd(exitDescriptor);
-        if (p) {
-            *exitIndex = p->value();
+    bool addExit(unsigned exitIndex, const Signature* signature)
+    {
+        ExitMap::AddPtr p = exits_.lookupForAdd(exitIndex);
+        if (p)
             return true;
-        }
-        if (!module_->addExit(ffiIndex, exitIndex))
-            return false;
-        MOZ_ASSERT(exitsSignatures_.length() == *exitIndex,
-                   "exitsSignatures_ and exits_ must contain as many elements");
-        if (!exitsSignatures_.append(signature))
-            return false;
-        return exits_.add(p, Move(exitDescriptor), *exitIndex);
+        return exits_.add(p, exitIndex, signature);
     }
 
     bool finishGeneratingFunction(ModuleGlobals::Func& func, CodeGenerator& codegen,
                                   const AsmJSFunctionLabels& labels)
     {
         uint32_t line, column;
         tokenStream().srcCoords.lineNumAndColumnIndex(func.srcBegin(), &line, &column);
 
@@ -3001,16 +2920,17 @@ class AsmFunction
         return pos;
     }
 
     uint8_t  readU8 (size_t* pc) const { return readPrimitive<uint8_t>(pc); }
     int32_t  readI32(size_t* pc) const { return readPrimitive<int32_t>(pc); }
     float    readF32(size_t* pc) const { return readPrimitive<float>(pc); }
     uint32_t readU32(size_t* pc) const { return readPrimitive<uint32_t>(pc); }
     double   readF64(size_t* pc) const { return readPrimitive<double>(pc); }
+    uint8_t* readPtr(size_t* pc) const { return readPrimitive<uint8_t*>(pc); }
 
     SimdConstant readI32X4(size_t* pc) const {
         int32_t x = readI32(pc);
         int32_t y = readI32(pc);
         int32_t z = readI32(pc);
         int32_t w = readI32(pc);
         return SimdConstant::CreateX4(x, y, z, w);
     }
@@ -3039,16 +2959,21 @@ class AsmFunction
     template<class T>
     void patch32(size_t pc, T i) {
         static_assert(sizeof(T) == sizeof(uint32_t),
                       "patch32 must be used with 32-bits wide types");
         MOZ_ASSERT(pcIsPatchable(pc, sizeof(uint32_t)));
         memcpy(&bytecode_[pc], &i, sizeof(uint32_t));
     }
 
+    void patchPtr(size_t pc, uint8_t* ptr) {
+        MOZ_ASSERT(pcIsPatchable(pc, sizeof(uint8_t*)));
+        memcpy(&bytecode_[pc], &ptr, sizeof(uint8_t*));
+    }
+
     // Setters
     void setReturnedType(RetType retType) {
         MOZ_ASSERT(returnedType_ == RetType::Which(-1));
         returnedType_ = retType;
     }
     void setNumLocals(size_t numLocals) {
         MOZ_ASSERT(numLocals_ == size_t(-1));
         numLocals_ = numLocals;
@@ -3078,42 +3003,39 @@ class FunctionBuilder
         Local(VarType t, unsigned slot) : type(t), slot(slot) {}
     };
 
   private:
     typedef HashMap<PropertyName*, Local> LocalMap;
     typedef HashMap<PropertyName*, uint32_t> LabelMap;
 
     ModuleValidator &      m_;
-    ModuleCompiler &       mc_;
     ParseNode *            fn_;
 
     AsmFunction &          func_;
 
     LocalMap               locals_;
     LabelMap               labels_;
 
     unsigned               heapExpressionDepth_;
 
     bool                   hasAlreadyReturned_;
 
   public:
-    FunctionBuilder(ModuleValidator& m, ModuleCompiler& mc, AsmFunction& func, ParseNode* fn)
+    FunctionBuilder(ModuleValidator& m, AsmFunction& func, ParseNode* fn)
       : m_(m),
-        mc_(mc),
         fn_(fn),
         func_(func),
         locals_(m.cx()),
         labels_(m.cx()),
         heapExpressionDepth_(0),
         hasAlreadyReturned_(false)
     {}
 
     ModuleValidator& m() const    { return m_; }
-    ModuleCompiler& mc() const    { return mc_; }
     ExclusiveContext* cx() const  { return m_.cx(); }
     ParseNode* fn() const         { return fn_; }
 
     bool init()
     {
         return locals_.init() &&
                labels_.init();
     }
@@ -3286,32 +3208,47 @@ class FunctionBuilder
     void patchU8(size_t pos, uint8_t u8) {
         func_.patchU8(pos, u8);
     }
     template<class T>
     void patch32(size_t pos, T val) {
         static_assert(sizeof(T) == sizeof(uint32_t), "patch32 is used for 4-bytes long ops");
         func_.patch32(pos, val);
     }
+    void patchPtr(size_t pos, uint8_t* ptr) {
+        func_.patchPtr(pos, ptr);
+    }
 
     size_t tempU8() {
         return func_.writeU8(uint8_t(Stmt::Bad));
     }
     size_t tempOp() {
         return tempU8();
     }
     size_t temp32() {
         size_t ret = func_.writeU8(uint8_t(Stmt::Bad));
         for (size_t i = 1; i < 4; i++)
             func_.writeU8(uint8_t(Stmt::Bad));
         return ret;
     }
+    size_t tempPtr() {
+        size_t ret = func_.writeU8(uint8_t(Stmt::Bad));
+        for (size_t i = 1; i < sizeof(intptr_t); i++)
+            func_.writeU8(uint8_t(Stmt::Bad));
+        return ret;
+    }
     /************************************************** End of build helpers */
 };
 
+static bool
+NoExceptionPending(ExclusiveContext* cx)
+{
+    return !cx->isJSContext() || !cx->asJSContext()->isExceptionPending();
+}
+
 typedef Vector<size_t,1> LabelVector;
 typedef Vector<MBasicBlock*,8> BlockVector;
 
 // Encapsulates the compilation of a single function in an asm.js module. The
 // function compiler handles the creation and final backend compilation of the
 // MIR graph. Also see ModuleCompiler comment.
 class FunctionCompiler
 {
@@ -3980,38 +3917,38 @@ class FunctionCompiler
             return false;
 
         curBlock_->add(ins);
         *def = ins;
         return true;
     }
 
   public:
-    bool internalCall(const ModuleGlobals::Func& func, const Call& call, MDefinition** def)
-    {
-        MIRType returnType = func.sig().retType().toMIRType();
-        return callPrivate(MAsmJSCall::Callee(&func.entry()), call, returnType, def);
-    }
-
-    bool funcPtrCall(const ModuleGlobals::FuncPtrTable& table, MDefinition* index,
+    bool internalCall(const Signature& sig, Label* entry, const Call& call, MDefinition** def)
+    {
+        MIRType returnType = sig.retType().toMIRType();
+        return callPrivate(MAsmJSCall::Callee(entry), call, returnType, def);
+    }
+
+    bool funcPtrCall(const Signature& sig, uint32_t maskLit, uint32_t globalDataOffset, MDefinition* index,
                      const Call& call, MDefinition** def)
     {
         if (inDeadCode()) {
             *def = nullptr;
             return true;
         }
 
-        MConstant* mask = MConstant::New(alloc(), Int32Value(table.mask()));
+        MConstant* mask = MConstant::New(alloc(), Int32Value(maskLit));
         curBlock_->add(mask);
         MBitAnd* maskedIndex = MBitAnd::NewAsmJS(alloc(), index, mask);
         curBlock_->add(maskedIndex);
-        MAsmJSLoadFuncPtr* ptrFun = MAsmJSLoadFuncPtr::New(alloc(), table.globalDataOffset(), maskedIndex);
+        MAsmJSLoadFuncPtr* ptrFun = MAsmJSLoadFuncPtr::New(alloc(), globalDataOffset, maskedIndex);
         curBlock_->add(ptrFun);
 
-        MIRType returnType = table.sig().retType().toMIRType();
+        MIRType returnType = sig.retType().toMIRType();
         return callPrivate(MAsmJSCall::Callee(ptrFun), call, returnType, def);
     }
 
     bool ffiCall(unsigned exitIndex, const Call& call, MIRType returnType, MDefinition** def)
     {
         if (inDeadCode()) {
             *def = nullptr;
             return true;
@@ -4368,16 +4305,17 @@ class FunctionCompiler
 
     /************************************************************ DECODING ***/
 
     uint8_t  readU8()        { return func_.readU8(&pc_); }
     uint32_t readU32()       { return func_.readU32(&pc_); }
     int32_t  readI32()       { return func_.readI32(&pc_); }
     float    readF32()       { return func_.readF32(&pc_); }
     double   readF64()       { return func_.readF64(&pc_); }
+    uint8_t* readPtr()       { return func_.readPtr(&pc_); }
     SimdConstant readI32X4() { return func_.readI32X4(&pc_); }
     SimdConstant readF32X4() { return func_.readF32X4(&pc_); }
 
     Stmt readStmtOp()        { return Stmt(readU8()); }
 
     void assertDebugCheckPoint() {
 #ifdef DEBUG
         MOZ_ASSERT(Stmt(readU8()) == Stmt::DebugCheckPoint);
@@ -6451,31 +6389,29 @@ CheckSignatureAgainstExisting(ModuleVali
     }
 
     MOZ_ASSERT(sig == existing);
     return true;
 }
 
 static bool
 CheckFunctionSignature(ModuleValidator& m, ParseNode* usepn, Signature&& sig, PropertyName* name,
-                       ModuleGlobals::Func** func, uint32_t* funcIndex = nullptr)
+                       ModuleGlobals::Func** func)
 {
     ModuleGlobals::Func* existing = m.lookupFunction(name);
     if (!existing) {
         if (!CheckModuleLevelName(m, usepn, name))
             return false;
-        return m.addFunction(name, Move(sig), func, funcIndex);
+        return m.addFunction(name, Move(sig), func);
     }
 
     if (!CheckSignatureAgainstExisting(m, usepn, sig, existing->sig()))
         return false;
 
     *func = existing;
-    if (funcIndex)
-        *funcIndex = existing->funcIndex();
     return true;
 }
 
 static bool
 CheckIsVarType(FunctionBuilder& f, ParseNode* argNode, Type type)
 {
     if (!type.isVarType())
         return f.failf(argNode, "%s is not a subtype of int, float or double", type.toChars());
@@ -6495,81 +6431,78 @@ CheckInternalCall(FunctionBuilder& f, Pa
         case RetType::Void:      f.writeOp(Stmt::CallInternal);  break;
         case RetType::Signed:    f.writeOp(I32::CallInternal);   break;
         case RetType::Double:    f.writeOp(F64::CallInternal);   break;
         case RetType::Float:     f.writeOp(F32::CallInternal);   break;
         case RetType::Int32x4:   f.writeOp(I32X4::CallInternal); break;
         case RetType::Float32x4: f.writeOp(F32X4::CallInternal); break;
     }
 
-    // Signature's index in module
-    size_t indexAt = f.temp32();
+    // Function's entry in lifo
+    size_t entryAt = f.tempPtr();
+    // Function's signature in lifo
+    size_t signatureAt = f.tempPtr();
     // Call node position (asm.js specific)
     f.writeU32(callNode->pn_pos.begin);
 
     Signature signature(f.m().lifo(), retType);
     if (!CheckCallArgs(f, callNode, CheckIsVarType, signature))
         return false;
 
-    uint32_t funcIndex = -1;
     ModuleGlobals::Func* callee;
-    if (!CheckFunctionSignature(f.m(), callNode, Move(signature), calleeName, &callee, &funcIndex))
-        return false;
-
-    MOZ_ASSERT(funcIndex != uint32_t(-1));
-    f.patch32(indexAt, funcIndex);
+    if (!CheckFunctionSignature(f.m(), callNode, Move(signature), calleeName, &callee))
+        return false;
+
+    f.patchPtr(entryAt, (uint8_t*) &callee->entry());
+    f.patchPtr(signatureAt, (uint8_t*) &callee->sig());
     *type = retType.toType();
     return true;
 }
 
 static bool
 EmitInternalCall(FunctionCompiler& f, RetType retType, MDefinition** def)
 {
-    uint32_t sigIndex = f.readU32();
-
-    ModuleGlobals::Func& callee = f.m().function(sigIndex);
-    Signature& sig = callee.sig();
+    Label* entry = reinterpret_cast<Label*>(f.readPtr());
+    const Signature& sig = *reinterpret_cast<Signature*>(f.readPtr());
+
     MOZ_ASSERT_IF(sig.retType() != RetType::Void, sig.retType() == retType);
 
     uint32_t callNodePosition = f.readU32();
 
     FunctionCompiler::Call call(f, callNodePosition);
     if (!EmitCallArgs(f, sig, &call))
         return false;
 
-    return f.internalCall(callee, call, def);
+    return f.internalCall(sig, entry, call, def);
 }
 
 static bool
 CheckFuncPtrTableAgainstExisting(ModuleValidator& m, ParseNode* usepn,
                                  PropertyName* name, Signature&& sig, unsigned mask,
-                                 ModuleGlobals::FuncPtrTable** tableOut,
-                                 uint32_t* tableIndex = nullptr)
+                                 ModuleGlobals::FuncPtrTable** tableOut)
 {
     if (const ModuleValidator::Global* existing = m.lookupGlobal(name)) {
         if (existing->which() != ModuleValidator::Global::FuncPtrTable)
             return m.failName(usepn, "'%s' is not a function-pointer table", name);
 
         ModuleGlobals::FuncPtrTable& table = m.funcPtrTable(existing->funcPtrTableIndex());
         if (mask != table.mask())
             return m.failf(usepn, "mask does not match previous value (%u)", table.mask());
 
         if (!CheckSignatureAgainstExisting(m, usepn, sig, table.sig()))
             return false;
 
-        if (tableIndex)
-            *tableIndex = existing->funcPtrTableIndex();
         *tableOut = &table;
         return true;
     }
 
     if (!CheckModuleLevelName(m, usepn, name))
         return false;
 
-    return m.addFuncPtrTable(name, Move(sig), mask, tableOut, tableIndex);
+    return m.addFuncPtrTable(name, Move(sig), mask, tableOut);
 }
 
 static bool
 CheckFuncPtrCall(FunctionBuilder& f, ParseNode* callNode, RetType retType, Type* type)
 {
     if (!f.canCall()) {
         return f.fail(callNode, "function-pointer call expressions may not be nested inside heap "
                                 "expressions when the module contains a change-heap function");
@@ -6602,64 +6535,68 @@ CheckFuncPtrCall(FunctionBuilder& f, Par
     switch (retType.which()) {
         case RetType::Void:      f.writeOp(Stmt::CallIndirect);  break;
         case RetType::Signed:    f.writeOp(I32::CallIndirect);   break;
         case RetType::Double:    f.writeOp(F64::CallIndirect);   break;
         case RetType::Float:     f.writeOp(F32::CallIndirect);   break;
         case RetType::Int32x4:   f.writeOp(I32X4::CallIndirect); break;
         case RetType::Float32x4: f.writeOp(F32X4::CallIndirect); break;
     }
-    // Table's index in module
-    size_t tableIndexAt = f.temp32();
+
+    // Table's mask
+    f.writeU32(mask);
+    // Global data offset
+    size_t globalDataOffsetAt = f.temp32();
+    // Signature
+    size_t signatureAt = f.tempPtr();
     // Call node position (asm.js specific)
     f.writeU32(callNode->pn_pos.begin);
 
     Type indexType;
     if (!CheckExpr(f, indexNode, &indexType))
         return false;
 
     if (!indexType.isIntish())
         return f.failf(indexNode, "%s is not a subtype of intish", indexType.toChars());
 
     Signature sig(f.m().lifo(), retType);
     if (!CheckCallArgs(f, callNode, CheckIsVarType, sig))
         return false;
 
-    uint32_t tableIndex = -1;
     ModuleGlobals::FuncPtrTable* table;
-    if (!CheckFuncPtrTableAgainstExisting(f.m(), tableNode, name, Move(sig), mask, &table, &tableIndex))
-        return false;
-
-    MOZ_ASSERT(tableIndex != uint32_t(-1));
-    f.patch32(tableIndexAt, tableIndex);
+    if (!CheckFuncPtrTableAgainstExisting(f.m(), tableNode, name, Move(sig), mask, &table))
+        return false;
+
+    f.patch32(globalDataOffsetAt, table->globalDataOffset());
+    f.patchPtr(signatureAt, (uint8_t*) &table->sig());
 
     *type = retType.toType();
     return true;
 }
 
 static bool
 EmitFuncPtrCall(FunctionCompiler& f, RetType retType, MDefinition** def)
 {
-    uint32_t tableIndex = f.readU32();
-
-    ModuleGlobals::FuncPtrTable& table = f.m().funcPtrTable(tableIndex);
-    const Signature& sig = table.sig();
+    uint32_t mask = f.readU32();
+    uint32_t globalDataOffset = f.readU32();
+    const Signature& sig = *reinterpret_cast<Signature*>(f.readPtr());
+
     MOZ_ASSERT_IF(sig.retType() != RetType::Void, sig.retType() == retType);
 
     uint32_t callNodePosition = f.readU32();
 
     MDefinition *index;
     if (!EmitI32Expr(f, &index))
         return false;
 
     FunctionCompiler::Call call(f, callNodePosition);
     if (!EmitCallArgs(f, sig, &call))
         return false;
 
-    return f.funcPtrCall(table, index, call, def);
+    return f.funcPtrCall(sig, mask, globalDataOffset, index, call, def);
 }
 
 static bool
 CheckIsExternType(FunctionBuilder& f, ParseNode* argNode, Type type)
 {
     if (!type.isExtern())
         return f.failf(argNode, "%s is not a subtype of extern", type.toChars());
     return true;
@@ -6685,48 +6622,56 @@ CheckFFICall(FunctionBuilder& f, ParseNo
         case RetType::Void:      f.writeOp(Stmt::CallImport);  break;
         case RetType::Signed:    f.writeOp(I32::CallImport);   break;
         case RetType::Double:    f.writeOp(F64::CallImport);   break;
         case RetType::Float:     f.writeOp(F32::CallImport);   break;
         case RetType::Int32x4:   f.writeOp(I32X4::CallImport); break;
         case RetType::Float32x4: f.writeOp(F32X4::CallImport); break;
     }
 
-    // Index in the exit array
+    // Exit index
     size_t indexAt = f.temp32();
+    // Pointer to the exit's signature in the module's lifo
+    size_t sigAt = f.tempPtr();
     // Call node position (asm.js specific)
     f.writeU32(callNode->pn_pos.begin);
 
     Signature signature(f.m().lifo(), retType);
     if (!CheckCallArgs(f, callNode, CheckIsExternType, signature))
         return false;
 
-    unsigned exitIndex = -1;
-    if (!f.mc().addExit(ffiIndex, calleeName, Move(signature), &exitIndex))
-        return false;
-
-    MOZ_ASSERT(exitIndex != uint32_t(-1));
+    Signature* lifoSig = nullptr;
+    unsigned exitIndex;
+    if (!f.m().addExit(ffiIndex, calleeName, Move(signature), &exitIndex, &lifoSig))
+        return false;
+
+    MOZ_ASSERT(!!lifoSig);
     f.patch32(indexAt, exitIndex);
+    f.patchPtr(sigAt, (uint8_t*)lifoSig);
     *type = retType.toType();
     return true;
 }
 
 static bool
 EmitFFICall(FunctionCompiler& f, RetType retType, MDefinition** def)
 {
-    uint32_t exitIndex = f.readU32();
-
-    const Signature& sig = f.m().exitSignature(exitIndex);
+    unsigned exitIndex = f.readI32();
+    uint8_t* signaturePtr = f.readPtr();
+
+    const Signature& sig = *reinterpret_cast<Signature*>(signaturePtr);
     MOZ_ASSERT_IF(sig.retType() != RetType::Void, sig.retType() == retType);
 
     uint32_t callNodePosition = f.readU32();
     FunctionCompiler::Call call(f, callNodePosition);
     if (!EmitCallArgs(f, sig, &call))
         return false;
 
+    if (!f.m().addExit(exitIndex, &sig))
+        return false;
+
     return f.ffiCall(exitIndex, call, retType.toMIRType(), def);
 }
 
 static bool
 CheckFloatCoercionArg(FunctionBuilder& f, ParseNode* inputNode, Type inputType,
                       size_t opcodeAt)
 {
     if (inputType.isMaybeDouble()) {
@@ -10441,17 +10386,17 @@ CheckFunction(ModuleValidator& m, Module
             return false;
         if (validated) {
             *mir = nullptr;
             return true;
         }
     }
 
     AsmFunction function(m.cx());
-    FunctionBuilder f(m, mc, function, fn);
+    FunctionBuilder f(m, function, fn);
     if (!f.init())
         return false;
 
     ParseNode* stmtIter = ListHead(FunctionStatementList(fn));
 
     if (!CheckProcessingDirectives(m, &stmtIter))
         return false;
 
@@ -10524,17 +10469,17 @@ GenerateCode(ModuleCompiler& m, ModuleGl
     // Unlike regular IonMonkey, which links and generates a new JitCode for
     // every function, we accumulate all the functions in the module in a
     // single MacroAssembler and link at end. Linking asm.js doesn't require a
     // CodeGenerator so we can destroy it now (via ScopedJSDeletePtr).
     return true;
 }
 
 static bool
-CheckAllFunctionsDefined(ModuleCompiler& m)
+CheckAllFunctionsDefined(ModuleValidator& m)
 {
     for (unsigned i = 0; i < m.numFunctions(); i++) {
         if (!m.function(i).entry().bound())
             return m.failName(nullptr, "missing definition of function %s", m.function(i).name());
     }
 
     return true;
 }
@@ -10578,17 +10523,17 @@ CheckFunctionsSequential(ModuleValidator
             return m.failOffset(func->srcBegin(), "internal compiler failure (probably out of memory)");
 
         func->accumulateCompileTime((PRMJ_Now() - before) / PRMJ_USEC_PER_MSEC);
 
         if (!GenerateCode(mc, *func, *mir, *lir))
             return false;
     }
 
-    if (!CheckAllFunctionsDefined(mc))
+    if (!CheckAllFunctionsDefined(m))
         return false;
 
     return true;
 }
 
 // Currently, only one asm.js parallel compilation is allowed at a time.
 // This RAII class attempts to claim this parallel compilation using atomic ops
 // on the helper thread state's asmJSCompilationInProgress.
@@ -10743,21 +10688,21 @@ CheckFunctionsParallel(ModuleValidator& 
 
     // Block for all outstanding helpers to complete.
     while (group.outstandingJobs > 0) {
         AsmJSParallelTask* ignored = nullptr;
         if (!GetUsedTask(mc, group, &ignored))
             return false;
     }
 
-    if (!CheckAllFunctionsDefined(mc))
+    if (!CheckAllFunctionsDefined(m))
         return false;
 
     MOZ_ASSERT(group.outstandingJobs == 0);
-    MOZ_ASSERT(group.compiledJobs == mc.numFunctions());
+    MOZ_ASSERT(group.compiledJobs == m.numFunctions());
 #ifdef DEBUG
     {
         AutoLockHelperThreadState lock;
         MOZ_ASSERT(HelperThreadState().asmJSWorklist().empty());
         MOZ_ASSERT(HelperThreadState().asmJSFinishedList().empty());
     }
 #endif
     MOZ_ASSERT(!HelperThreadState().asmJSFailed());
@@ -11296,18 +11241,18 @@ GenerateCheckForHeapDetachment(ModuleCom
     masm.append(AsmJSGlobalAccess(label, AsmJSHeapGlobalDataOffset));
     masm.branchTestPtr(Assembler::Zero, scratch, scratch, &m.onDetachedLabel());
 #else
     masm.branchTestPtr(Assembler::Zero, HeapReg, HeapReg, &m.onDetachedLabel());
 #endif
 }
 
 static bool
-GenerateFFIInterpExit(ModuleCompiler& m, const ModuleCompiler::ExitDescriptor& exit,
-                      unsigned exitIndex, Label* throwLabel)
+GenerateFFIInterpExit(ModuleCompiler& m, const Signature* sig, unsigned exitIndex,
+                      Label* throwLabel)
 {
     MacroAssembler& masm = m.masm();
     MOZ_ASSERT(masm.framePushed() == 0);
 
     // Argument types for InvokeFromAsmJS_*:
     static const MIRType typeArray[] = { MIRType_Pointer,   // exitDatum
                                          MIRType_Int32,     // argc
                                          MIRType_Pointer }; // argv
@@ -11315,39 +11260,39 @@ GenerateFFIInterpExit(ModuleCompiler& m,
     if (!invokeArgTypes.append(typeArray, ArrayLength(typeArray)))
         return false;
 
     // At the point of the call, the stack layout shall be (sp grows to the left):
     //   | stack args | padding | Value argv[] | padding | retaddr | caller stack args |
     // The padding between stack args and argv ensures that argv is aligned. The
     // padding between argv and retaddr ensures that sp is aligned.
     unsigned offsetToArgv = AlignBytes(StackArgBytes(invokeArgTypes), sizeof(double));
-    unsigned argvBytes = Max<size_t>(1, exit.sig().args().length()) * sizeof(Value);
+    unsigned argvBytes = Max<size_t>(1, sig->args().length()) * sizeof(Value);
     unsigned framePushed = StackDecrementForCall(masm, ABIStackAlignment, offsetToArgv + argvBytes);
 
     Label begin;
     GenerateAsmJSExitPrologue(masm, framePushed, AsmJSExit::SlowFFI, &begin);
 
     // Fill the argument array.
     unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed();
     Register scratch = ABIArgGenerator::NonArgReturnReg0;
-    FillArgumentArray(m, exit.sig().args(), offsetToArgv, offsetToCallerStackArgs, scratch);
+    FillArgumentArray(m, sig->args(), offsetToArgv, offsetToCallerStackArgs, scratch);
 
     // Prepare the arguments for the call to InvokeFromAsmJS_*.
     ABIArgMIRTypeIter i(invokeArgTypes);
 
     // argument 0: exitIndex
     if (i->kind() == ABIArg::GPR)
         masm.mov(ImmWord(exitIndex), i->gpr());
     else
         masm.store32(Imm32(exitIndex), Address(masm.getStackPointer(), i->offsetFromArgBase()));
     i++;
 
     // argument 1: argc
-    unsigned argc = exit.sig().args().length();
+    unsigned argc = sig->args().length();
     if (i->kind() == ABIArg::GPR)
         masm.mov(ImmWord(argc), i->gpr());
     else
         masm.store32(Imm32(argc), Address(masm.getStackPointer(), i->offsetFromArgBase()));
     i++;
 
     // argument 2: argv
     Address argv(masm.getStackPointer(), offsetToArgv);
@@ -11357,17 +11302,17 @@ GenerateFFIInterpExit(ModuleCompiler& m,
         masm.computeEffectiveAddress(argv, scratch);
         masm.storePtr(scratch, Address(masm.getStackPointer(), i->offsetFromArgBase()));
     }
     i++;
     MOZ_ASSERT(i.done());
 
     // Make the call, test whether it succeeded, and extract the return value.
     AssertStackAlignment(masm, ABIStackAlignment);
-    switch (exit.sig().retType().which()) {
+    switch (sig->retType().which()) {
       case RetType::Void:
         masm.call(AsmJSImmPtr(AsmJSImm_InvokeFromAsmJS_Ignore));
         masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         break;
       case RetType::Signed:
         masm.call(AsmJSImmPtr(AsmJSImm_InvokeFromAsmJS_ToInt32));
         masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
         masm.unboxInt32(argv, ReturnReg);
@@ -11396,30 +11341,30 @@ GenerateFFIInterpExit(ModuleCompiler& m,
 
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32)
 static const unsigned MaybeSavedGlobalReg = sizeof(void*);
 #else
 static const unsigned MaybeSavedGlobalReg = 0;
 #endif
 
 static bool
-GenerateFFIIonExit(ModuleCompiler& m, const ModuleCompiler::ExitDescriptor& exit,
-                   unsigned exitIndex, Label* throwLabel)
+GenerateFFIIonExit(ModuleCompiler& m, const Signature* sig, unsigned exitIndex,
+                   Label* throwLabel)
 {
     MacroAssembler& masm = m.masm();
     MOZ_ASSERT(masm.framePushed() == 0);
 
     // Ion calls use the following stack layout (sp grows to the left):
     //   | retaddr | descriptor | callee | argc | this | arg1..N |
     // After the Ion frame, the global register (if present) is saved since Ion
     // does not preserve non-volatile regs. Also, unlike most ABIs, Ion requires
     // that sp be JitStackAlignment-aligned *after* pushing the return address.
     static_assert(AsmJSStackAlignment >= JitStackAlignment, "subsumes");
     unsigned sizeOfRetAddr = sizeof(void*);
-    unsigned ionFrameBytes = 3 * sizeof(void*) + (1 + exit.sig().args().length()) * sizeof(Value);
+    unsigned ionFrameBytes = 3 * sizeof(void*) + (1 + sig->args().length()) * sizeof(Value);
     unsigned totalIonBytes = sizeOfRetAddr + ionFrameBytes + MaybeSavedGlobalReg;
     unsigned ionFramePushed = StackDecrementForCall(masm, JitStackAlignment, totalIonBytes) -
                               sizeOfRetAddr;
 
     Label begin;
     GenerateAsmJSExitPrologue(masm, ionFramePushed, AsmJSExit::JitFFI, &begin);
 
     // 1. Descriptor
@@ -11449,28 +11394,28 @@ GenerateFFIIonExit(ModuleCompiler& m, co
     masm.storePtr(callee, Address(masm.getStackPointer(), argOffset));
     argOffset += sizeof(size_t);
 
     // 2.4. Load callee executable entry point
     masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
     masm.loadBaselineOrIonNoArgCheck(callee, callee, nullptr);
 
     // 3. Argc
-    unsigned argc = exit.sig().args().length();
+    unsigned argc = sig->args().length();
     masm.storePtr(ImmWord(uintptr_t(argc)), Address(masm.getStackPointer(), argOffset));
     argOffset += sizeof(size_t);
 
     // 4. |this| value
     masm.storeValue(UndefinedValue(), Address(masm.getStackPointer(), argOffset));
     argOffset += sizeof(Value);
 
     // 5. Fill the arguments
     unsigned offsetToCallerStackArgs = ionFramePushed + sizeof(AsmJSFrame);
-    FillArgumentArray(m, exit.sig().args(), argOffset, offsetToCallerStackArgs, scratch);
-    argOffset += exit.sig().args().length() * sizeof(Value);
+    FillArgumentArray(m, sig->args(), argOffset, offsetToCallerStackArgs, scratch);
+    argOffset += sig->args().length() * sizeof(Value);
     MOZ_ASSERT(argOffset == ionFrameBytes);
 
     // 6. Jit code will clobber all registers, even non-volatiles. GlobalReg and
     //    HeapReg are removed from the general register set for asm.js code, so
     //    these will not have been saved by the caller like all other registers,
     //    so they must be explicitly preserved. Only save GlobalReg since
     //    HeapReg must be reloaded (from global data) after the call since the
     //    heap may change during the FFI call.
@@ -11602,17 +11547,17 @@ GenerateFFIIonExit(ModuleCompiler& m, co
     static_assert(ABIStackAlignment <= JitStackAlignment, "subsumes");
     masm.reserveStack(sizeOfRetAddr);
     unsigned nativeFramePushed = masm.framePushed();
     AssertStackAlignment(masm, ABIStackAlignment);
 
     masm.branchTestMagic(Assembler::Equal, JSReturnOperand, throwLabel);
 
     Label oolConvert;
-    switch (exit.sig().retType().which()) {
+    switch (sig->retType().which()) {
       case RetType::Void:
         break;
       case RetType::Signed:
         masm.convertValueToInt32(JSReturnOperand, ReturnDoubleReg, ReturnReg, &oolConvert,
                                  /* -0 check */ false);
         break;
       case RetType::Double:
         masm.convertValueToDouble(JSReturnOperand, ReturnDoubleReg, &oolConvert);
@@ -11659,17 +11604,17 @@ GenerateFFIIonExit(ModuleCompiler& m, co
             masm.computeEffectiveAddress(argv, scratch);
             masm.storePtr(scratch, Address(masm.getStackPointer(), i->offsetFromArgBase()));
         }
         i++;
         MOZ_ASSERT(i.done());
 
         // Call coercion function
         AssertStackAlignment(masm, ABIStackAlignment);
-        switch (exit.sig().retType().which()) {
+        switch (sig->retType().which()) {
           case RetType::Signed:
             masm.call(AsmJSImmPtr(AsmJSImm_CoerceInPlace_ToInt32));
             masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
             masm.unboxInt32(Address(masm.getStackPointer(), offsetToCoerceArgv), ReturnReg);
             break;
           case RetType::Double:
             masm.call(AsmJSImmPtr(AsmJSImm_CoerceInPlace_ToNumber));
             masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
@@ -11685,25 +11630,25 @@ GenerateFFIIonExit(ModuleCompiler& m, co
 
     MOZ_ASSERT(masm.framePushed() == 0);
 
     return !masm.oom() && m.finishGeneratingJitExit(exitIndex, &begin, &profilingReturn);
 }
 
 // See "asm.js FFI calls" comment above.
 static bool
-GenerateFFIExits(ModuleCompiler& m, const ModuleCompiler::ExitDescriptor& exit, unsigned exitIndex,
+GenerateFFIExits(ModuleCompiler& m, unsigned exitIndex, const Signature* signature,
                  Label* throwLabel)
 {
     // Generate the slow path through the interpreter
-    if (!GenerateFFIInterpExit(m, exit, exitIndex, throwLabel))
+    if (!GenerateFFIInterpExit(m, signature, exitIndex, throwLabel))
         return false;
 
     // Generate the fast path
-    if (!GenerateFFIIonExit(m, exit, exitIndex, throwLabel))
+    if (!GenerateFFIIonExit(m, signature, exitIndex, throwLabel))
         return false;
 
     return true;
 }
 
 // Generate a thunk that updates fp before calling the given builtin so that
 // both the builtin and the calling function show up in profiler stacks. (This
 // thunk is dynamically patched in when profiling is enabled.) Since the thunk
@@ -12236,22 +12181,16 @@ EstablishPreconditions(ExclusiveContext*
         return Warn(parser, JSMSG_USE_ASM_TYPE_FAIL, "Disabled by generator context");
 
     if (parser.pc->isArrowFunction())
         return Warn(parser, JSMSG_USE_ASM_TYPE_FAIL, "Disabled by arrow function context");
 
     return true;
 }
 
-static bool
-NoExceptionPending(ExclusiveContext* cx)
-{
-    return !cx->isJSContext() || !cx->asJSContext()->isExceptionPending();
-}
-
 bool
 js::ValidateAsmJS(ExclusiveContext* cx, AsmJSParser& parser, ParseNode* stmtList, bool* validated)
 {
     *validated = false;
 
     if (!EstablishPreconditions(cx, parser))
         return NoExceptionPending(cx);