Merge mozilla-inbound to mozilla-central a=merge
authorRazvan Maries <rmaries@mozilla.com>
Sat, 11 May 2019 12:28:35 +0300
changeset 532329 2f94885dcf94f823f37c99231c79146b6b907e4c
parent 532319 1be22e2d35d1e2a40451b2d555ced98a49d533af (current diff)
parent 532328 131aa3a88866bc3886463b9c53710522e061b12d (diff)
child 532332 c0a837d2a3778981498a2c6a1afd35eb86f0dc95
push id11265
push userffxbld-merge
push dateMon, 13 May 2019 10:53:39 +0000
treeherdermozilla-beta@77e0fe8dbdd3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone68.0a1
first release with
nightly linux32
2f94885dcf94 / 68.0a1 / 20190511094814 / files
nightly linux64
2f94885dcf94 / 68.0a1 / 20190511094814 / files
nightly mac
2f94885dcf94 / 68.0a1 / 20190511094814 / files
nightly win32
2f94885dcf94 / 68.0a1 / 20190511094814 / files
nightly win64
2f94885dcf94 / 68.0a1 / 20190511094814 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-inbound to mozilla-central a=merge
--- a/dom/base/Element.h
+++ b/dom/base/Element.h
@@ -1938,17 +1938,17 @@ class Element : public FragmentOrElement
   // Prevent people from doing pointless checks/casts on Element instances.
   void IsElement() = delete;
   void AsElement() = delete;
 
   // Data members
   EventStates mState;
   // Per-node data managed by Servo.
   //
-  // There should not be data on nodes that are in the flattened tree, or
+  // There should not be data on nodes that are not in the flattened tree, or
   // descendants of display: none elements.
   mozilla::RustCell<ServoNodeData*> mServoData;
 
  protected:
   // Array containing all attributes for this element
   AttrArray mAttrs;
 };
 
--- a/gfx/wr/webrender/src/prim_store/mod.rs
+++ b/gfx/wr/webrender/src/prim_store/mod.rs
@@ -2005,27 +2005,44 @@ impl PrimitiveStore {
                 };
 
                 let combined_local_clip_rect = if apply_local_clip_rect {
                     clip_chain.local_clip_rect
                 } else {
                     prim_instance.local_clip_rect
                 };
 
+                if combined_local_clip_rect.size.is_empty_or_negative() {
+                    debug_assert!(combined_local_clip_rect.size.width >= 0.0 &&
+                                  combined_local_clip_rect.size.height >= 0.0);
+                    if prim_instance.is_chased() {
+                        println!("\tculled for zero local clip rectangle");
+                    }
+                    prim_instance.visibility_info = PrimitiveVisibilityIndex::INVALID;
+                    continue;
+                }
+
                 // All pictures must snap to their primitive rect instead of the
                 // visible rect like most primitives. This is because the picture's
                 // visible rect includes the effect of the picture's clip rect,
                 // which was not considered by the picture's children. The primitive
                 // rect however is simply the union of the visible rect of the
                 // children, which they snapped to, which is precisely what we also
                 // need to snap to in order to be consistent.
                 let visible_rect = if snap_to_visible {
-                    combined_local_clip_rect
-                        .intersection(&prim_local_rect)
-                        .unwrap_or(LayoutRect::zero())
+                    match combined_local_clip_rect.intersection(&prim_local_rect) {
+                        Some(r) => r,
+                        None => {
+                            if prim_instance.is_chased() {
+                                println!("\tculled for zero visible rectangle");
+                            }
+                            prim_instance.visibility_info = PrimitiveVisibilityIndex::INVALID;
+                            continue;
+                        }
+                    }
                 } else {
                     prim_local_rect
                 };
 
                 // This is how primitives get snapped. In general, snapping a picture's
                 // visible rect here will have no effect, but if it is rasterized in its
                 // own space, or it has a blur or drop shadow effect applied, it may
                 // provide a snapping offset.
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/funcref.js
@@ -0,0 +1,142 @@
+const {Module,Instance,Global,RuntimeError} = WebAssembly;
+
+const badWasmFunc = /can only pass WebAssembly exported functions to funcref/;
+const typeErr = /type mismatch/;
+
+
+// Validation:
+
+wasmEvalText(`(module (func (local anyref funcref) (local.set 0 (local.get 1))))`);
+wasmEvalText(`(module (func (local funcref funcref) (local.set 0 (local.get 1))))`);
+wasmEvalText(`(module (func (local funcref) (local.set 0 (ref.null))))`);
+wasmFailValidateText(`(module (func (local funcref anyref) (local.set 0 (local.get 1))))`, typeErr);
+wasmEvalText(`(module (global (mut funcref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`);
+wasmEvalText(`(module (global (mut anyref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`);
+wasmFailValidateText(`(module (global (mut funcref) (ref.null)) (func (param anyref) (global.set 0 (local.get 0))))`, typeErr);
+wasmEvalText(`(module (func (param funcref)) (func (param funcref) (call 0 (local.get 0))))`);
+wasmEvalText(`(module (func (param anyref)) (func (param funcref) (call 0 (local.get 0))))`);
+wasmFailValidateText(`(module (func (param funcref)) (func (param anyref) (call 0 (local.get 0))))`, typeErr);
+wasmEvalText(`(module (func (param funcref) (result funcref) (block funcref (local.get 0) (br 0))))`);
+wasmEvalText(`(module (func (param funcref) (result anyref) (block anyref (local.get 0) (br 0))))`);
+wasmFailValidateText(`(module (func (param anyref) (result anyref) (block funcref (local.get 0) (br 0))))`, typeErr);
+wasmEvalText(`(module (func (param funcref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
+wasmEvalText(`(module (func (param anyref funcref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
+wasmEvalText(`(module (func (param funcref anyref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
+wasmFailValidateText(`(module (func (param anyref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr);
+wasmFailValidateText(`(module (func (param funcref anyref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr);
+
+
+// Runtime:
+
+var m = new Module(wasmTextToBinary(`(module (func (export "wasmFun")))`));
+const wasmFun1 = new Instance(m).exports.wasmFun;
+const wasmFun2 = new Instance(m).exports.wasmFun;
+const wasmFun3 = new Instance(m).exports.wasmFun;
+
+var run = wasmEvalText(`(module
+    (global (mut funcref) (ref.null))
+    (func (param $x funcref) (param $test i32) (result funcref)
+      local.get $x
+      global.get 0
+      local.get $test
+      select
+    )
+    (func (export "run") (param $a funcref) (param $b funcref) (param $c funcref) (param $test1 i32) (param $test2 i32) (result funcref)
+      local.get $a
+      global.set 0
+      block funcref
+        local.get $b
+        local.get $test1
+        br_if 0
+        drop
+        local.get $c
+      end
+      local.get $test2
+      call 0
+    )
+)`).exports.run;
+assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, false), wasmFun1);
+assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, false), wasmFun1);
+assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, true), wasmFun2);
+assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, true), wasmFun3);
+
+var run = wasmEvalText(`(module
+  (type $t0 (func (param anyref) (result anyref)))
+  (type $t1 (func (param funcref) (result anyref)))
+  (type $t2 (func (param anyref) (result funcref)))
+  (type $t3 (func (param funcref funcref) (result funcref)))
+  (func $f0 (type $t0) ref.null)
+  (func $f1 (type $t1) ref.null)
+  (func $f2 (type $t2) ref.null)
+  (func $f3 (type $t3) ref.null)
+  (table funcref (elem $f0 $f1 $f2 $f3))
+  (func (export "run") (param i32 i32) (result anyref)
+    block $b3 block $b2 block $b1 block $b0
+      local.get 0
+      br_table $b0 $b1 $b2 $b3
+    end $b0
+      ref.null
+      local.get 1
+      call_indirect $t0
+      return
+    end $b1
+      ref.null
+      local.get 1
+      call_indirect $t1
+      return
+    end $b2
+      ref.null
+      local.get 1
+      call_indirect $t2
+      return
+    end $b3
+      ref.null
+      ref.null
+      local.get 1
+      call_indirect $t3
+      return
+  )
+)`).exports.run;
+
+for (var i = 0; i < 4; i++) {
+  for (var j = 0; j < 4; j++) {
+    if (i == j)
+      assertEq(run(i, j), null);
+    else
+      assertErrorMessage(() => run(i, j), RuntimeError, /indirect call signature mismatch/);
+  }
+}
+
+
+// JS API:
+
+const wasmFun = wasmEvalText(`(module (func (export "x")))`).exports.x;
+
+var run = wasmEvalText(`(module (func (export "run") (param funcref) (result funcref) (local.get 0)))`).exports.run;
+assertEq(run(wasmFun), wasmFun);
+assertEq(run(null), null);
+assertErrorMessage(() => run(() => {}), TypeError, badWasmFunc);
+
+var importReturnValue;
+var importFun = () => importReturnValue;
+var run = wasmEvalText(`(module (func (import "" "i") (result funcref)) (func (export "run") (result funcref) (call 0)))`, {'':{i:importFun}}).exports.run;
+importReturnValue = wasmFun;
+assertEq(run(), wasmFun);
+importReturnValue = null;
+assertEq(run(), null);
+importReturnValue = undefined;
+assertErrorMessage(() => run(), TypeError, badWasmFunc);
+importReturnValue = () => {};
+assertErrorMessage(() => run(), TypeError, badWasmFunc);
+
+var g = new Global({value:'funcref', mutable:true}, wasmFun);
+assertEq(g.value, wasmFun);
+g.value = null;
+assertEq(g.value, null);
+Math.sin();
+assertErrorMessage(() => g.value = () => {}, TypeError, badWasmFunc);
+var g = new Global({value:'funcref', mutable:true}, null);
+assertEq(g.value, null);
+g.value = wasmFun;
+assertEq(g.value, wasmFun);
+assertErrorMessage(() => new Global({value:'funcref'}, () => {}), TypeError, badWasmFunc);
--- a/js/src/jit-test/tests/wasm/tables.js
+++ b/js/src/jit-test/tests/wasm/tables.js
@@ -1,15 +1,17 @@
 const Module = WebAssembly.Module;
 const Instance = WebAssembly.Instance;
 const Table = WebAssembly.Table;
 const Memory = WebAssembly.Memory;
 const LinkError = WebAssembly.LinkError;
 const RuntimeError = WebAssembly.RuntimeError;
 
+const badFuncRefError = /can only pass WebAssembly exported functions to funcref/;
+
 var callee = i => `(func $f${i} (result i32) (i32.const ${i}))`;
 
 wasmFailValidateText(`(module (elem (i32.const 0) $f0) ${callee(0)})`, /elem segment requires a table section/);
 wasmFailValidateText(`(module (table 10 funcref) (elem (i32.const 0) 0))`, /table element out of range/);
 wasmFailValidateText(`(module (table 10 funcref) (func) (elem (i32.const 0) 0 1))`, /table element out of range/);
 wasmFailValidateText(`(module (table 10 funcref) (func) (elem (f32.const 0) 0) ${callee(0)})`, /type mismatch/);
 
 assertErrorMessage(() => wasmEvalText(`(module (table 10 funcref) (elem (i32.const 10) $f0) ${callee(0)})`), LinkError, /elem segment does not fit/);
@@ -117,18 +119,18 @@ tbl.set(1, e2.g);
 tbl.set(2, e3.h);
 var e4 = wasmEvalText(`(module (import "a" "b" (table 3 funcref)) ${caller})`, {a:{b:tbl}}).exports;
 assertEq(e4.call(0), 42);
 assertErrorMessage(() => e4.call(1), RuntimeError, /indirect call signature mismatch/);
 assertEq(e4.call(2), 13);
 
 var asmjsFun = (function() { "use asm"; function f() {} return f })();
 assertEq(isAsmJSFunction(asmjsFun), isAsmJSCompilationAvailable());
-assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, /can only assign WebAssembly exported functions/);
-assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, /bad initializer to funcref table/);
+assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, badFuncRefError);
+assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, badFuncRefError);
 
 var m = new Module(wasmTextToBinary(`(module
     (type $i2i (func (param i32) (result i32)))
     (import "a" "mem" (memory 1))
     (import "a" "tbl" (table 10 funcref))
     (import $imp "a" "imp" (result i32))
     (func $call (param $i i32) (result i32)
         (i32.add
--- a/js/src/jit/AliasAnalysis.cpp
+++ b/js/src/jit/AliasAnalysis.cpp
@@ -174,17 +174,16 @@ static inline const MDefinition* GetObje
     case MDefinition::Opcode::WasmStore:
     case MDefinition::Opcode::WasmCompareExchangeHeap:
     case MDefinition::Opcode::WasmAtomicBinopHeap:
     case MDefinition::Opcode::WasmAtomicExchangeHeap:
     case MDefinition::Opcode::WasmLoadGlobalVar:
     case MDefinition::Opcode::WasmLoadGlobalCell:
     case MDefinition::Opcode::WasmStoreGlobalVar:
     case MDefinition::Opcode::WasmStoreGlobalCell:
-    case MDefinition::Opcode::WasmLoadRef:
     case MDefinition::Opcode::WasmStoreRef:
     case MDefinition::Opcode::ArrayJoin:
     case MDefinition::Opcode::ArraySlice:
       return nullptr;
     default:
 #ifdef DEBUG
       // Crash when the default aliasSet is overriden, but when not added in the
       // list above.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -7416,17 +7416,18 @@ void CodeGenerator::emitWasmCallBase(LWa
       break;
     case wasm::CalleeDesc::Builtin:
       masm.call(desc, callee.builtin());
       reloadRegs = false;
       switchRealm = false;
       break;
     case wasm::CalleeDesc::BuiltinInstanceMethod:
       masm.wasmCallBuiltinInstanceMethod(desc, mir->instanceArg(),
-                                         callee.builtin());
+                                         callee.builtin(),
+                                         mir->builtinMethodFailureMode());
       switchRealm = false;
       break;
   }
 
   // Note the assembler offset for the associated LSafePoint.
   markSafepointAt(masm.currentOffset(), lir);
 
   // Now that all the outbound in-memory args are on the stack, note the
@@ -7525,20 +7526,16 @@ void CodeGenerator::visitWasmStoreSlot(L
   }
 }
 
 void CodeGenerator::visitWasmDerivedPointer(LWasmDerivedPointer* ins) {
   masm.movePtr(ToRegister(ins->base()), ToRegister(ins->output()));
   masm.addPtr(Imm32(int32_t(ins->offset())), ToRegister(ins->output()));
 }
 
-void CodeGenerator::visitWasmLoadRef(LWasmLoadRef* lir) {
-  masm.loadPtr(Address(ToRegister(lir->ptr()), 0), ToRegister(lir->output()));
-}
-
 void CodeGenerator::visitWasmStoreRef(LWasmStoreRef* ins) {
   Register tls = ToRegister(ins->tls());
   Register valueAddr = ToRegister(ins->valueAddr());
   Register value = ToRegister(ins->value());
   Register temp = ToRegister(ins->temp());
 
   Label skipPreBarrier;
   wasm::EmitWasmPreBarrierGuard(masm, tls, temp, valueAddr, &skipPreBarrier);
@@ -13913,16 +13910,17 @@ void CodeGenerator::emitIonToWasmCallBas
       case wasm::ValType::I32:
       case wasm::ValType::F32:
       case wasm::ValType::F64:
         argMir = ToMIRType(sig.args()[i]);
         break;
       case wasm::ValType::I64:
       case wasm::ValType::Ref:
       case wasm::ValType::AnyRef:
+      case wasm::ValType::FuncRef:
         // Don't forget to trace GC type arguments in TraceJitExitFrames
         // when they're enabled.
         MOZ_CRASH("unexpected argument type when calling from ion to wasm");
       case wasm::ValType::NullRef:
         MOZ_CRASH("NullRef not expressible");
     }
 
     ABIArg arg = abi.next(argMir);
@@ -13971,16 +13969,17 @@ void CodeGenerator::emitIonToWasmCallBas
       MOZ_ASSERT(ToFloatRegister(lir->output()) == ReturnFloat32Reg);
       break;
     case wasm::ExprType::F64:
       MOZ_ASSERT(lir->mir()->type() == MIRType::Double);
       MOZ_ASSERT(ToFloatRegister(lir->output()) == ReturnDoubleReg);
       break;
     case wasm::ExprType::Ref:
     case wasm::ExprType::AnyRef:
+    case wasm::ExprType::FuncRef:
     case wasm::ExprType::I64:
       // Don't forget to trace GC type return value in TraceJitExitFrames
       // when they're enabled.
       MOZ_CRASH("unexpected return type when calling from ion to wasm");
     case wasm::ExprType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     case wasm::ExprType::Limit:
       MOZ_CRASH("Limit");
@@ -14011,21 +14010,16 @@ void CodeGenerator::visitIonToWasmCall(L
 void CodeGenerator::visitIonToWasmCallV(LIonToWasmCallV* lir) {
   emitIonToWasmCallBase(lir);
 }
 
 void CodeGenerator::visitWasmNullConstant(LWasmNullConstant* lir) {
   masm.xorPtr(ToRegister(lir->output()), ToRegister(lir->output()));
 }
 
-void CodeGenerator::visitIsNullPointer(LIsNullPointer* lir) {
-  masm.cmpPtrSet(Assembler::Equal, ToRegister(lir->value()), ImmWord(0),
-                 ToRegister(lir->output()));
-}
-
 void CodeGenerator::visitWasmCompareAndSelect(LWasmCompareAndSelect* ins) {
   bool cmpIs32bit = ins->compareType() == MCompare::Compare_Int32 ||
                     ins->compareType() == MCompare::Compare_UInt32;
   bool selIs32bit = ins->mir()->type() == MIRType::Int32;
 
   if (cmpIs32bit && selIs32bit) {
     Register out = ToRegister(ins->output());
     MOZ_ASSERT(ToRegister(ins->ifTrueExpr()) == out,
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -4345,21 +4345,16 @@ void LIRGenerator::visitWasmStoreGlobalC
   }
 }
 
 void LIRGenerator::visitWasmDerivedPointer(MWasmDerivedPointer* ins) {
   LAllocation base = useRegisterAtStart(ins->base());
   define(new (alloc()) LWasmDerivedPointer(base), ins);
 }
 
-void LIRGenerator::visitWasmLoadRef(MWasmLoadRef* ins) {
-  define(new (alloc()) LWasmLoadRef(useRegisterAtStart(ins->getOperand(0))),
-         ins);
-}
-
 void LIRGenerator::visitWasmStoreRef(MWasmStoreRef* ins) {
   LAllocation tls = useRegister(ins->tls());
   LAllocation valueAddr = useFixed(ins->valueAddr(), PreBarrierReg);
   LAllocation value = useRegister(ins->value());
   add(new (alloc()) LWasmStoreRef(tls, valueAddr, value, temp()), ins);
 }
 
 void LIRGenerator::visitWasmParameter(MWasmParameter* ins) {
@@ -4717,21 +4712,16 @@ void LIRGenerator::visitConstant(MConsta
       MOZ_CRASH("unexpected constant type");
   }
 }
 
 void LIRGenerator::visitWasmNullConstant(MWasmNullConstant* ins) {
   define(new (alloc()) LWasmNullConstant(), ins);
 }
 
-void LIRGenerator::visitIsNullPointer(MIsNullPointer* ins) {
-  define(new (alloc()) LIsNullPointer(useRegisterAtStart(ins->getOperand(0))),
-         ins);
-}
-
 void LIRGenerator::visitWasmFloatConstant(MWasmFloatConstant* ins) {
   switch (ins->type()) {
     case MIRType::Double:
       define(new (alloc()) LDouble(ins->toDouble()), ins);
       break;
     case MIRType::Float32:
       define(new (alloc()) LFloat32(ins->toFloat32()), ins);
       break;
--- a/js/src/jit/MCallOptimize.cpp
+++ b/js/src/jit/MCallOptimize.cpp
@@ -4267,16 +4267,17 @@ IonBuilder::InliningResult IonBuilder::i
       case wasm::ValType::F32:
         conversion = MToFloat32::New(alloc(), arg);
         break;
       case wasm::ValType::F64:
         conversion = MToDouble::New(alloc(), arg);
         break;
       case wasm::ValType::I64:
       case wasm::ValType::AnyRef:
+      case wasm::ValType::FuncRef:
       case wasm::ValType::Ref:
         MOZ_CRASH("impossible per above check");
       case wasm::ValType::NullRef:
         MOZ_CRASH("NullRef not expressible");
     }
 
     current->add(conversion);
     call->initArg(i, conversion);
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -5446,27 +5446,29 @@ MWasmCall* MWasmCall::New(TempAllocator&
     call->initOperand(call->argRegs_.length(), tableIndex);
   }
 
   return call;
 }
 
 MWasmCall* MWasmCall::NewBuiltinInstanceMethodCall(
     TempAllocator& alloc, const wasm::CallSiteDesc& desc,
-    const wasm::SymbolicAddress builtin, const ABIArg& instanceArg,
-    const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned) {
+    const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode,
+    const ABIArg& instanceArg, const Args& args, MIRType resultType,
+    uint32_t stackArgAreaSizeUnaligned) {
   auto callee = wasm::CalleeDesc::builtinInstanceMethod(builtin);
   MWasmCall* call = MWasmCall::New(alloc, desc, callee, args, resultType,
                                    stackArgAreaSizeUnaligned, nullptr);
   if (!call) {
     return nullptr;
   }
 
   MOZ_ASSERT(instanceArg != ABIArg());
   call->instanceArg_ = instanceArg;
+  call->builtinMethodFailureMode_ = failureMode;
   return call;
 }
 
 void MSqrt::trySpecializeFloat32(TempAllocator& alloc) {
   if (!input()->canProduceFloat32() || !CheckUsesAreFloat32Consumers(this)) {
     if (input()->type() == MIRType::Float32) {
       ConvertDefinitionToDouble<0>(alloc, input(), this);
     }
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -1565,39 +1565,16 @@ class MWasmNullConstant : public MNullar
   bool congruentTo(const MDefinition* ins) const override {
     return ins->isWasmNullConstant();
   }
   AliasSet getAliasSet() const override { return AliasSet::None(); }
 
   ALLOW_CLONE(MWasmNullConstant)
 };
 
-class MIsNullPointer : public MUnaryInstruction, public NoTypePolicy::Data {
-  explicit MIsNullPointer(MDefinition* value)
-      : MUnaryInstruction(classOpcode, value) {
-    MOZ_ASSERT(value->type() == MIRType::Pointer);
-    setResultType(MIRType::Boolean);
-    setMovable();
-  }
-
- public:
-  INSTRUCTION_HEADER(IsNullPointer);
-
-  static MIsNullPointer* New(TempAllocator& alloc, MDefinition* value) {
-    return new (alloc) MIsNullPointer(value);
-  }
-
-  bool congruentTo(const MDefinition* ins) const override {
-    return congruentIfOperandsEqual(ins);
-  }
-  AliasSet getAliasSet() const override { return AliasSet::None(); }
-
-  ALLOW_CLONE(MIsNullPointer)
-};
-
 // Floating-point value as created by wasm. Just a constant value, used to
 // effectively inhibite all the MIR optimizations. This uses the same LIR nodes
 // as a MConstant of the same type would.
 class MWasmFloatConstant : public MNullaryInstruction {
   union {
     float f32_;
     double f64_;
     uint64_t bits_;
@@ -11793,41 +11770,16 @@ class MWasmDerivedPointer : public MUnar
 
   bool congruentTo(const MDefinition* ins) const override {
     return congruentIfOperandsEqual(ins);
   }
 
   ALLOW_CLONE(MWasmDerivedPointer)
 };
 
-class MWasmLoadRef : public MUnaryInstruction, public NoTypePolicy::Data {
-  AliasSet::Flag aliasSet_;
-
-  explicit MWasmLoadRef(MDefinition* valueAddr, AliasSet::Flag aliasSet,
-                        bool isMovable = true)
-      : MUnaryInstruction(classOpcode, valueAddr), aliasSet_(aliasSet) {
-    MOZ_ASSERT(valueAddr->type() == MIRType::Pointer);
-    setResultType(MIRType::RefOrNull);
-    if (isMovable) {
-      setMovable();
-    }
-  }
-
- public:
-  INSTRUCTION_HEADER(WasmLoadRef)
-  TRIVIAL_NEW_WRAPPERS
-
-  bool congruentTo(const MDefinition* ins) const override {
-    return congruentIfOperandsEqual(ins);
-  }
-  AliasSet getAliasSet() const override { return AliasSet::Load(aliasSet_); }
-
-  ALLOW_CLONE(MWasmLoadRef)
-};
-
 class MWasmStoreRef : public MAryInstruction<3>, public NoTypePolicy::Data {
   AliasSet::Flag aliasSet_;
 
   MWasmStoreRef(MDefinition* tls, MDefinition* valueAddr, MDefinition* value,
                 AliasSet::Flag aliasSet)
       : MAryInstruction<3>(classOpcode), aliasSet_(aliasSet) {
     MOZ_ASSERT(valueAddr->type() == MIRType::Pointer);
     MOZ_ASSERT(value->type() == MIRType::RefOrNull);
@@ -11892,25 +11844,27 @@ class MWasmStackArg : public MUnaryInstr
 
   uint32_t spOffset() const { return spOffset_; }
   void incrementOffset(uint32_t inc) { spOffset_ += inc; }
 };
 
 class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
   wasm::CallSiteDesc desc_;
   wasm::CalleeDesc callee_;
+  wasm::FailureMode builtinMethodFailureMode_;
   FixedList<AnyRegister> argRegs_;
   uint32_t stackArgAreaSizeUnaligned_;
   ABIArg instanceArg_;
 
   MWasmCall(const wasm::CallSiteDesc& desc, const wasm::CalleeDesc& callee,
             uint32_t stackArgAreaSizeUnaligned)
       : MVariadicInstruction(classOpcode),
         desc_(desc),
         callee_(callee),
+        builtinMethodFailureMode_(wasm::FailureMode::Infallible),
         stackArgAreaSizeUnaligned_(stackArgAreaSizeUnaligned) {}
 
  public:
   INSTRUCTION_HEADER(WasmCall)
 
   struct Arg {
     AnyRegister reg;
     MDefinition* def;
@@ -11920,26 +11874,31 @@ class MWasmCall final : public MVariadic
 
   static MWasmCall* New(TempAllocator& alloc, const wasm::CallSiteDesc& desc,
                         const wasm::CalleeDesc& callee, const Args& args,
                         MIRType resultType, uint32_t stackArgAreaSizeUnaligned,
                         MDefinition* tableIndex = nullptr);
 
   static MWasmCall* NewBuiltinInstanceMethodCall(
       TempAllocator& alloc, const wasm::CallSiteDesc& desc,
-      const wasm::SymbolicAddress builtin, const ABIArg& instanceArg,
-      const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned);
+      const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode,
+      const ABIArg& instanceArg, const Args& args, MIRType resultType,
+      uint32_t stackArgAreaSizeUnaligned);
 
   size_t numArgs() const { return argRegs_.length(); }
   AnyRegister registerForArg(size_t index) const {
     MOZ_ASSERT(index < numArgs());
     return argRegs_[index];
   }
   const wasm::CallSiteDesc& desc() const { return desc_; }
   const wasm::CalleeDesc& callee() const { return callee_; }
+  wasm::FailureMode builtinMethodFailureMode() const {
+    MOZ_ASSERT(callee_.which() == wasm::CalleeDesc::BuiltinInstanceMethod);
+    return builtinMethodFailureMode_;
+  }
   uint32_t stackArgAreaSizeUnaligned() const {
     return stackArgAreaSizeUnaligned_;
   }
 
   bool possiblyCalls() const override { return true; }
 
   const ABIArg& instanceArg() const { return instanceArg_; }
 };
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -3186,33 +3186,57 @@ CodeOffset MacroAssembler::wasmCallImpor
                     WasmTlsReg);
   loadWasmPinnedRegsFromTls();
 
   return call(desc, ABINonArgReg0);
 }
 
 CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod(
     const wasm::CallSiteDesc& desc, const ABIArg& instanceArg,
-    wasm::SymbolicAddress builtin) {
+    wasm::SymbolicAddress builtin, wasm::FailureMode failureMode) {
   MOZ_ASSERT(instanceArg != ABIArg());
 
   if (instanceArg.kind() == ABIArg::GPR) {
     loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, instance)),
             instanceArg.gpr());
   } else if (instanceArg.kind() == ABIArg::Stack) {
     // Safe to use ABINonArgReg0 since it's the last thing before the call.
     Register scratch = ABINonArgReg0;
     loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, instance)), scratch);
     storePtr(scratch,
              Address(getStackPointer(), instanceArg.offsetFromArgBase()));
   } else {
     MOZ_CRASH("Unknown abi passing style for pointer");
   }
 
-  return call(desc, builtin);
+  CodeOffset ret = call(desc, builtin);
+
+  if (failureMode != wasm::FailureMode::Infallible) {
+    Label noTrap;
+    switch (failureMode) {
+      case wasm::FailureMode::Infallible:
+        MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE();
+      case wasm::FailureMode::FailOnNegI32:
+        branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
+        break;
+      case wasm::FailureMode::FailOnNullPtr:
+        branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap);
+        break;
+      case wasm::FailureMode::FailOnInvalidRef:
+        branchPtr(Assembler::NotEqual, ReturnReg,
+                  ImmWord(uintptr_t(wasm::AnyRef::invalid().forCompiledCode())),
+                  &noTrap);
+        break;
+    }
+    wasmTrap(wasm::Trap::ThrowReported,
+             wasm::BytecodeOffset(desc.lineOrBytecode()));
+    bind(&noTrap);
+  }
+
+  return ret;
 }
 
 CodeOffset MacroAssembler::wasmCallIndirect(const wasm::CallSiteDesc& desc,
                                             const wasm::CalleeDesc& callee,
                                             bool needsBoundsCheck) {
   Register scratch = WasmTableCallScratchReg0;
   Register index = WasmTableCallIndexReg;
 
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -1922,17 +1922,18 @@ class MacroAssembler : public MacroAssem
                               const wasm::CalleeDesc& callee,
                               bool needsBoundsCheck);
 
   // This function takes care of loading the pointer to the current instance
   // as the implicit first argument. It preserves TLS and pinned registers.
   // (TLS & pinned regs are non-volatile registers in the system ABI).
   CodeOffset wasmCallBuiltinInstanceMethod(const wasm::CallSiteDesc& desc,
                                            const ABIArg& instanceArg,
-                                           wasm::SymbolicAddress builtin);
+                                           wasm::SymbolicAddress builtin,
+                                           wasm::FailureMode failureMode);
 
   // As enterFakeExitFrame(), but using register conventions appropriate for
   // wasm stubs.
   void enterFakeExitFrameForWasm(Register cxreg, Register scratch,
                                  ExitFrameType type) PER_SHARED_ARCH;
 
  public:
   // ========================================================================
--- a/js/src/jit/shared/LIR-shared.h
+++ b/js/src/jit/shared/LIR-shared.h
@@ -6677,27 +6677,16 @@ class LWasmDerivedPointer : public LInst
   explicit LWasmDerivedPointer(const LAllocation& base)
       : LInstructionHelper(classOpcode) {
     setOperand(0, base);
   }
   const LAllocation* base() { return getOperand(0); }
   size_t offset() { return mirRaw()->toWasmDerivedPointer()->offset(); }
 };
 
-class LWasmLoadRef : public LInstructionHelper<1, 1, 0> {
- public:
-  LIR_HEADER(WasmLoadRef);
-  explicit LWasmLoadRef(const LAllocation& ptr)
-      : LInstructionHelper(classOpcode) {
-    setOperand(0, ptr);
-  }
-  MWasmLoadRef* mir() const { return mirRaw()->toWasmLoadRef(); }
-  const LAllocation* ptr() { return getOperand(0); }
-};
-
 class LWasmStoreRef : public LInstructionHelper<0, 3, 1> {
  public:
   LIR_HEADER(WasmStoreRef);
   LWasmStoreRef(const LAllocation& tls, const LAllocation& valueAddr,
                 const LAllocation& value, const LDefinition& temp)
       : LInstructionHelper(classOpcode) {
     setOperand(0, tls);
     setOperand(1, valueAddr);
@@ -6777,27 +6766,16 @@ inline bool IsWasmCall(LNode::Opcode op)
 }
 
 class LWasmNullConstant : public LInstructionHelper<1, 0, 0> {
  public:
   LIR_HEADER(WasmNullConstant);
   explicit LWasmNullConstant() : LInstructionHelper(classOpcode) {}
 };
 
-class LIsNullPointer : public LInstructionHelper<1, 1, 0> {
- public:
-  LIR_HEADER(IsNullPointer);
-  explicit LIsNullPointer(const LAllocation& value)
-      : LInstructionHelper(classOpcode) {
-    setOperand(0, value);
-  }
-  MIsNullPointer* mir() const { return mirRaw()->toIsNullPointer(); }
-  const LAllocation* value() { return getOperand(0); }
-};
-
 template <size_t Defs>
 class LWasmCallBase : public LVariadicInstruction<Defs, 0> {
   using Base = LVariadicInstruction<Defs, 0>;
 
   bool needsBoundsCheck_;
 
  public:
   LWasmCallBase(LNode::Opcode opcode, uint32_t numOperands,
--- a/js/src/js.msg
+++ b/js/src/js.msg
@@ -396,28 +396,27 @@ MSG_DEF(JSMSG_WASM_INT_DIVIDE_BY_ZERO, 0
 MSG_DEF(JSMSG_WASM_OUT_OF_BOUNDS,      0, JSEXN_WASMRUNTIMEERROR, "index out of bounds")
 MSG_DEF(JSMSG_WASM_UNALIGNED_ACCESS,   0, JSEXN_WASMRUNTIMEERROR, "unaligned memory access")
 MSG_DEF(JSMSG_WASM_WAKE_OVERFLOW,      0, JSEXN_WASMRUNTIMEERROR, "too many woken agents")
 MSG_DEF(JSMSG_WASM_DROPPED_DATA_SEG,   0, JSEXN_WASMRUNTIMEERROR, "use of dropped data segment")
 MSG_DEF(JSMSG_WASM_DROPPED_ELEM_SEG,   0, JSEXN_WASMRUNTIMEERROR, "use of dropped element segment")
 MSG_DEF(JSMSG_WASM_DEREF_NULL,         0, JSEXN_WASMRUNTIMEERROR, "dereferencing null pointer")
 MSG_DEF(JSMSG_WASM_BAD_RANGE ,         2, JSEXN_RANGEERR,    "bad {0} {1}")
 MSG_DEF(JSMSG_WASM_BAD_GROW,           1, JSEXN_RANGEERR,    "failed to grow {0}")
-MSG_DEF(JSMSG_WASM_BAD_TBL_GROW_INIT,  1, JSEXN_TYPEERR,     "bad initializer to {0} table")
 MSG_DEF(JSMSG_WASM_TABLE_OUT_OF_BOUNDS, 0, JSEXN_RANGEERR,   "table index out of bounds")
 MSG_DEF(JSMSG_WASM_BAD_UINT32,         2, JSEXN_TYPEERR,     "bad {0} {1}")
 MSG_DEF(JSMSG_WASM_BAD_BUF_ARG,        0, JSEXN_TYPEERR,     "first argument must be an ArrayBuffer or typed array object")
 MSG_DEF(JSMSG_WASM_BAD_MOD_ARG,        0, JSEXN_TYPEERR,     "first argument must be a WebAssembly.Module")
 MSG_DEF(JSMSG_WASM_BAD_BUF_MOD_ARG,    0, JSEXN_TYPEERR,     "first argument must be a WebAssembly.Module, ArrayBuffer or typed array object")
 MSG_DEF(JSMSG_WASM_BAD_DESC_ARG,       1, JSEXN_TYPEERR,     "first argument must be a {0} descriptor")
 MSG_DEF(JSMSG_WASM_BAD_ELEMENT,        0, JSEXN_TYPEERR,     "\"element\" property of table descriptor must be \"funcref\"")
 MSG_DEF(JSMSG_WASM_BAD_ELEMENT_GENERALIZED, 0, JSEXN_TYPEERR, "\"element\" property of table descriptor must be \"funcref\" or \"anyref\"")
 MSG_DEF(JSMSG_WASM_BAD_IMPORT_ARG,     0, JSEXN_TYPEERR,     "second argument must be an object")
 MSG_DEF(JSMSG_WASM_BAD_IMPORT_FIELD,   1, JSEXN_TYPEERR,     "import object field '{0}' is not an Object")
-MSG_DEF(JSMSG_WASM_BAD_TABLE_VALUE,    0, JSEXN_TYPEERR,     "can only assign WebAssembly exported functions to Table")
+MSG_DEF(JSMSG_WASM_BAD_FUNCREF_VALUE,  0, JSEXN_TYPEERR,     "can only pass WebAssembly exported functions to funcref")
 MSG_DEF(JSMSG_WASM_BAD_I64_TYPE,       0, JSEXN_TYPEERR,     "cannot pass i64 to or from JS")
 MSG_DEF(JSMSG_WASM_BAD_GLOBAL_TYPE,    0, JSEXN_TYPEERR,     "bad type for a WebAssembly.Global")
 MSG_DEF(JSMSG_WASM_NO_TRANSFER,        0, JSEXN_TYPEERR,     "cannot transfer WebAssembly/asm.js ArrayBuffer")
 MSG_DEF(JSMSG_WASM_TEXT_FAIL,          1, JSEXN_SYNTAXERR,   "wasm text error: {0}")
 MSG_DEF(JSMSG_WASM_MISSING_MAXIMUM,    0, JSEXN_TYPEERR,     "'shared' is true but maximum is not specified")
 MSG_DEF(JSMSG_WASM_GLOBAL_IMMUTABLE,   0, JSEXN_TYPEERR,     "can't set value of immutable global")
 
 // Proxy
--- a/js/src/wasm/AsmJS.cpp
+++ b/js/src/wasm/AsmJS.cpp
@@ -2014,17 +2014,17 @@ class MOZ_STACK_CLASS JS_HAZ_ROOTED Modu
     }
 
     MOZ_ASSERT(sigIndex >= env_.asmJSSigToTableIndex.length());
     if (!env_.asmJSSigToTableIndex.resize(sigIndex + 1)) {
       return false;
     }
 
     env_.asmJSSigToTableIndex[sigIndex] = env_.tables.length();
-    if (!env_.tables.emplaceBack(TableKind::TypedFunction, Limits(mask + 1))) {
+    if (!env_.tables.emplaceBack(TableKind::AsmJS, Limits(mask + 1))) {
       return false;
     }
 
     Global* global = validationLifo_.new_<Global>(Global::Table);
     if (!global) {
       return false;
     }
 
@@ -6557,17 +6557,18 @@ static bool ValidateGlobalVariable(JSCon
           if (!ToNumber(cx, v, &d)) {
             return false;
           }
           val->emplace(d);
           return true;
         }
         case ValType::Ref:
         case ValType::NullRef:
-        case ValType::AnyRef: {
+        case ValType::AnyRef:
+        case ValType::FuncRef: {
           MOZ_CRASH("not available in asm.js");
         }
       }
     }
   }
 
   MOZ_CRASH("unreachable");
 }
--- a/js/src/wasm/WasmAST.h
+++ b/js/src/wasm/WasmAST.h
@@ -122,19 +122,21 @@ class AstValType {
       which_ = IsValType;
       type_ = ValType(ValType::Ref, ref.index());
     } else {
       which_ = IsAstRef;
       ref_ = ref;
     }
   }
 
-  bool isRefType() const {
+#ifdef ENABLE_WASM_GC
+  bool isNarrowType() const {
     return code() == ValType::AnyRef || code() == ValType::Ref;
   }
+#endif
 
   bool isValid() const { return !(which_ == IsValType && !type_.isValid()); }
 
   bool isResolved() const { return which_ == IsValType; }
 
   AstRef& asRef() { return ref_; }
 
   void resolve() {
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -1051,16 +1051,17 @@ void BaseLocalIter::settle() {
   MOZ_ASSERT(argsIter_.done());
   if (index_ < locals_.length()) {
     switch (locals_[index_].code()) {
       case ValType::I32:
       case ValType::I64:
       case ValType::F32:
       case ValType::F64:
       case ValType::Ref:
+      case ValType::FuncRef:
       case ValType::AnyRef:
         // TODO/AnyRef-boxing: With boxed immediates and strings, the
         // debugger must be made aware that AnyRef != Pointer.
         ASSERT_ANYREF_IS_JSOBJECT;
         mirType_ = ToMIRType(locals_[index_]);
         frameOffset_ = pushLocal(MIRTypeToSize(mirType_));
         break;
       case ValType::NullRef:
@@ -2778,16 +2779,17 @@ class BaseCompiler final : public BaseCo
   void maybeReserveJoinRegI(ExprType type) {
     switch (type.code()) {
       case ExprType::I32:
         needI32(joinRegI32_);
         break;
       case ExprType::I64:
         needI64(joinRegI64_);
         break;
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
       case ExprType::NullRef:
       case ExprType::Ref:
         needRef(joinRegPtr_);
         break;
       default:;
     }
   }
@@ -2795,16 +2797,17 @@ class BaseCompiler final : public BaseCo
   void maybeUnreserveJoinRegI(ExprType type) {
     switch (type.code()) {
       case ExprType::I32:
         freeI32(joinRegI32_);
         break;
       case ExprType::I64:
         freeI64(joinRegI64_);
         break;
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
       case ExprType::NullRef:
       case ExprType::Ref:
         freeRef(joinRegPtr_);
         break;
       default:;
     }
   }
@@ -2820,16 +2823,17 @@ class BaseCompiler final : public BaseCo
       case ExprType::F32:
         needF32(joinRegF32_);
         break;
       case ExprType::F64:
         needF64(joinRegF64_);
         break;
       case ExprType::Ref:
       case ExprType::NullRef:
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
         needRef(joinRegPtr_);
         break;
       default:
         break;
     }
   }
 
@@ -2844,16 +2848,17 @@ class BaseCompiler final : public BaseCo
       case ExprType::F32:
         freeF32(joinRegF32_);
         break;
       case ExprType::F64:
         freeF64(joinRegF64_);
         break;
       case ExprType::Ref:
       case ExprType::NullRef:
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
         freeRef(joinRegPtr_);
         break;
       default:
         break;
     }
   }
 
@@ -3773,16 +3778,17 @@ class BaseCompiler final : public BaseCo
       case ExprType::F32: {
         DebugOnly<Stk::Kind> k(stk_.back().kind());
         MOZ_ASSERT(k == Stk::RegisterF32 || k == Stk::ConstF32 ||
                    k == Stk::MemF32 || k == Stk::LocalF32);
         return Some(AnyReg(popF32(joinRegF32_)));
       }
       case ExprType::Ref:
       case ExprType::NullRef:
+      case ExprType::FuncRef:
       case ExprType::AnyRef: {
         DebugOnly<Stk::Kind> k(stk_.back().kind());
         MOZ_ASSERT(k == Stk::RegisterRef || k == Stk::ConstRef ||
                    k == Stk::MemRef || k == Stk::LocalRef);
         return Some(AnyReg(popRef(joinRegPtr_)));
       }
       default: {
         MOZ_CRASH("Compiler bug: unexpected expression type");
@@ -3811,16 +3817,17 @@ class BaseCompiler final : public BaseCo
         needF32(joinRegF32_);
         return Some(AnyReg(joinRegF32_));
       case ExprType::F64:
         MOZ_ASSERT(isAvailableF64(joinRegF64_));
         needF64(joinRegF64_);
         return Some(AnyReg(joinRegF64_));
       case ExprType::Ref:
       case ExprType::NullRef:
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
         MOZ_ASSERT(isAvailableRef(joinRegPtr_));
         needRef(joinRegPtr_);
         return Some(AnyReg(joinRegPtr_));
       case ExprType::Void:
         return Nothing();
       default:
         MOZ_CRASH("Compiler bug: unexpected type");
@@ -4234,16 +4241,17 @@ class BaseCompiler final : public BaseCo
         break;
       case ExprType::F64:
         masm.storeDouble(RegF64(ReturnDoubleReg), resultsAddress);
         break;
       case ExprType::F32:
         masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress);
         break;
       case ExprType::Ref:
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
         masm.storePtr(RegPtr(ReturnReg), resultsAddress);
         break;
       case ExprType::NullRef:
       default:
         MOZ_CRASH("Function return type");
     }
   }
@@ -4264,16 +4272,17 @@ class BaseCompiler final : public BaseCo
         break;
       case ExprType::F64:
         masm.loadDouble(resultsAddress, RegF64(ReturnDoubleReg));
         break;
       case ExprType::F32:
         masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg));
         break;
       case ExprType::Ref:
+      case ExprType::FuncRef:
       case ExprType::AnyRef:
         masm.loadPtr(resultsAddress, RegPtr(ReturnReg));
         break;
       case ExprType::NullRef:
       default:
         MOZ_CRASH("Function return type");
     }
   }
@@ -4576,16 +4585,17 @@ class BaseCompiler final : public BaseCo
           }
 #endif
           case ABIArg::Uninitialized:
             MOZ_CRASH("Uninitialized ABIArg kind");
         }
         break;
       }
       case ValType::Ref:
+      case ValType::FuncRef:
       case ValType::AnyRef: {
         ABIArg argLoc = call->abi.next(MIRType::RefOrNull);
         if (argLoc.kind() == ABIArg::Stack) {
           ScratchPtr scratch(*this);
           loadRef(arg, scratch);
           masm.storePtr(scratch, Address(masm.getStackPointer(),
                                          argLoc.offsetFromArgBase()));
         } else {
@@ -4633,24 +4643,25 @@ class BaseCompiler final : public BaseCo
     CalleeDesc callee = CalleeDesc::import(globalDataOffset);
     return masm.wasmCallImport(desc, callee);
   }
 
   CodeOffset builtinCall(SymbolicAddress builtin, const FunctionCall& call) {
     return callSymbolic(builtin, call);
   }
 
-  CodeOffset builtinInstanceMethodCall(SymbolicAddress builtin,
+  CodeOffset builtinInstanceMethodCall(const SymbolicAddressSignature& builtin,
                                        const ABIArg& instanceArg,
                                        const FunctionCall& call) {
     // Builtin method calls assume the TLS register has been set.
     masm.loadWasmTlsRegFromFrame();
 
     CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Symbolic);
-    return masm.wasmCallBuiltinInstanceMethod(desc, instanceArg, builtin);
+    return masm.wasmCallBuiltinInstanceMethod(
+        desc, instanceArg, builtin.identity, builtin.failureMode);
   }
 
   //////////////////////////////////////////////////////////////////////
   //
   // Sundry low-level code generators.
 
   // The compiler depends on moveImm32() clearing the high bits of a 64-bit
   // register on 64-bit systems except MIPS64 where high bits are sign extended
@@ -8548,16 +8559,17 @@ void BaseCompiler::doReturn(ExprType typ
     case ExprType::F32: {
       RegF32 rv = popF32(RegF32(ReturnFloat32Reg));
       returnCleanup(popStack);
       freeF32(rv);
       break;
     }
     case ExprType::Ref:
     case ExprType::NullRef:
+    case ExprType::FuncRef:
     case ExprType::AnyRef: {
       RegPtr rv = popRef(RegPtr(ReturnReg));
       returnCleanup(popStack);
       freeRef(rv);
       break;
     }
     default: {
       MOZ_CRASH("Function return type");
@@ -8990,16 +9002,17 @@ bool BaseCompiler::emitGetLocal() {
       break;
     case ValType::F64:
       pushLocalF64(slot);
       break;
     case ValType::F32:
       pushLocalF32(slot);
       break;
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef:
       pushLocalRef(slot);
       break;
     case ValType::NullRef:
     default:
       MOZ_CRASH("Local variable type");
   }
 
@@ -9054,16 +9067,17 @@ bool BaseCompiler::emitSetOrTeeLocal(uin
       if (isSetLocal) {
         freeF32(rv);
       } else {
         pushF32(rv);
       }
       break;
     }
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       RegPtr rv = popRef();
       syncLocal(slot);
       fr.storeLocalPtr(rv, localFromSlot(slot, MIRType::RefOrNull));
       if (isSetLocal) {
         freeRef(rv);
       } else {
         pushRef(rv);
@@ -9119,22 +9133,22 @@ bool BaseCompiler::emitGetGlobal() {
         break;
       case ValType::F32:
         pushF32(value.f32());
         break;
       case ValType::F64:
         pushF64(value.f64());
         break;
       case ValType::Ref:
+      case ValType::FuncRef:
+      case ValType::AnyRef:
+        pushRef(intptr_t(value.ref().forCompiledCode()));
+        break;
       case ValType::NullRef:
-        pushRef(intptr_t(value.ref()));
-        break;
-      case ValType::AnyRef:
-        pushRef(intptr_t(value.anyref().forCompiledCode()));
-        break;
+        MOZ_CRASH("NullRef not expressible");
       default:
         MOZ_CRASH("Global constant type");
     }
     return true;
   }
 
   switch (global.type().code()) {
     case ValType::I32: {
@@ -9161,16 +9175,17 @@ bool BaseCompiler::emitGetGlobal() {
     case ValType::F64: {
       RegF64 rv = needF64();
       ScratchI32 tmp(*this);
       masm.loadDouble(addressOfGlobalVar(global, tmp), rv);
       pushF64(rv);
       break;
     }
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       RegPtr rv = needRef();
       ScratchI32 tmp(*this);
       masm.loadPtr(addressOfGlobalVar(global, tmp), rv);
       pushRef(rv);
       break;
     }
     case ValType::NullRef:
@@ -9220,16 +9235,17 @@ bool BaseCompiler::emitSetGlobal() {
     case ValType::F64: {
       RegF64 rv = popF64();
       ScratchI32 tmp(*this);
       masm.storeDouble(rv, addressOfGlobalVar(global, tmp));
       freeF64(rv);
       break;
     }
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       RegPtr valueAddr(PreBarrierReg);
       needRef(valueAddr);
       {
         ScratchI32 tmp(*this);
         masm.computeEffectiveAddress(addressOfGlobalVar(global, tmp),
                                      valueAddr);
       }
@@ -9627,16 +9643,17 @@ bool BaseCompiler::emitSelect() {
       moveF64(rs, r);
       masm.bind(&done);
       freeF64(rs);
       pushF64(r);
       break;
     }
     case ValType::Ref:
     case ValType::NullRef:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       RegPtr r, rs;
       pop2xRef(&r, &rs);
       emitBranchPerform(&b);
       moveRef(rs, r);
       masm.bind(&done);
       freeRef(rs);
       pushRef(r);
@@ -9780,17 +9797,17 @@ bool BaseCompiler::emitInstanceCall(uint
         t = sizeof(void*) == 4 ? ValType::I32 : ValType::I64;
         break;
       default:
         MOZ_CRASH("Unexpected type");
     }
     passArg(t, peek(numNonInstanceArgs - i), &baselineCall);
   }
   CodeOffset raOffset =
-      builtinInstanceMethodCall(builtin.identity, instanceArg, baselineCall);
+      builtinInstanceMethodCall(builtin, instanceArg, baselineCall);
   if (!createStackMap("emitInstanceCall", raOffset)) {
     return false;
   }
 
   endCall(baselineCall, stackSpace);
 
   popValueStackBy(numNonInstanceArgs);
 
@@ -10142,64 +10159,48 @@ bool BaseCompiler::emitWait(ValType type
   if (!iter_.readWait(&addr, type, byteSize, &nothing, &nothing)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // Returns -1 on trap, otherwise nonnegative result.
   switch (type.code()) {
     case ValType::I32:
       if (!emitInstanceCall(lineOrBytecode, SASigWaitI32)) {
         return false;
       }
       break;
     case ValType::I64:
       if (!emitInstanceCall(lineOrBytecode, SASigWaitI64)) {
         return false;
       }
       break;
     default:
       MOZ_CRASH();
   }
 
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
   return true;
 }
 
 bool BaseCompiler::emitWake() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   Nothing nothing;
   LinearMemoryAddress<Nothing> addr;
   if (!iter_.readWake(&addr, &nothing)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // Returns -1 on trap, otherwise nonnegative result.
-  if (!emitInstanceCall(lineOrBytecode, SASigWake)) {
-    return false;
-  }
-
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
-  return true;
+  return emitInstanceCall(lineOrBytecode, SASigWake);
 }
 
 #ifdef ENABLE_WASM_BULKMEM_OPS
 bool BaseCompiler::emitMemOrTableCopy(bool isMem) {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   uint32_t dstMemOrTableIndex = 0;
   uint32_t srcMemOrTableIndex = 0;
@@ -10208,95 +10209,69 @@ bool BaseCompiler::emitMemOrTableCopy(bo
                                 &srcMemOrTableIndex, &nothing, &nothing)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // Returns -1 on trap, otherwise 0.
   if (isMem) {
     MOZ_ASSERT(srcMemOrTableIndex == 0);
     MOZ_ASSERT(dstMemOrTableIndex == 0);
     if (!emitInstanceCall(lineOrBytecode, SASigMemCopy,
                           /*pushReturnedValue=*/false)) {
       return false;
     }
   } else {
     pushI32(dstMemOrTableIndex);
     pushI32(srcMemOrTableIndex);
     if (!emitInstanceCall(lineOrBytecode, SASigTableCopy,
                           /*pushReturnedValue=*/false)) {
       return false;
     }
   }
 
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
   return true;
 }
 
 bool BaseCompiler::emitDataOrElemDrop(bool isData) {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   uint32_t segIndex = 0;
   if (!iter_.readDataOrElemDrop(isData, &segIndex)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
   // Despite the cast to int32_t, the callee regards the value as unsigned.
-  //
-  // Returns -1 on trap, otherwise 0.
   pushI32(int32_t(segIndex));
-  const SymbolicAddressSignature& callee =
-      isData ? SASigDataDrop : SASigElemDrop;
-  if (!emitInstanceCall(lineOrBytecode, callee, /*pushReturnedValue=*/false)) {
-    return false;
-  }
-
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
-  return true;
+
+  return emitInstanceCall(lineOrBytecode,
+                          isData ? SASigDataDrop : SASigElemDrop,
+                          /*pushReturnedValue=*/false);
 }
 
 bool BaseCompiler::emitMemFill() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   Nothing nothing;
   if (!iter_.readMemFill(&nothing, &nothing, &nothing)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // Returns -1 on trap, otherwise 0.
-  if (!emitInstanceCall(lineOrBytecode, SASigMemFill,
-                        /*pushReturnedValue=*/false)) {
-    return false;
-  }
-
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
-  return true;
+  return emitInstanceCall(lineOrBytecode, SASigMemFill,
+                          /*pushReturnedValue=*/false);
 }
 
 bool BaseCompiler::emitMemOrTableInit(bool isMem) {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   uint32_t segIndex = 0;
   uint32_t dstTableIndex = 0;
   Nothing nothing;
@@ -10304,36 +10279,30 @@ bool BaseCompiler::emitMemOrTableInit(bo
                                 &nothing, &nothing)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
-  // Returns -1 on trap, otherwise 0.
   pushI32(int32_t(segIndex));
   if (isMem) {
     if (!emitInstanceCall(lineOrBytecode, SASigMemInit,
                           /*pushReturnedValue=*/false)) {
       return false;
     }
   } else {
     pushI32(dstTableIndex);
     if (!emitInstanceCall(lineOrBytecode, SASigTableInit,
                           /*pushReturnedValue=*/false)) {
       return false;
     }
   }
 
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
   return true;
 }
 #endif
 
 MOZ_MUST_USE
 bool BaseCompiler::emitTableFill() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
@@ -10343,58 +10312,38 @@ bool BaseCompiler::emitTableFill() {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
   // fill(start:u32, val:ref, len:u32, table:u32) -> u32
-  //
-  // Returns -1 on trap, otherwise 0.
   pushI32(tableIndex);
-  if (!emitInstanceCall(lineOrBytecode, SASigTableFill,
-                        /*pushReturnedValue=*/false)) {
-    return false;
-  }
-
-  Label ok;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
-  return true;
+  return emitInstanceCall(lineOrBytecode, SASigTableFill,
+                          /*pushReturnedValue=*/false);
 }
 
 MOZ_MUST_USE
 bool BaseCompiler::emitTableGet() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
   Nothing index;
   uint32_t tableIndex;
   if (!iter_.readTableGet(&tableIndex, &index)) {
     return false;
   }
   if (deadCode_) {
     return true;
   }
-  // get(index:u32, table:u32) -> void*
-  //
-  // Returns nullptr for error, otherwise a pointer to a nonmoveable memory
-  // location that holds the anyref value.
+  // get(index:u32, table:u32) -> uintptr_t(AnyRef)
   pushI32(tableIndex);
   if (!emitInstanceCall(lineOrBytecode, SASigTableGet,
                         /*pushReturnedValue=*/false)) {
     return false;
   }
-  Label noTrap;
-  masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap);
-  trap(Trap::ThrowReported);
-  masm.bind(&noTrap);
-
-  masm.loadPtr(Address(ReturnReg, 0), ReturnReg);
 
   // Push the resulting anyref back on the eval stack.  NOTE: needRef() must
   // not kill the value in the register.
   RegPtr r = RegPtr(ReturnReg);
   needRef(r);
   pushRef(r);
 
   return true;
@@ -10408,61 +10357,48 @@ bool BaseCompiler::emitTableGrow() {
   uint32_t tableIndex;
   if (!iter_.readTableGrow(&tableIndex, &initValue, &delta)) {
     return false;
   }
   if (deadCode_) {
     return true;
   }
   // grow(initValue:anyref, delta:u32, table:u32) -> u32
-  //
-  // infallible.
   pushI32(tableIndex);
   return emitInstanceCall(lineOrBytecode, SASigTableGrow);
 }
 
 MOZ_MUST_USE
 bool BaseCompiler::emitTableSet() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
   Nothing index, value;
   uint32_t tableIndex;
   if (!iter_.readTableSet(&tableIndex, &index, &value)) {
     return false;
   }
   if (deadCode_) {
     return true;
   }
   // set(index:u32, value:ref, table:u32) -> i32
-  //
-  // Returns -1 on range error, otherwise 0 (which is then ignored).
   pushI32(tableIndex);
-  if (!emitInstanceCall(lineOrBytecode, SASigTableSet,
-                        /*pushReturnedValue=*/false)) {
-    return false;
-  }
-  Label noTrap;
-  masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
-  trap(Trap::ThrowReported);
-  masm.bind(&noTrap);
-  return true;
+  return emitInstanceCall(lineOrBytecode, SASigTableSet,
+                          /*pushReturnedValue=*/false);
 }
 
 MOZ_MUST_USE
 bool BaseCompiler::emitTableSize() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
   uint32_t tableIndex;
   if (!iter_.readTableSize(&tableIndex)) {
     return false;
   }
   if (deadCode_) {
     return true;
   }
   // size(table:u32) -> u32
-  //
-  // infallible.
   pushI32(tableIndex);
   return emitInstanceCall(lineOrBytecode, SASigTableSize);
 }
 
 bool BaseCompiler::emitStructNew() {
   uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
 
   uint32_t typeIndex;
@@ -10482,23 +10418,16 @@ bool BaseCompiler::emitStructNew() {
 
   const StructType& structType = env_.types[typeIndex].structType();
 
   pushI32(structType.moduleIndex_);
   if (!emitInstanceCall(lineOrBytecode, SASigStructNew)) {
     return false;
   }
 
-  // Null pointer check.
-
-  Label ok;
-  masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &ok);
-  trap(Trap::ThrowReported);
-  masm.bind(&ok);
-
   // As many arguments as there are fields.
 
   MOZ_ASSERT(args.length() == structType.fields_.length());
 
   // Optimization opportunity: Iterate backward to pop arguments off the
   // stack.  This will generate more instructions than we want, since we
   // really only need to pop the stack once at the end, not for every element,
   // but to do better we need a bit more machinery to load elements off the
@@ -10540,16 +10469,17 @@ bool BaseCompiler::emitStructNew() {
       }
       case ValType::F64: {
         RegF64 r = popF64();
         masm.storeDouble(r, Address(rdata, offs));
         freeF64(r);
         break;
       }
       case ValType::Ref:
+      case ValType::FuncRef:
       case ValType::AnyRef: {
         RegPtr value = popRef();
         masm.storePtr(value, Address(rdata, offs));
 
         // A write barrier is needed here for the extremely unlikely case
         // that the object is allocated in the tenured area - a result of
         // a GC artifact.
 
@@ -10657,16 +10587,17 @@ bool BaseCompiler::emitStructGet() {
     }
     case ValType::F64: {
       RegF64 r = needF64();
       masm.loadDouble(Address(rp, offs), r);
       pushF64(r);
       break;
     }
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       RegPtr r = needRef();
       masm.loadPtr(Address(rp, offs), r);
       pushRef(r);
       break;
     }
     case ValType::NullRef: {
       MOZ_CRASH("NullRef not expressible");
@@ -10718,16 +10649,17 @@ bool BaseCompiler::emitStructSet() {
       break;
     case ValType::F32:
       rf = popF32();
       break;
     case ValType::F64:
       rd = popF64();
       break;
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef:
       rr = popRef();
       break;
     case ValType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     default:
       MOZ_CRASH("Unexpected field type");
   }
@@ -10761,16 +10693,17 @@ bool BaseCompiler::emitStructSet() {
       break;
     }
     case ValType::F64: {
       masm.storeDouble(rd, Address(rp, offs));
       freeF64(rd);
       break;
     }
     case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       masm.computeEffectiveAddress(Address(rp, offs), valueAddr);
       // emitBarrieredStore consumes valueAddr
       if (!emitBarrieredStore(Some(rp), valueAddr, rr,
                               structType.fields_[fieldIndex].type)) {
         return false;
       }
       freeRef(rr);
@@ -10797,31 +10730,33 @@ bool BaseCompiler::emitStructNarrow() {
   if (!iter_.readStructNarrow(&inputType, &outputType, &nothing)) {
     return false;
   }
 
   if (deadCode_) {
     return true;
   }
 
+  // Currently not supported by struct.narrow validation.
+  MOZ_ASSERT(inputType != ValType::FuncRef);
+  MOZ_ASSERT(outputType != ValType::FuncRef);
+
   // AnyRef -> AnyRef is a no-op, just leave the value on the stack.
 
   if (inputType == ValType::AnyRef && outputType == ValType::AnyRef) {
     return true;
   }
 
   RegPtr rp = popRef();
 
   // AnyRef -> (ref T) must first unbox; leaves rp or null
 
   bool mustUnboxAnyref = inputType == ValType::AnyRef;
 
   // Dynamic downcast (ref T) -> (ref U), leaves rp or null
-  //
-  // Infallible.
   const StructType& outputStruct =
       env_.types[outputType.refTypeIndex()].structType();
 
   pushI32(mustUnboxAnyref);
   pushI32(outputStruct.moduleIndex_);
   pushRef(rp);
   return emitInstanceCall(lineOrBytecode, SASigStructNarrow);
 }
--- a/js/src/wasm/WasmBuiltins.cpp
+++ b/js/src/wasm/WasmBuiltins.cpp
@@ -51,116 +51,163 @@ static const unsigned BUILTIN_THUNK_LIFO
 #define _F64 MIRType::Double
 #define _F32 MIRType::Float32
 #define _I32 MIRType::Int32
 #define _I64 MIRType::Int64
 #define _PTR MIRType::Pointer
 #define _RoN MIRType::RefOrNull
 #define _VOID MIRType::None
 #define _END MIRType::None
+#define _Infallible FailureMode::Infallible
+#define _FailOnNegI32 FailureMode::FailOnNegI32
+#define _FailOnNullPtr FailureMode::FailOnNullPtr
+#define _FailOnInvalidRef FailureMode::FailOnInvalidRef
 
 namespace js {
 namespace wasm {
 
 const SymbolicAddressSignature SASigSinD = {
-    SymbolicAddress::SinD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigCosD = {
-    SymbolicAddress::CosD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigTanD = {
-    SymbolicAddress::TanD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigASinD = {
-    SymbolicAddress::ASinD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigACosD = {
-    SymbolicAddress::ACosD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigATanD = {
-    SymbolicAddress::ATanD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigCeilD = {
-    SymbolicAddress::CeilD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigCeilF = {
-    SymbolicAddress::CeilF, _F32, 1, {_F32, _END}};
+    SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}};
 const SymbolicAddressSignature SASigFloorD = {
-    SymbolicAddress::FloorD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigFloorF = {
-    SymbolicAddress::FloorF, _F32, 1, {_F32, _END}};
+    SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}};
 const SymbolicAddressSignature SASigTruncD = {
-    SymbolicAddress::TruncD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigTruncF = {
-    SymbolicAddress::TruncF, _F32, 1, {_F32, _END}};
+    SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}};
 const SymbolicAddressSignature SASigNearbyIntD = {
-    SymbolicAddress::NearbyIntD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigNearbyIntF = {
-    SymbolicAddress::NearbyIntF, _F32, 1, {_F32, _END}};
+    SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}};
 const SymbolicAddressSignature SASigExpD = {
-    SymbolicAddress::ExpD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigLogD = {
-    SymbolicAddress::LogD, _F64, 1, {_F64, _END}};
+    SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}};
 const SymbolicAddressSignature SASigPowD = {
-    SymbolicAddress::PowD, _F64, 2, {_F64, _F64, _END}};
+    SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}};
 const SymbolicAddressSignature SASigATan2D = {
-    SymbolicAddress::ATan2D, _F64, 2, {_F64, _F64, _END}};
+    SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}};
 const SymbolicAddressSignature SASigMemoryGrow = {
-    SymbolicAddress::MemoryGrow, _I32, 2, {_PTR, _I32, _END}};
+    SymbolicAddress::MemoryGrow, _I32, _Infallible, 2, {_PTR, _I32, _END}};
 const SymbolicAddressSignature SASigMemorySize = {
-    SymbolicAddress::MemorySize, _I32, 1, {_PTR, _END}};
-const SymbolicAddressSignature SASigWaitI32 = {
-    SymbolicAddress::WaitI32, _I32, 4, {_PTR, _I32, _I32, _I64, _END}};
-const SymbolicAddressSignature SASigWaitI64 = {
-    SymbolicAddress::WaitI64, _I32, 4, {_PTR, _I32, _I64, _I64, _END}};
+    SymbolicAddress::MemorySize, _I32, _Infallible, 1, {_PTR, _END}};
+const SymbolicAddressSignature SASigWaitI32 = {SymbolicAddress::WaitI32,
+                                               _I32,
+                                               _FailOnNegI32,
+                                               4,
+                                               {_PTR, _I32, _I32, _I64, _END}};
+const SymbolicAddressSignature SASigWaitI64 = {SymbolicAddress::WaitI64,
+                                               _I32,
+                                               _FailOnNegI32,
+                                               4,
+                                               {_PTR, _I32, _I64, _I64, _END}};
 const SymbolicAddressSignature SASigWake = {
-    SymbolicAddress::Wake, _I32, 3, {_PTR, _I32, _I32, _END}};
-const SymbolicAddressSignature SASigMemCopy = {
-    SymbolicAddress::MemCopy, _I32, 4, {_PTR, _I32, _I32, _I32, _END}};
+    SymbolicAddress::Wake, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}};
+const SymbolicAddressSignature SASigMemCopy = {SymbolicAddress::MemCopy,
+                                               _VOID,
+                                               _FailOnNegI32,
+                                               4,
+                                               {_PTR, _I32, _I32, _I32, _END}};
 const SymbolicAddressSignature SASigDataDrop = {
-    SymbolicAddress::DataDrop, _I32, 2, {_PTR, _I32, _END}};
-const SymbolicAddressSignature SASigMemFill = {
-    SymbolicAddress::MemFill, _I32, 4, {_PTR, _I32, _I32, _I32, _END}};
+    SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
+const SymbolicAddressSignature SASigMemFill = {SymbolicAddress::MemFill,
+                                               _VOID,
+                                               _FailOnNegI32,
+                                               4,
+                                               {_PTR, _I32, _I32, _I32, _END}};
 const SymbolicAddressSignature SASigMemInit = {
-    SymbolicAddress::MemInit, _I32, 5, {_PTR, _I32, _I32, _I32, _I32, _END}};
+    SymbolicAddress::MemInit,
+    _VOID,
+    _FailOnNegI32,
+    5,
+    {_PTR, _I32, _I32, _I32, _I32, _END}};
 const SymbolicAddressSignature SASigTableCopy = {
     SymbolicAddress::TableCopy,
-    _I32,
+    _VOID,
+    _FailOnNegI32,
     6,
     {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
 const SymbolicAddressSignature SASigElemDrop = {
-    SymbolicAddress::ElemDrop, _I32, 2, {_PTR, _I32, _END}};
+    SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
 const SymbolicAddressSignature SASigTableFill = {
-    SymbolicAddress::TableFill, _I32, 5, {_PTR, _I32, _RoN, _I32, _I32, _END}};
-const SymbolicAddressSignature SASigTableGet = {
-    SymbolicAddress::TableGet, _PTR, 3, {_PTR, _I32, _I32, _END}};
+    SymbolicAddress::TableFill,
+    _VOID,
+    _FailOnNegI32,
+    5,
+    {_PTR, _I32, _RoN, _I32, _I32, _END}};
+const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet,
+                                                _RoN,
+                                                _FailOnInvalidRef,
+                                                3,
+                                                {_PTR, _I32, _I32, _END}};
 const SymbolicAddressSignature SASigTableGrow = {
-    SymbolicAddress::TableGrow, _I32, 4, {_PTR, _RoN, _I32, _I32, _END}};
+    SymbolicAddress::TableGrow,
+    _I32,
+    _Infallible,
+    4,
+    {_PTR, _RoN, _I32, _I32, _END}};
 const SymbolicAddressSignature SASigTableInit = {
     SymbolicAddress::TableInit,
-    _I32,
+    _VOID,
+    _FailOnNegI32,
     6,
     {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
-const SymbolicAddressSignature SASigTableSet = {
-    SymbolicAddress::TableSet, _I32, 4, {_PTR, _I32, _RoN, _I32, _END}};
+const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet,
+                                                _VOID,
+                                                _FailOnNegI32,
+                                                4,
+                                                {_PTR, _I32, _RoN, _I32, _END}};
 const SymbolicAddressSignature SASigTableSize = {
-    SymbolicAddress::TableSize, _I32, 2, {_PTR, _I32, _END}};
+    SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}};
 const SymbolicAddressSignature SASigPostBarrier = {
-    SymbolicAddress::PostBarrier, _VOID, 2, {_PTR, _PTR, _END}};
+    SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}};
 const SymbolicAddressSignature SASigPostBarrierFiltering = {
-    SymbolicAddress::PostBarrierFiltering, _VOID, 2, {_PTR, _PTR, _END}};
+    SymbolicAddress::PostBarrierFiltering,
+    _VOID,
+    _Infallible,
+    2,
+    {_PTR, _PTR, _END}};
 const SymbolicAddressSignature SASigStructNew = {
-    SymbolicAddress::StructNew, _RoN, 2, {_PTR, _I32, _END}};
+    SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _I32, _END}};
 const SymbolicAddressSignature SASigStructNarrow = {
-    SymbolicAddress::StructNarrow, _RoN, 4, {_PTR, _I32, _I32, _RoN, _END}};
+    SymbolicAddress::StructNarrow,
+    _RoN,
+    _Infallible,
+    4,
+    {_PTR, _I32, _I32, _RoN, _END}};
 
 }  // namespace wasm
 }  // namespace js
 
 #undef _F64
 #undef _F32
 #undef _I32
 #undef _I64
 #undef _PTR
 #undef _RoN
 #undef _VOID
 #undef _END
+#undef _Infallible
+#undef _FailOnNegI32
+#undef _FailOnNullPtr
 
 // ============================================================================
 // WebAssembly builtin C++ functions called from wasm code to implement internal
 // wasm operations: implementations.
 
 #if defined(JS_CODEGEN_ARM)
 extern "C" {
 
@@ -614,16 +661,19 @@ void* wasm::AddressOf(SymbolicAddress im
       *abiType = Args_General4;
       return FuncCast(Instance::callImport_i32, *abiType);
     case SymbolicAddress::CallImport_I64:
       *abiType = Args_General4;
       return FuncCast(Instance::callImport_i64, *abiType);
     case SymbolicAddress::CallImport_F64:
       *abiType = Args_General4;
       return FuncCast(Instance::callImport_f64, *abiType);
+    case SymbolicAddress::CallImport_FuncRef:
+      *abiType = Args_General4;
+      return FuncCast(Instance::callImport_funcref, *abiType);
     case SymbolicAddress::CallImport_AnyRef:
       *abiType = Args_General4;
       return FuncCast(Instance::callImport_anyref, *abiType);
     case SymbolicAddress::CoerceInPlace_ToInt32:
       *abiType = Args_General1;
       return FuncCast(CoerceInPlace_ToInt32, *abiType);
     case SymbolicAddress::CoerceInPlace_ToNumber:
       *abiType = Args_General1;
@@ -832,16 +882,17 @@ bool wasm::NeedsBuiltinThunk(SymbolicAdd
   switch (sym) {
     case SymbolicAddress::HandleDebugTrap:  // GenerateDebugTrapStub
     case SymbolicAddress::HandleThrow:      // GenerateThrowStub
     case SymbolicAddress::HandleTrap:       // GenerateTrapExit
     case SymbolicAddress::CallImport_Void:  // GenerateImportInterpExit
     case SymbolicAddress::CallImport_I32:
     case SymbolicAddress::CallImport_I64:
     case SymbolicAddress::CallImport_F64:
+    case SymbolicAddress::CallImport_FuncRef:
     case SymbolicAddress::CallImport_AnyRef:
     case SymbolicAddress::CoerceInPlace_ToInt32:  // GenerateImportJitExit
     case SymbolicAddress::CoerceInPlace_ToNumber:
 #if defined(JS_CODEGEN_MIPS32)
     case SymbolicAddress::js_jit_gAtomic64Lock:
 #endif
 #ifdef WASM_CODEGEN_DEBUG
     case SymbolicAddress::PrintI32:
--- a/js/src/wasm/WasmConstants.h
+++ b/js/src/wasm/WasmConstants.h
@@ -44,17 +44,17 @@ enum class SectionId {
 
 enum class TypeCode {
   I32 = 0x7f,  // SLEB128(-0x01)
   I64 = 0x7e,  // SLEB128(-0x02)
   F32 = 0x7d,  // SLEB128(-0x03)
   F64 = 0x7c,  // SLEB128(-0x04)
 
   // A function pointer with any signature
-  AnyFunc = 0x70,  // SLEB128(-0x10)
+  FuncRef = 0x70,  // SLEB128(-0x10)
 
   // A reference to any type.
   AnyRef = 0x6f,
 
   // Type constructor for reference types.
   Ref = 0x6e,
 
   // Type constructor for function types
--- a/js/src/wasm/WasmCraneliftCompile.cpp
+++ b/js/src/wasm/WasmCraneliftCompile.cpp
@@ -431,19 +431,19 @@ TypeCode global_type(const GlobalDesc* g
 
 size_t global_tlsOffset(const GlobalDesc* global) {
   return globalToTlsOffset(global->offset());
 }
 
 // TableDesc
 
 size_t table_tlsOffset(const TableDesc* table) {
-  MOZ_RELEASE_ASSERT(table->kind == TableKind::AnyFunction ||
-                         table->kind == TableKind::TypedFunction,
-                     "cranelift doesn't support AnyRef tables yet.");
+  MOZ_RELEASE_ASSERT(
+      table->kind == TableKind::FuncRef || table->kind == TableKind::AsmJS,
+      "cranelift doesn't support AnyRef tables yet.");
   return globalToTlsOffset(table->globalDataOffset);
 }
 
 // Sig
 
 size_t funcType_numArgs(const FuncTypeWithId* funcType) {
   return funcType->args().length();
 }
--- a/js/src/wasm/WasmFrameIter.cpp
+++ b/js/src/wasm/WasmFrameIter.cpp
@@ -1256,16 +1256,17 @@ static const char* ThunkedNativeToDescri
   switch (func) {
     case SymbolicAddress::HandleDebugTrap:
     case SymbolicAddress::HandleThrow:
     case SymbolicAddress::HandleTrap:
     case SymbolicAddress::CallImport_Void:
     case SymbolicAddress::CallImport_I32:
     case SymbolicAddress::CallImport_I64:
     case SymbolicAddress::CallImport_F64:
+    case SymbolicAddress::CallImport_FuncRef:
     case SymbolicAddress::CallImport_AnyRef:
     case SymbolicAddress::CoerceInPlace_ToInt32:
     case SymbolicAddress::CoerceInPlace_ToNumber:
       MOZ_ASSERT(!NeedsBuiltinThunk(func),
                  "not in sync with NeedsBuiltinThunk");
       break;
     case SymbolicAddress::ToInt32:
       return "call to asm.js native ToInt32 coercion (in wasm)";
--- a/js/src/wasm/WasmGenerator.cpp
+++ b/js/src/wasm/WasmGenerator.cpp
@@ -362,28 +362,28 @@ bool ModuleGenerator::init(Metadata* may
     }
   }
 
   if (env_->startFuncIndex) {
     addOrMerge(ExportedFunc(*env_->startFuncIndex, true));
   }
 
   for (const ElemSegment* seg : env_->elemSegments) {
-    TableKind kind = !seg->active() ? TableKind::AnyFunction
+    TableKind kind = !seg->active() ? TableKind::FuncRef
                                     : env_->tables[seg->tableIndex].kind;
     switch (kind) {
-      case TableKind::AnyFunction:
+      case TableKind::FuncRef:
         for (uint32_t funcIndex : seg->elemFuncIndices) {
           if (funcIndex == NullFuncIndex) {
             continue;
           }
           addOrMerge(ExportedFunc(funcIndex, false));
         }
         break;
-      case TableKind::TypedFunction:
+      case TableKind::AsmJS:
         // asm.js functions are not exported.
         break;
       case TableKind::AnyRef:
         break;
     }
   }
 
   auto* newEnd =
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -126,16 +126,17 @@ bool Instance::callImport(JSContext* cx,
         args[i].set(Int32Value(*(int32_t*)&argv[i]));
         break;
       case ValType::F32:
         args[i].set(JS::CanonicalizedDoubleValue(*(float*)&argv[i]));
         break;
       case ValType::F64:
         args[i].set(JS::CanonicalizedDoubleValue(*(double*)&argv[i]));
         break;
+      case ValType::FuncRef:
       case ValType::AnyRef: {
         args[i].set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)&argv[i])));
         break;
       }
       case ValType::Ref:
         MOZ_CRASH("temporarily unsupported Ref type in callImport");
       case ValType::I64:
         MOZ_CRASH("unhandled type in callImport");
@@ -214,17 +215,17 @@ bool Instance::callImport(JSContext* cx,
         break;
       case ValType::F32:
         type = TypeSet::DoubleType();
         break;
       case ValType::F64:
         type = TypeSet::DoubleType();
         break;
       case ValType::Ref:
-        MOZ_CRASH("case guarded above");
+      case ValType::FuncRef:
       case ValType::AnyRef:
         MOZ_CRASH("case guarded above");
       case ValType::I64:
         MOZ_CRASH("NYI");
       case ValType::NullRef:
         MOZ_CRASH("NullRef not expressible");
     }
     if (!TypeScript::ArgTypes(script, i)->hasType(type)) {
@@ -300,38 +301,59 @@ Instance::callImport_anyref(Instance* in
   RootedValue rval(cx);
   if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
     return false;
   }
   RootedAnyRef result(cx, AnyRef::null());
   if (!BoxAnyRef(cx, rval, &result)) {
     return false;
   }
+  static_assert(sizeof(argv[0]) >= sizeof(void*), "fits");
   *(void**)argv = result.get().forCompiledCode();
   return true;
 }
 
-/* static */ uint32_t /* infallible */
-Instance::memoryGrow_i32(Instance* instance, uint32_t delta) {
+/* static */ int32_t /* 0 to signal trap; 1 to signal OK */
+Instance::callImport_funcref(Instance* instance, int32_t funcImportIndex,
+                             int32_t argc, uint64_t* argv) {
+  JSContext* cx = TlsContext.get();
+  RootedValue rval(cx);
+  if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
+    return false;
+  }
+
+  RootedFunction fun(cx);
+  if (!CheckFuncRefValue(cx, rval, &fun)) {
+    return false;
+  }
+
+  *(void**)argv = fun;
+  return true;
+}
+
+/* static */ uint32_t Instance::memoryGrow_i32(Instance* instance,
+                                               uint32_t delta) {
+  MOZ_ASSERT(SASigMemoryGrow.failureMode == FailureMode::Infallible);
   MOZ_ASSERT(!instance->isAsmJS());
 
   JSContext* cx = TlsContext.get();
   RootedWasmMemoryObject memory(cx, instance->memory_);
 
   uint32_t ret = WasmMemoryObject::grow(memory, delta, cx);
 
   // If there has been a moving grow, this Instance should have been notified.
   MOZ_RELEASE_ASSERT(instance->tlsData()->memoryBase ==
                      instance->memory_->buffer().dataPointerEither());
 
   return ret;
 }
 
-/* static */ uint32_t /* infallible */
-Instance::memorySize_i32(Instance* instance) {
+/* static */ uint32_t Instance::memorySize_i32(Instance* instance) {
+  MOZ_ASSERT(SASigMemorySize.failureMode == FailureMode::Infallible);
+
   // This invariant must hold when running Wasm code. Assert it here so we can
   // write tests for cross-realm calls.
   MOZ_ASSERT(TlsContext.get()->realm() == instance->realm());
 
   uint32_t byteLength = instance->memory()->volatileMemoryLength();
   MOZ_ASSERT(byteLength % wasm::PageSize == 0);
   return byteLength / wasm::PageSize;
 }
@@ -369,30 +391,32 @@ static int32_t PerformWait(Instance* ins
       return 2;
     case FutexThread::WaitResult::Error:
       return -1;
     default:
       MOZ_CRASH();
   }
 }
 
-/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */
-Instance::wait_i32(Instance* instance, uint32_t byteOffset, int32_t value,
-                   int64_t timeout_ns) {
+/* static */ int32_t Instance::wait_i32(Instance* instance, uint32_t byteOffset,
+                                        int32_t value, int64_t timeout_ns) {
+  MOZ_ASSERT(SASigWaitI32.failureMode == FailureMode::FailOnNegI32);
   return PerformWait<int32_t>(instance, byteOffset, value, timeout_ns);
 }
 
-/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */
-Instance::wait_i64(Instance* instance, uint32_t byteOffset, int64_t value,
-                   int64_t timeout_ns) {
+/* static */ int32_t Instance::wait_i64(Instance* instance, uint32_t byteOffset,
+                                        int64_t value, int64_t timeout_ns) {
+  MOZ_ASSERT(SASigWaitI64.failureMode == FailureMode::FailOnNegI32);
   return PerformWait<int64_t>(instance, byteOffset, value, timeout_ns);
 }
 
-/* static */ int32_t /* -1 to signal trap; nonnegative for ok */
-Instance::wake(Instance* instance, uint32_t byteOffset, int32_t count) {
+/* static */ int32_t Instance::wake(Instance* instance, uint32_t byteOffset,
+                                    int32_t count) {
+  MOZ_ASSERT(SASigWake.failureMode == FailureMode::FailOnNegI32);
+
   JSContext* cx = TlsContext.get();
 
   // The alignment guard is not in the wasm spec as of 2017-11-02, but is
   // considered likely to appear, as 4-byte alignment is required for WAKE by
   // the spec's validation algorithm.
 
   if (byteOffset & 3) {
     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
@@ -413,19 +437,21 @@ Instance::wake(Instance* instance, uint3
     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
                               JSMSG_WASM_WAKE_OVERFLOW);
     return -1;
   }
 
   return int32_t(woken);
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::memCopy(Instance* instance, uint32_t dstByteOffset,
-                  uint32_t srcByteOffset, uint32_t len) {
+/* static */ int32_t Instance::memCopy(Instance* instance,
+                                       uint32_t dstByteOffset,
+                                       uint32_t srcByteOffset, uint32_t len) {
+  MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
+
   WasmMemoryObject* mem = instance->memory();
   uint32_t memLen = mem->volatileMemoryLength();
 
   if (len == 0) {
     // Even though the length is zero, we must check for a valid offset.  But
     // zero-length operations at the edge of the memory are allowed.
     if (dstByteOffset <= memLen && srcByteOffset <= memLen) {
       return 0;
@@ -481,18 +507,19 @@ Instance::memCopy(Instance* instance, ui
   }
 
   JSContext* cx = TlsContext.get();
   JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
                             JSMSG_WASM_OUT_OF_BOUNDS);
   return -1;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::dataDrop(Instance* instance, uint32_t segIndex) {
+/* static */ int32_t Instance::dataDrop(Instance* instance, uint32_t segIndex) {
+  MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
+
   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
                      "ensured by validation");
 
   if (!instance->passiveDataSegments_[segIndex]) {
     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                               JSMSG_WASM_DROPPED_DATA_SEG);
     return -1;
   }
@@ -500,19 +527,20 @@ Instance::dataDrop(Instance* instance, u
   SharedDataSegment& segRefPtr = instance->passiveDataSegments_[segIndex];
   MOZ_RELEASE_ASSERT(!segRefPtr->active());
 
   // Drop this instance's reference to the DataSegment so it can be released.
   segRefPtr = nullptr;
   return 0;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::memFill(Instance* instance, uint32_t byteOffset, uint32_t value,
-                  uint32_t len) {
+/* static */ int32_t Instance::memFill(Instance* instance, uint32_t byteOffset,
+                                       uint32_t value, uint32_t len) {
+  MOZ_ASSERT(SASigMemFill.failureMode == FailureMode::FailOnNegI32);
+
   WasmMemoryObject* mem = instance->memory();
   uint32_t memLen = mem->volatileMemoryLength();
 
   if (len == 0) {
     // Even though the length is zero, we must check for a valid offset.  But
     // zero-length operations at the edge of the memory are allowed.
     if (byteOffset <= memLen) {
       return 0;
@@ -553,19 +581,21 @@ Instance::memFill(Instance* instance, ui
   }
 
   JSContext* cx = TlsContext.get();
   JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
                             JSMSG_WASM_OUT_OF_BOUNDS);
   return -1;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::memInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
-                  uint32_t len, uint32_t segIndex) {
+/* static */ int32_t Instance::memInit(Instance* instance, uint32_t dstOffset,
+                                       uint32_t srcOffset, uint32_t len,
+                                       uint32_t segIndex) {
+  MOZ_ASSERT(SASigMemInit.failureMode == FailureMode::FailOnNegI32);
+
   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
                      "ensured by validation");
 
   if (!instance->passiveDataSegments_[segIndex]) {
     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                               JSMSG_WASM_DROPPED_DATA_SEG);
     return -1;
   }
@@ -631,20 +661,22 @@ Instance::memInit(Instance* instance, ui
     }
   }
 
   JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                             JSMSG_WASM_OUT_OF_BOUNDS);
   return -1;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::tableCopy(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
-                    uint32_t len, uint32_t dstTableIndex,
-                    uint32_t srcTableIndex) {
+/* static */ int32_t Instance::tableCopy(Instance* instance, uint32_t dstOffset,
+                                         uint32_t srcOffset, uint32_t len,
+                                         uint32_t dstTableIndex,
+                                         uint32_t srcTableIndex) {
+  MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
+
   const SharedTable& srcTable = instance->tables()[srcTableIndex];
   uint32_t srcTableLen = srcTable->length();
 
   const SharedTable& dstTable = instance->tables()[dstTableIndex];
   uint32_t dstTableLen = dstTable->length();
 
   if (len == 0) {
     // Even though the number of items to copy is zero, we must check for valid
@@ -708,18 +740,19 @@ Instance::tableCopy(Instance* instance, 
     }
   }
 
   JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                             JSMSG_WASM_OUT_OF_BOUNDS);
   return -1;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::elemDrop(Instance* instance, uint32_t segIndex) {
+/* static */ int32_t Instance::elemDrop(Instance* instance, uint32_t segIndex) {
+  MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
+
   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
                      "ensured by validation");
 
   if (!instance->passiveElemSegments_[segIndex]) {
     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                               JSMSG_WASM_DROPPED_ELEM_SEG);
     return -1;
   }
@@ -766,30 +799,33 @@ void Instance::initElems(uint32_t tableI
           WasmInstanceObject* calleeInstanceObj =
               ExportedFunctionToInstanceObject(fun);
           Instance& calleeInstance = calleeInstanceObj->instance();
           Tier calleeTier = calleeInstance.code().bestTier();
           const CodeRange& calleeCodeRange =
               calleeInstanceObj->getExportedFunctionCodeRange(fun, calleeTier);
           void* code = calleeInstance.codeBase(calleeTier) +
                        calleeCodeRange.funcTableEntry();
-          table.setAnyFunc(dstOffset + i, code, &calleeInstance);
+          table.setFuncRef(dstOffset + i, code, &calleeInstance);
           continue;
         }
       }
       void* code = codeBaseTier +
                    codeRanges[funcToCodeRange[funcIndex]].funcTableEntry();
-      table.setAnyFunc(dstOffset + i, code, this);
+      table.setFuncRef(dstOffset + i, code, this);
     }
   }
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::tableInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
-                    uint32_t len, uint32_t segIndex, uint32_t tableIndex) {
+/* static */ int32_t Instance::tableInit(Instance* instance, uint32_t dstOffset,
+                                         uint32_t srcOffset, uint32_t len,
+                                         uint32_t segIndex,
+                                         uint32_t tableIndex) {
+  MOZ_ASSERT(SASigTableInit.failureMode == FailureMode::FailOnNegI32);
+
   MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
                      "ensured by validation");
 
   if (!instance->passiveElemSegments_[segIndex]) {
     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                               JSMSG_WASM_DROPPED_ELEM_SEG);
     return -1;
   }
@@ -798,17 +834,17 @@ Instance::tableInit(Instance* instance, 
   MOZ_RELEASE_ASSERT(!seg.active());
   const uint32_t segLen = seg.length();
 
   const Table& table = *instance->tables()[tableIndex];
   const uint32_t tableLen = table.length();
 
   // Element segments cannot currently contain arbitrary values, and anyref
   // tables cannot be initialized from segments.
-  MOZ_ASSERT(table.kind() == TableKind::AnyFunction);
+  MOZ_ASSERT(table.kind() == TableKind::FuncRef);
 
   // We are proposing to copy
   //
   //   seg[ srcOffset .. srcOffset + len - 1 ]
   // to
   //   tableBase[ dstOffset .. dstOffset + len - 1 ]
 
   if (len == 0) {
@@ -846,19 +882,21 @@ Instance::tableInit(Instance* instance, 
     }
   }
 
   JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                             JSMSG_WASM_OUT_OF_BOUNDS);
   return -1;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::tableFill(Instance* instance, uint32_t start, void* value,
-                    uint32_t len, uint32_t tableIndex) {
+/* static */ int32_t Instance::tableFill(Instance* instance, uint32_t start,
+                                         void* value, uint32_t len,
+                                         uint32_t tableIndex) {
+  MOZ_ASSERT(SASigTableFill.failureMode == FailureMode::FailOnNegI32);
+
   Table& table = *instance->tables()[tableIndex];
   MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
 
   if (len == 0) {
     // Even though the length is zero, we must check for a valid offset.  But
     // zero-length operations at the edge of the table are allowed.
     if (start <= table.length()) {
       return 0;
@@ -892,105 +930,104 @@ Instance::tableFill(Instance* instance, 
   }
 
   JSContext* cx = TlsContext.get();
   JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
                             JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
   return -1;
 }
 
-// The return convention for tableGet() is awkward but avoids a situation where
-// Ion code has to hold a value that may or may not be a pointer to GC'd
-// storage, or where Ion has to pass in a pointer to storage where a return
-// value can be written.
-//
-// Note carefully that the pointer that is returned may not be valid past
-// operations that change the size of the table or cause GC work; it is strictly
-// to be used to retrieve the return value.
-
-/* static */ void* /* nullptr to signal trap; pointer to table location
-                      otherwise */
-Instance::tableGet(Instance* instance, uint32_t index, uint32_t tableIndex) {
+/* static */ void* Instance::tableGet(Instance* instance, uint32_t index,
+                                      uint32_t tableIndex) {
+  MOZ_ASSERT(SASigTableGet.failureMode == FailureMode::FailOnInvalidRef);
   const Table& table = *instance->tables()[tableIndex];
   MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
   if (index >= table.length()) {
     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                               JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
-    return nullptr;
+    return AnyRef::invalid().forCompiledCode();
   }
-  return const_cast<void*>(table.getShortlivedAnyRefLocForCompiledCode(index));
+  return table.getAnyRef(index).forCompiledCode();
 }
 
-/* static */ uint32_t /* infallible */
-Instance::tableGrow(Instance* instance, void* initValue, uint32_t delta,
-                    uint32_t tableIndex) {
+/* static */ uint32_t Instance::tableGrow(Instance* instance, void* initValue,
+                                          uint32_t delta, uint32_t tableIndex) {
+  MOZ_ASSERT(SASigTableGrow.failureMode == FailureMode::Infallible);
+
   RootedAnyRef obj(TlsContext.get(), AnyRef::fromCompiledCode(initValue));
   Table& table = *instance->tables()[tableIndex];
   MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
 
   uint32_t oldSize = table.grow(delta, TlsContext.get());
   if (oldSize != uint32_t(-1) && initValue != nullptr) {
     for (uint32_t i = 0; i < delta; i++) {
       table.setAnyRef(oldSize + i, obj.get());
     }
   }
   return oldSize;
 }
 
-/* static */ int32_t /* -1 to signal trap; 0 for ok */
-Instance::tableSet(Instance* instance, uint32_t index, void* value,
-                   uint32_t tableIndex) {
+/* static */ int32_t Instance::tableSet(Instance* instance, uint32_t index,
+                                        void* value, uint32_t tableIndex) {
+  MOZ_ASSERT(SASigTableSet.failureMode == FailureMode::FailOnNegI32);
+
   Table& table = *instance->tables()[tableIndex];
   MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
   if (index >= table.length()) {
     JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
                               JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
     return -1;
   }
   table.setAnyRef(index, AnyRef::fromCompiledCode(value));
   return 0;
 }
 
-/* static */ uint32_t /* infallible */
-Instance::tableSize(Instance* instance, uint32_t tableIndex) {
+/* static */ uint32_t Instance::tableSize(Instance* instance,
+                                          uint32_t tableIndex) {
+  MOZ_ASSERT(SASigTableSize.failureMode == FailureMode::Infallible);
   Table& table = *instance->tables()[tableIndex];
   return table.length();
 }
 
-/* static */ void /* infallible */
-Instance::postBarrier(Instance* instance, gc::Cell** location) {
+/* static */ void Instance::postBarrier(Instance* instance,
+                                        gc::Cell** location) {
+  MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
   MOZ_ASSERT(location);
   TlsContext.get()->runtime()->gc.storeBuffer().putCell(location);
 }
 
-/* static */ void /* infallible */
-Instance::postBarrierFiltering(Instance* instance, gc::Cell** location) {
+/* static */ void Instance::postBarrierFiltering(Instance* instance,
+                                                 gc::Cell** location) {
+  MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
   MOZ_ASSERT(location);
   if (*location == nullptr || !gc::IsInsideNursery(*location)) {
     return;
   }
   TlsContext.get()->runtime()->gc.storeBuffer().putCell(location);
 }
 
 // The typeIndex is an index into the structTypeDescrs_ table in the instance.
 // That table holds TypeDescr objects.
 //
 // When we fail to allocate we return a nullptr; the wasm side must check this
 // and propagate it as an error.
 
-/* static */ void* /* null on OOM, otherwise a pointer */
-Instance::structNew(Instance* instance, uint32_t typeIndex) {
+/* static */ void* Instance::structNew(Instance* instance, uint32_t typeIndex) {
+  MOZ_ASSERT(SASigStructNew.failureMode == FailureMode::FailOnNullPtr);
   JSContext* cx = TlsContext.get();
   Rooted<TypeDescr*> typeDescr(cx, instance->structTypeDescrs_[typeIndex]);
   return TypedObject::createZeroed(cx, typeDescr);
 }
 
-/* static */ void* /* infallible */
-Instance::structNarrow(Instance* instance, uint32_t mustUnboxAnyref,
-                       uint32_t outputTypeIndex, void* maybeNullPtr) {
+/* static */ void* Instance::structNarrow(Instance* instance,
+                                          uint32_t mustUnboxAnyref,
+                                          uint32_t outputTypeIndex,
+                                          void* maybeNullPtr) {
+  MOZ_ASSERT(SASigStructNarrow.failureMode == FailureMode::Infallible);
+
   JSContext* cx = TlsContext.get();
 
   Rooted<TypedObject*> obj(cx);
   Rooted<StructTypeDescr*> typeDescr(cx);
 
   if (maybeNullPtr == nullptr) {
     return maybeNullPtr;
   }
@@ -1054,17 +1091,17 @@ Instance::structNarrow(Instance* instanc
 // Note, dst must point into nonmoveable storage that is not in the nursery,
 // this matters for the write barriers.  Furthermore, for pointer types the
 // current value of *dst must be null so that only a post-barrier is required.
 //
 // Regarding the destination not being in the nursery, we have these cases.
 // Either the written location is in the global data section in the
 // WasmInstanceObject, or the Cell of a WasmGlobalObject:
 //
-// - WasmInstanceObjects are always tenured and u.ref_/anyref_ may point to a
+// - WasmInstanceObjects are always tenured and u.ref_ may point to a
 //   nursery object, so we need a post-barrier since the global data of an
 //   instance is effectively a field of the WasmInstanceObject.
 //
 // - WasmGlobalObjects are always tenured, and they have a Cell field, so a
 //   post-barrier may be needed for the same reason as above.
 
 void CopyValPostBarriered(uint8_t* dst, const Val& src) {
   switch (src.type().code()) {
@@ -1083,43 +1120,32 @@ void CopyValPostBarriered(uint8_t* dst, 
       memcpy(dst, &x, sizeof(x));
       break;
     }
     case ValType::F64: {
       double x = src.f64();
       memcpy(dst, &x, sizeof(x));
       break;
     }
+    case ValType::Ref:
+    case ValType::FuncRef:
     case ValType::AnyRef: {
       // TODO/AnyRef-boxing: With boxed immediates and strings, the write
       // barrier is going to have to be more complicated.
       ASSERT_ANYREF_IS_JSOBJECT;
       MOZ_ASSERT(*(void**)dst == nullptr,
                  "should be null so no need for a pre-barrier");
-      AnyRef x = src.anyref();
-      memcpy(dst, x.asJSObjectAddress(), sizeof(x));
+      AnyRef x = src.ref();
+      memcpy(dst, x.asJSObjectAddress(), sizeof(*x.asJSObjectAddress()));
       if (!x.isNull()) {
         JSObject::writeBarrierPost((JSObject**)dst, nullptr, x.asJSObject());
       }
       break;
     }
-    case ValType::Ref: {
-      MOZ_ASSERT(*(JSObject**)dst == nullptr,
-                 "should be null so no need for a pre-barrier");
-      JSObject* x = src.ref();
-      memcpy(dst, &x, sizeof(x));
-      if (x) {
-        JSObject::writeBarrierPost((JSObject**)dst, nullptr, x);
-      }
-      break;
-    }
     case ValType::NullRef: {
-      break;
-    }
-    default: {
       MOZ_CRASH("unexpected Val type");
     }
   }
 }
 
 Instance::Instance(JSContext* cx, Handle<WasmInstanceObject*> object,
                    SharedCode code, UniqueTlsData tlsDataIn,
                    HandleWasmMemoryObject memory, SharedTableVector&& tables,
@@ -1397,23 +1423,23 @@ void Instance::tracePrivate(JSTracer* tr
     TraceNullableEdge(trc, &funcImportTls(fi).fun, "wasm import");
   }
 
   for (const SharedTable& table : tables_) {
     table->trace(trc);
   }
 
   for (const GlobalDesc& global : code().metadata().globals) {
-    // Indirect anyref global get traced by the owning WebAssembly.Global.
+    // Indirect reference globals get traced by the owning WebAssembly.Global.
     if (!global.type().isReference() || global.isConstant() ||
         global.isIndirect()) {
       continue;
     }
     GCPtrObject* obj = (GCPtrObject*)(globalData() + global.offset());
-    TraceNullableEdge(trc, obj, "wasm ref/anyref global");
+    TraceNullableEdge(trc, obj, "wasm reference-typed global");
   }
 
   TraceNullableEdge(trc, &memory_, "wasm buffer");
   structTypeDescrs_.trace(trc);
 }
 
 void Instance::trace(JSTracer* trc) {
   // Technically, instead of having this method, the caller could use
@@ -1651,90 +1677,98 @@ bool Instance::callExport(JSContext* cx,
   // stored in the first element of the array (which, therefore, must have
   // length >= 1).
   Vector<ExportArg, 8> exportArgs(cx);
   if (!exportArgs.resize(Max<size_t>(1, funcType->args().length()))) {
     return false;
   }
 
   ASSERT_ANYREF_IS_JSOBJECT;
-  Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> anyrefs(cx);
+  Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> refs(cx);
 
   DebugCodegen(DebugChannel::Function, "wasm-function[%d]; arguments ",
                funcIndex);
   RootedValue v(cx);
   for (size_t i = 0; i < funcType->args().length(); ++i) {
     v = i < args.length() ? args[i] : UndefinedValue();
     switch (funcType->arg(i).code()) {
       case ValType::I32:
         if (!ToInt32(cx, v, (int32_t*)&exportArgs[i])) {
-          DebugCodegen(DebugChannel::Function, "call to ToInt32 failed!\n");
           return false;
         }
         DebugCodegen(DebugChannel::Function, "i32(%d) ",
                      *(int32_t*)&exportArgs[i]);
         break;
       case ValType::I64:
         MOZ_CRASH("unexpected i64 flowing into callExport");
       case ValType::F32:
         if (!RoundFloat32(cx, v, (float*)&exportArgs[i])) {
-          DebugCodegen(DebugChannel::Function,
-                       "call to RoundFloat32 failed!\n");
           return false;
         }
         DebugCodegen(DebugChannel::Function, "f32(%f) ",
                      *(float*)&exportArgs[i]);
         break;
       case ValType::F64:
         if (!ToNumber(cx, v, (double*)&exportArgs[i])) {
-          DebugCodegen(DebugChannel::Function, "call to ToNumber failed!\n");
           return false;
         }
         DebugCodegen(DebugChannel::Function, "f64(%lf) ",
                      *(double*)&exportArgs[i]);
         break;
       case ValType::Ref:
         MOZ_CRASH("temporarily unsupported Ref type in callExport");
+      case ValType::FuncRef: {
+        RootedFunction fun(cx);
+        if (!CheckFuncRefValue(cx, v, &fun)) {
+          return false;
+        }
+        // Store in rooted array until no more GC is possible.
+        ASSERT_ANYREF_IS_JSOBJECT;
+        if (!refs.emplaceBack(fun)) {
+          return false;
+        }
+        DebugCodegen(DebugChannel::Function, "ptr(#%d) ",
+                     int(refs.length() - 1));
+        break;
+      }
       case ValType::AnyRef: {
         RootedAnyRef ar(cx, AnyRef::null());
         if (!BoxAnyRef(cx, v, &ar)) {
-          DebugCodegen(DebugChannel::Function, "call to BoxAnyRef failed!\n");
           return false;
         }
-        // We'll copy the value into the arguments array just before the call;
-        // for now tuck the value away in a rooted array.
+        // Store in rooted array until no more GC is possible.
         ASSERT_ANYREF_IS_JSOBJECT;
-        if (!anyrefs.emplaceBack(ar.get().asJSObject())) {
+        if (!refs.emplaceBack(ar.get().asJSObject())) {
           return false;
         }
         DebugCodegen(DebugChannel::Function, "ptr(#%d) ",
-                     int(anyrefs.length() - 1));
+                     int(refs.length() - 1));
         break;
       }
       case ValType::NullRef: {
         MOZ_CRASH("NullRef not expressible");
       }
     }
   }
 
   DebugCodegen(DebugChannel::Function, "\n");
 
   // Copy over reference values from the rooted array, if any.
-  if (anyrefs.length() > 0) {
+  if (refs.length() > 0) {
     DebugCodegen(DebugChannel::Function, "; ");
     size_t nextRef = 0;
     for (size_t i = 0; i < funcType->args().length(); ++i) {
       if (funcType->arg(i).isReference()) {
         ASSERT_ANYREF_IS_JSOBJECT;
-        *(void**)&exportArgs[i] = (void*)anyrefs[nextRef++];
+        *(void**)&exportArgs[i] = (void*)refs[nextRef++];
         DebugCodegen(DebugChannel::Function, "ptr(#%d) = %p ", int(nextRef - 1),
                      *(void**)&exportArgs[i]);
       }
     }
-    anyrefs.clear();
+    refs.clear();
   }
 
   {
     JitActivation activation(cx);
 
     // Call the per-exported-function trampoline created by GenerateEntry.
     auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, interpEntry);
     if (!CALL_GENERATED_2(funcPtr, exportArgs.begin(), tlsData())) {
@@ -1778,16 +1812,17 @@ bool Instance::callExport(JSContext* cx,
       DebugCodegen(DebugChannel::Function, "f32(%f)", *(float*)retAddr);
       break;
     case ExprType::F64:
       args.rval().set(NumberValue(*(double*)retAddr));
       DebugCodegen(DebugChannel::Function, "f64(%lf)", *(double*)retAddr);
       break;
     case ExprType::Ref:
       MOZ_CRASH("temporarily unsupported Ref type in callExport");
+    case ExprType::FuncRef:
     case ExprType::AnyRef:
       args.rval().set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)retAddr)));
       DebugCodegen(DebugChannel::Function, "ptr(%p)", *(void**)retAddr);
       break;
     case ExprType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     case ExprType::Limit:
       MOZ_CRASH("Limit");
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -177,16 +177,17 @@ class Instance {
 
  public:
   // Functions to be called directly from wasm code.
   static int32_t callImport_void(Instance*, int32_t, int32_t, uint64_t*);
   static int32_t callImport_i32(Instance*, int32_t, int32_t, uint64_t*);
   static int32_t callImport_i64(Instance*, int32_t, int32_t, uint64_t*);
   static int32_t callImport_f64(Instance*, int32_t, int32_t, uint64_t*);
   static int32_t callImport_anyref(Instance*, int32_t, int32_t, uint64_t*);
+  static int32_t callImport_funcref(Instance*, int32_t, int32_t, uint64_t*);
   static uint32_t memoryGrow_i32(Instance* instance, uint32_t delta);
   static uint32_t memorySize_i32(Instance* instance);
   static int32_t wait_i32(Instance* instance, uint32_t byteOffset,
                           int32_t value, int64_t timeout);
   static int32_t wait_i64(Instance* instance, uint32_t byteOffset,
                           int64_t value, int64_t timeout);
   static int32_t wake(Instance* instance, uint32_t byteOffset, int32_t count);
   static int32_t memCopy(Instance* instance, uint32_t destByteOffset,
--- a/js/src/wasm/WasmIonCompile.cpp
+++ b/js/src/wasm/WasmIonCompile.cpp
@@ -178,16 +178,17 @@ class FunctionCompiler {
           break;
         case ValType::F32:
           ins = MConstant::New(alloc(), Float32Value(0.f), MIRType::Float32);
           break;
         case ValType::F64:
           ins = MConstant::New(alloc(), DoubleValue(0.0), MIRType::Double);
           break;
         case ValType::Ref:
+        case ValType::FuncRef:
         case ValType::AnyRef:
           ins = MWasmNullConstant::New(alloc());
           break;
         case ValType::NullRef:
           MOZ_CRASH("NullRef not expressible");
       }
 
       curBlock_->add(ins);
@@ -675,76 +676,16 @@ class FunctionCompiler {
     }
     auto* ins =
         MWasmAddOffset::New(alloc(), base, access->offset(), bytecodeOffset());
     curBlock_->add(ins);
     access->clearOffset();
     return ins;
   }
 
-  bool checkI32NegativeMeansFailedResult(MDefinition* value) {
-    if (inDeadCode()) {
-      return true;
-    }
-
-    auto* zero = constant(Int32Value(0), MIRType::Int32);
-    auto* cond = compare(value, zero, JSOP_LT, MCompare::Compare_Int32);
-
-    MBasicBlock* failBlock;
-    if (!newBlock(curBlock_, &failBlock)) {
-      return false;
-    }
-
-    MBasicBlock* okBlock;
-    if (!newBlock(curBlock_, &okBlock)) {
-      return false;
-    }
-
-    curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock));
-    failBlock->end(
-        MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset()));
-    curBlock_ = okBlock;
-    return true;
-  }
-
-  bool checkPointerNullMeansFailedResult(MDefinition* value) {
-    if (inDeadCode()) {
-      return true;
-    }
-
-    auto* cond = MIsNullPointer::New(alloc(), value);
-    curBlock_->add(cond);
-
-    MBasicBlock* failBlock;
-    if (!newBlock(curBlock_, &failBlock)) {
-      return false;
-    }
-
-    MBasicBlock* okBlock;
-    if (!newBlock(curBlock_, &okBlock)) {
-      return false;
-    }
-
-    curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock));
-    failBlock->end(
-        MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset()));
-    curBlock_ = okBlock;
-    return true;
-  }
-
-  MDefinition* derefTableElementPointer(MDefinition* base) {
-    // Table element storage may be moved by GC operations, so reads from that
-    // storage are not movable.
-    MWasmLoadRef* load =
-        MWasmLoadRef::New(alloc(), base, AliasSet::WasmTableElement,
-                          /*isMovable=*/false);
-    curBlock_->add(load);
-    return load;
-  }
-
   MDefinition* load(MDefinition* base, MemoryAccessDesc* access,
                     ValType result) {
     if (inDeadCode()) {
       return nullptr;
     }
 
     MWasmLoadTls* memoryBase = maybeLoadMemoryBase();
     MInstruction* load = nullptr;
@@ -1152,16 +1093,18 @@ class FunctionCompiler {
   bool builtinCall(const SymbolicAddressSignature& builtin,
                    uint32_t lineOrBytecode, const CallCompileState& call,
                    MDefinition** def) {
     if (inDeadCode()) {
       *def = nullptr;
       return true;
     }
 
+    MOZ_ASSERT(builtin.failureMode == FailureMode::Infallible);
+
     CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic);
     auto callee = CalleeDesc::builtin(builtin.identity);
     auto* ins =
         MWasmCall::New(alloc(), desc, callee, call.regArgs_, builtin.retType,
                        StackArgAreaSizeUnaligned(builtin));
     if (!ins) {
       return false;
     }
@@ -1169,32 +1112,37 @@ class FunctionCompiler {
     curBlock_->add(ins);
     *def = ins;
     return true;
   }
 
   bool builtinInstanceMethodCall(const SymbolicAddressSignature& builtin,
                                  uint32_t lineOrBytecode,
                                  const CallCompileState& call,
-                                 MDefinition** def) {
+                                 MDefinition** def = nullptr) {
+    MOZ_ASSERT_IF(!def, builtin.retType == MIRType::None);
     if (inDeadCode()) {
-      *def = nullptr;
+      if (def) {
+        *def = nullptr;
+      }
       return true;
     }
 
     CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic);
     auto* ins = MWasmCall::NewBuiltinInstanceMethodCall(
-        alloc(), desc, builtin.identity, call.instanceArg_, call.regArgs_,
-        builtin.retType, StackArgAreaSizeUnaligned(builtin));
+        alloc(), desc, builtin.identity, builtin.failureMode, call.instanceArg_,
+        call.regArgs_, builtin.retType, StackArgAreaSizeUnaligned(builtin));
     if (!ins) {
       return false;
     }
 
     curBlock_->add(ins);
-    *def = ins;
+    if (def) {
+      *def = ins;
+    }
     return true;
   }
 
   /*********************************************** Control flow generation */
 
   inline bool inDeadCode() const { return curBlock_ == nullptr; }
 
   void returnExpr(MDefinition* operand) {
@@ -2178,18 +2126,19 @@ static bool EmitGetGlobal(FunctionCompil
       result = f.constant(int64_t(value.i64()));
       break;
     case ValType::F32:
       result = f.constant(value.f32());
       break;
     case ValType::F64:
       result = f.constant(value.f64());
       break;
+    case ValType::FuncRef:
     case ValType::AnyRef:
-      MOZ_ASSERT(value.anyref().isNull());
+      MOZ_ASSERT(value.ref().isNull());
       result = f.nullRefConstant();
       break;
     default:
       MOZ_CRASH("unexpected type in EmitGetGlobal");
   }
 
   f.iter().setResult(result);
   return true;
@@ -2218,18 +2167,17 @@ static bool EmitSetGlobal(FunctionCompil
     CallCompileState args;
     if (!f.passInstance(callee.argTypes[0], &args)) {
       return false;
     }
     if (!f.passArg(barrierAddr, callee.argTypes[1], &args)) {
       return false;
     }
     f.finishCall(&args);
-    MDefinition* ret;
-    if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
+    if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args)) {
       return false;
     }
   }
 
   return true;
 }
 
 static bool EmitTeeGlobal(FunctionCompiler& f) {
@@ -2843,20 +2791,16 @@ static bool EmitWait(FunctionCompiler& f
     return false;
   }
 
   MDefinition* ret;
   if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
     return false;
   }
 
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
   f.iter().setResult(ret);
   return true;
 }
 
 static bool EmitWake(FunctionCompiler& f) {
   uint32_t lineOrBytecode = f.readCallSiteLineOrBytecode();
 
   const SymbolicAddressSignature& callee = SASigWake;
@@ -2890,20 +2834,16 @@ static bool EmitWake(FunctionCompiler& f
     return false;
   }
 
   MDefinition* ret;
   if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
     return false;
   }
 
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
   f.iter().setResult(ret);
   return true;
 }
 
 static bool EmitAtomicXchg(FunctionCompiler& f, ValType type,
                            Scalar::Type viewType) {
   LinearMemoryAddress<MDefinition*> addr;
   MDefinition* value;
@@ -2969,26 +2909,17 @@ static bool EmitMemOrTableCopy(FunctionC
     if (!f.passArg(sti, callee.argTypes[5], &args)) {
       return false;
     }
   }
   if (!f.finishCall(&args)) {
     return false;
   }
 
-  MDefinition* ret;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
-    return false;
-  }
-
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
-  return true;
+  return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
 }
 
 static bool EmitDataOrElemDrop(FunctionCompiler& f, bool isData) {
   uint32_t segIndexVal = 0;
   if (!f.iter().readDataOrElemDrop(isData, &segIndexVal)) {
     return false;
   }
 
@@ -3010,26 +2941,17 @@ static bool EmitDataOrElemDrop(FunctionC
   if (!f.passArg(segIndex, callee.argTypes[1], &args)) {
     return false;
   }
 
   if (!f.finishCall(&args)) {
     return false;
   }
 
-  MDefinition* ret;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
-    return false;
-  }
-
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
-  return true;
+  return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
 }
 
 static bool EmitMemFill(FunctionCompiler& f) {
   MDefinition *start, *val, *len;
   if (!f.iter().readMemFill(&start, &val, &len)) {
     return false;
   }
 
@@ -3054,26 +2976,17 @@ static bool EmitMemFill(FunctionCompiler
   if (!f.passArg(len, callee.argTypes[3], &args)) {
     return false;
   }
 
   if (!f.finishCall(&args)) {
     return false;
   }
 
-  MDefinition* ret;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
-    return false;
-  }
-
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
-  return true;
+  return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
 }
 
 static bool EmitMemOrTableInit(FunctionCompiler& f, bool isMem) {
   uint32_t segIndexVal = 0, dstTableIndex = 0;
   MDefinition *dstOff, *srcOff, *len;
   if (!f.iter().readMemOrTableInit(isMem, &segIndexVal, &dstTableIndex, &dstOff,
                                    &srcOff, &len)) {
     return false;
@@ -3115,26 +3028,17 @@ static bool EmitMemOrTableInit(FunctionC
     if (!f.passArg(dti, callee.argTypes[5], &args)) {
       return false;
     }
   }
   if (!f.finishCall(&args)) {
     return false;
   }
 
-  MDefinition* ret;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
-    return false;
-  }
-
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
-  return true;
+  return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
 }
 #endif  // ENABLE_WASM_BULKMEM_OPS
 
 #ifdef ENABLE_WASM_REFTYPES
 // Note, table.{get,grow,set} on table(funcref) are currently rejected by the
 // verifier.
 
 static bool EmitTableFill(FunctionCompiler& f) {
@@ -3174,26 +3078,17 @@ static bool EmitTableFill(FunctionCompil
   if (!f.passArg(tableIndexArg, callee.argTypes[4], &args)) {
     return false;
   }
 
   if (!f.finishCall(&args)) {
     return false;
   }
 
-  MDefinition* ret;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
-    return false;
-  }
-
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-
-  return true;
+  return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
 }
 
 static bool EmitTableGet(FunctionCompiler& f) {
   uint32_t tableIndex;
   MDefinition* index;
   if (!f.iter().readTableGet(&tableIndex, &index)) {
     return false;
   }
@@ -3224,26 +3119,18 @@ static bool EmitTableGet(FunctionCompile
   }
 
   if (!f.finishCall(&args)) {
     return false;
   }
 
   // The return value here is either null, denoting an error, or a short-lived
   // pointer to a location containing a possibly-null ref.
-  MDefinition* result;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &result)) {
-    return false;
-  }
-  if (!f.checkPointerNullMeansFailedResult(result)) {
-    return false;
-  }
-
-  MDefinition* ret = f.derefTableElementPointer(result);
-  if (!ret) {
+  MDefinition* ret;
+  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
     return false;
   }
 
   f.iter().setResult(ret);
   return true;
 }
 
 static bool EmitTableGrow(FunctionCompiler& f) {
@@ -3332,24 +3219,17 @@ static bool EmitTableSet(FunctionCompile
   if (!f.passArg(tableIndexArg, callee.argTypes[3], &args)) {
     return false;
   }
 
   if (!f.finishCall(&args)) {
     return false;
   }
 
-  MDefinition* ret;
-  if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
-    return false;
-  }
-  if (!f.checkI32NegativeMeansFailedResult(ret)) {
-    return false;
-  }
-  return true;
+  return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
 }
 
 static bool EmitTableSize(FunctionCompiler& f) {
   uint32_t tableIndex;
   if (!f.iter().readTableSize(&tableIndex)) {
     return false;
   }
 
--- a/js/src/wasm/WasmJS.cpp
+++ b/js/src/wasm/WasmJS.cpp
@@ -173,22 +173,30 @@ static bool ToWebAssemblyValue(JSContext
     case ValType::F64: {
       double d;
       if (!ToNumber(cx, v, &d)) {
         return false;
       }
       val.set(Val(d));
       return true;
     }
+    case ValType::FuncRef: {
+      RootedFunction fun(cx);
+      if (!CheckFuncRefValue(cx, v, &fun)) {
+        return false;
+      }
+      val.set(Val(ValType::FuncRef, AnyRef::fromJSObject(fun)));
+      return true;
+    }
     case ValType::AnyRef: {
       RootedAnyRef tmp(cx, AnyRef::null());
       if (!BoxAnyRef(cx, v, &tmp)) {
         return false;
       }
-      val.set(Val(tmp));
+      val.set(Val(ValType::AnyRef, tmp));
       return true;
     }
     case ValType::Ref:
     case ValType::NullRef:
     case ValType::I64: {
       break;
     }
   }
@@ -198,18 +206,19 @@ static bool ToWebAssemblyValue(JSContext
 static Value ToJSValue(const Val& val) {
   switch (val.type().code()) {
     case ValType::I32:
       return Int32Value(val.i32());
     case ValType::F32:
       return DoubleValue(JS::CanonicalizeNaN(double(val.f32())));
     case ValType::F64:
       return DoubleValue(JS::CanonicalizeNaN(val.f64()));
+    case ValType::FuncRef:
     case ValType::AnyRef:
-      return UnboxAnyRef(val.anyref());
+      return UnboxAnyRef(val.ref());
     case ValType::Ref:
     case ValType::NullRef:
     case ValType::I64:
       break;
   }
   MOZ_CRASH("unexpected type when translating to a JS value");
 }
 
@@ -1531,16 +1540,39 @@ WasmFunctionScope* WasmInstanceObject::g
 
   return funcScope;
 }
 
 bool wasm::IsWasmExportedFunction(JSFunction* fun) {
   return fun->kind() == JSFunction::Wasm;
 }
 
+bool wasm::CheckFuncRefValue(JSContext* cx, HandleValue v,
+                             MutableHandleFunction fun) {
+  if (v.isNull()) {
+    MOZ_ASSERT(!fun);
+    return true;
+  }
+
+  if (v.isObject()) {
+    JSObject& obj = v.toObject();
+    if (obj.is<JSFunction>()) {
+      JSFunction* f = &obj.as<JSFunction>();
+      if (IsWasmExportedFunction(f)) {
+        fun.set(f);
+        return true;
+      }
+    }
+  }
+
+  JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
+                           JSMSG_WASM_BAD_FUNCREF_VALUE);
+  return false;
+}
+
 Instance& wasm::ExportedFunctionToInstance(JSFunction* fun) {
   return ExportedFunctionToInstanceObject(fun)->instance();
 }
 
 WasmInstanceObject* wasm::ExportedFunctionToInstanceObject(JSFunction* fun) {
   MOZ_ASSERT(fun->kind() == JSFunction::Wasm ||
              fun->kind() == JSFunction::AsmJS);
   const Value& v = fun->getExtendedSlot(FunctionExtended::WASM_INSTANCE_SLOT);
@@ -2001,17 +2033,17 @@ bool WasmTableObject::construct(JSContex
   RootedLinearString elementLinearStr(cx, elementStr->ensureLinear(cx));
   if (!elementLinearStr) {
     return false;
   }
 
   TableKind tableKind;
   if (StringEqualsAscii(elementLinearStr, "anyfunc") ||
       StringEqualsAscii(elementLinearStr, "funcref")) {
-    tableKind = TableKind::AnyFunction;
+    tableKind = TableKind::FuncRef;
 #ifdef ENABLE_WASM_REFTYPES
   } else if (StringEqualsAscii(elementLinearStr, "anyref")) {
     if (!HasReftypesSupport(cx)) {
       JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
                                JSMSG_WASM_BAD_ELEMENT);
       return false;
     }
     tableKind = TableKind::AnyRef;
@@ -2090,18 +2122,18 @@ bool WasmTableObject::getImpl(JSContext*
   }
 
   uint32_t index;
   if (!ToTableIndex(cx, args.get(0), table, "get index", &index)) {
     return false;
   }
 
   switch (table.kind()) {
-    case TableKind::AnyFunction: {
-      const FunctionTableElem& elem = table.getAnyFunc(index);
+    case TableKind::FuncRef: {
+      const FunctionTableElem& elem = table.getFuncRef(index);
       if (!elem.code) {
         args.rval().setNull();
         return true;
       }
 
       Instance& instance = *elem.tls->instance;
       const CodeRange& codeRange = *instance.code().lookupFuncRange(elem.code);
 
@@ -2129,16 +2161,23 @@ bool WasmTableObject::getImpl(JSContext*
 /* static */
 bool WasmTableObject::get(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
   return CallNonGenericMethod<IsTable, getImpl>(cx, args);
 }
 
 static void TableFunctionFill(JSContext* cx, Table* table, HandleFunction value,
                               uint32_t index, uint32_t limit) {
+  if (!value) {
+    while (index < limit) {
+      table->setNull(index++);
+    }
+    return;
+  }
+
   RootedWasmInstanceObject instanceObj(cx,
                                        ExportedFunctionToInstanceObject(value));
   uint32_t funcIndex = ExportedFunctionToFuncIndex(value);
 
 #ifdef DEBUG
   RootedFunction f(cx);
   MOZ_ASSERT(instanceObj->getExportedFunction(cx, instanceObj, funcIndex, &f));
   MOZ_ASSERT(value == f);
@@ -2146,32 +2185,18 @@ static void TableFunctionFill(JSContext*
 
   Instance& instance = instanceObj->instance();
   Tier tier = instance.code().bestTier();
   const MetadataTier& metadata = instance.metadata(tier);
   const CodeRange& codeRange =
       metadata.codeRange(metadata.lookupFuncExport(funcIndex));
   void* code = instance.codeBase(tier) + codeRange.funcTableEntry();
   while (index < limit) {
-    table->setAnyFunc(index++, code, &instance);
-  }
-}
-
-static bool IsWasmExportedFunction(const Value& v, MutableHandleFunction f) {
-  if (!v.isObject()) {
-    return false;
-  }
-
-  JSObject& obj = v.toObject();
-  if (!obj.is<JSFunction>() || !IsWasmExportedFunction(&obj.as<JSFunction>())) {
-    return false;
-  }
-
-  f.set(&obj.as<JSFunction>());
-  return true;
+    table->setFuncRef(index++, code, &instance);
+  }
 }
 
 /* static */
 bool WasmTableObject::setImpl(JSContext* cx, const CallArgs& args) {
   RootedWasmTableObject tableObj(
       cx, &args.thisv().toObject().as<WasmTableObject>());
   Table& table = tableObj->table();
 
@@ -2181,31 +2206,24 @@ bool WasmTableObject::setImpl(JSContext*
 
   uint32_t index;
   if (!ToTableIndex(cx, args.get(0), table, "set index", &index)) {
     return false;
   }
 
   RootedValue fillValue(cx, args[1]);
   switch (table.kind()) {
-    case TableKind::AnyFunction: {
-      RootedFunction value(cx);
-      if (!IsWasmExportedFunction(fillValue, &value) && !fillValue.isNull()) {
-        JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
-                                 JSMSG_WASM_BAD_TABLE_VALUE);
+    case TableKind::FuncRef: {
+      RootedFunction fun(cx);
+      if (!CheckFuncRefValue(cx, fillValue, &fun)) {
         return false;
       }
-
-      if (value) {
-        MOZ_ASSERT(index < MaxTableLength);
-        static_assert(MaxTableLength < UINT32_MAX, "Invariant");
-        TableFunctionFill(cx, &table, value, index, index + 1);
-      } else {
-        table.setNull(index);
-      }
+      MOZ_ASSERT(index < MaxTableLength);
+      static_assert(MaxTableLength < UINT32_MAX, "Invariant");
+      TableFunctionFill(cx, &table, fun, index, index + 1);
       break;
     }
     case TableKind::AnyRef: {
       RootedAnyRef tmp(cx, AnyRef::null());
       if (!BoxAnyRef(cx, fillValue, &tmp)) {
         return false;
       }
       table.setAnyRef(index, tmp);
@@ -2255,31 +2273,30 @@ bool WasmTableObject::growImpl(JSContext
   }
 
   MOZ_ASSERT(delta <= MaxTableLength);              // grow() should ensure this
   MOZ_ASSERT(oldLength <= MaxTableLength - delta);  // ditto
 
   static_assert(MaxTableLength < UINT32_MAX, "Invariant");
 
   switch (table->table().kind()) {
-    case TableKind::AnyFunction: {
-      RootedFunction value(cx);
+    case TableKind::FuncRef: {
       if (fillValue.isNull()) {
 #ifdef DEBUG
         for (uint32_t index = oldLength; index < oldLength + delta; index++) {
-          MOZ_ASSERT(table->table().getAnyFunc(index).code == nullptr);
+          MOZ_ASSERT(table->table().getFuncRef(index).code == nullptr);
         }
 #endif
-      } else if (IsWasmExportedFunction(fillValue, &value)) {
-        TableFunctionFill(cx, &table->table(), value, oldLength,
+      } else {
+        RootedFunction fun(cx);
+        if (!CheckFuncRefValue(cx, fillValue, &fun)) {
+          return false;
+        }
+        TableFunctionFill(cx, &table->table(), fun, oldLength,
                           oldLength + delta);
-      } else {
-        JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
-                                 JSMSG_WASM_BAD_TBL_GROW_INIT, "funcref");
-        return false;
       }
       break;
     }
     case TableKind::AnyRef: {
       RootedAnyRef tmp(cx, AnyRef::null());
       if (!BoxAnyRef(cx, fillValue, &tmp)) {
         return false;
       }
@@ -2348,24 +2365,24 @@ void WasmGlobalObject::trace(JSTracer* t
   WasmGlobalObject* global = reinterpret_cast<WasmGlobalObject*>(obj);
   if (global->isNewborn()) {
     // This can happen while we're allocating the object, in which case
     // every single slot of the object is not defined yet. In particular,
     // there's nothing to trace yet.
     return;
   }
   switch (global->type().code()) {
+    case ValType::FuncRef:
     case ValType::AnyRef:
-      if (!global->cell()->anyref.isNull()) {
+      if (!global->cell()->ref.isNull()) {
         // TODO/AnyRef-boxing: With boxed immediates and strings, the write
         // barrier is going to have to be more complicated.
         ASSERT_ANYREF_IS_JSOBJECT;
-        TraceManuallyBarrieredEdge(trc,
-                                   global->cell()->anyref.asJSObjectAddress(),
-                                   "wasm anyref global");
+        TraceManuallyBarrieredEdge(trc, global->cell()->ref.asJSObjectAddress(),
+                                   "wasm reference-typed global");
       }
       break;
     case ValType::I32:
     case ValType::F32:
     case ValType::I64:
     case ValType::F64:
       break;
     case ValType::Ref:
@@ -2417,32 +2434,32 @@ WasmGlobalObject* WasmGlobalObject::crea
       cell->i64 = val.i64();
       break;
     case ValType::F32:
       cell->f32 = val.f32();
       break;
     case ValType::F64:
       cell->f64 = val.f64();
       break;
-    case ValType::NullRef:
-      MOZ_ASSERT(!cell->ref, "value should be null already");
-      break;
+    case ValType::FuncRef:
     case ValType::AnyRef:
-      MOZ_ASSERT(cell->anyref.isNull(), "no prebarriers needed");
-      cell->anyref = val.anyref();
-      if (!cell->anyref.isNull()) {
+      MOZ_ASSERT(cell->ref.isNull(), "no prebarriers needed");
+      cell->ref = val.ref();
+      if (!cell->ref.isNull()) {
         // TODO/AnyRef-boxing: With boxed immediates and strings, the write
         // barrier is going to have to be more complicated.
         ASSERT_ANYREF_IS_JSOBJECT;
-        JSObject::writeBarrierPost(&cell->anyref, nullptr,
-                                   cell->anyref.asJSObject());
+        JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(), nullptr,
+                                   cell->ref.asJSObject());
       }
       break;
     case ValType::Ref:
       MOZ_CRASH("Ref NYI");
+    case ValType::NullRef:
+      MOZ_CRASH("NullRef not expressible");
   }
 
   obj->initReservedSlot(TYPE_SLOT,
                         Int32Value(int32_t(val.type().bitsUnsafe())));
   obj->initReservedSlot(MUTABLE_SLOT, JS::BooleanValue(isMutable));
   obj->initReservedSlot(CELL_SLOT, PrivateValue(cell));
 
   MOZ_ASSERT(!obj->isNewborn());
@@ -2500,16 +2517,19 @@ bool WasmGlobalObject::construct(JSConte
     // initializing value.
     globalType = ValType::I64;
   } else if (StringEqualsAscii(typeLinearStr, "f32")) {
     globalType = ValType::F32;
   } else if (StringEqualsAscii(typeLinearStr, "f64")) {
     globalType = ValType::F64;
 #ifdef ENABLE_WASM_REFTYPES
   } else if (HasReftypesSupport(cx) &&
+             StringEqualsAscii(typeLinearStr, "funcref")) {
+    globalType = ValType::FuncRef;
+  } else if (HasReftypesSupport(cx) &&
              StringEqualsAscii(typeLinearStr, "anyref")) {
     globalType = ValType::AnyRef;
 #endif
   } else {
     JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
                              JSMSG_WASM_BAD_GLOBAL_TYPE);
     return false;
   }
@@ -2528,29 +2548,32 @@ bool WasmGlobalObject::construct(JSConte
       globalVal = Val(uint64_t(0));
       break;
     case ValType::F32:
       globalVal = Val(float(0.0));
       break;
     case ValType::F64:
       globalVal = Val(double(0.0));
       break;
+    case ValType::FuncRef:
+      globalVal = Val(ValType::FuncRef, AnyRef::null());
+      break;
     case ValType::AnyRef:
-      globalVal = Val(AnyRef::null());
+      globalVal = Val(ValType::AnyRef, AnyRef::null());
       break;
     case ValType::Ref:
       MOZ_CRASH("Ref NYI");
     case ValType::NullRef:
       MOZ_CRASH("NullRef not expressible");
   }
 
   // Override with non-undefined value, if provided.
   RootedValue valueVal(cx, args.get(1));
   if (!valueVal.isUndefined() ||
-      (args.length() >= 2 && globalType == ValType::AnyRef)) {
+      (args.length() >= 2 && globalType.isReference())) {
     if (!ToWebAssemblyValue(cx, globalType, valueVal, &globalVal)) {
       return false;
     }
   }
 
   WasmGlobalObject* global = WasmGlobalObject::create(cx, globalVal, isMutable);
   if (!global) {
     return false;
@@ -2565,16 +2588,17 @@ static bool IsGlobal(HandleValue v) {
 }
 
 /* static */
 bool WasmGlobalObject::valueGetterImpl(JSContext* cx, const CallArgs& args) {
   switch (args.thisv().toObject().as<WasmGlobalObject>().type().code()) {
     case ValType::I32:
     case ValType::F32:
     case ValType::F64:
+    case ValType::FuncRef:
     case ValType::AnyRef:
       args.rval().set(args.thisv().toObject().as<WasmGlobalObject>().value(cx));
       return true;
     case ValType::I64:
       JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
                                JSMSG_WASM_BAD_I64_TYPE);
       return false;
     case ValType::Ref:
@@ -2622,27 +2646,28 @@ bool WasmGlobalObject::valueSetterImpl(J
       cell->i32 = val.get().i32();
       break;
     case ValType::F32:
       cell->f32 = val.get().f32();
       break;
     case ValType::F64:
       cell->f64 = val.get().f64();
       break;
+    case ValType::FuncRef:
     case ValType::AnyRef: {
-      AnyRef prevPtr = cell->anyref;
+      AnyRef prevPtr = cell->ref;
       // TODO/AnyRef-boxing: With boxed immediates and strings, the write
       // barrier is going to have to be more complicated.
       ASSERT_ANYREF_IS_JSOBJECT;
       JSObject::writeBarrierPre(prevPtr.asJSObject());
-      cell->anyref = val.get().anyref();
-      if (!cell->anyref.isNull()) {
-        JSObject::writeBarrierPost(cell->anyref.asJSObjectAddress(),
+      cell->ref = val.get().ref();
+      if (!cell->ref.isNull()) {
+        JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(),
                                    prevPtr.asJSObject(),
-                                   cell->anyref.asJSObject());
+                                   cell->ref.asJSObject());
       }
       break;
     }
     case ValType::I64:
       MOZ_CRASH("unexpected i64 when setting global's value");
     case ValType::Ref:
       MOZ_CRASH("Ref NYI");
     case ValType::NullRef:
@@ -2688,18 +2713,21 @@ void WasmGlobalObject::val(MutableHandle
       outval.set(Val(uint64_t(cell->i64)));
       return;
     case ValType::F32:
       outval.set(Val(cell->f32));
       return;
     case ValType::F64:
       outval.set(Val(cell->f64));
       return;
+    case ValType::FuncRef:
+      outval.set(Val(ValType::FuncRef, cell->ref));
+      return;
     case ValType::AnyRef:
-      outval.set(Val(cell->anyref));
+      outval.set(Val(ValType::AnyRef, cell->ref));
       return;
     case ValType::Ref:
       MOZ_CRASH("Ref NYI");
     case ValType::NullRef:
       MOZ_CRASH("NullRef not expressible");
   }
   MOZ_CRASH("unexpected Global type");
 }
--- a/js/src/wasm/WasmJS.h
+++ b/js/src/wasm/WasmJS.h
@@ -97,23 +97,24 @@ MOZ_MUST_USE bool DeserializeModule(JSCo
 
 // A WebAssembly "Exported Function" is the spec name for the JS function
 // objects created to wrap wasm functions. This predicate returns false
 // for asm.js functions which are semantically just normal JS functions
 // (even if they are implemented via wasm under the hood). The accessor
 // functions for extracting the instance and func-index of a wasm function
 // can be used for both wasm and asm.js, however.
 
-extern bool IsWasmExportedFunction(JSFunction* fun);
+bool IsWasmExportedFunction(JSFunction* fun);
+bool CheckFuncRefValue(JSContext* cx, HandleValue v, MutableHandleFunction fun);
 
-extern Instance& ExportedFunctionToInstance(JSFunction* fun);
-extern WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun);
-extern uint32_t ExportedFunctionToFuncIndex(JSFunction* fun);
+Instance& ExportedFunctionToInstance(JSFunction* fun);
+WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun);
+uint32_t ExportedFunctionToFuncIndex(JSFunction* fun);
 
-extern bool IsSharedWasmMemoryObject(JSObject* obj);
+bool IsSharedWasmMemoryObject(JSObject* obj);
 
 }  // namespace wasm
 
 // The class of the WebAssembly global namespace object.
 
 extern const Class WebAssemblyClass;
 
 JSObject* InitWebAssemblyClass(JSContext* cx, Handle<GlobalObject*> global);
@@ -171,18 +172,17 @@ class WasmGlobalObject : public NativeOb
  public:
   // For exposed globals the Cell holds the value of the global; the
   // instance's global area holds a pointer to the Cell.
   union Cell {
     int32_t i32;
     int64_t i64;
     float f32;
     double f64;
-    JSObject* ref;  // Note, this breaks an abstraction boundary
-    wasm::AnyRef anyref;
+    wasm::AnyRef ref;
     Cell() : i64(0) {}
     ~Cell() {}
   };
 
   static const unsigned RESERVED_SLOTS = 3;
   static const Class class_;
   static const JSPropertySpec properties[];
   static const JSFunctionSpec methods[];
--- a/js/src/wasm/WasmModule.cpp
+++ b/js/src/wasm/WasmModule.cpp
@@ -1223,16 +1223,17 @@ static bool MakeStructField(JSContext* c
     case ValType::F64:
       t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
                                                    Scalar::Float64);
       break;
     case ValType::Ref:
       t = GlobalObject::getOrCreateReferenceTypeDescr(
           cx, cx->global(), ReferenceType::TYPE_OBJECT);
       break;
+    case ValType::FuncRef:
     case ValType::AnyRef:
       t = GlobalObject::getOrCreateReferenceTypeDescr(
           cx, cx->global(), ReferenceType::TYPE_WASM_ANYREF);
       break;
     default:
       MOZ_CRASH("Bad field type");
   }
   MOZ_ASSERT(t != nullptr);
--- a/js/src/wasm/WasmOpIter.h
+++ b/js/src/wasm/WasmOpIter.h
@@ -41,16 +41,17 @@ class StackType {
 #ifdef DEBUG
   bool isValidCode() {
     switch (UnpackTypeCodeType(tc_)) {
       case TypeCode::I32:
       case TypeCode::I64:
       case TypeCode::F32:
       case TypeCode::F64:
       case TypeCode::AnyRef:
+      case TypeCode::FuncRef:
       case TypeCode::Ref:
       case TypeCode::NullRef:
       case TypeCode::Limit:
         return true;
       default:
         return false;
     }
   }
@@ -59,16 +60,17 @@ class StackType {
  public:
   enum Code {
     I32 = uint8_t(ValType::I32),
     I64 = uint8_t(ValType::I64),
     F32 = uint8_t(ValType::F32),
     F64 = uint8_t(ValType::F64),
 
     AnyRef = uint8_t(ValType::AnyRef),
+    FuncRef = uint8_t(ValType::FuncRef),
     Ref = uint8_t(ValType::Ref),
     NullRef = uint8_t(ValType::NullRef),
 
     TVar = uint8_t(TypeCode::Limit),
   };
 
   StackType() : tc_(InvalidPackedTypeCode()) {}
 
@@ -78,34 +80,26 @@ class StackType {
 
   explicit StackType(const ValType& t) : tc_(t.packed()) {}
 
   PackedTypeCode packed() const { return tc_; }
 
   Code code() const { return Code(UnpackTypeCodeType(tc_)); }
 
   uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
-
   bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
 
-  bool isReference() const {
-    TypeCode tc = UnpackTypeCodeType(tc_);
-    return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
-           tc == TypeCode::NullRef;
-  }
+  bool isReference() const { return IsReferenceType(tc_); }
 
   bool operator==(const StackType& that) const { return tc_ == that.tc_; }
-
   bool operator!=(const StackType& that) const { return tc_ != that.tc_; }
-
   bool operator==(Code that) const {
     MOZ_ASSERT(that != Code::Ref);
     return code() == that;
   }
-
   bool operator!=(Code that) const { return !(*this == that); }
 };
 
 static inline ValType NonTVarToValType(StackType type) {
   MOZ_ASSERT(type != StackType::TVar);
   return ValType(type.packed());
 }
 
@@ -760,16 +754,17 @@ inline bool OpIter<Policy>::readBlockTyp
   switch (uncheckedCode) {
     case uint8_t(ExprType::Void):
     case uint8_t(ExprType::I32):
     case uint8_t(ExprType::I64):
     case uint8_t(ExprType::F32):
     case uint8_t(ExprType::F64):
       known = true;
       break;
+    case uint8_t(ExprType::FuncRef):
     case uint8_t(ExprType::AnyRef):
 #ifdef ENABLE_WASM_REFTYPES
       known = true;
 #endif
       break;
     case uint8_t(ExprType::Ref):
       known = env_.gcTypesEnabled() && uncheckedRefTypeIndex < MaxTypes &&
               uncheckedRefTypeIndex < env_.types.length();
@@ -1559,17 +1554,17 @@ inline bool OpIter<Policy>::readCallIndi
   }
   if (*tableIndex >= env_.tables.length()) {
     // Special case this for improved user experience.
     if (!env_.tables.length()) {
       return fail("can't call_indirect without a table");
     }
     return fail("table index out of range for call_indirect");
   }
-  if (env_.tables[*tableIndex].kind != TableKind::AnyFunction) {
+  if (env_.tables[*tableIndex].kind != TableKind::FuncRef) {
     return fail("indirect calls must go through a table of 'funcref'");
   }
 
   if (!popWithType(ValType::I32, callee)) {
     return false;
   }
 
   if (!env_.types[*funcTypeIndex].isFuncType()) {
@@ -1950,17 +1945,17 @@ inline bool OpIter<Policy>::readMemOrTab
   } else {
     if (memOrTableIndex >= env_.tables.length()) {
       return fail("table index out of range for table.init");
     }
     *dstTableIndex = memOrTableIndex;
 
     // Element segments must carry functions exclusively and funcref is not
     // yet a subtype of anyref.
-    if (env_.tables[*dstTableIndex].kind != TableKind::AnyFunction) {
+    if (env_.tables[*dstTableIndex].kind != TableKind::FuncRef) {
       return fail("only tables of 'funcref' may have element segments");
     }
     if (*segIndex >= env_.elemSegments.length()) {
       return fail("table.init segment index out of range");
     }
   }
 
   return true;
--- a/js/src/wasm/WasmStubs.cpp
+++ b/js/src/wasm/WasmStubs.cpp
@@ -307,16 +307,17 @@ static void StoreABIReturn(MacroAssemble
       masm.canonicalizeFloat(ReturnFloat32Reg);
       masm.storeFloat32(ReturnFloat32Reg, Address(argv, 0));
       break;
     case ExprType::F64:
       masm.canonicalizeDouble(ReturnDoubleReg);
       masm.storeDouble(ReturnDoubleReg, Address(argv, 0));
       break;
     case ExprType::Ref:
+    case ExprType::FuncRef:
     case ExprType::AnyRef:
       masm.storePtr(ReturnReg, Address(argv, 0));
       break;
     case ExprType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     case ExprType::Limit:
       MOZ_CRASH("Limit");
   }
@@ -895,20 +896,19 @@ static bool GenerateJitEntry(MacroAssemb
     case ExprType::F64: {
       masm.canonicalizeDouble(ReturnDoubleReg);
       GenPrintF64(DebugChannel::Function, masm, ReturnDoubleReg);
       ScratchDoubleScope fpscratch(masm);
       masm.boxDouble(ReturnDoubleReg, JSReturnOperand, fpscratch);
       break;
     }
     case ExprType::Ref:
-      MOZ_CRASH("return ref in jitentry NYI");
-      break;
+    case ExprType::FuncRef:
     case ExprType::AnyRef:
-      MOZ_CRASH("return anyref in jitentry NYI");
+      MOZ_CRASH("returning reference in jitentry NYI");
       break;
     case ExprType::I64:
       MOZ_CRASH("unexpected return type when calling from ion to wasm");
     case ExprType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     case ExprType::Limit:
       MOZ_CRASH("Limit");
   }
@@ -1146,16 +1146,17 @@ void wasm::GenerateDirectCallFromJit(Mac
       masm.canonicalizeFloat(ReturnFloat32Reg);
       GenPrintF32(DebugChannel::Function, masm, ReturnFloat32Reg);
       break;
     case wasm::ExprType::F64:
       masm.canonicalizeDouble(ReturnDoubleReg);
       GenPrintF64(DebugChannel::Function, masm, ReturnDoubleReg);
       break;
     case wasm::ExprType::Ref:
+    case wasm::ExprType::FuncRef:
     case wasm::ExprType::AnyRef:
     case wasm::ExprType::I64:
       MOZ_CRASH("unexpected return type when calling from ion to wasm");
     case wasm::ExprType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     case wasm::ExprType::Limit:
       MOZ_CRASH("Limit");
   }
@@ -1547,16 +1548,24 @@ static bool GenerateImportInterpExit(Mac
       masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
       masm.loadDouble(argv, ReturnDoubleReg);
       GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ",
                 funcImportIndex);
       GenPrintF64(DebugChannel::Import, masm, ReturnDoubleReg);
       break;
     case ExprType::Ref:
       MOZ_CRASH("No Ref support here yet");
+    case ExprType::FuncRef:
+      masm.call(SymbolicAddress::CallImport_FuncRef);
+      masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
+      masm.loadPtr(argv, ReturnReg);
+      GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ",
+                funcImportIndex);
+      GenPrintPtr(DebugChannel::Import, masm, ReturnReg);
+      break;
     case ExprType::AnyRef:
       masm.call(SymbolicAddress::CallImport_AnyRef);
       masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
       masm.loadPtr(argv, ReturnReg);
       GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ",
                 funcImportIndex);
       GenPrintPtr(DebugChannel::Import, masm, ReturnReg);
       break;
@@ -1748,20 +1757,19 @@ static bool GenerateImportJitExit(MacroA
       masm.convertValueToFloat(JSReturnOperand, ReturnFloat32Reg, &oolConvert);
       GenPrintF32(DebugChannel::Import, masm, ReturnFloat32Reg);
       break;
     case ExprType::F64:
       masm.convertValueToDouble(JSReturnOperand, ReturnDoubleReg, &oolConvert);
       GenPrintF64(DebugChannel::Import, masm, ReturnDoubleReg);
       break;
     case ExprType::Ref:
-      MOZ_CRASH("ref returned by import (jit exit) NYI");
-      break;
+    case ExprType::FuncRef:
     case ExprType::AnyRef:
-      MOZ_CRASH("anyref returned by import (jit exit) NYI");
+      MOZ_CRASH("reference returned by import (jit exit) NYI");
       break;
     case ExprType::NullRef:
       MOZ_CRASH("NullRef not expressible");
     case ExprType::Limit:
       MOZ_CRASH("Limit");
   }
 
   GenPrintf(DebugChannel::Import, masm, "\n");
--- a/js/src/wasm/WasmTable.cpp
+++ b/js/src/wasm/WasmTable.cpp
@@ -25,17 +25,17 @@
 #include "wasm/WasmInstance.h"
 #include "wasm/WasmJS.h"
 
 using namespace js;
 using namespace js::wasm;
 using mozilla::CheckedInt;
 
 Table::Table(JSContext* cx, const TableDesc& desc,
-             HandleWasmTableObject maybeObject, UniqueAnyFuncArray functions)
+             HandleWasmTableObject maybeObject, UniqueFuncRefArray functions)
     : maybeObject_(maybeObject),
       observers_(cx->zone()),
       functions_(std::move(functions)),
       kind_(desc.kind),
       length_(desc.limits.initial),
       maximum_(desc.limits.maximum) {
   MOZ_ASSERT(kind_ != TableKind::AnyRef);
 }
@@ -50,19 +50,19 @@ Table::Table(JSContext* cx, const TableD
       maximum_(desc.limits.maximum) {
   MOZ_ASSERT(kind_ == TableKind::AnyRef);
 }
 
 /* static */
 SharedTable Table::create(JSContext* cx, const TableDesc& desc,
                           HandleWasmTableObject maybeObject) {
   switch (desc.kind) {
-    case TableKind::AnyFunction:
-    case TableKind::TypedFunction: {
-      UniqueAnyFuncArray functions(
+    case TableKind::FuncRef:
+    case TableKind::AsmJS: {
+      UniqueFuncRefArray functions(
           cx->pod_calloc<FunctionTableElem>(desc.limits.initial));
       if (!functions) {
         return nullptr;
       }
       return SharedTable(
           cx->new_<Table>(cx, desc, maybeObject, std::move(functions)));
     }
     case TableKind::AnyRef: {
@@ -85,31 +85,31 @@ void Table::tracePrivate(JSTracer* trc) 
   // GC. TraceWeakEdge may sound better, but it is less efficient given that
   // we know object_ is already marked.
   if (maybeObject_) {
     MOZ_ASSERT(!gc::IsAboutToBeFinalized(&maybeObject_));
     TraceEdge(trc, &maybeObject_, "wasm table object");
   }
 
   switch (kind_) {
-    case TableKind::AnyFunction: {
+    case TableKind::FuncRef: {
       for (uint32_t i = 0; i < length_; i++) {
         if (functions_[i].tls) {
           functions_[i].tls->instance->trace(trc);
         } else {
           MOZ_ASSERT(!functions_[i].code);
         }
       }
       break;
     }
     case TableKind::AnyRef: {
       objects_.trace(trc);
       break;
     }
-    case TableKind::TypedFunction: {
+    case TableKind::AsmJS: {
 #ifdef DEBUG
       for (uint32_t i = 0; i < length_; i++) {
         MOZ_ASSERT(!functions_[i].tls);
       }
 #endif
       break;
     }
   }
@@ -130,51 +130,45 @@ void Table::trace(JSTracer* trc) {
 
 uint8_t* Table::functionBase() const {
   if (kind() == TableKind::AnyRef) {
     return nullptr;
   }
   return (uint8_t*)functions_.get();
 }
 
-const FunctionTableElem& Table::getAnyFunc(uint32_t index) const {
+const FunctionTableElem& Table::getFuncRef(uint32_t index) const {
   MOZ_ASSERT(isFunction());
   return functions_[index];
 }
 
 AnyRef Table::getAnyRef(uint32_t index) const {
   MOZ_ASSERT(!isFunction());
   // TODO/AnyRef-boxing: With boxed immediates and strings, the write barrier
   // is going to have to be more complicated.
   ASSERT_ANYREF_IS_JSOBJECT;
   return AnyRef::fromJSObject(objects_[index]);
 }
 
-const void* Table::getShortlivedAnyRefLocForCompiledCode(uint32_t index) const {
-  MOZ_ASSERT(!isFunction());
-  return const_cast<HeapPtr<JSObject*>&>(objects_[index])
-      .unsafeUnbarrieredForTracing();
-}
-
-void Table::setAnyFunc(uint32_t index, void* code, const Instance* instance) {
+void Table::setFuncRef(uint32_t index, void* code, const Instance* instance) {
   MOZ_ASSERT(isFunction());
 
   FunctionTableElem& elem = functions_[index];
   if (elem.tls) {
     JSObject::writeBarrierPre(elem.tls->instance->objectUnbarriered());
   }
 
   switch (kind_) {
-    case TableKind::AnyFunction:
+    case TableKind::FuncRef:
       elem.code = code;
       elem.tls = instance->tlsData();
       MOZ_ASSERT(elem.tls->instance->objectUnbarriered()->isTenured(),
                  "no writeBarrierPost (Table::set)");
       break;
-    case TableKind::TypedFunction:
+    case TableKind::AsmJS:
       elem.code = code;
       elem.tls = nullptr;
       break;
     case TableKind::AnyRef:
       MOZ_CRASH("Bad table type");
   }
 }
 
@@ -183,39 +177,39 @@ void Table::setAnyRef(uint32_t index, An
   // TODO/AnyRef-boxing: With boxed immediates and strings, the write barrier
   // is going to have to be more complicated.
   ASSERT_ANYREF_IS_JSOBJECT;
   objects_[index] = new_obj.asJSObject();
 }
 
 void Table::setNull(uint32_t index) {
   switch (kind_) {
-    case TableKind::AnyFunction: {
+    case TableKind::FuncRef: {
       FunctionTableElem& elem = functions_[index];
       if (elem.tls) {
         JSObject::writeBarrierPre(elem.tls->instance->objectUnbarriered());
       }
 
       elem.code = nullptr;
       elem.tls = nullptr;
       break;
     }
     case TableKind::AnyRef: {
       setAnyRef(index, AnyRef::null());
       break;
     }
-    case TableKind::TypedFunction: {
+    case TableKind::AsmJS: {
       MOZ_CRASH("Should not happen");
     }
   }
 }
 
 void Table::copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex) {
   switch (kind_) {
-    case TableKind::AnyFunction: {
+    case TableKind::FuncRef: {
       FunctionTableElem& dst = functions_[dstIndex];
       if (dst.tls) {
         JSObject::writeBarrierPre(dst.tls->instance->objectUnbarriered());
       }
 
       FunctionTableElem& src = srcTable.functions_[srcIndex];
       dst.code = src.code;
       dst.tls = src.tls;
@@ -228,17 +222,17 @@ void Table::copy(const Table& srcTable, 
         MOZ_ASSERT(!dst.code);
       }
       break;
     }
     case TableKind::AnyRef: {
       setAnyRef(dstIndex, srcTable.getAnyRef(srcIndex));
       break;
     }
-    case TableKind::TypedFunction: {
+    case TableKind::AsmJS: {
       MOZ_CRASH("Bad table type");
     }
   }
 }
 
 uint32_t Table::grow(uint32_t delta, JSContext* cx) {
   // This isn't just an optimization: movingGrowable() assumes that
   // onMovingGrowTable does not fire when length == maximum.
@@ -259,17 +253,17 @@ uint32_t Table::grow(uint32_t delta, JSC
   }
 
   MOZ_ASSERT(movingGrowable());
 
   JSRuntime* rt =
       cx->runtime();  // Use JSRuntime's MallocProvider to avoid throwing.
 
   switch (kind_) {
-    case TableKind::AnyFunction: {
+    case TableKind::FuncRef: {
       // Note that realloc does not release functions_'s pointee on failure
       // which is exactly what we need here.
       FunctionTableElem* newFunctions = rt->pod_realloc<FunctionTableElem>(
           functions_.get(), length_, newLength.value());
       if (!newFunctions) {
         return -1;
       }
       Unused << functions_.release();
@@ -280,17 +274,17 @@ uint32_t Table::grow(uint32_t delta, JSC
       break;
     }
     case TableKind::AnyRef: {
       if (!objects_.resize(newLength.value())) {
         return -1;
       }
       break;
     }
-    case TableKind::TypedFunction: {
+    case TableKind::AsmJS: {
       MOZ_CRASH("Bad table type");
     }
   }
 
   length_ = newLength.value();
 
   for (InstanceSet::Range r = observers_.all(); !r.empty(); r.popFront()) {
     r.front()->instance().onMovingGrowTable(this);
--- a/js/src/wasm/WasmTable.h
+++ b/js/src/wasm/WasmTable.h
@@ -24,75 +24,73 @@
 
 namespace js {
 namespace wasm {
 
 // A Table is an indexable array of opaque values. Tables are first-class
 // stateful objects exposed to WebAssembly. asm.js also uses Tables to represent
 // its homogeneous function-pointer tables.
 //
-// A table of AnyFunction holds FunctionTableElems, which are (instance*,index)
+// A table of FuncRef holds FunctionTableElems, which are (instance*,index)
 // pairs, where the instance must be traced.
 //
 // A table of AnyRef holds JSObject pointers, which must be traced.
 
 // TODO/AnyRef-boxing: With boxed immediates and strings, JSObject* is no longer
 // the most appropriate representation for Cell::anyref.
 STATIC_ASSERT_ANYREF_IS_JSOBJECT;
 
 typedef GCVector<HeapPtr<JSObject*>, 0, SystemAllocPolicy> TableAnyRefVector;
 
 class Table : public ShareableBase<Table> {
   using InstanceSet = JS::WeakCache<GCHashSet<
       WeakHeapPtrWasmInstanceObject,
       MovableCellHasher<WeakHeapPtrWasmInstanceObject>, SystemAllocPolicy>>;
-  using UniqueAnyFuncArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
+  using UniqueFuncRefArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
 
   WeakHeapPtrWasmTableObject maybeObject_;
   InstanceSet observers_;
-  UniqueAnyFuncArray functions_;  // either functions_ has data
+  UniqueFuncRefArray functions_;  // either functions_ has data
   TableAnyRefVector objects_;     //   or objects_, but not both
   const TableKind kind_;
   uint32_t length_;
   const Maybe<uint32_t> maximum_;
 
   template <class>
   friend struct js::MallocProvider;
   Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject,
-        UniqueAnyFuncArray functions);
+        UniqueFuncRefArray functions);
   Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject,
         TableAnyRefVector&& objects);
 
   void tracePrivate(JSTracer* trc);
   friend class js::WasmTableObject;
 
  public:
   static RefPtr<Table> create(JSContext* cx, const TableDesc& desc,
                               HandleWasmTableObject maybeObject);
   void trace(JSTracer* trc);
 
   TableKind kind() const { return kind_; }
-  bool isTypedFunction() const { return kind_ == TableKind::TypedFunction; }
   bool isFunction() const {
-    return kind_ == TableKind::AnyFunction || kind_ == TableKind::TypedFunction;
+    return kind_ == TableKind::FuncRef || kind_ == TableKind::AsmJS;
   }
   uint32_t length() const { return length_; }
   Maybe<uint32_t> maximum() const { return maximum_; }
 
   // Only for function values.  Raw pointer to the table.
   uint8_t* functionBase() const;
 
-  // get/setAnyFunc is allowed only on table-of-funcref.
+  // get/setFuncRef is allowed only on table-of-funcref.
   // get/setAnyRef is allowed only on table-of-anyref.
   // setNull is allowed on either.
-  const FunctionTableElem& getAnyFunc(uint32_t index) const;
-  void setAnyFunc(uint32_t index, void* code, const Instance* instance);
+  const FunctionTableElem& getFuncRef(uint32_t index) const;
+  void setFuncRef(uint32_t index, void* code, const Instance* instance);
 
   AnyRef getAnyRef(uint32_t index) const;
-  const void* getShortlivedAnyRefLocForCompiledCode(uint32_t index) const;
   void setAnyRef(uint32_t index, AnyRef);
 
   void setNull(uint32_t index);
 
   // Copy entry from |srcTable| at |srcIndex| to this table at |dstIndex|.
   // Used by table.copy.
   void copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex);
 
--- a/js/src/wasm/WasmTextToBinary.cpp
+++ b/js/src/wasm/WasmTextToBinary.cpp
@@ -85,17 +85,16 @@ class WasmToken {
     EndOfFile,
     Equal,
     Error,
     Export,
     ExtraConversionOpcode,
     Field,
     Float,
     Func,
-    FuncRef,
 #ifdef ENABLE_WASM_GC
     GcFeatureOptIn,
 #endif
     GetGlobal,
     GetLocal,
     Global,
     If,
     Import,
@@ -371,17 +370,16 @@ class WasmToken {
       case EndOfFile:
       case Equal:
       case End:
       case Error:
       case Export:
       case Field:
       case Float:
       case Func:
-      case FuncRef:
 #ifdef ENABLE_WASM_GC
       case GcFeatureOptIn:
 #endif
       case Global:
       case Mutable:
       case Import:
       case Index:
       case Memory:
@@ -949,17 +947,17 @@ WasmToken WasmTokenStream::next() {
     case '9':
       return literal(begin);
 
     case 'a':
       if (consume(u"align")) {
         return WasmToken(WasmToken::Align, begin, cur_);
       }
       if (consume(u"anyfunc")) {
-        return WasmToken(WasmToken::FuncRef, begin, cur_);
+        return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_);
       }
       if (consume(u"anyref")) {
         return WasmToken(WasmToken::ValueType, ValType::AnyRef, begin, cur_);
       }
       if (consume(u"atomic.")) {
         if (consume(u"wake") || consume(u"notify")) {
           return WasmToken(WasmToken::Wake, ThreadOp::Wake, begin, cur_);
         }
@@ -1032,17 +1030,17 @@ WasmToken WasmTokenStream::next() {
       break;
 
     case 'f':
       if (consume(u"field")) {
         return WasmToken(WasmToken::Field, begin, cur_);
       }
 
       if (consume(u"funcref")) {
-        return WasmToken(WasmToken::FuncRef, begin, cur_);
+        return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_);
       }
 
       if (consume(u"func")) {
         return WasmToken(WasmToken::Func, begin, cur_);
       }
 
       if (consume(u"f32")) {
         if (!consume(u".")) {
@@ -3968,27 +3966,27 @@ static AstExpr* ParseStructSet(WasmParse
 }
 
 static AstExpr* ParseStructNarrow(WasmParseContext& c, bool inParens) {
   AstValType inputType;
   if (!ParseValType(c, &inputType)) {
     return nullptr;
   }
 
-  if (!inputType.isRefType()) {
+  if (!inputType.isNarrowType()) {
     c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error);
     return nullptr;
   }
 
   AstValType outputType;
   if (!ParseValType(c, &outputType)) {
     return nullptr;
   }
 
-  if (!outputType.isRefType()) {
+  if (!outputType.isNarrowType()) {
     c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error);
     return nullptr;
   }
 
   AstExpr* ptr = ParseExpr(c, inParens);
   if (!ptr) {
     return nullptr;
   }
@@ -4714,30 +4712,29 @@ static bool ParseGlobalType(WasmParseCon
     return false;
   }
 
   return true;
 }
 
 static bool ParseElemType(WasmParseContext& c, TableKind* tableKind) {
   WasmToken token;
-  if (c.ts.getIf(WasmToken::FuncRef, &token)) {
-    *tableKind = TableKind::AnyFunction;
-    return true;
-  }
+  if (c.ts.getIf(WasmToken::ValueType, &token)) {
+    if (token.valueType() == ValType::FuncRef) {
+      *tableKind = TableKind::FuncRef;
+      return true;
+    }
 #ifdef ENABLE_WASM_REFTYPES
-  if (c.ts.getIf(WasmToken::ValueType, &token) &&
-      token.valueType() == ValType::AnyRef) {
-    *tableKind = TableKind::AnyRef;
-    return true;
+    if (token.valueType() == ValType::AnyRef) {
+      *tableKind = TableKind::AnyRef;
+      return true;
+    }
+#endif
   }
   c.ts.generateError(token, "'funcref' or 'anyref' required", c.error);
-#else
-  c.ts.generateError(token, "'funcref' required", c.error);
-#endif
   return false;
 }
 
 static bool ParseTableSig(WasmParseContext& c, Limits* table,
                           TableKind* tableKind) {
   return ParseLimits(c, table, Shareable::False) && ParseElemType(c, tableKind);
 }
 
@@ -6853,18 +6850,18 @@ static bool EncodeLimits(Encoder& e, con
   }
 
   return true;
 }
 
 static bool EncodeTableLimits(Encoder& e, const Limits& limits,
                               TableKind tableKind) {
   switch (tableKind) {
-    case TableKind::AnyFunction:
-      if (!e.writeVarU32(uint32_t(TypeCode::AnyFunc))) {
+    case TableKind::FuncRef:
+      if (!e.writeVarU32(uint32_t(TypeCode::FuncRef))) {
         return false;
       }
       break;
     case TableKind::AnyRef:
       if (!e.writeVarU32(uint32_t(TypeCode::AnyRef))) {
         return false;
       }
       break;
@@ -7275,17 +7272,17 @@ static bool EncodeDataCountSection(Encod
 
 static bool EncodeElemSegment(Encoder& e, AstElemSegment& segment) {
   if (!EncodeDestinationOffsetOrFlags(e, segment.targetTable().index(),
                                       segment.offsetIfActive())) {
     return false;
   }
 
   if (segment.isPassive()) {
-    if (!e.writeFixedU8(uint8_t(TypeCode::AnyFunc))) {
+    if (!e.writeFixedU8(uint8_t(TypeCode::FuncRef))) {
       return false;
     }
   }
 
   if (!e.writeVarU32(segment.elems().length())) {
     return false;
   }
 
--- a/js/src/wasm/WasmTypes.cpp
+++ b/js/src/wasm/WasmTypes.cpp
@@ -73,38 +73,33 @@ Val::Val(const LitVal& val) {
       return;
     case ValType::I64:
       u.i64_ = val.i64();
       return;
     case ValType::F64:
       u.f64_ = val.f64();
       return;
     case ValType::Ref:
+    case ValType::FuncRef:
+    case ValType::AnyRef:
       u.ref_ = val.ref();
       return;
-    case ValType::AnyRef:
-      u.anyref_ = val.anyref();
-      return;
     case ValType::NullRef:
       break;
   }
   MOZ_CRASH();
 }
 
 void Val::trace(JSTracer* trc) {
-  if (type_.isValid()) {
-    if (type_.isRef() && u.ref_) {
-      TraceManuallyBarrieredEdge(trc, &u.ref_, "wasm ref/anyref global");
-    } else if (type_ == ValType::AnyRef && !u.anyref_.isNull()) {
-      // TODO/AnyRef-boxing: With boxed immediates and strings, the write
-      // barrier is going to have to be more complicated.
-      ASSERT_ANYREF_IS_JSOBJECT;
-      TraceManuallyBarrieredEdge(trc, u.anyref_.asJSObjectAddress(),
-                                 "wasm ref/anyref global");
-    }
+  if (type_.isValid() && type_.isReference() && !u.ref_.isNull()) {
+    // TODO/AnyRef-boxing: With boxed immediates and strings, the write
+    // barrier is going to have to be more complicated.
+    ASSERT_ANYREF_IS_JSOBJECT;
+    TraceManuallyBarrieredEdge(trc, u.ref_.asJSObjectAddress(),
+                               "wasm reference-typed global");
   }
 }
 
 void AnyRef::trace(JSTracer* trc) {
   if (value_) {
     TraceManuallyBarrieredEdge(trc, &value_, "wasm anyref referent");
   }
 }
@@ -268,16 +263,17 @@ static const unsigned sMaxTypes =
     (sTotalBits - sTagBits - sReturnBit - sLengthBits) / sTypeBits;
 
 static bool IsImmediateType(ValType vt) {
   switch (vt.code()) {
     case ValType::I32:
     case ValType::I64:
     case ValType::F32:
     case ValType::F64:
+    case ValType::FuncRef:
     case ValType::AnyRef:
       return true;
     case ValType::NullRef:
     case ValType::Ref:
       return false;
   }
   MOZ_CRASH("bad ValType");
 }
@@ -288,18 +284,20 @@ static unsigned EncodeImmediateType(ValT
     case ValType::I32:
       return 0;
     case ValType::I64:
       return 1;
     case ValType::F32:
       return 2;
     case ValType::F64:
       return 3;
+    case ValType::FuncRef:
+      return 4;
     case ValType::AnyRef:
-      return 4;
+      return 5;
     case ValType::NullRef:
     case ValType::Ref:
       break;
   }
   MOZ_CRASH("bad ValType");
 }
 
 /* static */
@@ -719,16 +717,17 @@ void DebugFrame::updateReturnJSValue() {
       cachedReturnJSValue_.setDouble(JS::CanonicalizeNaN(resultF32_));
       break;
     case ExprType::F64:
       cachedReturnJSValue_.setDouble(JS::CanonicalizeNaN(resultF64_));
       break;
     case ExprType::Ref:
       cachedReturnJSValue_ = ObjectOrNullValue((JSObject*)resultRef_);
       break;
+    case ExprType::FuncRef:
     case ExprType::AnyRef:
       cachedReturnJSValue_ = UnboxAnyRef(resultAnyRef_);
       break;
     default:
       MOZ_CRASH("result type");
   }
 }
 
--- a/js/src/wasm/WasmTypes.h
+++ b/js/src/wasm/WasmTypes.h
@@ -250,16 +250,22 @@ static inline TypeCode UnpackTypeCodeTyp
   return TypeCode(uint32_t(ptc) & 255);
 }
 
 static inline uint32_t UnpackTypeCodeIndex(PackedTypeCode ptc) {
   MOZ_ASSERT(UnpackTypeCodeType(ptc) == TypeCode::Ref);
   return uint32_t(ptc) >> 8;
 }
 
+static inline bool IsReferenceType(PackedTypeCode ptc) {
+  TypeCode tc = UnpackTypeCodeType(ptc);
+  return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
+         tc == TypeCode::FuncRef || tc == TypeCode::NullRef;
+}
+
 // The ExprType represents the type of a WebAssembly expression or return value
 // and may either be a ValType or void.
 //
 // (Soon, expression types will be generalized to a list of ValType and this
 // class will go away, replaced, wherever it is used, by a varU32 + list of
 // ValType.)
 
 class ValType;
@@ -270,16 +276,17 @@ class ExprType {
 #ifdef DEBUG
   bool isValidCode() {
     switch (UnpackTypeCodeType(tc_)) {
       case TypeCode::I32:
       case TypeCode::I64:
       case TypeCode::F32:
       case TypeCode::F64:
       case TypeCode::AnyRef:
+      case TypeCode::FuncRef:
       case TypeCode::NullRef:
       case TypeCode::Ref:
       case TypeCode::BlockVoid:
       case TypeCode::Limit:
         return true;
       default:
         return false;
     }
@@ -290,16 +297,17 @@ class ExprType {
   enum Code {
     Void = uint8_t(TypeCode::BlockVoid),
 
     I32 = uint8_t(TypeCode::I32),
     I64 = uint8_t(TypeCode::I64),
     F32 = uint8_t(TypeCode::F32),
     F64 = uint8_t(TypeCode::F64),
     AnyRef = uint8_t(TypeCode::AnyRef),
+    FuncRef = uint8_t(TypeCode::FuncRef),
     NullRef = uint8_t(TypeCode::NullRef),
     Ref = uint8_t(TypeCode::Ref),
 
     Limit = uint8_t(TypeCode::Limit)
   };
 
   ExprType() : tc_() {}
 
@@ -316,42 +324,33 @@ class ExprType {
 
   explicit ExprType(PackedTypeCode ptc) : tc_(ptc) {
     MOZ_ASSERT(isValidCode());
   }
 
   explicit inline ExprType(const ValType& t);
 
   PackedTypeCode packed() const { return tc_; }
-
   PackedTypeCode* packedPtr() { return &tc_; }
 
   Code code() const { return Code(UnpackTypeCodeType(tc_)); }
 
+  bool isValid() const { return IsValid(tc_); }
+
   uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
-
-  bool isValid() const { return IsValid(tc_); }
-
   bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
 
-  bool isReference() const {
-    TypeCode tc = UnpackTypeCodeType(tc_);
-    return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
-           tc == TypeCode::NullRef;
-  }
+  bool isReference() const { return IsReferenceType(tc_); }
 
   bool operator==(const ExprType& that) const { return tc_ == that.tc_; }
-
   bool operator!=(const ExprType& that) const { return tc_ != that.tc_; }
-
   bool operator==(Code that) const {
     MOZ_ASSERT(that != Code::Ref);
     return code() == that;
   }
-
   bool operator!=(Code that) const { return !(*this == that); }
 };
 
 // The ValType represents the storage type of a WebAssembly location, whether
 // parameter, local, or global.
 
 class ValType {
   PackedTypeCode tc_;
@@ -359,16 +358,17 @@ class ValType {
 #ifdef DEBUG
   bool isValidCode() {
     switch (UnpackTypeCodeType(tc_)) {
       case TypeCode::I32:
       case TypeCode::I64:
       case TypeCode::F32:
       case TypeCode::F64:
       case TypeCode::AnyRef:
+      case TypeCode::FuncRef:
       case TypeCode::NullRef:
       case TypeCode::Ref:
         return true;
       default:
         return false;
     }
   }
 #endif
@@ -376,16 +376,17 @@ class ValType {
  public:
   enum Code {
     I32 = uint8_t(TypeCode::I32),
     I64 = uint8_t(TypeCode::I64),
     F32 = uint8_t(TypeCode::F32),
     F64 = uint8_t(TypeCode::F64),
 
     AnyRef = uint8_t(TypeCode::AnyRef),
+    FuncRef = uint8_t(TypeCode::FuncRef),
     NullRef = uint8_t(TypeCode::NullRef),
     Ref = uint8_t(TypeCode::Ref),
   };
 
   ValType() : tc_(InvalidPackedTypeCode()) {}
 
   MOZ_IMPLICIT ValType(Code c) : tc_(PackTypeCode(TypeCode(c))) {
     MOZ_ASSERT(isValidCode());
@@ -426,37 +427,29 @@ class ValType {
   }
 
   PackedTypeCode packed() const { return tc_; }
 
   uint32_t bitsUnsafe() const { return PackedTypeCodeToBits(tc_); }
 
   Code code() const { return Code(UnpackTypeCodeType(tc_)); }
 
+  bool isValid() const { return IsValid(tc_); }
+
   uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
-
-  bool isValid() const { return IsValid(tc_); }
-
   bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
 
-  bool isReference() const {
-    TypeCode tc = UnpackTypeCodeType(tc_);
-    return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
-           tc == TypeCode::NullRef;
-  }
+  bool isReference() const { return IsReferenceType(tc_); }
 
   bool operator==(const ValType& that) const { return tc_ == that.tc_; }
-
   bool operator!=(const ValType& that) const { return tc_ != that.tc_; }
-
   bool operator==(Code that) const {
     MOZ_ASSERT(that != Code::Ref);
     return code() == that;
   }
-
   bool operator!=(Code that) const { return !(*this == that); }
 };
 
 // The dominant use of this data type is for locals and args, and profiling
 // with ZenGarden and Tanks suggests an initial size of 16 minimises heap
 // allocation, both in terms of blocks and bytes.
 typedef Vector<ValType, 16, SystemAllocPolicy> ValTypeVector;
 
@@ -466,16 +459,17 @@ static inline unsigned SizeOf(ValType vt
   switch (vt.code()) {
     case ValType::I32:
     case ValType::F32:
       return 4;
     case ValType::I64:
     case ValType::F64:
       return 8;
     case ValType::AnyRef:
+    case ValType::FuncRef:
     case ValType::NullRef:
     case ValType::Ref:
       return sizeof(intptr_t);
   }
   MOZ_CRASH("Invalid ValType");
 }
 
 static inline jit::MIRType ToMIRType(ValType vt) {
@@ -485,16 +479,17 @@ static inline jit::MIRType ToMIRType(Val
     case ValType::I64:
       return jit::MIRType::Int64;
     case ValType::F32:
       return jit::MIRType::Float32;
     case ValType::F64:
       return jit::MIRType::Double;
     case ValType::Ref:
     case ValType::AnyRef:
+    case ValType::FuncRef:
     case ValType::NullRef:
       return jit::MIRType::RefOrNull;
   }
   MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("bad type");
 }
 
 static inline bool IsNumberType(ValType vt) { return !vt.isReference(); }
 
@@ -522,16 +517,18 @@ static inline const char* ToCString(Expr
     case ExprType::I64:
       return "i64";
     case ExprType::F32:
       return "f32";
     case ExprType::F64:
       return "f64";
     case ExprType::AnyRef:
       return "anyref";
+    case ExprType::FuncRef:
+      return "funcref";
     case ExprType::NullRef:
       return "nullref";
     case ExprType::Ref:
       return "ref";
     case ExprType::Limit:;
   }
   MOZ_CRASH("bad expression type");
 }
@@ -566,21 +563,26 @@ static inline const char* ToCString(ValT
 // For version 0, we simply equate AnyRef and JSObject* (this means that there
 // are technically no tags at all yet).  We use a simple boxing scheme that
 // wraps a JS value that is not already JSObject in a distinguishable JSObject
 // that holds the value, see WasmTypes.cpp for details.
 
 class AnyRef {
   JSObject* value_;
 
+  explicit AnyRef() : value_((JSObject*)-1) {}
   explicit AnyRef(JSObject* p) : value_(p) {
     MOZ_ASSERT(((uintptr_t)p & 0x03) == 0);
   }
 
  public:
+  // An invalid AnyRef cannot arise naturally from wasm and so can be used as
+  // a sentinel value to indicate failure from an AnyRef-returning function.
+  static AnyRef invalid() { return AnyRef(); }
+
   // Given a void* that comes from compiled wasm code, turn it into AnyRef.
   static AnyRef fromCompiledCode(void* p) { return AnyRef((JSObject*)p); }
 
   // Given a JSObject* that comes from JS, turn it into AnyRef.
   static AnyRef fromJSObject(JSObject* p) { return AnyRef(p); }
 
   // Generate an AnyRef null pointer.
   static AnyRef null() { return AnyRef(nullptr); }
@@ -697,40 +699,33 @@ class LitVal {
  protected:
   ValType type_;
   union U {
     U() : i32_(0) {}
     uint32_t i32_;
     uint64_t i64_;
     float f32_;
     double f64_;
-    JSObject* ref_;  // Note, this breaks an abstraction boundary
-    AnyRef anyref_;
+    AnyRef ref_;
   } u;
 
  public:
   LitVal() : type_(), u{} {}
 
   explicit LitVal(uint32_t i32) : type_(ValType::I32) { u.i32_ = i32; }
   explicit LitVal(uint64_t i64) : type_(ValType::I64) { u.i64_ = i64; }
 
   explicit LitVal(float f32) : type_(ValType::F32) { u.f32_ = f32; }
   explicit LitVal(double f64) : type_(ValType::F64) { u.f64_ = f64; }
 
-  explicit LitVal(AnyRef any) : type_(ValType::AnyRef) {
+  explicit LitVal(ValType type, AnyRef any) : type_(type) {
+    MOZ_ASSERT(type.isReference());
     MOZ_ASSERT(any.isNull(),
                "use Val for non-nullptr ref types to get tracing");
-    u.anyref_ = any;
-  }
-
-  explicit LitVal(ValType refType, JSObject* ref) : type_(refType) {
-    MOZ_ASSERT(refType.isRef());
-    MOZ_ASSERT(ref == nullptr,
-               "use Val for non-nullptr ref types to get tracing");
-    u.ref_ = ref;
+    u.ref_ = any;
   }
 
   ValType type() const { return type_; }
   static constexpr size_t sizeofLargestValue() { return sizeof(u); }
 
   uint32_t i32() const {
     MOZ_ASSERT(type_ == ValType::I32);
     return u.i32_;
@@ -742,42 +737,38 @@ class LitVal {
   const float& f32() const {
     MOZ_ASSERT(type_ == ValType::F32);
     return u.f32_;
   }
   const double& f64() const {
     MOZ_ASSERT(type_ == ValType::F64);
     return u.f64_;
   }
-  JSObject* ref() const {
-    MOZ_ASSERT(type_.isRef());
+  AnyRef ref() const {
+    MOZ_ASSERT(type_.isReference());
     return u.ref_;
   }
-  AnyRef anyref() const {
-    MOZ_ASSERT(type_ == ValType::AnyRef);
-    return u.anyref_;
-  }
 };
 
 // A Val is a LitVal that can contain (non-null) pointers to GC things. All Vals
 // must be stored in Rooteds so that their trace() methods are called during
 // stack marking. Vals do not implement barriers and thus may not be stored on
 // the heap.
 
 class MOZ_NON_PARAM Val : public LitVal {
  public:
   Val() : LitVal() {}
   explicit Val(const LitVal& val);
   explicit Val(uint32_t i32) : LitVal(i32) {}
   explicit Val(uint64_t i64) : LitVal(i64) {}
   explicit Val(float f32) : LitVal(f32) {}
   explicit Val(double f64) : LitVal(f64) {}
-  explicit Val(AnyRef val) : LitVal(AnyRef::null()) { u.anyref_ = val; }
-  explicit Val(ValType type, JSObject* obj) : LitVal(type, (JSObject*)nullptr) {
-    u.ref_ = obj;
+  explicit Val(ValType type, AnyRef val) : LitVal(type, AnyRef::null()) {
+    MOZ_ASSERT(type.isReference());
+    u.ref_ = val;
   }
   void trace(JSTracer* trc);
 };
 
 typedef Rooted<Val> RootedVal;
 typedef Handle<Val> HandleVal;
 typedef MutableHandle<Val> MutableHandleVal;
 
@@ -1858,16 +1849,17 @@ enum class SymbolicAddress {
   HandleDebugTrap,
   HandleThrow,
   HandleTrap,
   ReportInt64JSCall,
   CallImport_Void,
   CallImport_I32,
   CallImport_I64,
   CallImport_F64,
+  CallImport_FuncRef,
   CallImport_AnyRef,
   CoerceInPlace_ToInt32,
   CoerceInPlace_ToNumber,
   CoerceInPlace_JitEntry,
   DivI64,
   UDivI64,
   ModI64,
   UModI64,
@@ -1908,31 +1900,45 @@ enum class SymbolicAddress {
   PrintPtr,
   PrintF32,
   PrintF64,
   PrintText,
 #endif
   Limit
 };
 
+// The FailureMode indicates whether, immediately after a call to a builtin
+// returns, the return value should be checked against an error condition
+// (and if so, which one) which signals that the C++ calle has already
+// reported an error and thus wasm needs to wasmTrap(Trap::ThrowReported).
+
+enum class FailureMode : uint8_t {
+  Infallible,
+  FailOnNegI32,
+  FailOnNullPtr,
+  FailOnInvalidRef
+};
+
 // SymbolicAddressSignature carries type information for a function referred
 // to by a SymbolicAddress.  In order that |argTypes| can be written out as a
 // static initialiser, it has to have fixed length.  At present
 // SymbolicAddressType is used to describe functions with at most 6 arguments,
 // so |argTypes| has 7 entries in order to allow the last value to be
 // MIRType::None, in the hope of catching any accidental overruns of the
 // defined section of the array.
 
 static constexpr size_t SymbolicAddressSignatureMaxArgs = 6;
 
 struct SymbolicAddressSignature {
   // The SymbolicAddress that is described.
   const SymbolicAddress identity;
   // The return type, or MIRType::None to denote 'void'.
   const jit::MIRType retType;
+  // The failure mode, which is checked by masm.wasmCallBuiltinInstanceMethod.
+  const FailureMode failureMode;
   // The number of arguments, 0 .. SymbolicAddressSignatureMaxArgs only.
   const uint8_t numArgs;
   // The argument types; SymbolicAddressSignatureMaxArgs + 1 guard, which
   // should be MIRType::None.
   const jit::MIRType argTypes[SymbolicAddressSignatureMaxArgs + 1];
 };
 
 // The 16 in this assertion is derived as follows: SymbolicAddress is probably
@@ -1961,20 +1967,23 @@ struct Limits {
 
   Limits() = default;
   explicit Limits(uint32_t initial, const Maybe<uint32_t>& maximum = Nothing(),
                   Shareable shared = Shareable::False)
       : initial(initial), maximum(maximum), shared(shared) {}
 };
 
 // TableDesc describes a table as well as the offset of the table's base pointer
-// in global memory. Currently, wasm only has "any function" and asm.js only
-// "typed function".
-
-enum class TableKind { AnyFunction, AnyRef, TypedFunction };
+// in global memory. The TableKind determines the representation:
+//  - AnyRef: a wasm anyref word (wasm::AnyRef)
+//  - FuncRef: a two-word FunctionTableElem (wasm indirect call ABI)
+//  - AsmJS: a two-word FunctionTableElem (asm.js ABI)
+// Eventually there should be a single unified AnyRef representation.
+
+enum class TableKind { AnyRef, FuncRef, AsmJS };
 
 struct TableDesc {
   TableKind kind;
   bool importedOrExported;
   uint32_t globalDataOffset;
   Limits limits;
 
   TableDesc() = default;
@@ -2101,18 +2110,18 @@ struct TableTls {
   // Length of the table in number of elements (not bytes).
   uint32_t length;
 
   // Pointer to the array of elements (which can have various representations).
   // For tables of anyref this is null.
   void* functionBase;
 };
 
-// Table elements for TableKind::AnyFunctions carry both the code pointer and an
-// instance pointer.
+// Table element for TableKind::FuncRef which carries both the code pointer and
+// an instance pointer.
 
 struct FunctionTableElem {
   // The code to call when calling this element. The table ABI is the system
   // ABI with the additional ABI requirements that:
   //  - WasmTlsReg and any pinned registers have been loaded appropriately
   //  - if this is a heterogeneous table that requires a signature check,
   //    WasmTableCallSigReg holds the signature id.
   void* code;
--- a/js/src/wasm/WasmValidate.cpp
+++ b/js/src/wasm/WasmValidate.cpp
@@ -1311,16 +1311,17 @@ static bool DecodeStructType(Decoder& d,
         offset = layout.addScalar(Scalar::Float32);
         break;
       case ValType::F64:
         offset = layout.addScalar(Scalar::Float64);
         break;
       case ValType::Ref:
         offset = layout.addReference(ReferenceType::TYPE_OBJECT);
         break;
+      case ValType::FuncRef:
       case ValType::AnyRef:
         offset = layout.addReference(ReferenceType::TYPE_WASM_ANYREF);
         break;
       default:
         MOZ_CRASH("Unknown type");
     }
     if (!offset.isValid()) {
       return d.fail("Object too large");
@@ -1557,18 +1558,18 @@ static bool DecodeLimits(Decoder& d, Lim
 static bool DecodeTableTypeAndLimits(Decoder& d, bool gcTypesEnabled,
                                      TableDescVector* tables) {
   uint8_t elementType;
   if (!d.readFixedU8(&elementType)) {
     return d.fail("expected table element type");
   }
 
   TableKind tableKind;
-  if (elementType == uint8_t(TypeCode::AnyFunc)) {
-    tableKind = TableKind::AnyFunction;
+  if (elementType == uint8_t(TypeCode::FuncRef)) {
+    tableKind = TableKind::FuncRef;
 #ifdef ENABLE_WASM_REFTYPES
   } else if (elementType == uint8_t(TypeCode::AnyRef)) {
     tableKind = TableKind::AnyRef;
 #endif
   } else {
 #ifdef ENABLE_WASM_REFTYPES
     return d.fail("expected 'funcref' or 'anyref' element type");
 #else
@@ -1597,16 +1598,17 @@ static bool DecodeTableTypeAndLimits(Dec
 }
 
 static bool GlobalIsJSCompatible(Decoder& d, ValType type, bool isMutable) {
   switch (type.code()) {
     case ValType::I32:
     case ValType::F32:
     case ValType::F64:
     case ValType::I64:
+    case ValType::FuncRef:
     case ValType::AnyRef:
       break;
 #ifdef WASM_PRIVATE_REFTYPES
     case ValType::Ref:
       return d.fail("cannot expose reference type");
 #endif
     default:
       return d.fail("unexpected variable type in global import/export");
@@ -1932,24 +1934,18 @@ static bool DecodeInitializerExpression(
       *init = InitExpr(LitVal(f64));
       break;
     }
     case uint16_t(Op::RefNull): {
       if (!expected.isReference()) {
         return d.fail(
             "type mismatch: initializer type and expected type don't match");
       }
-      if (expected == ValType::AnyRef) {
-        *init = InitExpr(LitVal(AnyRef::null()));
-      } else {
-        if (!env->gcTypesEnabled()) {
-          return d.fail("unexpected initializer expression");
-        }
-        *init = InitExpr(LitVal(expected, nullptr));
-      }
+      MOZ_ASSERT_IF(expected.isRef(), env->gcTypesEnabled());
+      *init = InitExpr(LitVal(expected, AnyRef::null()));
       break;
     }
     case uint16_t(Op::GetGlobal): {
       uint32_t i;
       const GlobalDescVector& globals = env->globals;
       if (!d.readVarU32(&i)) {
         return d.fail(
             "failed to read global.get index in initializer expression");
@@ -2266,17 +2262,17 @@ static bool DecodeElemSection(Decoder& d
         tableIndex >= env->tables.length()) {
       return d.fail("table index out of range for element segment");
     }
     if (initializerKind == InitializerKind::Passive) {
       // Too many bugs result from keeping this value zero.  For passive
       // segments, there really is no segment index, and we should never
       // touch the field.
       tableIndex = (uint32_t)-1;
-    } else if (env->tables[tableIndex].kind != TableKind::AnyFunction) {
+    } else if (env->tables[tableIndex].kind != TableKind::FuncRef) {
       return d.fail("only tables of 'funcref' may have element segments");
     }
 
     seg->tableIndex = tableIndex;
 
     switch (initializerKind) {
       case InitializerKind::Active:
       case InitializerKind::ActiveWithIndex: {
@@ -2287,17 +2283,17 @@ static bool DecodeElemSection(Decoder& d
         seg->offsetIfActive.emplace(offset);
         break;
       }
       case InitializerKind::Passive: {
         uint8_t form;
         if (!d.readFixedU8(&form)) {
           return d.fail("expected type form");
         }
-        if (form != uint8_t(TypeCode::AnyFunc)) {
+        if (form != uint8_t(TypeCode::FuncRef)) {
           return d.fail(
               "passive segments can only contain function references");
         }
         break;
       }
     }
 
     uint32_t numElems;
--- a/js/src/wasm/WasmValidate.h
+++ b/js/src/wasm/WasmValidate.h
@@ -606,16 +606,17 @@ class Decoder {
     switch (code) {
       case uint8_t(ValType::I32):
       case uint8_t(ValType::F32):
       case uint8_t(ValType::F64):
       case uint8_t(ValType::I64):
         *type = ValType::Code(code);
         return true;
 #ifdef ENABLE_WASM_REFTYPES
+      case uint8_t(ValType::FuncRef):
       case uint8_t(ValType::AnyRef):
         *type = ValType::Code(code);
         return true;
 #  ifdef ENABLE_WASM_GC
       case uint8_t(ValType::Ref): {
         if (!gcTypesEnabled) {
           return fail("(ref T) types not enabled");
         }
--- a/security/nss/TAG-INFO
+++ b/security/nss/TAG-INFO
@@ -1,1 +1,1 @@
-NSS_3_44_BETA3
+NSS_3_44_RTM
--- a/security/nss/coreconf/coreconf.dep
+++ b/security/nss/coreconf/coreconf.dep
@@ -5,9 +5,8 @@
 
 /*
  * A dummy header file that is a dependency for all the object files.
  * Used to force a full recompilation of NSS in Mozilla's Tinderbox
  * depend builds.  See comments in rules.mk.
  */
 
 #error "Do not include this header file."
-
--- a/security/nss/lib/nss/nss.h
+++ b/security/nss/lib/nss/nss.h
@@ -17,22 +17,22 @@
 
 /*
  * NSS's major version, minor version, patch level, build number, and whether
  * this is a beta release.
  *
  * The format of the version string should be
  *     "<major version>.<minor version>[.<patch level>[.<build number>]][ <ECC>][ <Beta>]"
  */
-#define NSS_VERSION "3.44" _NSS_CUSTOMIZED " Beta"
+#define NSS_VERSION "3.44" _NSS_CUSTOMIZED
 #define NSS_VMAJOR 3
 #define NSS_VMINOR 44
 #define NSS_VPATCH 0
 #define NSS_VBUILD 0
-#define NSS_BETA PR_TRUE
+#define NSS_BETA PR_FALSE
 
 #ifndef RC_INVOKED
 
 #include "seccomon.h"
 
 typedef struct NSSInitParametersStr NSSInitParameters;
 
 /*
--- a/security/nss/lib/softoken/softkver.h
+++ b/security/nss/lib/softoken/softkver.h
@@ -12,16 +12,16 @@
 
 /*
  * Softoken's major version, minor version, patch level, build number,
  * and whether this is a beta release.
  *
  * The format of the version string should be
  *     "<major version>.<minor version>[.<patch level>[.<build number>]][ <ECC>][ <Beta>]"
  */
-#define SOFTOKEN_VERSION "3.44" SOFTOKEN_ECC_STRING " Beta"
+#define SOFTOKEN_VERSION "3.44" SOFTOKEN_ECC_STRING
 #define SOFTOKEN_VMAJOR 3
 #define SOFTOKEN_VMINOR 44
 #define SOFTOKEN_VPATCH 0
 #define SOFTOKEN_VBUILD 0
-#define SOFTOKEN_BETA PR_TRUE
+#define SOFTOKEN_BETA PR_FALSE
 
 #endif /* _SOFTKVER_H_ */
--- a/security/nss/lib/util/nssutil.h
+++ b/security/nss/lib/util/nssutil.h
@@ -14,22 +14,22 @@
 
 /*
  * NSS utilities's major version, minor version, patch level, build number,
  * and whether this is a beta release.
  *
  * The format of the version string should be
  *     "<major version>.<minor version>[.<patch level>[.<build number>]][ <Beta>]"
  */
-#define NSSUTIL_VERSION "3.44 Beta"
+#define NSSUTIL_VERSION "3.44"
 #define NSSUTIL_VMAJOR 3
 #define NSSUTIL_VMINOR 44
 #define NSSUTIL_VPATCH 0
 #define NSSUTIL_VBUILD 0
-#define NSSUTIL_BETA PR_TRUE
+#define NSSUTIL_BETA PR_FALSE
 
 SEC_BEGIN_PROTOS
 
 /*
  * Returns a const string of the UTIL library version.
  */
 extern const char *NSSUTIL_GetVersion(void);