Bug 1360390: wasm: Preserve the scratch register in builtin thunks if it's volatile; r=luke
authorBenjamin Bouvier <benj@benj.me>
Thu, 04 May 2017 13:07:52 +0200
changeset 572800 8d3adabc1d0b725872c79dcd8d7d125a32c32d4a
parent 572799 40fcf8d32f3ad5894bb42e8adcc0bfc43f8952af
child 572801 558841792eabd8a0c58d83272746b64b6171826d
push id57195
push userbmo:rbarker@mozilla.com
push dateThu, 04 May 2017 20:08:56 +0000
reviewersluke
bugs1360390
milestone55.0a1
Bug 1360390: wasm: Preserve the scratch register in builtin thunks if it's volatile; r=luke Callable prologues/epilogues use the ABINonArgReg0 register as a scratch register, which is non-volatile on ARM. This means it can get clobbered. Instead, just preserve it manually.
js/src/jit-test/tests/asm.js/testBug1360390.js
js/src/wasm/WasmFrameIterator.cpp
js/src/wasm/WasmFrameIterator.h
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/asm.js/testBug1360390.js
@@ -0,0 +1,13 @@
+load(libdir + "asm.js");
+
+var code = `
+    "use asm";
+    var ff = foreign.ff;
+    function f(x) {
+        x = +x
+        return ~~x + (ff(3 ^ 9 / 7), 1) & 1;
+    }
+    return f;
+`;
+
+assertEq(asmLink(asmCompile("b", "foreign", code), 0, { ff: decodeURIComponent })(Infinity), 1);
--- a/js/src/wasm/WasmFrameIterator.cpp
+++ b/js/src/wasm/WasmFrameIterator.cpp
@@ -313,67 +313,84 @@ GenerateCallablePrologue(MacroAssembler&
     // instructions from entry. To save space, we make these offsets static
     // constants and assert that they match the actual codegen below. On ARM,
     // this requires AutoForbidPools to prevent a constant pool from being
     // randomly inserted between two instructions.
     {
 #if defined(JS_CODEGEN_ARM)
         AutoForbidPools afp(&masm, /* number of instructions in scope = */ 8);
 #endif
-
         *entry = masm.currentOffset();
 
         PushRetAddr(masm, *entry);
         MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
         masm.push(WasmTlsReg);
         MOZ_ASSERT_IF(!masm.oom(), PushedTLS == masm.currentOffset() - *entry);
         masm.push(FramePointer);
         MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
         masm.moveStackPtrTo(FramePointer);
         MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
     }
 
     if (!reason.isNone()) {
         Register scratch = ABINonArgReg0;
+
+        // Native callers expect the native ABI, which assume that non-saved
+        // registers are preserved. Explicitly preserve the scratch register
+        // in that case.
+        if (reason.isNative() && !scratch.volatile_())
+            masm.Push(scratch);
+
         masm.loadWasmActivationFromTls(scratch);
         masm.wasmAssertNonExitInvariants(scratch);
         Address exitReason(scratch, WasmActivation::offsetOfExitReason());
         masm.store32(Imm32(reason.raw()), exitReason);
         Address exitFP(scratch, WasmActivation::offsetOfExitFP());
         masm.storePtr(FramePointer, exitFP);
+
+        if (reason.isNative() && !scratch.volatile_())
+            masm.Pop(scratch);
     }
 
     if (framePushed)
         masm.subFromStackPtr(Imm32(framePushed));
 }
 
 static void
 GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
                          uint32_t* ret)
 {
     if (framePushed)
         masm.addToStackPtr(Imm32(framePushed));
 
     if (!reason.isNone()) {
         Register scratch = ABINonArgReturnReg0;
+
+        // See comment in GenerateCallablePrologue.
+        if (reason.isNative() && !scratch.volatile_())
+            masm.Push(scratch);
+
         masm.loadWasmActivationFromTls(scratch);
         Address exitFP(scratch, WasmActivation::offsetOfExitFP());
         masm.storePtr(ImmWord(0), exitFP);
         Address exitReason(scratch, WasmActivation::offsetOfExitReason());
 #ifdef DEBUG
         // Check the passed exitReason is the same as the one on entry.
         masm.push(scratch);
         masm.loadPtr(exitReason, ABINonArgReturnReg0);
         Label ok;
         masm.branch32(Assembler::Condition::Equal, ABINonArgReturnReg0, Imm32(reason.raw()), &ok);
         masm.breakpoint();
         masm.bind(&ok);
         masm.pop(scratch);
 #endif
         masm.store32(Imm32(ExitReason::None().raw()), exitReason);
+
+        if (reason.isNative() && !scratch.volatile_())
+            masm.Pop(scratch);
     }
 
     // Forbid pools for the same reason as described in GenerateCallablePrologue.
 #if defined(JS_CODEGEN_ARM)
     AutoForbidPools afp(&masm, /* number of instructions in scope = */ 3);
 #endif
 
     // There is an important ordering constraint here: fp must be repointed to
--- a/js/src/wasm/WasmFrameIterator.h
+++ b/js/src/wasm/WasmFrameIterator.h
@@ -113,16 +113,17 @@ class ExitReason
         MOZ_ASSERT(uint32_t(sym) <= (UINT32_MAX << 1), "packing constraints");
         MOZ_ASSERT(!isFixed());
     }
 
     static ExitReason None() { return ExitReason(ExitReason::Fixed::None); }
 
     bool isFixed() const { return (payload_ & 0x1) == 0; }
     bool isNone() const { return isFixed() && fixed() == Fixed::None; }
+    bool isNative() const { return !isFixed() || fixed() == Fixed::BuiltinNative; }
 
     uint32_t raw() const {
         return payload_;
     }
     Fixed fixed() const {
         MOZ_ASSERT(isFixed());
         return Fixed(payload_ >> 1);
     }