Bug 1478616 - Generalize the wasm write barrier. r=bbouvier
authorLars T Hansen <lhansen@mozilla.com>
Thu, 26 Jul 2018 15:54:49 +0200
changeset 428752 e713a94f6e551b1e7bff74841faacbfdaae3057e
parent 428751 55e008024f2463901dde8375b88046fe02920eb2
child 428753 c47ad4f17e198461cea48de023ba58b36598acb3
push id34341
push userdvarga@mozilla.com
push dateFri, 27 Jul 2018 17:42:47 +0000
treeherdermozilla-central@35a17ebc4ee6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbbouvier
bugs1478616
milestone63.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1478616 - Generalize the wasm write barrier. r=bbouvier We need to generalize the barrier to handle not just globals, but also fields in structures. To do this we pass the location of the store (ie the Cell**) to the C++ barrier machinery, not the global index.
js/src/wasm/WasmBaselineCompile.cpp
js/src/wasm/WasmBuiltins.cpp
js/src/wasm/WasmInstance.cpp
js/src/wasm/WasmInstance.h
js/src/wasm/WasmTypes.h
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -1886,16 +1886,17 @@ class BaseCompiler final : public BaseCo
     TempAllocator&              alloc_;
     const ValTypeVector&        locals_;         // Types of parameters and locals
     bool                        deadCode_;       // Flag indicating we should decode & discard the opcode
     BCESet                      bceSafe_;        // Locals that have been bounds checked and not updated since
     ValTypeVector               SigD_;
     ValTypeVector               SigF_;
     MIRTypeVector               SigP_;
     MIRTypeVector               SigPI_;
+    MIRTypeVector               SigPL_;
     MIRTypeVector               SigPII_;
     MIRTypeVector               SigPIII_;
     MIRTypeVector               SigPIIL_;
     MIRTypeVector               SigPILL_;
     NonAssertingLabel           returnLabel_;
 
     LatentOp                    latentOp_;       // Latent operation for branch (seen next)
     ValType                     latentType_;     // Operand type, if latentOp_ is true
@@ -5611,16 +5612,107 @@ class BaseCompiler final : public BaseCo
     }
 
     void trap(Trap t) const {
         masm.wasmTrap(t, bytecodeOffset());
     }
 
     ////////////////////////////////////////////////////////////
     //
+    // Object support.
+
+#ifdef ENABLE_WASM_GC
+    // This emits a GC pre-write barrier.  The pre-barrier is needed when we
+    // replace a member field with a new value, and the previous field value
+    // might have no other referents, and incremental GC is ongoing. The field
+    // might belong to an object or be a stack slot or a register or a heap
+    // allocated value.
+    //
+    // let obj = { field: previousValue };
+    // obj.field = newValue; // previousValue must be marked with a pre-barrier.
+    //
+    // The `valueAddr` is the address of the location that we are about to
+    // update.  This function preserves that register.
+
+    void emitPreBarrier(RegPtr valueAddr) {
+        Label skipBarrier;
+
+        MOZ_ASSERT(valueAddr == PreBarrierReg);
+
+        ScratchPtr scratch(*this);
+
+        // If no incremental GC has started, we don't need the barrier.
+        masm.loadWasmTlsRegFromFrame(scratch);
+        masm.loadPtr(Address(scratch, offsetof(TlsData, addressOfNeedsIncrementalBarrier)), scratch);
+        masm.branchTest32(Assembler::Zero, Address(scratch, 0), Imm32(0x1), &skipBarrier);
+
+        // If the previous value is null, we don't need the barrier.
+        masm.loadPtr(Address(valueAddr, 0), scratch);
+        masm.branchTestPtr(Assembler::Zero, scratch, scratch, &skipBarrier);
+
+        // Call the barrier. This assumes PreBarrierReg contains the address of
+        // the stored value.
+        //
+        // PreBarrierReg is volatile and is preserved by the barrier.
+        masm.loadWasmTlsRegFromFrame(scratch);
+        masm.loadPtr(Address(scratch, offsetof(TlsData, instance)), scratch);
+        masm.loadPtr(Address(scratch, Instance::offsetOfPreBarrierCode()), scratch);
+        masm.call(scratch);
+
+        masm.bind(&skipBarrier);
+    }
+
+    // This emits a GC post-write barrier. This is needed to ensure that the GC
+    // is aware of slots of tenured things containing references to nursery
+    // values. Pass None for object when the field's owner object is known to
+    // be tenured or heap-allocated.
+    //
+    // This frees the register `valueAddr`.
+
+    void emitPostBarrier(const Maybe<RegPtr>& object, RegPtr otherScratch, RegPtr valueAddr, RegPtr setValue) {
+        Label skipBarrier;
+
+        // If the pointer being stored is null, no barrier.
+        masm.branchTestPtr(Assembler::Zero, setValue, setValue, &skipBarrier);
+
+        // If there is a containing object and it is in the nursery, no barrier.
+        if (object)
+            masm.branchPtrInNurseryChunk(Assembler::Equal, *object, otherScratch, &skipBarrier);
+
+        // If the pointer being stored is to a tenured object, no barrier.
+        masm.branchPtrInNurseryChunk(Assembler::NotEqual, setValue, otherScratch, &skipBarrier);
+
+        // Need a barrier.
+        uint32_t bytecodeOffset = iter_.lastOpcodeOffset();
+
+        // The `valueAddr` is a raw pointer to the cell within some GC object or
+        // TLS area, and we guarantee that the GC will not run while the
+        // postbarrier call is active, so push a uintptr_t value.
+# ifdef JS_64BIT
+        pushI64(RegI64(Register64(valueAddr)));
+        emitInstanceCall(bytecodeOffset, SigPL_, ExprType::Void, SymbolicAddress::PostBarrier);
+# else
+        pushI32(RegI32(valueAddr));
+        emitInstanceCall(bytecodeOffset, SigPI_, ExprType::Void, SymbolicAddress::PostBarrier);
+# endif
+
+        masm.bind(&skipBarrier);
+    }
+#endif
+
+    void emitBarrieredStore(const Maybe<RegPtr>& object, RegPtr valueAddr, RegPtr value) {
+        emitPreBarrier(valueAddr); // Preserves valueAddr
+        masm.storePtr(value, Address(valueAddr, 0));
+        RegPtr otherScratch = needRef();
+        emitPostBarrier(object, otherScratch, valueAddr, value); // Consumes valueAddr
+        freeRef(otherScratch);
+    }
+
+    ////////////////////////////////////////////////////////////
+    //
     // Machinery for optimized conditional branches.
     //
     // To disable this optimization it is enough always to return false from
     // sniffConditionalControl{Cmp,Eqz}.
 
     struct BranchState {
         union {
             struct {
@@ -5716,91 +5808,16 @@ class BaseCompiler final : public BaseCo
     void branchTo(Assembler::Condition c, RegI64 lhs, RegI64 rhs, Label* l) {
         masm.branch64(c, lhs, rhs, l);
     }
 
     void branchTo(Assembler::Condition c, RegI64 lhs, Imm64 rhs, Label* l) {
         masm.branch64(c, lhs, rhs, l);
     }
 
-#ifdef ENABLE_WASM_GC
-    // The following couple of functions emit a GC pre-write barrier. This is
-    // needed when we replace a member field with a new value, and the previous
-    // field value might have no other referents. The field might belong to an
-    // object or be a stack slot or a register or a heap allocated value.
-    //
-    // let obj = { field: previousValue };
-    // obj.field = newValue; // previousValue must be marked with a pre-barrier.
-    //
-    // Implementing a pre-barrier looks like this:
-    // - call `testNeedPreBarrier` with a fresh label.
-    // - user code must put the address of the field we're about to clobber in
-    // PreBarrierReg (to avoid explicit pushing/popping).
-    // - call `emitPreBarrier`, which binds the label.
-
-    void testNeedPreBarrier(Label* skipBarrier) {
-        MOZ_ASSERT(!skipBarrier->used());
-        MOZ_ASSERT(!skipBarrier->bound());
-
-        // If no incremental GC has started, we don't need the barrier.
-        ScratchPtr scratch(*this);
-        masm.loadWasmTlsRegFromFrame(scratch);
-        masm.loadPtr(Address(scratch, offsetof(TlsData, addressOfNeedsIncrementalBarrier)), scratch);
-        masm.branchTest32(Assembler::Zero, Address(scratch, 0), Imm32(0x1), skipBarrier);
-    }
-
-    void emitPreBarrier(RegPtr valueAddr, Label* skipBarrier) {
-        MOZ_ASSERT(valueAddr == PreBarrierReg);
-
-        // If the previous value is null, we don't need the barrier.
-        ScratchPtr scratch(*this);
-        masm.loadPtr(Address(valueAddr, 0), scratch);
-        masm.branchTestPtr(Assembler::Zero, scratch, scratch, skipBarrier);
-
-        // Call the barrier. This assumes PreBarrierReg contains the address of
-        // the stored value.
-        masm.loadWasmTlsRegFromFrame(scratch);
-        masm.loadPtr(Address(scratch, offsetof(TlsData, instance)), scratch);
-        masm.loadPtr(Address(scratch, Instance::offsetOfPreBarrierCode()), scratch);
-        masm.call(scratch);
-
-        masm.bind(skipBarrier);
-    }
-
-    // This emits a GC post-write barrier. This is needed to ensure that the GC
-    // is aware of slots of tenured things containing references to nursery
-    // values. Pass None for object when the field's owner object is known to
-    // be tenured or heap-allocated.
-
-    void emitPostBarrier(const Maybe<RegPtr>& object, RegPtr setValue, PostBarrierArg arg) {
-        Label skipBarrier;
-
-        // If the set value is null, no barrier.
-        masm.branchTestPtr(Assembler::Zero, setValue, setValue, &skipBarrier);
-
-        RegPtr scratch = needRef();
-        if (object) {
-            // If the object value isn't tenured, no barrier.
-            masm.branchPtrInNurseryChunk(Assembler::Equal, *object, scratch, &skipBarrier);
-        }
-
-        // If the set value is tenured, no barrier.
-        masm.branchPtrInNurseryChunk(Assembler::NotEqual, setValue, scratch, &skipBarrier);
-
-        freeRef(scratch);
-
-        // Need a barrier.
-        uint32_t bytecodeOffset = iter_.lastOpcodeOffset();
-        pushI32(arg.rawPayload());
-        emitInstanceCall(bytecodeOffset, SigPI_, ExprType::Void, SymbolicAddress::PostBarrier);
-
-        masm.bind(&skipBarrier);
-    }
-#endif
-
     // Emit a conditional branch that optionally and optimally cleans up the CPU
     // stack before we branch.
     //
     // Cond is either Assembler::Condition or Assembler::DoubleCondition.
     //
     // Lhs is RegI32, RegI64, or RegF32, or RegF64.
     //
     // Rhs is either the same as Lhs, or an immediate expression compatible with
@@ -8433,37 +8450,24 @@ BaseCompiler::emitSetGlobal()
         RegF64 rv = popF64();
         ScratchI32 tmp(*this);
         masm.storeDouble(rv, addressOfGlobalVar(global, tmp));
         freeF64(rv);
         break;
       }
 #ifdef ENABLE_WASM_GC
       case ValType::AnyRef: {
-        Label skipBarrier;
-        testNeedPreBarrier(&skipBarrier);
-
         RegPtr valueAddr(PreBarrierReg);
         needRef(valueAddr);
         {
             ScratchI32 tmp(*this);
             masm.computeEffectiveAddress(addressOfGlobalVar(global, tmp), valueAddr);
         }
-        emitPreBarrier(valueAddr, &skipBarrier);
-        freeRef(valueAddr);
-
         RegPtr rv = popRef();
-        {
-            // Actual store.
-            ScratchI32 tmp(*this);
-            masm.storePtr(rv, addressOfGlobalVar(global, tmp));
-        }
-
-        emitPostBarrier(Nothing(), rv, PostBarrierArg::Global(id));
-
+        emitBarrieredStore(Nothing(), valueAddr, rv); // Consumes valueAddr
         freeRef(rv);
         break;
       }
 #endif
       default:
         MOZ_CRASH("Global variable type");
         break;
     }
@@ -10255,16 +10259,18 @@ BaseCompiler::init()
     if (!SigD_.append(ValType::F64))
         return false;
     if (!SigF_.append(ValType::F32))
         return false;
     if (!SigP_.append(MIRType::Pointer))
         return false;
     if (!SigPI_.append(MIRType::Pointer) || !SigPI_.append(MIRType::Int32))
         return false;
+    if (!SigPL_.append(MIRType::Pointer) || !SigPL_.append(MIRType::Int64))
+        return false;
     if (!SigPII_.append(MIRType::Pointer) || !SigPII_.append(MIRType::Int32) ||
         !SigPII_.append(MIRType::Int32))
     {
         return false;
     }
     if (!SigPIII_.append(MIRType::Pointer) || !SigPIII_.append(MIRType::Int32) ||
         !SigPIII_.append(MIRType::Int32) || !SigPIII_.append(MIRType::Int32))
     {
--- a/js/src/wasm/WasmBuiltins.cpp
+++ b/js/src/wasm/WasmBuiltins.cpp
@@ -673,17 +673,16 @@ AddressOf(SymbolicAddress imm, ABIFuncti
         *abiType = Args_General4;
         return FuncCast(Instance::memCopy, *abiType);
       case SymbolicAddress::MemFill:
         *abiType = Args_General4;
         return FuncCast(Instance::memFill, *abiType);
 #ifdef ENABLE_WASM_GC
       case SymbolicAddress::PostBarrier:
         *abiType = Args_General2;
-        static_assert(sizeof(PostBarrierArg) == sizeof(uint32_t), "passed arg is a u32");
         return FuncCast(Instance::postBarrier, *abiType);
 #endif
 #if defined(JS_CODEGEN_MIPS32)
       case SymbolicAddress::js_jit_gAtomic64Lock:
         return &js::jit::gAtomic64Lock;
 #endif
       case SymbolicAddress::Limit:
         break;
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -462,35 +462,20 @@ Instance::memFill(Instance* instance, ui
 
     JSContext* cx = TlsContext.get();
     JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS);
     return -1;
 }
 
 #ifdef ENABLE_WASM_GC
 /* static */ void
-Instance::postBarrier(Instance* instance, PostBarrierArg arg)
+Instance::postBarrier(Instance* instance, gc::Cell** location)
 {
-    gc::Cell** cell = nullptr;
-    switch (arg.type()) {
-      case PostBarrierArg::Type::Global: {
-        const GlobalDesc& global = instance->metadata().globals[arg.globalIndex()];
-        MOZ_ASSERT(!global.isConstant());
-        MOZ_ASSERT(global.type().isRefOrAnyRef());
-        uint8_t* globalAddr = instance->globalData() + global.offset();
-        if (global.isIndirect())
-            globalAddr = *(uint8_t**)globalAddr;
-        MOZ_ASSERT(*(JSObject**)globalAddr, "shouldn't call postbarrier if null");
-        cell = (gc::Cell**) globalAddr;
-        break;
-      }
-    }
-
-    MOZ_ASSERT(cell);
-    TlsContext.get()->runtime()->gc.storeBuffer().putCell(cell);
+    MOZ_ASSERT(location);
+    TlsContext.get()->runtime()->gc.storeBuffer().putCell(location);
 }
 #endif // ENABLE_WASM_GC
 
 Instance::Instance(JSContext* cx,
                    Handle<WasmInstanceObject*> object,
                    SharedCode code,
                    UniqueDebugState debug,
                    UniqueTlsData tlsDataIn,
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -175,17 +175,17 @@ class Instance
     static uint32_t growMemory_i32(Instance* instance, uint32_t delta);
     static uint32_t currentMemory_i32(Instance* instance);
     static int32_t wait_i32(Instance* instance, uint32_t byteOffset, int32_t value, int64_t timeout);
     static int32_t wait_i64(Instance* instance, uint32_t byteOffset, int64_t value, int64_t timeout);
     static int32_t wake(Instance* instance, uint32_t byteOffset, int32_t count);
     static int32_t memCopy(Instance* instance, uint32_t destByteOffset, uint32_t srcByteOffset, uint32_t len);
     static int32_t memFill(Instance* instance, uint32_t byteOffset, uint32_t value, uint32_t len);
 #ifdef ENABLE_WASM_GC
-    static void postBarrier(Instance* instance, PostBarrierArg arg);
+    static void postBarrier(Instance* instance, gc::Cell** location);
 #endif
 };
 
 typedef UniquePtr<Instance> UniqueInstance;
 
 } // namespace wasm
 } // namespace js
 
--- a/js/src/wasm/WasmTypes.h
+++ b/js/src/wasm/WasmTypes.h
@@ -2370,54 +2370,12 @@ class DebugFrame
 
     // DebugFrames are aligned to 8-byte aligned, allowing them to be placed in
     // an AbstractFramePtr.
 
     static const unsigned Alignment = 8;
     static void alignmentStaticAsserts();
 };
 
-# ifdef ENABLE_WASM_GC
-// A packed format for an argument to the Instance::postBarrier function.
-class PostBarrierArg
-{
-  public:
-    enum class Type {
-        Global = 0x0,
-        Last = Global
-    };
-
-  private:
-    uint32_t type_: 1;
-    uint32_t payload_: 31;
-
-    PostBarrierArg(uint32_t payload, Type type)
-      : type_(uint32_t(type)),
-        payload_(payload)
-    {
-        MOZ_ASSERT(payload < (UINT32_MAX >> 1));
-        MOZ_ASSERT(uint32_t(type) <= uint32_t(Type::Last));
-    }
-
-  public:
-    static PostBarrierArg Global(uint32_t globalIndex) {
-        return PostBarrierArg(globalIndex, Type::Global);
-    }
-
-    Type type() const {
-        MOZ_ASSERT(type_ <= uint32_t(Type::Last));
-        return Type(type_);
-    }
-    uint32_t globalIndex() const {
-        MOZ_ASSERT(type() == Type::Global);
-        return payload_;
-    }
-
-    uint32_t rawPayload() const {
-        return (payload_ << 1) | type_;
-    }
-};
-# endif
-
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_types_h