Bug 1329195 - Update old stubs instead of attaching a new stub when a DOM proxy's generation changes. r=evilpie
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 19 Jan 2017 14:04:07 +0100
changeset 375111 80e4fe7ff7cb78b5774caa19f9c340132a06202b
parent 375110 52a8ae1e1a82e5f7166001b70d5ab5adb2f26336
child 375112 30f65ce798246377f94798a4fa88c102e9224680
push id6996
push userjlorenzo@mozilla.com
push dateMon, 06 Mar 2017 20:48:21 +0000
treeherdermozilla-beta@d89512dab048 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersevilpie
bugs1329195
milestone53.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1329195 - Update old stubs instead of attaching a new stub when a DOM proxy's generation changes. r=evilpie
js/src/jit/BaselineCacheIRCompiler.cpp
js/src/jit/CacheIR.cpp
js/src/jit/CacheIR.h
js/src/jit/CacheIRCompiler.cpp
js/src/jit/IonCacheIRCompiler.cpp
--- a/js/src/jit/BaselineCacheIRCompiler.cpp
+++ b/js/src/jit/BaselineCacheIRCompiler.cpp
@@ -960,27 +960,27 @@ jit::AttachBaselineCacheIRStub(JSContext
     for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
         switch (stubKind) {
           case CacheIRStubKind::Monitored: {
             if (!iter->isCacheIR_Monitored())
                 continue;
             auto otherStub = iter->toCacheIR_Monitored();
             if (otherStub->stubInfo() != stubInfo)
                 continue;
-            if (!writer.stubDataEquals(otherStub->stubDataStart()))
+            if (!writer.stubDataEqualsMaybeUpdate(otherStub->stubDataStart()))
                 continue;
             break;
           }
           case CacheIRStubKind::Updated: {
             if (!iter->isCacheIR_Updated())
                 continue;
             auto otherStub = iter->toCacheIR_Updated();
             if (otherStub->stubInfo() != stubInfo)
                 continue;
-            if (!writer.stubDataEquals(otherStub->stubDataStart()))
+            if (!writer.stubDataEqualsMaybeUpdate(otherStub->stubDataStart()))
                 continue;
             break;
           }
         }
 
         // We found a stub that's exactly the same as the stub we're about to
         // attach. Just return nullptr, the caller should do nothing in this
         // case.
--- a/js/src/jit/CacheIR.cpp
+++ b/js/src/jit/CacheIR.cpp
@@ -674,19 +674,21 @@ CheckDOMProxyExpandoDoesNotShadow(CacheI
 }
 
 bool
 GetPropIRGenerator::tryAttachDOMProxyUnshadowed(HandleObject obj, ObjOperandId objId, HandleId id)
 {
     MOZ_ASSERT(IsCacheableDOMProxy(obj));
 
     RootedObject checkObj(cx_, obj->staticPrototype());
+    if (!checkObj)
+        return false;
+
     RootedNativeObject holder(cx_);
     RootedShape shape(cx_);
-
     NativeGetPropCacheability canCache = CanAttachNativeGetProp(cx_, checkObj, id, &holder, &shape,
                                                                 pc_, engine_, canAttachGetter_,
                                                                 isTemporarilyUnoptimizable_);
     MOZ_ASSERT_IF(idempotent(),
                   canCache == CanAttachNone || (canCache == CanAttachReadSlot && holder));
     if (canCache == CanAttachNone)
         return false;
 
--- a/js/src/jit/CacheIR.h
+++ b/js/src/jit/CacheIR.h
@@ -218,16 +218,17 @@ class StubField
         JSObject,
         Symbol,
         String,
         Id,
 
         // These fields take up 64 bits on all platforms.
         RawInt64,
         First64BitType = RawInt64,
+        DOMExpandoGeneration,
         Value,
 
         Limit
     };
 
     static bool sizeIsWord(Type type) {
         MOZ_ASSERT(type != Type::Limit);
         return type < Type::First64BitType;
@@ -373,17 +374,17 @@ class MOZ_RAII CacheIRWriter : public JS
         // For now, assert we only GC before we append stub fields.
         MOZ_RELEASE_ASSERT(stubFields_.empty());
     }
 
     size_t stubDataSize() const {
         return stubDataSize_;
     }
     void copyStubData(uint8_t* dest) const;
-    bool stubDataEquals(const uint8_t* stubData) const;
+    bool stubDataEqualsMaybeUpdate(uint8_t* stubData) const;
 
     bool operandIsDead(uint32_t operandId, uint32_t currentInstruction) const {
         if (operandId >= operandLastUsed_.length())
             return false;
         return currentInstruction > operandLastUsed_[operandId];
     }
     const uint8_t* codeStart() const {
         MOZ_ASSERT(!failed());
@@ -535,17 +536,17 @@ class MOZ_RAII CacheIRWriter : public JS
         addStubField(uintptr_t(shape), StubField::Type::Shape);
     }
     ValOperandId loadDOMExpandoValueGuardGeneration(ObjOperandId obj,
                                                     ExpandoAndGeneration* expandoAndGeneration)
     {
         ValOperandId res(nextOperandId_++);
         writeOpWithOperandId(CacheOp::LoadDOMExpandoValueGuardGeneration, obj);
         addStubField(uintptr_t(expandoAndGeneration), StubField::Type::RawWord);
-        addStubField(expandoAndGeneration->generation, StubField::Type::RawInt64);
+        addStubField(expandoAndGeneration->generation, StubField::Type::DOMExpandoGeneration);
         writeOperandId(res);
         return res;
     }
     ValOperandId loadDOMExpandoValueIgnoreGeneration(ObjOperandId obj) {
         ValOperandId res(nextOperandId_++);
         writeOpWithOperandId(CacheOp::LoadDOMExpandoValueIgnoreGeneration, obj);
         writeOperandId(res);
         return res;
--- a/js/src/jit/CacheIRCompiler.cpp
+++ b/js/src/jit/CacheIRCompiler.cpp
@@ -668,16 +668,17 @@ CacheIRStubInfo::copyStubData(ICStub* sr
     while (true) {
         StubField::Type type = fieldType(field);
         switch (type) {
           case StubField::Type::RawWord:
             *reinterpret_cast<uintptr_t*>(destBytes + offset) =
                 *reinterpret_cast<uintptr_t*>(srcBytes + offset);
             break;
           case StubField::Type::RawInt64:
+          case StubField::Type::DOMExpandoGeneration:
             *reinterpret_cast<uint64_t*>(destBytes + offset) =
                 *reinterpret_cast<uint64_t*>(srcBytes + offset);
             break;
           case StubField::Type::Shape:
             getStubField<ICStub, Shape*>(dest, offset).init(getStubField<ICStub, Shape*>(src, offset));
             break;
           case StubField::Type::JSObject:
             getStubField<ICStub, JSObject*>(dest, offset).init(getStubField<ICStub, JSObject*>(src, offset));
@@ -763,16 +764,17 @@ CacheIRWriter::copyStubData(uint8_t* des
             break;
           case StubField::Type::String:
             InitGCPtr<JSString*>(destWords, field.asWord());
             break;
           case StubField::Type::Id:
             InitGCPtr<jsid>(destWords, field.asWord());
             break;
           case StubField::Type::RawInt64:
+          case StubField::Type::DOMExpandoGeneration:
             *reinterpret_cast<uint64_t*>(destWords) = field.asInt64();
             break;
           case StubField::Type::Value:
             InitGCPtr<JS::Value>(destWords, field.asInt64());
             break;
           case StubField::Type::Limit:
             MOZ_CRASH("Invalid type");
         }
@@ -786,16 +788,17 @@ jit::TraceCacheIRStub(JSTracer* trc, T* 
 {
     uint32_t field = 0;
     size_t offset = 0;
     while (true) {
         StubField::Type fieldType = stubInfo->fieldType(field);
         switch (fieldType) {
           case StubField::Type::RawWord:
           case StubField::Type::RawInt64:
+          case StubField::Type::DOMExpandoGeneration:
             break;
           case StubField::Type::Shape:
             TraceNullableEdge(trc, &stubInfo->getStubField<T, Shape*>(stub, offset),
                               "cacheir-shape");
             break;
           case StubField::Type::ObjectGroup:
             TraceNullableEdge(trc, &stubInfo->getStubField<T, ObjectGroup*>(stub, offset),
                               "cacheir-group");
@@ -829,35 +832,48 @@ jit::TraceCacheIRStub(JSTracer* trc, T* 
 
 template
 void jit::TraceCacheIRStub(JSTracer* trc, ICStub* stub, const CacheIRStubInfo* stubInfo);
 
 template
 void jit::TraceCacheIRStub(JSTracer* trc, IonICStub* stub, const CacheIRStubInfo* stubInfo);
 
 bool
-CacheIRWriter::stubDataEquals(const uint8_t* stubData) const
+CacheIRWriter::stubDataEqualsMaybeUpdate(uint8_t* stubData) const
 {
     MOZ_ASSERT(!failed());
 
     const uintptr_t* stubDataWords = reinterpret_cast<const uintptr_t*>(stubData);
 
+    // If DOMExpandoGeneration fields are different but all other stub fields
+    // are exactly the same, we overwrite the old stub data instead of attaching
+    // a new stub, as the old stub is never going to succeed. This works because
+    // even Ion stubs read the DOMExpandoGeneration field from the stub instead
+    // of baking it in.
+    bool expandoGenerationIsDifferent = false;
+
     for (const StubField& field : stubFields_) {
         if (field.sizeIsWord()) {
             if (field.asWord() != *stubDataWords)
                 return false;
             stubDataWords++;
             continue;
         }
 
-        if (field.asInt64() != *reinterpret_cast<const uint64_t*>(stubDataWords))
-            return false;
+        if (field.asInt64() != *reinterpret_cast<const uint64_t*>(stubDataWords)) {
+            if (field.type() != StubField::Type::DOMExpandoGeneration)
+                return false;
+            expandoGenerationIsDifferent = true;
+        }
         stubDataWords += sizeof(uint64_t) / sizeof(uintptr_t);
     }
 
+    if (expandoGenerationIsDifferent)
+        copyStubData(stubData);
+
     return true;
 }
 
 HashNumber
 CacheIRStubKey::hash(const CacheIRStubKey::Lookup& l)
 {
     HashNumber hash = mozilla::HashBytes(l.code, l.length);
     hash = mozilla::AddToHash(hash, uint32_t(l.kind));
--- a/js/src/jit/IonCacheIRCompiler.cpp
+++ b/js/src/jit/IonCacheIRCompiler.cpp
@@ -1,59 +1,68 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+#include "mozilla/DebugOnly.h"
+
 #include "jit/CacheIRCompiler.h"
 #include "jit/IonCaches.h"
 #include "jit/IonIC.h"
 
 #include "jit/Linker.h"
 #include "jit/SharedICHelpers.h"
 #include "proxy/Proxy.h"
 
 #include "jit/MacroAssembler-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
+using mozilla::DebugOnly;
+
 namespace js {
 namespace jit {
 
 // IonCacheIRCompiler compiles CacheIR to IonIC native code.
 class MOZ_RAII IonCacheIRCompiler : public CacheIRCompiler
 {
   public:
     friend class AutoSaveLiveRegisters;
 
-    IonCacheIRCompiler(JSContext* cx, const CacheIRWriter& writer, IonIC* ic, IonScript* ionScript)
+    IonCacheIRCompiler(JSContext* cx, const CacheIRWriter& writer, IonIC* ic, IonScript* ionScript,
+                       IonICStub* stub)
       : CacheIRCompiler(cx, writer, Mode::Ion),
         writer_(writer),
         ic_(ic),
         ionScript_(ionScript),
+        stub_(stub),
         nextStubField_(0),
 #ifdef DEBUG
         calledPrepareVMCall_(false),
 #endif
         savedLiveRegs_(false)
     {
         MOZ_ASSERT(ic_);
         MOZ_ASSERT(ionScript_);
     }
 
     MOZ_MUST_USE bool init();
-    JitCode* compile(IonICStub* stub);
+    JitCode* compile();
 
   private:
     const CacheIRWriter& writer_;
     IonIC* ic_;
     IonScript* ionScript_;
 
+    // The stub we're generating code for.
+    IonICStub* stub_;
+
     CodeOffsetJump rejoinOffset_;
     Vector<CodeOffset, 4, SystemAllocPolicy> nextCodeOffsets_;
     Maybe<LiveRegisterSet> liveRegs_;
     Maybe<CodeOffset> stubJitCodeOffset_;
     uint32_t nextStubField_;
 
 #ifdef DEBUG
     bool calledPrepareVMCall_;
@@ -95,16 +104,24 @@ class MOZ_RAII IonCacheIRCompiler : publ
         return (T)readStubWord(offset, StubField::Type::RawWord);
     }
     template <typename T>
     T rawInt64StubField(uint32_t offset) {
         static_assert(sizeof(T) == sizeof(int64_t), "T musthave int64 size");
         return (T)readStubInt64(offset, StubField::Type::RawInt64);
     }
 
+    uint64_t* expandoGenerationStubFieldPtr(uint32_t offset) {
+        DebugOnly<uint64_t> generation =
+            readStubInt64(offset, StubField::Type::DOMExpandoGeneration);
+        uint64_t* ptr = reinterpret_cast<uint64_t*>(stub_->stubDataStart() + offset);
+        MOZ_ASSERT(*ptr == generation);
+        return ptr;
+    }
+
     void prepareVMCall(MacroAssembler& masm);
     MOZ_MUST_USE bool callVM(MacroAssembler& masm, const VMFunction& fun);
 
     void pushStubCodePointer() {
         stubJitCodeOffset_.emplace(masm.PushWithPatch(ImmPtr((void*)-1)));
     }
 
 #define DEFINE_OP(op) MOZ_MUST_USE bool emit##op();
@@ -355,17 +372,17 @@ IonCacheIRCompiler::init()
     }
 
     allocator.initAvailableRegs(available);
     allocator.initAvailableRegsAfterSpill();
     return true;
 }
 
 JitCode*
-IonCacheIRCompiler::compile(IonICStub* stub)
+IonCacheIRCompiler::compile()
 {
     masm.setFramePushed(ionScript_->frameSize());
     if (cx_->spsProfiler.enabled())
         masm.enableProfilingInstrumentation();
 
     do {
         switch (reader.readOp()) {
 #define DEFINE_OP(op)                   \
@@ -407,17 +424,17 @@ IonCacheIRCompiler::compile(IonICStub* s
     }
 
     rejoinOffset_.fixup(&masm);
     CodeLocationJump rejoinJump(newStubCode, rejoinOffset_);
     PatchJump(rejoinJump, ic_->rejoinLabel());
 
     for (CodeOffset offset : nextCodeOffsets_) {
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, offset),
-                                           ImmPtr(stub->nextCodeRawPtr()),
+                                           ImmPtr(stub_->nextCodeRawPtr()),
                                            ImmPtr((void*)-1));
     }
     if (stubJitCodeOffset_) {
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, *stubJitCodeOffset_),
                                            ImmPtr(newStubCode.get()),
                                            ImmPtr((void*)-1));
     }
 
@@ -903,38 +920,41 @@ IonCacheIRCompiler::emitGuardDOMExpandoM
 }
 
 bool
 IonCacheIRCompiler::emitLoadDOMExpandoValueGuardGeneration()
 {
     Register obj = allocator.useRegister(masm, reader.objOperandId());
     ExpandoAndGeneration* expandoAndGeneration =
         rawWordStubField<ExpandoAndGeneration*>(reader.stubOffset());
-    uint64_t generation = rawInt64StubField<uint64_t>(reader.stubOffset());
+    uint64_t* generationFieldPtr = expandoGenerationStubFieldPtr(reader.stubOffset());
 
-    AutoScratchRegister scratch(allocator, masm);
+    AutoScratchRegister scratch1(allocator, masm);
+    AutoScratchRegister scratch2(allocator, masm);
     ValueOperand output = allocator.defineValueRegister(masm, reader.valOperandId());
 
     FailurePath* failure;
     if (!addFailurePath(&failure))
         return false;
 
-    masm.loadPtr(Address(obj, ProxyObject::offsetOfValues()), scratch);
-    Address expandoAddr(scratch, ProxyObject::offsetOfExtraSlotInValues(GetDOMProxyExpandoSlot()));
+    masm.loadPtr(Address(obj, ProxyObject::offsetOfValues()), scratch1);
+    Address expandoAddr(scratch1, ProxyObject::offsetOfExtraSlotInValues(GetDOMProxyExpandoSlot()));
 
     // Guard the ExpandoAndGeneration* matches the proxy's ExpandoAndGeneration.
     masm.loadValue(expandoAddr, output);
     masm.branchTestValue(Assembler::NotEqual, output, PrivateValue(expandoAndGeneration),
                          failure->label());
 
     // Guard expandoAndGeneration->generation matches the expected generation.
     masm.movePtr(ImmPtr(expandoAndGeneration), output.scratchReg());
+    masm.movePtr(ImmPtr(generationFieldPtr), scratch1);
     masm.branch64(Assembler::NotEqual,
                   Address(output.scratchReg(), ExpandoAndGeneration::offsetOfGeneration()),
-                  Imm64(generation),
+                  Address(scratch1, 0),
+                  scratch2,
                   failure->label());
 
     // Load expandoAndGeneration->expando into the output Value register.
     masm.loadValue(Address(output.scratchReg(), ExpandoAndGeneration::offsetOfExpando()), output);
     return true;
 }
 
 bool
@@ -945,21 +965,16 @@ IonIC::attachCacheIRStub(JSContext* cx, 
     AutoAssertNoPendingException aanpe(cx);
     JS::AutoCheckCannotGC nogc;
 
     // Do nothing if the IR generator failed or triggered a GC that invalidated
     // the script.
     if (writer.failed() || !outerScript->hasIonScript())
         return false;
 
-    JitContext jctx(cx, nullptr);
-    IonCacheIRCompiler compiler(cx, writer, this, outerScript->ionScript());
-    if (!compiler.init())
-        return false;
-
     JitZone* jitZone = cx->zone()->jitZone();
     uint32_t stubDataOffset = sizeof(IonICStub);
 
     // Try to reuse a previously-allocated CacheIRStubInfo.
     CacheIRStubKey::Lookup lookup(kind, ICStubEngine::IonIC,
                                   writer.codeStart(), writer.codeLength());
     CacheIRStubInfo* stubInfo = jitZone->getIonCacheIRStubInfo(lookup);
     if (!stubInfo) {
@@ -983,17 +998,17 @@ IonIC::attachCacheIRStub(JSContext* cx, 
     MOZ_ASSERT(stubInfo);
 
     // Ensure we don't attach duplicate stubs. This can happen if a stub failed
     // for some reason and the IR generator doesn't check for exactly the same
     // conditions.
     for (IonICStub* stub = firstStub_; stub; stub = stub->next()) {
         if (stub->stubInfo() != stubInfo)
             continue;
-        if (!writer.stubDataEquals(stub->stubDataStart()))
+        if (!writer.stubDataEqualsMaybeUpdate(stub->stubDataStart()))
             continue;
         return true;
     }
 
     size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
 
     // Allocate the IonICStub in the optimized stub space. Ion stubs and
     // CacheIRStubInfo instances for Ion stubs can be purged on GC. That's okay
@@ -1001,18 +1016,22 @@ IonIC::attachCacheIRStub(JSContext* cx, 
     // stub code should never access the IonICStub after making a VM call. The
     // IonICStub::poison method poisons the stub to catch bugs in this area.
     ICStubSpace* stubSpace = cx->zone()->jitZone()->optimizedStubSpace();
     void* newStubMem = stubSpace->alloc(bytesNeeded);
     if (!newStubMem)
         return false;
 
     IonICStub* newStub = new(newStubMem) IonICStub(fallbackLabel_.raw(), stubInfo);
+    writer.copyStubData(newStub->stubDataStart());
 
-    JitCode* code = compiler.compile(newStub);
+    JitContext jctx(cx, nullptr);
+    IonCacheIRCompiler compiler(cx, writer, this, outerScript->ionScript(), newStub);
+    if (!compiler.init())
+        return false;
+
+    JitCode* code = compiler.compile();
     if (!code)
         return false;
 
-    writer.copyStubData(newStub->stubDataStart());
-
     attachStub(newStub, code);
     return true;
 }