Bug 1141865 - Part 1: Make two ICCall_Fallback, one for constructing invocations. (r=jandem)
☠☠ backed out by 8d2064e2c511 ☠ ☠
authorEric Faust <efaustbmo@mozilla.com>
Wed, 03 Jun 2015 02:01:14 -0700
changeset 246971 2901436c9047202f7cc30fd89474e1bd2075294d
parent 246970 58aab1e6a65b2dd684fa70a85c85ba721212ace1
child 246972 d038c5da19b0d926d7c6ca3a7d78389548312ee3
push id28848
push userryanvm@gmail.com
push dateWed, 03 Jun 2015 20:00:13 +0000
treeherdermozilla-central@0920f2325a6d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs1141865
milestone41.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1141865 - Part 1: Make two ICCall_Fallback, one for constructing invocations. (r=jandem)
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/Ion.cpp
js/src/jit/JitCompartment.h
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -449,17 +449,17 @@ static inline void*
 GetStubReturnAddress(JSContext* cx, jsbytecode* pc)
 {
     if (IsGetPropPC(pc))
         return cx->compartment()->jitCompartment()->baselineGetPropReturnAddr();
     if (IsSetPropPC(pc))
         return cx->compartment()->jitCompartment()->baselineSetPropReturnAddr();
     // This should be a call op of some kind, now.
     MOZ_ASSERT(IsCallPC(pc));
-    return cx->compartment()->jitCompartment()->baselineCallReturnAddr();
+    return cx->compartment()->jitCompartment()->baselineCallReturnAddr(JSOp(*pc) == JSOP_NEW);
 }
 
 static inline jsbytecode*
 GetNextNonLoopEntryPc(jsbytecode* pc)
 {
     JSOp op = JSOp(*pc);
     if (op == JSOP_GOTO)
         return pc + GET_JUMP_OFFSET(pc);
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -10830,34 +10830,30 @@ ICCall_Fallback::Compiler::generateStubC
 
     // Load passed-in ThisV into R1 just in case it's needed.  Need to do this before
     // we leave the stub frame since that info will be lost.
     // Current stack:  [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
     masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
 
     leaveStubFrame(masm, true);
 
-    // R1 and R0 are taken.
-    regs = availableGeneralRegs(2);
-    Register scratch = regs.takeAny();
-
     // If this is a |constructing| call, if the callee returns a non-object, we replace it with
     // the |this| object passed in.
-    MOZ_ASSERT(JSReturnOperand == R0);
-    Label skipThisReplace;
-    masm.load16ZeroExtend(Address(BaselineStubReg, ICStub::offsetOfExtra()), scratch);
-    masm.branchTest32(Assembler::Zero, scratch, Imm32(ICCall_Fallback::CONSTRUCTING_FLAG),
-                      &skipThisReplace);
-    masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
-    masm.moveValue(R1, R0);
+    if (isConstructing_) {
+        MOZ_ASSERT(JSReturnOperand == R0);
+        Label skipThisReplace;
+
+        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
+        masm.moveValue(R1, R0);
 #ifdef DEBUG
-    masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
-    masm.assumeUnreachable("Failed to return object in constructing call.");
+        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
+        masm.assumeUnreachable("Failed to return object in constructing call.");
 #endif
-    masm.bind(&skipThisReplace);
+        masm.bind(&skipThisReplace);
+    }
 
     // At this point, BaselineStubReg points to the ICCall_Fallback stub, which is NOT
     // a MonitoredStub, but rather a MonitoredFallbackStub.  To use EmitEnterTypeMonitorIC,
     // first load the ICTypeMonitor_Fallback stub into BaselineStubReg.  Then, use
     // EmitEnterTypeMonitorIC with a custom struct offset.
     masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
                  BaselineStubReg);
     EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
@@ -10868,17 +10864,18 @@ ICCall_Fallback::Compiler::generateStubC
 bool
 ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
 {
     if (MOZ_UNLIKELY(isSpread_))
         return true;
 
     CodeOffsetLabel offset(returnOffset_);
     offset.fixup(&masm);
-    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset());
+    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset(),
+                                                                    isConstructing_);
     return true;
 }
 
 typedef bool (*CreateThisFn)(JSContext* cx, HandleObject callee, MutableHandleValue rval);
 static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
 
 bool
 ICCallScriptedCompiler::generateStubCode(MacroAssembler& masm)
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -5533,37 +5533,28 @@ class ICCallStubCompiler : public ICStub
     void pushArrayArguments(MacroAssembler& masm, Address arrayVal,
                             AllocatableGeneralRegisterSet regs);
 };
 
 class ICCall_Fallback : public ICMonitoredFallbackStub
 {
     friend class ICStubSpace;
   public:
-    static const unsigned CONSTRUCTING_FLAG = 0x1;
-    static const unsigned UNOPTIMIZABLE_CALL_FLAG = 0x2;
+    static const unsigned UNOPTIMIZABLE_CALL_FLAG = 0x1;
 
     static const uint32_t MAX_OPTIMIZED_STUBS = 16;
     static const uint32_t MAX_SCRIPTED_STUBS = 7;
     static const uint32_t MAX_NATIVE_STUBS = 7;
   private:
 
-    ICCall_Fallback(JitCode* stubCode, bool isConstructing)
+    explicit ICCall_Fallback(JitCode* stubCode)
       : ICMonitoredFallbackStub(ICStub::Call_Fallback, stubCode)
-    {
-        extra_ = 0;
-        if (isConstructing)
-            extra_ |= CONSTRUCTING_FLAG;
-    }
-
-  public:
-    bool isConstructing() const {
-        return extra_ & CONSTRUCTING_FLAG;
-    }
-
+    { }
+
+  public:
     void noteUnoptimizableCall() {
         extra_ |= UNOPTIMIZABLE_CALL_FLAG;
     }
     bool hadUnoptimizableCall() const {
         return extra_ & UNOPTIMIZABLE_CALL_FLAG;
     }
 
     unsigned scriptedStubCount() const {
@@ -5578,36 +5569,40 @@ class ICCall_Fallback : public ICMonitor
     }
     bool nativeStubsAreGeneralized() const {
         // Return hasStub(Call_AnyNative) after Call_AnyNative stub is added.
         return false;
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICCallStubCompiler {
+      public:
+        static const int32_t CALL_KEY = static_cast<int32_t>(ICStub::Call_Fallback);
+        static const int32_t CONSTRUCT_KEY = static_cast<int32_t>(ICStub::Call_Fallback) | (1 << 17);
       protected:
         bool isConstructing_;
         bool isSpread_;
         uint32_t returnOffset_;
         bool generateStubCode(MacroAssembler& masm);
         bool postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code);
 
         virtual int32_t getKey() const {
-            return static_cast<int32_t>(kind) | (static_cast<int32_t>(isSpread_) << 16);
+            return static_cast<int32_t>(kind) | (static_cast<int32_t>(isSpread_) << 16) |
+                   (static_cast<int32_t>(isConstructing_) << 17);
         }
 
       public:
         Compiler(JSContext* cx, bool isConstructing, bool isSpread)
           : ICCallStubCompiler(cx, ICStub::Call_Fallback),
             isConstructing_(isConstructing),
             isSpread_(isSpread)
         { }
 
         ICStub* getStub(ICStubSpace* space) {
-            ICCall_Fallback* stub = newStub<ICCall_Fallback>(space, getStubCode(), isConstructing_);
+            ICCall_Fallback* stub = newStub<ICCall_Fallback>(space, getStubCode());
             if (!stub || !stub->initMonitoringChain(cx, space))
                 return nullptr;
             return stub;
         }
     };
 };
 
 class ICCall_Scripted : public ICMonitoredStub
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -357,23 +357,23 @@ JitRuntime::patchIonBackedges(JSRuntime*
             PatchBackedge(patchableBackedge->backedge, patchableBackedge->loopHeader, target);
         else
             PatchBackedge(patchableBackedge->backedge, patchableBackedge->interruptCheck, target);
     }
 }
 
 JitCompartment::JitCompartment()
   : stubCodes_(nullptr),
-    baselineCallReturnAddr_(nullptr),
     baselineGetPropReturnAddr_(nullptr),
     baselineSetPropReturnAddr_(nullptr),
     stringConcatStub_(nullptr),
     regExpExecStub_(nullptr),
     regExpTestStub_(nullptr)
 {
+    baselineCallReturnAddrs_[0] = baselineCallReturnAddrs_[1] = nullptr;
 }
 
 JitCompartment::~JitCompartment()
 {
     js_delete(stubCodes_);
 }
 
 bool
@@ -647,18 +647,21 @@ JitCompartment::sweep(FreeOp* fop, JSCom
     // do this for minor GCs.
     MOZ_ASSERT(!fop->runtime()->isHeapMinorCollecting());
     CancelOffThreadIonCompile(compartment, nullptr);
     FinishAllOffThreadCompilations(compartment);
 
     stubCodes_->sweep(fop);
 
     // If the sweep removed the ICCall_Fallback stub, nullptr the baselineCallReturnAddr_ field.
-    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::Call_Fallback)))
-        baselineCallReturnAddr_ = nullptr;
+    if (!stubCodes_->lookup(ICCall_Fallback::Compiler::CALL_KEY))
+        baselineCallReturnAddrs_[0] = nullptr;
+    if (!stubCodes_->lookup(ICCall_Fallback::Compiler::CONSTRUCT_KEY))
+        baselineCallReturnAddrs_[1] = nullptr;
+
     // Similarly for the ICGetProp_Fallback stub.
     if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::GetProp_Fallback)))
         baselineGetPropReturnAddr_ = nullptr;
     if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::SetProp_Fallback)))
         baselineSetPropReturnAddr_ = nullptr;
 
     if (stringConcatStub_ && !IsMarkedUnbarriered(&stringConcatStub_))
         stringConcatStub_ = nullptr;
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -417,17 +417,17 @@ class JitCompartment
     friend class JitActivation;
 
     // Map ICStub keys to ICStub shared code objects.
     typedef WeakValueCache<uint32_t, ReadBarrieredJitCode> ICStubCodeMap;
     ICStubCodeMap* stubCodes_;
 
     // Keep track of offset into various baseline stubs' code at return
     // point from called script.
-    void* baselineCallReturnAddr_;
+    void* baselineCallReturnAddrs_[2];
     void* baselineGetPropReturnAddr_;
     void* baselineSetPropReturnAddr_;
 
     // Stubs to concatenate two strings inline, or perform RegExp calls inline.
     // These bake in zone and compartment specific pointers and can't be stored
     // in JitRuntime. These are weak pointers, but are not declared as
     // ReadBarriered since they are only read from during Ion compilation,
     // which may occur off thread and whose barriers are captured during
@@ -476,23 +476,23 @@ class JitCompartment
     bool putStubCode(uint32_t key, Handle<JitCode*> stubCode) {
         // Make sure to do a lookupForAdd(key) and then insert into that slot, because
         // that way if stubCode gets moved due to a GC caused by lookupForAdd, then
         // we still write the correct pointer.
         MOZ_ASSERT(!stubCodes_->has(key));
         ICStubCodeMap::AddPtr p = stubCodes_->lookupForAdd(key);
         return stubCodes_->add(p, key, stubCode.get());
     }
-    void initBaselineCallReturnAddr(void* addr) {
-        MOZ_ASSERT(baselineCallReturnAddr_ == nullptr);
-        baselineCallReturnAddr_ = addr;
+    void initBaselineCallReturnAddr(void* addr, bool constructing) {
+        MOZ_ASSERT(baselineCallReturnAddrs_[constructing] == nullptr);
+        baselineCallReturnAddrs_[constructing] = addr;
     }
-    void* baselineCallReturnAddr() {
-        MOZ_ASSERT(baselineCallReturnAddr_ != nullptr);
-        return baselineCallReturnAddr_;
+    void* baselineCallReturnAddr(bool constructing) {
+        MOZ_ASSERT(baselineCallReturnAddrs_[constructing] != nullptr);
+        return baselineCallReturnAddrs_[constructing];
     }
     void initBaselineGetPropReturnAddr(void* addr) {
         MOZ_ASSERT(baselineGetPropReturnAddr_ == nullptr);
         baselineGetPropReturnAddr_ = addr;
     }
     void* baselineGetPropReturnAddr() {
         MOZ_ASSERT(baselineGetPropReturnAddr_ != nullptr);
         return baselineGetPropReturnAddr_;