Backed out changeset 2901436c9047 (bug 1141865) for e10-s asan m4 test failures on a CLOSED TREE
authorCarsten "Tomcat" Book <cbook@mozilla.com>
Wed, 03 Jun 2015 12:46:33 +0200
changeset 269645 8d2064e2c5111114ba60a0c475bc6482f62a5ccf
parent 269644 23fcf07dcd9edda77c36a361eb752ac0d6f067e4
child 269646 5d8c22617d0dc0ab571903f572849da42051dc2a
push id2540
push userwcosta@mozilla.com
push dateWed, 03 Jun 2015 20:55:41 +0000
bugs1141865
milestone41.0a1
backs out2901436c9047202f7cc30fd89474e1bd2075294d
Backed out changeset 2901436c9047 (bug 1141865) for e10-s asan m4 test failures on a CLOSED TREE
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/Ion.cpp
js/src/jit/JitCompartment.h
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -449,17 +449,17 @@ static inline void*
 GetStubReturnAddress(JSContext* cx, jsbytecode* pc)
 {
     if (IsGetPropPC(pc))
         return cx->compartment()->jitCompartment()->baselineGetPropReturnAddr();
     if (IsSetPropPC(pc))
         return cx->compartment()->jitCompartment()->baselineSetPropReturnAddr();
     // This should be a call op of some kind, now.
     MOZ_ASSERT(IsCallPC(pc));
-    return cx->compartment()->jitCompartment()->baselineCallReturnAddr(JSOp(*pc) == JSOP_NEW);
+    return cx->compartment()->jitCompartment()->baselineCallReturnAddr();
 }
 
 static inline jsbytecode*
 GetNextNonLoopEntryPc(jsbytecode* pc)
 {
     JSOp op = JSOp(*pc);
     if (op == JSOP_GOTO)
         return pc + GET_JUMP_OFFSET(pc);
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -10830,30 +10830,34 @@ ICCall_Fallback::Compiler::generateStubC
 
     // Load passed-in ThisV into R1 just in case it's needed.  Need to do this before
     // we leave the stub frame since that info will be lost.
     // Current stack:  [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
     masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
 
     leaveStubFrame(masm, true);
 
+    // R1 and R0 are taken.
+    regs = availableGeneralRegs(2);
+    Register scratch = regs.takeAny();
+
     // If this is a |constructing| call, if the callee returns a non-object, we replace it with
     // the |this| object passed in.
-    if (isConstructing_) {
-        MOZ_ASSERT(JSReturnOperand == R0);
-        Label skipThisReplace;
-
-        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
-        masm.moveValue(R1, R0);
+    MOZ_ASSERT(JSReturnOperand == R0);
+    Label skipThisReplace;
+    masm.load16ZeroExtend(Address(BaselineStubReg, ICStub::offsetOfExtra()), scratch);
+    masm.branchTest32(Assembler::Zero, scratch, Imm32(ICCall_Fallback::CONSTRUCTING_FLAG),
+                      &skipThisReplace);
+    masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
+    masm.moveValue(R1, R0);
 #ifdef DEBUG
-        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
-        masm.assumeUnreachable("Failed to return object in constructing call.");
+    masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
+    masm.assumeUnreachable("Failed to return object in constructing call.");
 #endif
-        masm.bind(&skipThisReplace);
-    }
+    masm.bind(&skipThisReplace);
 
     // At this point, BaselineStubReg points to the ICCall_Fallback stub, which is NOT
     // a MonitoredStub, but rather a MonitoredFallbackStub.  To use EmitEnterTypeMonitorIC,
     // first load the ICTypeMonitor_Fallback stub into BaselineStubReg.  Then, use
     // EmitEnterTypeMonitorIC with a custom struct offset.
     masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
                  BaselineStubReg);
     EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
@@ -10864,18 +10868,17 @@ ICCall_Fallback::Compiler::generateStubC
 bool
 ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
 {
     if (MOZ_UNLIKELY(isSpread_))
         return true;
 
     CodeOffsetLabel offset(returnOffset_);
     offset.fixup(&masm);
-    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset(),
-                                                                    isConstructing_);
+    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset());
     return true;
 }
 
 typedef bool (*CreateThisFn)(JSContext* cx, HandleObject callee, MutableHandleValue rval);
 static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
 
 bool
 ICCallScriptedCompiler::generateStubCode(MacroAssembler& masm)
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -5533,28 +5533,37 @@ class ICCallStubCompiler : public ICStub
     void pushArrayArguments(MacroAssembler& masm, Address arrayVal,
                             AllocatableGeneralRegisterSet regs);
 };
 
 class ICCall_Fallback : public ICMonitoredFallbackStub
 {
     friend class ICStubSpace;
   public:
-    static const unsigned UNOPTIMIZABLE_CALL_FLAG = 0x1;
+    static const unsigned CONSTRUCTING_FLAG = 0x1;
+    static const unsigned UNOPTIMIZABLE_CALL_FLAG = 0x2;
 
     static const uint32_t MAX_OPTIMIZED_STUBS = 16;
     static const uint32_t MAX_SCRIPTED_STUBS = 7;
     static const uint32_t MAX_NATIVE_STUBS = 7;
   private:
 
-    explicit ICCall_Fallback(JitCode* stubCode)
+    ICCall_Fallback(JitCode* stubCode, bool isConstructing)
       : ICMonitoredFallbackStub(ICStub::Call_Fallback, stubCode)
-    { }
-
-  public:
+    {
+        extra_ = 0;
+        if (isConstructing)
+            extra_ |= CONSTRUCTING_FLAG;
+    }
+
+  public:
+    bool isConstructing() const {
+        return extra_ & CONSTRUCTING_FLAG;
+    }
+
     void noteUnoptimizableCall() {
         extra_ |= UNOPTIMIZABLE_CALL_FLAG;
     }
     bool hadUnoptimizableCall() const {
         return extra_ & UNOPTIMIZABLE_CALL_FLAG;
     }
 
     unsigned scriptedStubCount() const {
@@ -5569,40 +5578,36 @@ class ICCall_Fallback : public ICMonitor
     }
     bool nativeStubsAreGeneralized() const {
         // Return hasStub(Call_AnyNative) after Call_AnyNative stub is added.
         return false;
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICCallStubCompiler {
-      public:
-        static const int32_t CALL_KEY = static_cast<int32_t>(ICStub::Call_Fallback);
-        static const int32_t CONSTRUCT_KEY = static_cast<int32_t>(ICStub::Call_Fallback) | (1 << 17);
       protected:
         bool isConstructing_;
         bool isSpread_;
         uint32_t returnOffset_;
         bool generateStubCode(MacroAssembler& masm);
         bool postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code);
 
         virtual int32_t getKey() const {
-            return static_cast<int32_t>(kind) | (static_cast<int32_t>(isSpread_) << 16) |
-                   (static_cast<int32_t>(isConstructing_) << 17);
+            return static_cast<int32_t>(kind) | (static_cast<int32_t>(isSpread_) << 16);
         }
 
       public:
         Compiler(JSContext* cx, bool isConstructing, bool isSpread)
           : ICCallStubCompiler(cx, ICStub::Call_Fallback),
             isConstructing_(isConstructing),
             isSpread_(isSpread)
         { }
 
         ICStub* getStub(ICStubSpace* space) {
-            ICCall_Fallback* stub = newStub<ICCall_Fallback>(space, getStubCode());
+            ICCall_Fallback* stub = newStub<ICCall_Fallback>(space, getStubCode(), isConstructing_);
             if (!stub || !stub->initMonitoringChain(cx, space))
                 return nullptr;
             return stub;
         }
     };
 };
 
 class ICCall_Scripted : public ICMonitoredStub
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -357,23 +357,23 @@ JitRuntime::patchIonBackedges(JSRuntime*
             PatchBackedge(patchableBackedge->backedge, patchableBackedge->loopHeader, target);
         else
             PatchBackedge(patchableBackedge->backedge, patchableBackedge->interruptCheck, target);
     }
 }
 
 JitCompartment::JitCompartment()
   : stubCodes_(nullptr),
+    baselineCallReturnAddr_(nullptr),
     baselineGetPropReturnAddr_(nullptr),
     baselineSetPropReturnAddr_(nullptr),
     stringConcatStub_(nullptr),
     regExpExecStub_(nullptr),
     regExpTestStub_(nullptr)
 {
-    baselineCallReturnAddrs_[0] = baselineCallReturnAddrs_[1] = nullptr;
 }
 
 JitCompartment::~JitCompartment()
 {
     js_delete(stubCodes_);
 }
 
 bool
@@ -647,21 +647,18 @@ JitCompartment::sweep(FreeOp* fop, JSCom
     // do this for minor GCs.
     MOZ_ASSERT(!fop->runtime()->isHeapMinorCollecting());
     CancelOffThreadIonCompile(compartment, nullptr);
     FinishAllOffThreadCompilations(compartment);
 
     stubCodes_->sweep(fop);
 
     // If the sweep removed the ICCall_Fallback stub, nullptr the baselineCallReturnAddr_ field.
-    if (!stubCodes_->lookup(ICCall_Fallback::Compiler::CALL_KEY))
-        baselineCallReturnAddrs_[0] = nullptr;
-    if (!stubCodes_->lookup(ICCall_Fallback::Compiler::CONSTRUCT_KEY))
-        baselineCallReturnAddrs_[1] = nullptr;
-
+    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::Call_Fallback)))
+        baselineCallReturnAddr_ = nullptr;
     // Similarly for the ICGetProp_Fallback stub.
     if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::GetProp_Fallback)))
         baselineGetPropReturnAddr_ = nullptr;
     if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::SetProp_Fallback)))
         baselineSetPropReturnAddr_ = nullptr;
 
     if (stringConcatStub_ && !IsMarkedUnbarriered(&stringConcatStub_))
         stringConcatStub_ = nullptr;
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -417,17 +417,17 @@ class JitCompartment
     friend class JitActivation;
 
     // Map ICStub keys to ICStub shared code objects.
     typedef WeakValueCache<uint32_t, ReadBarrieredJitCode> ICStubCodeMap;
     ICStubCodeMap* stubCodes_;
 
     // Keep track of offset into various baseline stubs' code at return
     // point from called script.
-    void* baselineCallReturnAddrs_[2];
+    void* baselineCallReturnAddr_;
     void* baselineGetPropReturnAddr_;
     void* baselineSetPropReturnAddr_;
 
     // Stubs to concatenate two strings inline, or perform RegExp calls inline.
     // These bake in zone and compartment specific pointers and can't be stored
     // in JitRuntime. These are weak pointers, but are not declared as
     // ReadBarriered since they are only read from during Ion compilation,
     // which may occur off thread and whose barriers are captured during
@@ -476,23 +476,23 @@ class JitCompartment
     bool putStubCode(uint32_t key, Handle<JitCode*> stubCode) {
         // Make sure to do a lookupForAdd(key) and then insert into that slot, because
         // that way if stubCode gets moved due to a GC caused by lookupForAdd, then
         // we still write the correct pointer.
         MOZ_ASSERT(!stubCodes_->has(key));
         ICStubCodeMap::AddPtr p = stubCodes_->lookupForAdd(key);
         return stubCodes_->add(p, key, stubCode.get());
     }
-    void initBaselineCallReturnAddr(void* addr, bool constructing) {
-        MOZ_ASSERT(baselineCallReturnAddrs_[constructing] == nullptr);
-        baselineCallReturnAddrs_[constructing] = addr;
+    void initBaselineCallReturnAddr(void* addr) {
+        MOZ_ASSERT(baselineCallReturnAddr_ == nullptr);
+        baselineCallReturnAddr_ = addr;
     }
-    void* baselineCallReturnAddr(bool constructing) {
-        MOZ_ASSERT(baselineCallReturnAddrs_[constructing] != nullptr);
-        return baselineCallReturnAddrs_[constructing];
+    void* baselineCallReturnAddr() {
+        MOZ_ASSERT(baselineCallReturnAddr_ != nullptr);
+        return baselineCallReturnAddr_;
     }
     void initBaselineGetPropReturnAddr(void* addr) {
         MOZ_ASSERT(baselineGetPropReturnAddr_ == nullptr);
         baselineGetPropReturnAddr_ = addr;
     }
     void* baselineGetPropReturnAddr() {
         MOZ_ASSERT(baselineGetPropReturnAddr_ != nullptr);
         return baselineGetPropReturnAddr_;