[INFER] More robust handling of constant object frame entries.
authorBrian Hackett <bhackett1024@gmail.com>
Tue, 22 Mar 2011 05:27:03 -0700
changeset 74830 99a3fe34ccc6570b911ba84c0ed5866d84476b3f
parent 74829 0e427e383bfdc706d7c71c2f8211c33a7084a063
child 74831 17e44b678d36742576a3af602eb0300c441e786c
push id2
push userbsmedberg@mozilla.com
push dateFri, 19 Aug 2011 14:38:13 +0000
milestone2.0b13pre
[INFER] More robust handling of constant object frame entries.
js/src/methodjit/Compiler.cpp
js/src/methodjit/FastOps.cpp
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.h
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -2367,29 +2367,20 @@ mjit::Compiler::generateMethod()
 
           BEGIN_CASE(JSOP_UNBRANDTHIS)
             jsop_this();
             jsop_unbrand();
             frame.pop();
           END_CASE(JSOP_UNBRANDTHIS)
 
           BEGIN_CASE(JSOP_GETGLOBAL)
+          BEGIN_CASE(JSOP_CALLGLOBAL)
             jsop_getglobal(GET_SLOTNO(PC));
-          END_CASE(JSOP_GETGLOBAL)
-
-          BEGIN_CASE(JSOP_CALLGLOBAL)
-          {
-            JSObject *singleton = pushedSingleton(0);
-            uint32 slot = GET_SLOTNO(PC);
-            if (singleton && !globalObj->getSlot(slot).isUndefined())
-                frame.push(ObjectValue(*singleton));
-            else
-                jsop_getglobal(slot);
-            frame.push(UndefinedValue());
-          }
+            if (op == JSOP_CALLGLOBAL)
+                frame.push(UndefinedValue());
           END_CASE(JSOP_GETGLOBAL)
 
           default:
            /* Sorry, this opcode isn't implemented yet. */
 #ifdef JS_METHODJIT_SPEW
             JaegerSpew(JSpew_Abort, "opcode %s not handled yet (%s line %d)\n", OpcodeNames[op],
                        script->filename, js_PCToLineNumber(cx, script, PC));
 #endif
@@ -2487,32 +2478,38 @@ mjit::Compiler::jumpInScript(Jump j, jsb
 }
 
 void
 mjit::Compiler::jsop_getglobal(uint32 index)
 {
     JS_ASSERT(globalObj);
     uint32 slot = script->getGlobalSlot(index);
 
+    JSObject *singleton = pushedSingleton(0);
+    if (singleton && !globalObj->getSlot(slot).isUndefined()) {
+        frame.push(ObjectValue(*singleton));
+        return;
+    }
+
     RegisterID reg = frame.allocReg();
     Address address = masm.objSlotRef(globalObj, reg, slot);
     frame.push(address, knownPushedType(0));
     frame.freeReg(reg);
 
     /*
      * If the global is currently undefined, it might still be undefined at the point
      * of this access, which type inference will not account for. Insert a check.
      */
     if (globalObj->getSlot(slot).isUndefined() &&
         (JSOp(*PC) == JSOP_CALLGLOBAL || PC[JSOP_GETGLOBAL_LENGTH] != JSOP_POP)) {
         Jump jump = masm.testUndefined(Assembler::Equal, address);
         stubcc.linkExit(jump, Uses(0));
         stubcc.leave();
         OOL_STUBCALL(stubs::UndefinedHelper);
-        stubcc.rejoin(Changes(0));
+        stubcc.rejoin(Changes(1));
     }
 }
 
 void
 mjit::Compiler::emitFinalReturn(Assembler &masm)
 {
     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
     masm.jump(Registers::ReturnReg);
@@ -2914,25 +2911,19 @@ mjit::Compiler::inlineCallHelper(uint32 
      * js::Invoke to ultimately call 'this'. We can do much better by having
      * the callIC cache and call 'this' directly. However, if it turns out that
      * we are not actually calling js_fun_call, the callIC must act as normal.
      */
     bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
 
     bool newType = callingNew && cx->typeInferenceEnabled() && types::UseNewType(cx, script, PC);
 
-    /*
-     * Currently, constant values are not functions, so don't even try to
-     * optimize. This lets us assume that callee/this have regs below.
-     */
 #ifdef JS_MONOIC
-    if (debugMode() || newType ||
-        origCallee->isConstant() || origCallee->isNotType(JSVAL_TYPE_OBJECT) ||
-        (lowerFunCallOrApply &&
-         (origThis->isConstant() || origThis->isNotType(JSVAL_TYPE_OBJECT)))) {
+    if (debugMode() || newType || origCallee->isNotType(JSVAL_TYPE_OBJECT) ||
+        (lowerFunCallOrApply && origThis->isNotType(JSVAL_TYPE_OBJECT))) {
 #endif
         if (applyTricks == LazyArgsObj) {
             /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
             jsop_arguments();
             frame.pushSynced(JSVAL_TYPE_UNKNOWN);
         }
         emitUncachedCall(callImmArgc, callingNew);
         applyTricks = NoApplyTricks;
@@ -2941,16 +2932,20 @@ mjit::Compiler::inlineCallHelper(uint32 
         if (recompiling) {
             OOL_STUBCALL(stubs::SlowCall);
             stubcc.rejoin(Changes(1));
         }
         return true;
 #ifdef JS_MONOIC
     }
 
+    frame.forgetConstantData(origCallee);
+    if (lowerFunCallOrApply)
+        frame.forgetConstantData(origThis);
+
     /* Initialized by both branches below. */
     CallGenInfo     callIC(PC);
     CallPatchInfo   callPatch;
     MaybeRegisterID icCalleeType; /* type to test for function-ness */
     RegisterID      icCalleeData; /* data to call */
     Address         icRvalAddr;   /* return slot on slow-path rejoin */
 
     /*
@@ -3221,18 +3216,24 @@ mjit::Compiler::inlineCallHelper(uint32 
         stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
 
     callICs.append(callIC);
     callPatches.append(callPatch);
     if (lowerFunCallOrApply)
         callPatches.append(uncachedCallPatch);
 
     if (!lowerFunCallOrApply && recompiling) {
+        /* Recompiled from inlined native slow path. */
+        if (!callingNew) {
+            OOL_STUBCALL(stubs::SlowCall);
+            stubcc.rejoin(Changes(1));
+        }
+
         /* Recompiled uncached call to cached call. */
-        OOL_STUBCALL(stubs::UncachedCall);
+        OOL_STUBCALL(callingNew ? stubs::UncachedNew : stubs::UncachedCall);
         stubcc.rejoin(Changes(1));
     }
 
     applyTricks = NoApplyTricks;
 
     return true;
 #endif
 }
@@ -3450,16 +3451,18 @@ mjit::Compiler::jsop_getprop(JSAtom *ato
     /* If the incoming type will never PIC, take slow path. */
     if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
         JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
                      top->getKnownType() != JSVAL_TYPE_STRING);
         jsop_getprop_slow(atom, usePropCache);
         return true;
     }
 
+    frame.forgetConstantData(top);
+
     /*
      * These two must be loaded first. The objReg because the string path
      * wants to read it, and the shapeReg because it could cause a spill that
      * the string path wouldn't sink back.
      */
     RegisterID objReg = Registers::ReturnReg;
     RegisterID shapeReg = Registers::ReturnReg;
     if (atom == cx->runtime->atomState.lengthAtom) {
@@ -3824,16 +3827,29 @@ mjit::Compiler::jsop_callprop_obj(JSAtom
     pics.append(pic);
 
     return true;
 }
 
 bool
 mjit::Compiler::testSingletonProperty(JSObject *obj, jsid id)
 {
+    /*
+     * We would like to completely no-op property/global accesses which can
+     * produce only a particular JSObject or undefined, provided we can
+     * determine the pushed value must not be undefined (or, if it could be
+     * undefined, a recompilation will be triggered).
+     *
+     * If the access definitely goes through obj, either directly or on the
+     * prototype chain, then if obj has a defined property now, and the
+     * property has a default or method shape, the only way it can produce
+     * undefined in the future is if it is deleted. Deletion causes type
+     * properties to be explicitly marked with undefined.
+     */
+
     if (!obj->isNative())
         return false;
     if (obj->getClass()->ops.lookupProperty)
         return false;
 
     JSObject *holder;
     JSProperty *prop = NULL;
     if (!obj->lookupProperty(cx, id, &holder, &prop))
@@ -3983,16 +3999,18 @@ mjit::Compiler::jsop_setprop(JSAtom *ato
         typeCheck = stubcc.masm.jump();
         pic.hasTypeCheck = true;
     } else {
         pic.fastPathStart = masm.label();
         pic.hasTypeCheck = false;
         pic.typeReg = Registers::ReturnReg;
     }
 
+    frame.forgetConstantData(lhs);
+
     /* Get the object into a mutable register. */
     RegisterID objReg = frame.copyDataIntoReg(lhs);
     pic.objReg = objReg;
 
     /* Get info about the RHS and pin it. */
     ValueRemat vr;
     frame.pinEntry(rhs, vr);
     pic.vr = vr;
@@ -4084,24 +4102,36 @@ mjit::Compiler::jsop_name(JSAtom *atom, 
 
     /* Initialize op labels. */
     ScopeNameLabels &labels = pic.scopeNameLabels();
     labels.setInlineJump(masm, pic.fastPathStart, inlineJump);
 
     /* Always test for undefined. */
     Jump undefinedGuard = masm.testUndefined(Assembler::Equal, pic.shapeReg);
 
-    frame.pushRegs(pic.shapeReg, pic.objReg, type);
+    /*
+     * We can't optimize away the PIC for the NAME access itself, but if we've
+     * only seen a single value pushed by this access, mark it as such and
+     * recompile if a different value becomes possible.
+     */
+    JSObject *singleton = pushedSingleton(0);
+    if (singleton) {
+        frame.push(ObjectValue(*singleton));
+        frame.freeReg(pic.shapeReg);
+        frame.freeReg(pic.objReg);
+    } else {
+        frame.pushRegs(pic.shapeReg, pic.objReg, type);
+    }
 
     stubcc.rejoin(Changes(1));
 
     stubcc.linkExit(undefinedGuard, Uses(0));
     stubcc.leave();
     OOL_STUBCALL(stubs::UndefinedHelper);
-    stubcc.rejoin(Changes(0));
+    stubcc.rejoin(Changes(1));
 
     pics.append(pic);
 }
 
 bool
 mjit::Compiler::jsop_xname(JSAtom *atom)
 {
     PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC), true);
@@ -4111,16 +4141,18 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
         return jsop_getprop(atom, knownPushedType(0));
     }
 
     if (!fe->isTypeKnown()) {
         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
         stubcc.linkExit(notObject, Uses(1));
     }
 
+    frame.forgetConstantData(fe);
+
     RESERVE_IC_SPACE(masm);
 
     pic.shapeReg = frame.allocReg();
     pic.objReg = frame.copyDataIntoReg(fe);
     pic.typeReg = Registers::ReturnReg;
     pic.atom = atom;
     pic.hasTypeCheck = false;
     pic.fastPathStart = masm.label();
@@ -4151,17 +4183,17 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
     /* Always test for undefined. */
     Jump undefinedGuard = masm.testUndefined(Assembler::Equal, pic.shapeReg);
 
     stubcc.rejoin(Changes(1));
 
     stubcc.linkExit(undefinedGuard, Uses(0));
     stubcc.leave();
     OOL_STUBCALL(stubs::UndefinedHelper);
-    stubcc.rejoin(Changes(0));
+    stubcc.rejoin(Changes(1));
 
     pics.append(pic);
     return true;
 }
 
 void
 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
 {
@@ -4588,16 +4620,18 @@ mjit::Compiler::iter(uintN flags)
         return true;
     }
 
     if (!fe->isTypeKnown()) {
         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
         stubcc.linkExit(notObject, Uses(1));
     }
 
+    frame.forgetConstantData(fe);
+
     RegisterID reg = frame.tempRegForData(fe);
 
     frame.pinReg(reg);
     RegisterID ioreg = frame.allocReg();  /* Will hold iterator JSObject */
     RegisterID nireg = frame.allocReg();  /* Will hold NativeIterator */
     RegisterID T1 = frame.allocReg();
     RegisterID T2 = frame.allocReg();
     frame.unpinReg(reg);
@@ -4885,26 +4919,23 @@ mjit::Compiler::jsop_getgname(uint32 ind
         frame.push(cx->runtime->NaNValue);
         return;
     }
     if (atom == cx->runtime->atomState.InfinityAtom) {
         frame.push(cx->runtime->positiveInfinityValue);
         return;
     }
 
-    /*
-     * Optimize singletons like Math for JSOP_CALLPROP.
-     * :FIXME: Fix other ops to support constant objects.
-     */
+    /* Optimize singletons like Math for JSOP_CALLPROP. */
     JSObject *obj = pushedSingleton(0);
-    if (obj && *(PC+JSOP_GETGNAME_LENGTH) == JSOP_CALLPROP &&
-        testSingletonProperty(globalObj, ATOM_TO_JSID(atom))) {
+    if (obj && testSingletonProperty(globalObj, ATOM_TO_JSID(atom))) {
         frame.push(ObjectValue(*obj));
         return;
     }
+
 #if defined JS_MONOIC
     jsop_bindgname();
 
     FrameEntry *fe = frame.peek(-1);
     JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
 
     GetGlobalNameICInfo ic;
     RESERVE_IC_SPACE(masm);
@@ -4990,16 +5021,29 @@ mjit::Compiler::jsop_callgname_epilogue(
 
     /* Fast path for known-not-an-object callee. */
     FrameEntry *fval = frame.peek(-1);
     if (fval->isNotType(JSVAL_TYPE_OBJECT)) {
         frame.push(UndefinedValue());
         return;
     }
 
+    /* Paths for known object callee. */
+    if (fval->isConstant()) {
+        JSObject *obj = &fval->getValue().toObject();
+        if (obj->getParent() == globalObj) {
+            frame.push(UndefinedValue());
+        } else {
+            prepareStubCall(Uses(1));
+            INLINE_STUBCALL(stubs::PushImplicitThisForGlobal);
+            frame.pushSynced(JSVAL_TYPE_UNKNOWN);
+        }
+        return;
+    }
+
     /*
      * Optimized version. This inlines the common case, calling a
      * (non-proxied) function that has the same global as the current
      * script. To make the code simpler, we:
      *      1. test the stronger property that the callee's parent is
      *         equal to the global of the current script, and
      *      2. bake in the global of the current script, which is why
      *         this optimized path requires compile-and-go.
@@ -5192,16 +5236,19 @@ mjit::Compiler::jsop_instanceof()
     }
 
     MaybeJump firstSlow;
     if (!rhs->isTypeKnown()) {
         Jump j = frame.testObject(Assembler::NotEqual, rhs);
         stubcc.linkExit(j, Uses(2));
     }
 
+    frame.forgetConstantData(lhs);
+    frame.forgetConstantData(rhs);
+
     RegisterID obj = frame.tempRegForData(rhs);
     Jump notFunction = masm.testFunction(Assembler::NotEqual, obj);
     stubcc.linkExit(notFunction, Uses(2));
 
     /* Test for bound functions. */
     Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)),
                                      Imm32(JSObject::BOUND_FUNCTION));
     {
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -575,16 +575,18 @@ mjit::Compiler::jsop_equality(JSOp op, B
         types::ObjectKind lhsKind =
             lhsTypes ? lhsTypes->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
         types::ObjectKind rhsKind =
             rhsTypes ? rhsTypes->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
 
         if (lhsKind != types::OBJECT_UNKNOWN && rhsKind != types::OBJECT_UNKNOWN) {
             /* :TODO: Merge with jsop_relational_int? */
             JS_ASSERT_IF(!target, fused != JSOP_IFEQ);
+            frame.forgetConstantData(lhs);
+            frame.forgetConstantData(rhs);
             Assembler::Condition cond = GetCompareCondition(op, fused);
             if (target) {
                 fixDoubleTypes(Uses(2));
                 if (!frame.syncForBranch(target, Uses(2)))
                     return false;
                 Jump fast = masm.branchPtr(cond, frame.tempRegForData(lhs), frame.tempRegForData(rhs));
                 frame.popn(2);
                 return jumpAndTrace(fast, target);
@@ -1265,16 +1267,18 @@ mjit::Compiler::jsop_setelem(bool popGua
     FrameEntry *id = frame.peek(-2);
     FrameEntry *value = frame.peek(-1);
 
     if (!IsCacheableSetElem(obj, id, value) || monitored(PC)) {
         jsop_setelem_slow();
         return true;
     }
 
+    frame.forgetConstantData(obj);
+
     if (cx->typeInferenceEnabled()) {
         types::TypeSet *types = frame.getTypeSet(obj);
         types::ObjectKind kind = types ? types->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
         if (id->mightBeType(JSVAL_TYPE_INT32) &&
             (kind == types::OBJECT_DENSE_ARRAY || kind == types::OBJECT_PACKED_ARRAY) &&
             !arrayPrototypeHasIndexedProperty()) {
             // This is definitely a dense array, generate code directly without
             // using an inline cache.
@@ -1581,16 +1585,18 @@ mjit::Compiler::jsop_getelem(bool isCall
     if (!IsCacheableGetElem(obj, id)) {
         if (isCall)
             jsop_callelem_slow();
         else
             jsop_getelem_slow();
         return true;
     }
 
+    frame.forgetConstantData(obj);
+
     if (cx->typeInferenceEnabled()) {
         types::TypeSet *types = frame.getTypeSet(obj);
         types::ObjectKind kind = types ? types->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
 
         if (!isCall && id->mightBeType(JSVAL_TYPE_INT32) &&
             (kind == types::OBJECT_DENSE_ARRAY || kind == types::OBJECT_PACKED_ARRAY) &&
             !arrayPrototypeHasIndexedProperty()) {
             // this is definitely a dense array, generate code directly without
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -492,16 +492,30 @@ FrameState::tempRegForData(FrameEntry *f
     if (fe->data.inRegister())
         return fe->data.reg();
 
     RegisterID reg = allocAndLoadReg(fe, false, RematInfo::DATA).reg();
     fe->data.setRegister(reg);
     return reg;
 }
 
+inline void
+FrameState::forgetConstantData(FrameEntry *fe)
+{
+    if (!fe->data.isConstant())
+        return;
+    JS_ASSERT(fe->isType(JSVAL_TYPE_OBJECT));
+
+    RegisterID reg = allocReg();
+    regstate(reg).associate(fe, RematInfo::DATA);
+
+    masm.move(JSC::MacroAssembler::ImmPtr(&fe->getValue().toObject()), reg);
+    fe->data.setRegister(reg);
+}
+
 inline JSC::MacroAssembler::FPRegisterID
 FrameState::tempFPRegForData(FrameEntry *fe)
 {
     JS_ASSERT(!fe->data.isConstant());
     JS_ASSERT(fe->isType(JSVAL_TYPE_DOUBLE));
 
     if (fe->isCopy())
         fe = fe->copyOf();
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -399,16 +399,23 @@ class FrameState
 
     /*
      * Same as above, except loads into reg (using masm) if the entry does not
      * already have a register, and does not change the frame state in doing so.
      */
     inline RegisterID tempRegForData(FrameEntry *fe, RegisterID reg, Assembler &masm) const;
 
     /*
+     * If fe is a constant, allocate a register and forget its payload. This
+     * function is a stopgap to cover missing paths in the Compiler, uses of it
+     * should be fixed.
+     */
+    inline void forgetConstantData(FrameEntry *fe);
+
+    /*
      * Convert an integer to a double without applying
      * additional Register pressure.
      */
     inline void convertInt32ToDouble(Assembler &masm, FrameEntry *fe,
                                      FPRegisterID fpreg) const;
 
     /*
      * Dive into a FrameEntry and check whether it's in a register.