[JAEGER] Added JSOP_ENTERBLOCK, JSOP_LEAVEBLOCK, and support for |let|.
authorDavid Anderson <danderson@mozilla.com>
Wed, 09 Jun 2010 01:03:58 -0700
changeset 52785 b1b07d563ca527e35e7d84de30b32b0c1d826dfc
parent 52784 741e1b67fd0d9827369269fab00393def69cdc9d
child 52786 87e881456cde6d8d6993448f1049216a431d4817
push id1
push usershaver@mozilla.com
push dateTue, 04 Jan 2011 17:58:04 +0000
milestone1.9.3a5pre
[JAEGER] Added JSOP_ENTERBLOCK, JSOP_LEAVEBLOCK, and support for |let|.
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.cpp
js/src/methodjit/FrameState.h
js/src/methodjit/MethodJIT.h
js/src/methodjit/StubCalls.cpp
js/src/methodjit/StubCalls.h
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -904,17 +904,17 @@ mjit::Compiler::generateMethod()
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_MASK32_BOOLEAN, Registers::ReturnReg);
           END_CASE(JSOP_INSTANCEOF)
 
           BEGIN_CASE(JSOP_LINENO)
           END_CASE(JSOP_LINENO)
 
           BEGIN_CASE(JSOP_DEFFUN)
-            JS_ASSERT(frame.stackDepth() == 0);
+            prepareStubCall();
             masm.move(Imm32(fullAtomIndex(PC)), Registers::ArgReg1);
             stubCall(stubs::DefFun, Uses(0), Defs(0));
           END_CASE(JSOP_DEFFUN)
 
           BEGIN_CASE(JSOP_LAMBDA)
           {
             JSFunction *fun = script->getFunction(fullAtomIndex(PC));
             prepareStubCall();
@@ -1015,16 +1015,44 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_CALLELEM)
 
           BEGIN_CASE(JSOP_STOP)
             /* Safe point! */
             emitReturn();
             goto done;
           END_CASE(JSOP_STOP)
 
+          BEGIN_CASE(JSOP_ENTERBLOCK)
+          {
+            // If this is an exception entry point, then jsl_InternalThrow has set
+            // VMFrame::fp to the correct fp for the entry point. We need to copy
+            // that value here to FpReg so that FpReg also has the correct sp.
+            // Otherwise, we would simply be using a stale FpReg value.
+            if (analysis[PC].exceptionEntry)
+                restoreFrameRegs();
+
+            /* For now, don't bother doing anything for this opcode. */
+            JSObject *obj = script->getObject(fullAtomIndex(PC));
+            frame.forgetEverything();
+            masm.move(ImmPtr(obj), Registers::ArgReg1);
+            uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
+            stubCall(stubs::EnterBlock, Uses(0), Defs(n));
+            frame.enterBlock(n);
+          }
+          END_CASE(JSOP_ENTERBLOCK)
+
+          BEGIN_CASE(JSOP_LEAVEBLOCK)
+          {
+            uint32 n = js_GetVariableStackUses(op, PC);
+            prepareStubCall();
+            stubCall(stubs::LeaveBlock, Uses(n), Defs(0));
+            frame.leaveBlock(n);
+          }
+          END_CASE(JSOP_LEAVEBLOCK)
+
           BEGIN_CASE(JSOP_CALLLOCAL)
             frame.pushLocal(GET_SLOTNO(PC));
             frame.push(NullTag());
           END_CASE(JSOP_CALLLOCAL)
 
           BEGIN_CASE(JSOP_INT8)
             frame.push(Value(Int32Tag(GET_INT8(PC))));
           END_CASE(JSOP_INT8)
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -164,16 +164,17 @@ class Compiler
     STUB_CALL_TYPE(VoidPtrStub);
     STUB_CALL_TYPE(BoolStub);
     STUB_CALL_TYPE(JSObjStubUInt32);
     STUB_CALL_TYPE(JSObjStubFun);
     STUB_CALL_TYPE(JSObjStubJSObj);
     STUB_CALL_TYPE(VoidStubAtom);
     STUB_CALL_TYPE(JSStrStub);
     STUB_CALL_TYPE(JSStrStubUInt32);
+    STUB_CALL_TYPE(VoidStubJSObj);
 
 #undef STUB_CALL_TYPE
     void prepareStubCall();
     Call stubCall(void *ptr, Uses uses, Defs defs);
 };
 
 } /* namespace js */
 } /* namespace mjit */
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -486,13 +486,29 @@ FrameState::dup2()
 }
 
 inline void
 FrameState::pushLocal(uint32 n)
 {
     pushCopyOf(indexOfFe(getLocal(n)));
 }
 
+inline void
+FrameState::leaveBlock(uint32 n)
+{
+    popn(n);
+}
+
+inline void
+FrameState::enterBlock(uint32 n)
+{
+    /* expect that tracker has 0 entries, for now. */
+    JS_ASSERT(!tracker.nentries);
+    JS_ASSERT(uint32(sp + n - locals) <= script->nslots);
+
+    sp += n;
+}
+
 } /* namspace mjit */
 } /* namspace js */
 
 #endif /* include */
 
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -604,46 +604,81 @@ FrameState::storeLocal(uint32 n)
         localFe->setConstant(Jsvalify(top->getValue()));
         return;
     }
 
     /*
      * When dealing with copies, there are two important invariants:
      *
      * 1) The backing store precedes all copies in the tracker.
-     * 2) The backing store of a local is never a stack slot.
+     * 2) The backing store of a local is never a stack slot, UNLESS the local
+     *    variable itself is a stack slot (blocks) that precesed the stack
+     *    slot.
      *
      * If the top is a copy, and the second condition holds true, the local
      * can be rewritten as a copy of the original backing slot. If the first
      * condition does not hold, force it to hold by swapping in-place.
      */
     FrameEntry *backing = top;
-    uint32 searchPoint = InvalidIndex;
     if (top->isCopy()) {
         backing = top->copyOf();
         JS_ASSERT(backing->trackerIndex() < top->trackerIndex());
 
-        if (indexOfFe(backing) < uint32(spBase - base)) {
+        uint32 backingIndex = indexOfFe(backing);
+        uint32 tol = uint32(spBase - base);
+        if (backingIndex < tol || backingIndex < localIndex(n)) {
             /* local.idx < backing.idx means local cannot be a copy yet */
             if (localFe->trackerIndex() < backing->trackerIndex())
                 swapInTracker(backing, localFe);
             localFe->setNotCopied();
             localFe->setCopyOf(backing);
             if (backing->isTypeKnown())
                 localFe->setTypeTag(backing->getTypeTag());
             else
                 localFe->type.invalidate();
             localFe->data.invalidate();
             return;
         }
 
-        searchPoint = backing->trackerIndex();
-    } else if (top->trackerIndex() < localFe->trackerIndex()) {
-        swapInTracker(top, localFe);
+        /*
+         * If control flow lands here, then there was a bytecode sequence like
+         *
+         *  ENTERBLOCK 2
+         *  GETLOCAL 1
+         *  SETLOCAL 0
+         *
+         * The problem is slot N can't be backed by M if M could be popped
+         * before N. We want a guarantee that when we pop M, even if it was
+         * copied, it has no outstanding copies.
+         * 
+         * Because of |let| expressions, it's kind of hard to really know
+         * whether a region on the stack will be popped all at once. Bleh!
+         *
+         * This should be rare except in browser code (and maybe even then),
+         * but even so there's a quick workaround. We take all copies of the
+         * backing fe, and redirect them to be copies of the destination.
+         */
+        FrameEntry *tos = tosFe();
+        for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
+            FrameEntry *fe = tracker[i];
+            if (fe >= tos)
+                continue;
+            if (fe->isCopy() && fe->copyOf() == backing)
+                fe->setCopyOf(localFe);
+        }
     }
+    backing->setNotCopied();
+    
+    /*
+     * This is valid from the top->isCopy() path because we're guaranteed a
+     * consistent ordering - all copies of |backing| are tracked after 
+     * |backing|. Transitively, only one swap is needed.
+     */
+    if (backing->trackerIndex() < localFe->trackerIndex())
+        swapInTracker(backing, localFe);
 
     /*
      * Move the backing store down - we spill registers here, but we could be
      * smarter and re-use the type reg.
      */
     RegisterID reg = tempRegForData(backing);
     localFe->data.setRegister(reg);
     moveOwnership(reg, localFe);
@@ -657,13 +692,12 @@ FrameState::storeLocal(uint32 n)
     }
 
     if (!backing->isTypeKnown())
         backing->type.invalidate();
     backing->data.invalidate();
     backing->setNotCopied();
     backing->setCopyOf(localFe);
 
-    JS_ASSERT(searchPoint == InvalidIndex);
     JS_ASSERT(top->copyOf() == localFe);
 }
 
 
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -186,16 +186,22 @@ class FrameState
 
     /*
      * Pops a number of values off the operation stack, freeing any of their
      * resources.
      */
     inline void popn(uint32 n);
 
     /*
+     * Temporarily increase and decrease local variable depth.
+     */
+    inline void enterBlock(uint32 n);
+    inline void leaveBlock(uint32 n);
+
+    /*
      * Pushes a copy of a local variable.
      */
     void pushLocal(uint32 n);
 
     /*
      * Allocates a temporary register for a FrameEntry's type. The register
      * can be spilled or clobbered by the frame. The compiler may only operate
      * on it temporarily, and must take care not to clobber it.
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -189,16 +189,17 @@ typedef void * (JS_FASTCALL *VoidPtrStub
 typedef void * (JS_FASTCALL *VoidPtrStubUInt32)(VMFrame &, uint32);
 typedef JSObject * (JS_FASTCALL *JSObjStub)(VMFrame &);
 typedef JSObject * (JS_FASTCALL *JSObjStubUInt32)(VMFrame &, uint32);
 typedef JSObject * (JS_FASTCALL *JSObjStubFun)(VMFrame &, JSFunction *);
 typedef JSObject * (JS_FASTCALL *JSObjStubJSObj)(VMFrame &, JSObject *);
 typedef void (JS_FASTCALL *VoidStubAtom)(VMFrame &, JSAtom *);
 typedef JSString * (JS_FASTCALL *JSStrStub)(VMFrame &);
 typedef JSString * (JS_FASTCALL *JSStrStubUInt32)(VMFrame &, uint32);
+typedef void (JS_FASTCALL *VoidStubJSObj)(VMFrame &, JSObject *);
 
 #define JS_UNJITTABLE_METHOD (reinterpret_cast<void*>(-1))
 
 namespace mjit {
 
 JSBool
 JaegerShot(JSContext *cx);
 
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -2509,8 +2509,78 @@ stubs::ArgCnt(VMFrame &f)
     JSStackFrame *fp = f.fp;
 
     jsid id = ATOM_TO_JSID(rt->atomState.lengthAtom);
     f.regs.sp++;
     if (!js_GetArgsProperty(cx, fp, id, &f.regs.sp[-1]))
         THROW();
 }
 
+void JS_FASTCALL
+stubs::EnterBlock(VMFrame &f, JSObject *obj)
+{
+    JSContext *cx = f.cx;
+    JSFrameRegs &regs = f.regs;
+    JSStackFrame *fp = f.fp;
+
+    JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
+    JS_ASSERT(fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
+    Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
+    JS_ASSERT(regs.sp < vp);
+    JS_ASSERT(vp <= fp->slots() + fp->script->nslots);
+    SetValueRangeToUndefined(regs.sp, vp);
+    regs.sp = vp;
+
+#ifdef DEBUG
+    JS_ASSERT(fp->blockChain == obj->getParent());
+
+    /*
+     * The young end of fp->scopeChain may omit blocks if we haven't closed
+     * over them, but if there are any closure blocks on fp->scopeChain, they'd
+     * better be (clones of) ancestors of the block we're entering now;
+     * anything else we should have popped off fp->scopeChain when we left its
+     * static scope.
+     */
+    JSObject *obj2 = fp->scopeChain;
+    Class *clasp;
+    while ((clasp = obj2->getClass()) == &js_WithClass)
+        obj2 = obj2->getParent();
+    if (clasp == &js_BlockClass &&
+        obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
+        JSObject *youngestProto = obj2->getProto();
+        JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
+        JSObject *parent = obj;
+        while ((parent = parent->getParent()) != youngestProto)
+            JS_ASSERT(parent);
+    }
+#endif
+
+    fp->blockChain = obj;
+}
+
+void JS_FASTCALL
+stubs::LeaveBlock(VMFrame &f)
+{
+    JSContext *cx = f.cx;
+    JSStackFrame *fp = f.fp;
+
+#ifdef DEBUG
+    JS_ASSERT(fp->blockChain->getClass() == &js_BlockClass);
+    uintN blockDepth = OBJ_BLOCK_DEPTH(cx, fp->blockChain);
+
+    JS_ASSERT(blockDepth <= StackDepth(fp->script));
+#endif
+    /*
+     * If we're about to leave the dynamic scope of a block that has been
+     * cloned onto fp->scopeChain, clear its private data, move its locals from
+     * the stack into the clone, and pop it off the chain.
+     */
+    JSObject *obj = fp->scopeChain;
+    if (obj->getProto() == fp->blockChain) {
+        JS_ASSERT(obj->getClass() == &js_BlockClass);
+        if (!js_PutBlockObject(cx, JS_TRUE))
+            THROW();
+    }
+
+    /* Pop the block chain, too.  */
+    fp->blockChain = fp->blockChain->getParent();
+}
+
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -85,16 +85,18 @@ void JS_FASTCALL DecProp(VMFrame &f, JSA
 void JS_FASTCALL CallProp(VMFrame &f, JSAtom *atom);
 
 void JS_FASTCALL DefFun(VMFrame &f, uint32 index);
 JSObject * JS_FASTCALL DefLocalFun(VMFrame &f, JSFunction *fun);
 JSObject * JS_FASTCALL RegExp(VMFrame &f, JSObject *regex);
 JSObject * JS_FASTCALL Lambda(VMFrame &f, JSFunction *fun);
 JSObject * JS_FASTCALL FlatLambda(VMFrame &f, JSFunction *fun);
 void JS_FASTCALL Arguments(VMFrame &f);
+void JS_FASTCALL EnterBlock(VMFrame &f, JSObject *obj);
+void JS_FASTCALL LeaveBlock(VMFrame &f);
 
 void JS_FASTCALL VpInc(VMFrame &f, Value *vp);
 void JS_FASTCALL VpDec(VMFrame &f, Value *vp);
 void JS_FASTCALL DecVp(VMFrame &f, Value *vp);
 void JS_FASTCALL IncVp(VMFrame &f, Value *vp);
 
 JSBool JS_FASTCALL LessThan(VMFrame &f);
 JSBool JS_FASTCALL LessEqual(VMFrame &f);