[INFER] Inline scripted calls, bug 639099.
authorBrian Hackett <bhackett1024@gmail.com>
Sat, 26 Mar 2011 19:07:13 -0700
changeset 75864 2a6ecf5af25ce78f36bffd8c34bd4cb179cd92ee
parent 75863 3d71887519174eeea07dc5ad5aa5ba089f61f311
child 75865 e2b96219a262097d8e008e23f46cddfa38280d92
push id67
push userclegnitto@mozilla.com
push dateFri, 04 Nov 2011 22:39:41 +0000
treeherdermozilla-release@04778346a3b0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs639099
milestone2.0b13pre
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
[INFER] Inline scripted calls, bug 639099.
js/src/jit-test/tests/jaeger/inline/scripted-01.js
js/src/jit-test/tests/jaeger/inline/scripted-02.js
js/src/jit-test/tests/jaeger/inline/scripted-03.js
js/src/jit-test/tests/jaeger/inline/scripted-04.js
js/src/jit-test/tests/jaeger/inline/scripted-05.js
js/src/jit-test/tests/jaeger/inline/scripted-06.js
js/src/jit-test/tests/jaeger/inline/scripted-07.js
js/src/jit-test/tests/jaeger/inline/scripted-08.js
js/src/jit-test/tests/jaeger/inline/scripted-09.js
js/src/jit-test/tests/jaeger/inline/scripted-10.js
js/src/jit-test/tests/jaeger/inline/scripted-11.js
js/src/jsanalyze.cpp
js/src/jsanalyze.h
js/src/jsapi.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/jsdbgapi.cpp
js/src/jsexn.cpp
js/src/jsfun.cpp
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsinferinlines.h
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jsinterpinlines.h
js/src/jsiter.cpp
js/src/jsobj.cpp
js/src/jsopcode.cpp
js/src/jspropertycache.cpp
js/src/jsscript.cpp
js/src/jsvalue.h
js/src/jswrapper.cpp
js/src/jsxml.cpp
js/src/methodjit/BaseAssembler.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FastArithmetic.cpp
js/src/methodjit/FastBuiltins.cpp
js/src/methodjit/FastOps.cpp
js/src/methodjit/FrameEntry.h
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.cpp
js/src/methodjit/FrameState.h
js/src/methodjit/ImmutableSync.cpp
js/src/methodjit/ImmutableSync.h
js/src/methodjit/InlineFrameAssembler.h
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MachineRegs.h
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/MonoIC.h
js/src/methodjit/PolyIC.cpp
js/src/methodjit/Retcon.cpp
js/src/methodjit/Retcon.h
js/src/methodjit/StubCalls.cpp
js/src/methodjit/StubCompiler.cpp
js/src/methodjit/StubCompiler.h
js/src/methodjit/TrampolineCompiler.cpp
js/src/tests/manifest.py
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-01.js
@@ -0,0 +1,17 @@
+function bar(x, y) {
+  return x + y;
+}
+
+function foo(x, y) {
+  var a = 0;
+  for (var i = 0; i < 1000; i++) {
+    a += bar(x, y);
+    a += bar(x, y);
+    a += bar(x, y);
+    a += bar(x, y);
+  }
+  return a;
+}
+
+var q = foo(0, 1);
+assertEq(q, 4000);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-02.js
@@ -0,0 +1,22 @@
+function getter(a, i) {
+  return a[i];
+}
+
+function foo(a, n) {
+  var res = 0;
+  for (var i = 0; i < 10; i++) {
+    res = 0;
+    for (var j = 0; j < n; j++) {
+      res += getter(a, j);
+    }
+  }
+  return res;
+}
+
+var n = 100;
+var a = Array(n);
+for (var i = 0; i < n; i++)
+  a[i] = i;
+
+var q = foo(a, n);
+assertEq(q, 4950);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-03.js
@@ -0,0 +1,14 @@
+function choose(x, y, z) {
+  return x ? y : z;
+}
+
+function foo(x, y, z) {
+  var a = 0;
+  for (var i = 0; i < 100; i++) {
+    a += choose(x, y, z);
+  }
+  return a;
+}
+
+var q = foo(true, 10, 0);
+assertEq(q, 1000);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-04.js
@@ -0,0 +1,12 @@
+function adder(x, y) {
+  return x + y;
+}
+
+function foo(x) {
+  for (var i = 0; i < 100; i++)
+    var a = adder(x, i);
+  return a;
+}
+
+var q = foo(0x7ffffff0);
+assertEq(q, 2147483731);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-05.js
@@ -0,0 +1,13 @@
+function adder(x, y) {
+  return Math.floor(x + y);
+}
+
+function foo(x) {
+  for (var i = 0; i < 100; i++) {
+    var a = adder(x, i);
+  }
+  return a;
+}
+
+var q = foo(0x7ffffff0 + .5);
+assertEq(q, 2147483731);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-06.js
@@ -0,0 +1,17 @@
+function popper(a) {
+  return a.pop();
+}
+
+function foo(x) {
+  for (var i = 0; i < 10; i++) {
+    var q = popper(x);
+    if (i < 5)
+      assertEq(q, 5 - i);
+    else
+      assertEq(q, undefined);
+  }
+  return q;
+}
+
+var q = foo([1,2,3,4,5]);
+assertEq(q, undefined);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-07.js
@@ -0,0 +1,15 @@
+function multiple(a) {
+  if (a > 10)
+    return 1;
+  return 0;
+}
+
+function foo(x) {
+  var a = 0;
+  for (var i = 0; i < 100; i++)
+    a += multiple(i);
+  return a;
+}
+
+var q = foo([1,2,3,4,5]);
+assertEq(q, 89);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-08.js
@@ -0,0 +1,21 @@
+function first(a, b) {
+  return second(a, b);
+}
+
+function second(a, b) {
+  return third(a, b, a + b);
+}
+
+function third(a, b, c) {
+  return a + b + c;
+}
+
+function foo(x) {
+  var a = 0;
+  for (var i = 0; i < 100; i++)
+    a += first(x, i);
+  return a;
+}
+
+var q = foo(10);
+assertEq(q, 11900);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-09.js
@@ -0,0 +1,19 @@
+function multiple(a) {
+  if (a > 10)
+    return a * 20;
+  return 0;
+}
+
+function deeper(a, b) {
+  return multiple(a + b);
+}
+
+function foo() {
+  var a = 0;
+  for (var i = 0; i < 10; i++)
+    a += deeper(0x7ffffff0, i);
+  return a;
+}
+
+var q = foo();
+assertEq(q, 429496727300);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-10.js
@@ -0,0 +1,13 @@
+function copied(x, y) {
+  return x + y;
+}
+
+function foo(x) {
+  var a = 0;
+  for (var i = 0; i < 100; i++)
+    a += copied(x, x);
+  return a;
+}
+
+var q = foo(5);
+assertEq(q, 1000);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/inline/scripted-11.js
@@ -0,0 +1,13 @@
+what = 0;
+
+function f(x) {
+  g(x);
+}
+
+function g(x) {
+  var a = <a></a>;
+  eval("what = true");
+}
+
+f(10);
+assertEq(what, true);
--- a/js/src/jsanalyze.cpp
+++ b/js/src/jsanalyze.cpp
@@ -47,17 +47,16 @@ namespace analyze {
 
 /////////////////////////////////////////////////////////////////////
 // Script
 /////////////////////////////////////////////////////////////////////
 
 Script::Script()
 {
     PodZero(this);
-    JS_InitArenaPool(&pool, "script_analyze", 256, 8, NULL);
 }
 
 Script::~Script()
 {
     JS_FinishArenaPool(&pool);
 }
 
 /////////////////////////////////////////////////////////////////////
@@ -161,16 +160,19 @@ Script::addJump(JSContext *cx, unsigned 
         }
     }
 
     if (!code->mergeDefines(cx, this, initial, stackDepth, defineArray, defineCount))
         return false;
     code->jumpTarget = true;
 
     if (offset < *currentOffset) {
+        /* Scripts containing loops are never inlined. */
+        isInlineable = false;
+
         /* Don't follow back edges to bytecode which has already been analyzed. */
         if (!code->analyzed) {
             if (*forwardJump == 0)
                 *forwardJump = *currentOffset;
             *currentOffset = offset;
         }
     } else if (offset > *forwardJump) {
         *forwardJump = offset;
@@ -239,16 +241,18 @@ BytecodeNoFallThrough(JSOp op)
       default:
         return false;
     }
 }
 
 void
 Script::analyze(JSContext *cx, JSScript *script)
 {
+    JS_InitArenaPool(&pool, "script_analyze", 256, 8, NULL);
+
     JS_ASSERT(script && !codeArray && !locals);
     this->script = script;
 
     unsigned length = script->length;
     unsigned nargs = script->fun ? script->fun->nargs : 0;
     unsigned nfixed = localCount();
 
     codeArray = ArenaArray<Bytecode*>(pool, length);
@@ -298,16 +302,22 @@ Script::analyze(JSContext *cx, JSScript 
 
     /*
      * If the script is in debug mode, JS_SetFrameReturnValue can be called at
      * any safe point.
      */
     if (cx->compartment->debugMode)
         usesRval = true;
 
+    isInlineable = true;
+    if (script->nClosedArgs || script->nClosedVars ||
+        script->usesEval || script->usesArguments || cx->compartment->debugMode) {
+        isInlineable = false;
+    }
+
     /*
      * If we are in the middle of one or more jumps, the offset of the highest
      * target jumping over this bytecode.  Includes implicit jumps from
      * try/catch/finally blocks.
      */
     unsigned forwardJump = 0;
 
     /*
@@ -365,18 +375,20 @@ Script::analyze(JSContext *cx, JSScript 
             continue;
         }
 
         code->analyzed = true;
 
         if (forwardCatch)
             code->inTryBlock = true;
 
-        if (untrap.trap)
+        if (untrap.trap) {
             code->safePoint = true;
+            isInlineable = false;
+        }
 
         unsigned stackDepth = code->stackDepth;
         uint32 *defineArray = code->defineArray;
         unsigned defineCount = code->defineCount;
 
         if (!forwardJump) {
             /*
              * There is no jump over this bytecode, nor a containing try block.
@@ -406,33 +418,41 @@ Script::analyze(JSContext *cx, JSScript 
         stackDepth -= nuses;
         stackDepth += ndefs;
 
         switch (op) {
 
           case JSOP_SETRVAL:
           case JSOP_POPV:
             usesRval = true;
+            isInlineable = false;
             break;
 
           case JSOP_NAME:
           case JSOP_CALLNAME:
           case JSOP_BINDNAME:
           case JSOP_SETNAME:
           case JSOP_DELNAME:
           case JSOP_INCNAME:
           case JSOP_DECNAME:
           case JSOP_NAMEINC:
           case JSOP_NAMEDEC:
           case JSOP_FORNAME:
             usesScope = true;
+            isInlineable = false;
+            break;
+
+          case JSOP_THIS:
+          case JSOP_GETTHISPROP:
+            usesThis = true;
             break;
 
           case JSOP_TABLESWITCH:
           case JSOP_TABLESWITCHX: {
+            isInlineable = false;
             jsbytecode *pc2 = pc;
             unsigned jmplen = (op == JSOP_TABLESWITCH) ? JUMP_OFFSET_LEN : JUMPX_OFFSET_LEN;
             unsigned defaultOffset = offset + GetJumpOffset(pc, pc2);
             pc2 += jmplen;
             jsint low = GET_JUMP_OFFSET(pc2);
             pc2 += JUMP_OFFSET_LEN;
             jsint high = GET_JUMP_OFFSET(pc2);
             pc2 += JUMP_OFFSET_LEN;
@@ -456,16 +476,17 @@ Script::analyze(JSContext *cx, JSScript 
                 getCode(targetOffset).safePoint = true;
                 pc2 += jmplen;
             }
             break;
           }
 
           case JSOP_LOOKUPSWITCH:
           case JSOP_LOOKUPSWITCHX: {
+            isInlineable = false;
             jsbytecode *pc2 = pc;
             unsigned jmplen = (op == JSOP_LOOKUPSWITCH) ? JUMP_OFFSET_LEN : JUMPX_OFFSET_LEN;
             unsigned defaultOffset = offset + GetJumpOffset(pc, pc2);
             pc2 += jmplen;
             unsigned npairs = GET_UINT16(pc2);
             pc2 += UINT16_LEN;
 
             if (!addJump(cx, defaultOffset, &nextOffset, &forwardJump,
@@ -492,16 +513,17 @@ Script::analyze(JSContext *cx, JSScript 
 
           case JSOP_TRY: {
             /*
              * Everything between a try and corresponding catch or finally is conditional.
              * Note that there is no problem with code which is skipped by a thrown
              * exception but is not caught by a later handler in the same function:
              * no more code will execute, and it does not matter what is defined.
              */
+            isInlineable = false;
             JSTryNote *tn = script->trynotes()->vector;
             JSTryNote *tnlimit = tn + script->trynotes()->length;
             for (; tn < tnlimit; tn++) {
                 unsigned startOffset = script->main - script->code + tn->start;
                 if (startOffset == offset + 1) {
                     unsigned catchOffset = startOffset + tn->length;
 
                     /* This will overestimate try block code, for multiple catch/finally. */
@@ -524,30 +546,34 @@ Script::analyze(JSContext *cx, JSScript 
           case JSOP_GETLOCAL:
             /*
              * Watch for uses of variables not known to be defined, and mark
              * them as having possible uses before definitions.  Ignore GETLOCAL
              * followed by a POP, these are generated for, e.g. 'var x;'
              */
             if (pc[JSOP_GETLOCAL_LENGTH] != JSOP_POP) {
                 uint32 local = GET_SLOTNO(pc);
-                if (local < nfixed && !localDefined(local, offset))
+                if (local < nfixed && !localDefined(local, offset)) {
                     setLocal(local, LOCAL_USE_BEFORE_DEF);
+                    isInlineable = false;
+                }
             }
             break;
 
           case JSOP_CALLLOCAL:
           case JSOP_GETLOCALPROP:
           case JSOP_INCLOCAL:
           case JSOP_DECLOCAL:
           case JSOP_LOCALINC:
           case JSOP_LOCALDEC: {
             uint32 local = GET_SLOTNO(pc);
-            if (local < nfixed && !localDefined(local, offset))
+            if (local < nfixed && !localDefined(local, offset)) {
                 setLocal(local, LOCAL_USE_BEFORE_DEF);
+                isInlineable = false;
+            }
             break;
           }
 
           case JSOP_SETLOCAL:
           case JSOP_FORLOCAL: {
             uint32 local = GET_SLOTNO(pc);
 
             /*
@@ -572,16 +598,47 @@ Script::analyze(JSContext *cx, JSScript 
                 } else {
                     /* This local is unconditionally defined by this bytecode. */
                     setLocal(local, offset);
                 }
             }
             break;
           }
 
+          /* Additional opcodes which can be compiled but which can't be inlined. */
+          case JSOP_ARGUMENTS:
+          case JSOP_EVAL:
+          case JSOP_FORARG:
+          case JSOP_SETARG:
+          case JSOP_INCARG:
+          case JSOP_DECARG:
+          case JSOP_ARGINC:
+          case JSOP_ARGDEC:
+          case JSOP_THROW:
+          case JSOP_EXCEPTION:
+          case JSOP_DEFFUN:
+          case JSOP_DEFVAR:
+          case JSOP_DEFCONST:
+          case JSOP_SETCONST:
+          case JSOP_DEFLOCALFUN:
+          case JSOP_DEFLOCALFUN_FC:
+          case JSOP_LAMBDA:
+          case JSOP_LAMBDA_FC:
+          case JSOP_GETFCSLOT:
+          case JSOP_CALLFCSLOT:
+          case JSOP_ARGSUB:
+          case JSOP_ARGCNT:
+          case JSOP_DEBUGGER:
+          case JSOP_ENTERBLOCK:
+          case JSOP_LEAVEBLOCK:
+          case JSOP_FUNCALL:
+          case JSOP_FUNAPPLY:
+            isInlineable = false;
+            break;
+
           default:
             break;
         }
 
         uint32 type = JOF_TYPE(js_CodeSpec[op].format);
 
         /* Check basic jump opcodes, which may or may not have a fallthrough. */
         if (type == JOF_JUMP || type == JOF_JUMPX) {
@@ -654,45 +711,45 @@ Script::analyze(JSContext *cx, JSScript 
 
 /////////////////////////////////////////////////////////////////////
 // Live Range Analysis
 /////////////////////////////////////////////////////////////////////
 
 LifetimeScript::LifetimeScript()
 {
     PodZero(this);
-    JS_InitArenaPool(&pool, "script_liverange", 256, 8, NULL);
 }
 
 LifetimeScript::~LifetimeScript()
 {
     JS_FinishArenaPool(&pool);
 }
 
 bool
-LifetimeScript::analyze(JSContext *cx, analyze::Script *analysis, JSScript *script, JSFunction *fun)
+LifetimeScript::analyze(JSContext *cx, analyze::Script *analysis, JSScript *script)
 {
     JS_ASSERT(analysis->hasAnalyzed() && !analysis->failed());
 
+    JS_InitArenaPool(&pool, "script_liverange", 256, 8, NULL);
+
     this->analysis = analysis;
     this->script = script;
-    this->fun = fun;
 
     codeArray = ArenaArray<LifetimeBytecode>(pool, script->length);
     if (!codeArray)
         return false;
     PodZero(codeArray, script->length);
 
     unsigned nfixed = analysis->localCount();
     locals = ArenaArray<LifetimeVariable>(pool, nfixed);
     if (!locals)
         return false;
     PodZero(locals, nfixed);
 
-    unsigned nargs = fun ? fun->nargs : 0;
+    unsigned nargs = script->fun ? script->fun->nargs : 0;
     args = ArenaArray<LifetimeVariable>(pool, nargs);
     if (!args)
         return false;
     PodZero(args, nargs);
 
     PodZero(&thisVar);
 
     saved = ArenaArray<LifetimeVariable*>(pool, nfixed + nargs + 1);
--- a/js/src/jsanalyze.h
+++ b/js/src/jsanalyze.h
@@ -134,38 +134,45 @@ class Script
 
     static const uint32 LOCAL_USE_BEFORE_DEF = uint32(-1);
     static const uint32 LOCAL_CONDITIONALLY_DEFINED = uint32(-2);
 
     bool outOfMemory;
     bool hadFailure;
     bool usesRval;
     bool usesScope;
+    bool usesThis;
+
+    bool isInlineable;
 
     JSPackedBool *closedVars;
     JSPackedBool *closedArgs;
 
   public:
     /* Pool for allocating analysis structures which will not outlive this script. */
     JSArenaPool pool;
 
     Script();
     ~Script();
 
     void analyze(JSContext *cx, JSScript *script);
 
     bool OOM() { return outOfMemory; }
     bool failed() { return hadFailure; }
+    bool inlineable(uint32 argc) { return isInlineable && argc == script->fun->nargs; }
 
     /* Whether there are POPV/SETRVAL bytecodes which can write to the frame's rval. */
     bool usesReturnValue() const { return usesRval; }
 
     /* Whether there are NAME bytecodes which can access the frame's scope chain. */
     bool usesScopeChain() const { return usesScope; }
 
+    /* Whether there are THIS bytecodes. */
+    bool usesThisValue() const { return usesThis; }
+
     bool hasAnalyzed() const { return !!codeArray; }
     JSScript *getScript() const { return script; }
 
     /* Accessors for bytecode information. */
 
     Bytecode& getCode(uint32 offset) {
         JS_ASSERT(offset < script->length);
         JS_ASSERT(codeArray[offset]);
@@ -381,48 +388,47 @@ struct LifetimeVariable
  * Analysis approximating variable liveness information at points in a script.
  * This is separate from analyze::Script as it is computed on every compilation
  * and thrown away afterwards.
  */
 class LifetimeScript
 {
     analyze::Script *analysis;
     JSScript *script;
-    JSFunction *fun;
 
     LifetimeBytecode *codeArray;
     LifetimeVariable *locals;
     LifetimeVariable *args;
     LifetimeVariable thisVar;
 
     LifetimeVariable **saved;
     unsigned savedCount;
 
   public:
     JSArenaPool pool;
 
     LifetimeScript();
     ~LifetimeScript();
 
-    bool analyze(JSContext *cx, analyze::Script *analysis, JSScript *script, JSFunction *fun);
+    bool analyze(JSContext *cx, analyze::Script *analysis, JSScript *script);
 
     LifetimeBytecode &getCode(uint32 offset) {
         JS_ASSERT(analysis->maybeCode(offset));
         return codeArray[offset];
     }
     LifetimeBytecode &getCode(jsbytecode *pc) { return getCode(pc - script->code); }
 
 #ifdef DEBUG
     void dumpVariable(LifetimeVariable &var);
     void dumpLocal(unsigned i) { dumpVariable(locals[i]); }
     void dumpArg(unsigned i) { dumpVariable(args[i]); }
 #endif
 
     Lifetime * argLive(uint32 arg, uint32 offset) {
-        JS_ASSERT(fun && arg < fun->nargs);
+        JS_ASSERT(script->fun && arg < script->fun->nargs);
         return args[arg].live(offset);
     }
     Lifetime * localLive(uint32 local, uint32 offset) {
         JS_ASSERT(local < analysis->localCount());
         return locals[local].live(offset);
     }
     Lifetime * thisLive(uint32 offset) { return thisVar.live(offset); }
 
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -5444,17 +5444,17 @@ JS_IsRunning(JSContext *cx)
         fp = fp->prev();
     return fp != NULL;
 }
 
 JS_PUBLIC_API(JSStackFrame *)
 JS_SaveFrameChain(JSContext *cx)
 {
     CHECK_REQUEST(cx);
-    JSStackFrame *fp = js_GetTopStackFrame(cx);
+    JSStackFrame *fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE);
     if (!fp)
         return NULL;
     cx->saveActiveSegment();
     return fp;
 }
 
 JS_PUBLIC_API(void)
 JS_RestoreFrameChain(JSContext *cx, JSStackFrame *fp)
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -381,29 +381,31 @@ StackSpace::getExecuteFrame(JSContext *c
 void
 StackSpace::pushExecuteFrame(JSContext *cx, JSObject *initialVarObj, ExecuteFrameGuard *fg)
 {
     JSStackFrame *fp = fg->fp();
     JSScript *script = fp->script();
     fg->regs_.pc = script->code;
     fg->regs_.fp = fp;
     fg->regs_.sp = fp->base();
+    fg->regs_.inlined = NULL;
     pushSegmentAndFrame(cx, &fg->regs_, fg);
     fg->seg_->setInitialVarObj(initialVarObj);
 }
 
 bool
 StackSpace::pushDummyFrame(JSContext *cx, JSObject &scopeChain, DummyFrameGuard *fg)
 {
     if (!getSegmentAndFrame(cx, 0 /*vplen*/, 0 /*nslots*/, fg))
         return false;
     fg->fp()->initDummyFrame(cx, scopeChain);
     fg->regs_.fp = fg->fp();
     fg->regs_.pc = NULL;
     fg->regs_.sp = fg->fp()->slots();
+    fg->regs_.inlined = NULL;
     pushSegmentAndFrame(cx, &fg->regs_, fg);
     return true;
 }
 
 bool
 StackSpace::getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots, GeneratorFrameGuard *fg)
 {
     return getSegmentAndFrame(cx, vplen, nslots, fg);
@@ -1232,17 +1234,17 @@ ReportError(JSContext *cx, const char *m
 /* The report must be initially zeroed. */
 static void
 PopulateReportBlame(JSContext *cx, JSErrorReport *report)
 {
     /*
      * Walk stack until we find a frame that is associated with some script
      * rather than a native frame.
      */
-    for (JSStackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) {
+    for (JSStackFrame *fp = js_GetTopStackFrame(cx, FRAME_EXPAND_TOP); fp; fp = fp->prev()) {
         if (fp->pc(cx)) {
             report->filename = fp->script()->filename;
             report->lineno = js_FramePCToLineNumber(cx, fp);
             break;
         }
     }
 }
 
@@ -1830,17 +1832,17 @@ TriggerAllOperationCallbacks(JSRuntime *
 }
 
 } /* namespace js */
 
 JSStackFrame *
 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
 {
     if (!fp)
-        fp = js_GetTopStackFrame(cx);
+        fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE);
     while (fp && fp->isDummyFrame())
         fp = fp->prev();
     JS_ASSERT_IF(fp, fp->isScriptFrame());
     return fp;
 }
 
 jsbytecode*
 js_GetCurrentBytecodePC(JSContext* cx)
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -2153,16 +2153,19 @@ public:
     inline js::types::TypeObject *getTypeEmpty();
 
     /* Alias two properties in the type information for obj. */
     inline bool aliasTypeProperties(js::types::TypeObject *obj, jsid first, jsid second);
 
     /* Mark an array type as being not packed and, possibly, not dense. */
     inline bool markTypeArrayNotPacked(js::types::TypeObject *obj, bool notDense);
 
+    /* Mark a function as being uninlineable (its .arguments property has been accessed). */
+    inline bool markTypeFunctionUninlineable(js::types::TypeObject *obj);
+
     /* Monitor all properties of a type object as unknown. */
     inline bool markTypeObjectUnknownProperties(js::types::TypeObject *obj);
 
     /*
      * For an array or object which has not yet escaped and been referenced elsewhere,
      * pick a new type based on the object's current contents.
      */
     inline bool fixArrayType(JSObject *obj);
@@ -3219,28 +3222,49 @@ js_CurrentPCIsInImacro(JSContext *cx);
 
 namespace js {
 
 class RegExpStatics;
 
 extern JS_FORCES_STACK JS_FRIEND_API(void)
 LeaveTrace(JSContext *cx);
 
+enum FrameExpandKind {
+    FRAME_EXPAND_NONE,
+    FRAME_EXPAND_TOP,
+    FRAME_EXPAND_ALL
+};
+
+#ifdef JS_METHODJIT
+namespace mjit {
+    void ExpandInlineFrames(JSContext *cx, bool all);
+}
+#endif
+
 } /* namespace js */
 
 /*
  * Get the current frame, first lazily instantiating stack frames if needed.
  * (Do not access cx->fp() directly except in JS_REQUIRES_STACK code.)
  *
- * Defined in jstracer.cpp if JS_TRACER is defined.
+ * LeaveTrace is defined in jstracer.cpp if JS_TRACER is defined.
+ *
+ * If the stack contains frames inlined by the method JIT, kind specifies
+ * which ones to expand.
  */
 static JS_FORCES_STACK JS_INLINE JSStackFrame *
-js_GetTopStackFrame(JSContext *cx)
+js_GetTopStackFrame(JSContext *cx, js::FrameExpandKind expand)
 {
     js::LeaveTrace(cx);
+
+#ifdef JS_METHODJIT
+    if (expand != js::FRAME_EXPAND_NONE)
+        js::mjit::ExpandInlineFrames(cx, expand == js::FRAME_EXPAND_ALL);
+#endif
+
     return cx->maybefp();
 }
 
 static JS_INLINE JSBool
 js_IsPropertyCacheDisabled(JSContext *cx)
 {
     return cx->runtime->shapeGen >= js::SHAPE_OVERFLOW_BIT;
 }
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -122,18 +122,18 @@ StackSpace::firstUnused() const
     if (!seg) {
         JS_ASSERT(invokeArgEnd == NULL);
         return base;
     }
     if (seg->inContext()) {
         Value *sp = seg->getCurrentRegs()->sp;
         if (invokeArgEnd > sp) {
             JS_ASSERT(invokeSegment == currentSegment);
-            JS_ASSERT_IF(seg->maybeContext()->hasfp(),
-                         invokeFrame == seg->maybeContext()->fp());
+            //JS_ASSERT_IF(seg->maybeContext()->hasfp(),  FIXME
+            //             invokeFrame == seg->maybeContext()->fp());
             return invokeArgEnd;
         }
         return sp;
     }
     JS_ASSERT(invokeArgEnd);
     JS_ASSERT(invokeSegment == currentSegment);
     return invokeArgEnd;
 }
@@ -256,17 +256,17 @@ StackSpace::popInvokeArgs(const InvokeAr
 {
     if (JS_UNLIKELY(ag.seg != NULL)) {
         popSegmentForInvoke(ag);
         return;
     }
 
     JS_ASSERT(isCurrentAndActive(ag.cx));
     JS_ASSERT(invokeSegment == currentSegment);
-    JS_ASSERT(invokeFrame == ag.cx->maybefp());
+    // JS_ASSERT(invokeFrame == ag.cx->maybefp()); FIXME
     JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc());
 
 #ifdef DEBUG
     invokeSegment = ag.prevInvokeSegment;
     invokeFrame = ag.prevInvokeFrame;
 #endif
     invokeArgEnd = ag.prevInvokeArgEnd;
 }
@@ -327,16 +327,17 @@ StackSpace::getInvokeFrame(JSContext *cx
 {
     JS_ASSERT(firstUnused() == args.argv() + args.argc());
 
     Value *firstUnused = args.argv() + args.argc();
     fg->regs_.fp = getCallFrame(cx, firstUnused, args.argc(), fun, script, flags,
                                 EnsureSpaceCheck());
     fg->regs_.sp = fg->regs_.fp->slots() + script->nfixed;
     fg->regs_.pc = script->code;
+    fg->regs_.inlined = NULL;
 
     return fg->regs_.fp != NULL;
 }
 
 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
 StackSpace::pushInvokeFrame(JSContext *cx, const CallArgs &args,
                             InvokeFrameGuard *fg)
 {
@@ -405,30 +406,31 @@ JS_REQUIRES_STACK JS_ALWAYS_INLINE void
 StackSpace::pushInlineFrame(JSContext *cx, JSScript *script, JSStackFrame *fp,
                             JSFrameRegs *regs)
 {
     JS_ASSERT(isCurrentAndActive(cx));
     JS_ASSERT(cx->regs == regs && script == fp->script());
 
     regs->fp = fp;
     regs->pc = script->code;
+    regs->inlined = NULL;
     regs->sp = fp->slots() + script->nfixed;
 }
 
 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
 StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *prev, Value *newsp)
 {
     JS_ASSERT(isCurrentAndActive(cx));
     JS_ASSERT(cx->hasActiveSegment());
     JS_ASSERT(cx->regs->fp->prev_ == prev);
     JS_ASSERT(!cx->regs->fp->hasImacropc());
     JS_ASSERT(prev->base() <= newsp && newsp <= cx->regs->fp->formalArgsEnd());
 
     JSFrameRegs *regs = cx->regs;
-    regs->pc = prev->pc(cx, regs->fp);
+    regs->pc = prev->pc(cx, regs->fp, &regs->inlined);
     regs->fp = prev;
     regs->sp = newsp;
 }
 
 JS_ALWAYS_INLINE Value *
 StackSpace::getStackLimit(JSContext *cx)
 {
     Value *sp = cx->regs->sp;
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -1327,17 +1327,20 @@ JS_GetScriptPrincipals(JSContext *cx, JS
 /************************************************************************/
 
 /*
  *  Stack Frame Iterator
  */
 JS_PUBLIC_API(JSStackFrame *)
 JS_FrameIterator(JSContext *cx, JSStackFrame **iteratorp)
 {
-    *iteratorp = (*iteratorp == NULL) ? js_GetTopStackFrame(cx) : (*iteratorp)->prev();
+    if (*iteratorp == NULL)
+        *iteratorp = js_GetTopStackFrame(cx, FRAME_EXPAND_ALL);
+    else
+        *iteratorp = (*iteratorp)->prev();
     return *iteratorp;
 }
 
 JS_PUBLIC_API(JSScript *)
 JS_GetFrameScript(JSContext *cx, JSStackFrame *fp)
 {
     return fp->maybeScript();
 }
@@ -1962,17 +1965,17 @@ JS_GetScriptTotalSize(JSContext *cx, JSS
 
     return nbytes;
 }
 
 JS_PUBLIC_API(uint32)
 JS_GetTopScriptFilenameFlags(JSContext *cx, JSStackFrame *fp)
 {
     if (!fp)
-        fp = js_GetTopStackFrame(cx);
+        fp = js_GetTopStackFrame(cx, FRAME_EXPAND_TOP);
     while (fp) {
         if (fp->isScriptFrame())
             return JS_GetScriptFilenameFlags(fp->script());
         fp = fp->prev();
     }
     return 0;
  }
 
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -291,17 +291,17 @@ InitExnPrivate(JSContext *cx, JSObject *
                   ? Valueify(callbacks->checkObjectAccess)
                   : NULL;
     older = JS_SetErrorReporter(cx, NULL);
     state = JS_SaveExceptionState(cx);
 
     callerid = ATOM_TO_JSID(cx->runtime->atomState.callerAtom);
     stackDepth = 0;
     valueCount = 0;
-    for (fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) {
+    for (fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE); fp; fp = fp->prev()) {
         if (fp->isFunctionFrame() && !fp->isEvalFrame()) {
             Value v = NullValue();
             if (checkAccess &&
                 !checkAccess(cx, &fp->callee(), callerid, JSACC_READ, &v)) {
                 break;
             }
             valueCount += fp->numActualArgs();
         }
@@ -332,17 +332,17 @@ InitExnPrivate(JSContext *cx, JSObject *
     priv->errorReport = NULL;
     priv->message = message;
     priv->filename = filename;
     priv->lineno = lineno;
     priv->stackDepth = stackDepth;
 
     values = GetStackTraceValueBuffer(priv);
     elem = priv->stackElems;
-    for (fp = js_GetTopStackFrame(cx); fp != fpstop; fp = fp->prev()) {
+    for (fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE); fp != fpstop; fp = fp->prev()) {
         if (!fp->isFunctionFrame() || fp->isEvalFrame()) {
             elem->funName = NULL;
             elem->argc = 0;
         } else {
             elem->funName = fp->fun()->atom
                             ? fp->fun()->atom
                             : cx->runtime->emptyString;
             elem->argc = fp->numActualArgs();
@@ -710,16 +710,20 @@ FilenameToString(JSContext *cx, const ch
 }
 
 static JSBool
 Exception(JSContext *cx, uintN argc, Value *vp)
 {
     JSString *message, *filename;
     JSStackFrame *fp;
 
+#ifdef JS_METHODJIT
+    js::mjit::ExpandInlineFrames(cx, true);
+#endif
+
     /*
      * ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when
      * called as functions, without operator new.  But as we do not give
      * each constructor a distinct JSClass, whose .name member is used by
      * NewNativeClassInstance to find the class prototype, we must get the
      * class prototype ourselves.
      */
     JSObject &callee = vp[0].toObject();
@@ -1114,16 +1118,20 @@ js_ErrorToException(JSContext *cx, const
     JSErrNum errorNumber;
     const JSErrorFormatString *errorString;
     JSExnType exn;
     jsval tv[4];
     JSBool ok;
     JSObject *errProto, *errObject;
     JSString *messageStr, *filenameStr;
 
+#ifdef JS_METHODJIT
+    js::mjit::ExpandInlineFrames(cx, true);
+#endif
+
     /*
      * Tell our caller to report immediately if this report is just a warning.
      */
     JS_ASSERT(reportp);
     if (JSREPORT_IS_WARNING(reportp->flags))
         return JS_FALSE;
 
     /* Find the exception index associated with this error. */
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -1599,24 +1599,46 @@ fun_getProperty(JSContext *cx, JSObject 
                    GetInstancePrivate(cx, obj, &js_FunctionClass, NULL))) {
         if (slot != FUN_LENGTH)
             return true;
         obj = obj->getProto();
         if (!obj)
             return true;
     }
 
+    if (slot == FUN_ARGUMENTS || slot == FUN_CALLER) {
+        /*
+         * Mark the function's script as uninlineable, to expand any of its
+         * frames on the stack before we go looking for them.
+         */
+        if (fun->isInterpreted() && !cx->markTypeFunctionUninlineable(fun->getType()))
+            return false;
+    }
+
     /* Find fun's top-most activation record. */
     JSStackFrame *fp;
-    for (fp = js_GetTopStackFrame(cx);
+    for (fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE);
          fp && (fp->maybeFun() != fun || fp->isEvalOrDebuggerFrame());
          fp = fp->prev()) {
         continue;
     }
 
+#ifdef JS_METHODJIT
+    if (slot == FUN_CALLER && fp && fp->prev()) {
+        /* Also make sure the caller is uninlineable. */
+        JSInlinedSite *inlined;
+        fp->prev()->pc(cx, fp, &inlined);
+        if (inlined) {
+            JSFunction *fun = fp->prev()->jit()->inlineFrames()[inlined->inlineIndex].fun;
+            if (!cx->markTypeFunctionUninlineable(fun->getType()))
+                return false;
+        }
+    }
+#endif
+
     JSAtom *atom = NULL;
 
     switch (slot) {
       case FUN_ARGUMENTS:
         /* Warn if strict about f.arguments or equivalent unqualified uses. */
         if (!JS_ReportErrorFlagsAndNumber(cx,
                                           JSREPORT_WARNING | JSREPORT_STRICT,
                                           js_GetErrorMessage, NULL,
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -1373,45 +1373,57 @@ TypeSet::getKnownTypeTag(JSContext *cx, 
         type = GetValueTypeFromTypeFlags(flags);
 
     if (script && type != JSVAL_TYPE_UNKNOWN)
         add(cx, ArenaNew<TypeConstraintFreezeTypeTag>(cx->compartment->types.pool, script), false);
 
     return type;
 }
 
+static inline bool
+ObjectKindPair(ObjectKind v0, ObjectKind v1, ObjectKind cmp0, ObjectKind cmp1)
+{
+    JS_ASSERT(v0 != v1);
+    return (v0 == cmp0 && v1 == cmp1) || (v0 == cmp1 && v1 == cmp0);
+}
+
 /* Compute the meet of kind with the kind of object, per the ObjectKind lattice. */
 static inline ObjectKind
 CombineObjectKind(TypeObject *object, ObjectKind kind)
 {
     /*
      * All type objects with unknown properties are considered interchangeable
      * with one another, as they can be freely exchanged in type sets to handle
      * objects whose __proto__ has been changed.
      */
     if (object->unknownProperties || object->hasSpecialEquality || kind == OBJECT_UNKNOWN)
         return OBJECT_UNKNOWN;
 
     ObjectKind nkind;
-    if (object->isFunction)
-        nkind = object->asFunction()->script ? OBJECT_SCRIPTED_FUNCTION : OBJECT_NATIVE_FUNCTION;
+    if (object->isFunction && object->asFunction()->script && !object->isUninlineable)
+        nkind = OBJECT_INLINEABLE_FUNCTION;
+    else if (object->isFunction && object->asFunction()->script)
+        nkind = OBJECT_SCRIPTED_FUNCTION;
+    else if (object->isFunction)
+        nkind = OBJECT_NATIVE_FUNCTION;
     else if (object->isPackedArray)
         nkind = OBJECT_PACKED_ARRAY;
     else if (object->isDenseArray)
         nkind = OBJECT_DENSE_ARRAY;
     else
         nkind = OBJECT_NO_SPECIAL_EQUALITY;
 
     if (kind == nkind || kind == OBJECT_NONE)
         return nkind;
 
-    if ((kind == OBJECT_PACKED_ARRAY && nkind == OBJECT_DENSE_ARRAY) ||
-        (kind == OBJECT_DENSE_ARRAY && nkind == OBJECT_PACKED_ARRAY)) {
+    if (ObjectKindPair(kind, nkind, OBJECT_INLINEABLE_FUNCTION, OBJECT_SCRIPTED_FUNCTION))
+        return OBJECT_SCRIPTED_FUNCTION;
+
+    if (ObjectKindPair(kind, nkind, OBJECT_PACKED_ARRAY, OBJECT_DENSE_ARRAY))
         return OBJECT_DENSE_ARRAY;
-    }
 
     return OBJECT_NO_SPECIAL_EQUALITY;
 }
 
 /* Constraint which triggers recompilation if an object changes state. */
 class TypeConstraintFreezeObjectKind : public TypeConstraint
 {
 public:
@@ -1517,16 +1529,34 @@ TypeSet::getKnownObjectKind(JSContext *c
          * in this set to add any needed FreezeArray constraints.
          */
         add(cx, ArenaNew<TypeConstraintFreezeObjectKindSet>(cx->compartment->types.pool, kind, script));
     }
 
     return kind;
 }
 
+static inline void
+ObjectStateChange(JSContext *cx, TypeObject *object, bool markingUnknown)
+{
+    /* All constraints listening to state changes are on the element types. */
+    TypeSet *elementTypes = object->getProperty(cx, JSID_VOID, false);
+    if (!elementTypes)
+        return;
+    if (markingUnknown) {
+        /* Mark as unknown after getting the element types, to avoid assert. */
+        object->unknownProperties = true;
+    }
+    TypeConstraint *constraint = elementTypes->constraintList;
+    while (constraint) {
+        constraint->newObjectState(cx);
+        constraint = constraint->next;
+    }
+}
+
 bool
 TypeSet::knownNonEmpty(JSContext *cx, JSScript *script)
 {
     if ((typeFlags & ~TYPE_FLAG_INTERMEDIATE_SET) != 0 || objectCount != 0)
         return true;
 
     add(cx, ArenaNew<TypeConstraintFreeze>(cx->compartment->types.pool, script), false);
 
@@ -1561,16 +1591,17 @@ TypeCompartment::init(JSContext *cx)
      * Initialize the empty type object. This is not threaded onto the objects list,
      * will never be collected during GC, and does not have a proto or any properties
      * that need to be marked. It *can* have empty shapes, which are weak references.
      */
 #ifdef DEBUG
     typeEmpty.name_ = JSID_VOID;
 #endif
     typeEmpty.hasSpecialEquality = true;
+    typeEmpty.isUninlineable = true;
     typeEmpty.unknownProperties = true;
 
     if (cx && cx->getRunOptions() & JSOPTION_TYPE_INFERENCE)
         inferenceEnabled = true;
 
     JS_InitArenaPool(&pool, "typeinfer", 512, 8, NULL);
 }
 
@@ -1608,17 +1639,17 @@ TypeCompartment::newTypeObject(JSContext
         new(object) TypeObject(id, proto);
     }
 
     TypeObject *&objects = script ? script->typeObjects : this->objects;
     object->next = objects;
     objects = object;
 
     if (!cx->typeInferenceEnabled())
-        object->hasSpecialEquality = true;
+        object->hasSpecialEquality = true;  /* Avoid syncSpecialEquality assert */
 
     return object;
 }
 
 TypeObject *
 TypeCompartment::newInitializerTypeObject(JSContext *cx, JSScript *script,
                                           uint32 offset, bool isArray)
 {
@@ -1813,16 +1844,27 @@ TypeCompartment::dynamicPush(JSContext *
         TypeSet *pushed = script->types->pushed(offset, 0);
         pushed->addType(cx, type);
     } else if (script->analyzed) {
         /* Any new dynamic result triggers reanalysis and recompilation. */
         AnalyzeScriptTypes(cx, script);
     }
 
     /*
+     * If this script was inlined into a parent, we need to make sure the
+     * parent has constraints listening to type changes in this one (it won't
+     * necessarily, if we have condensed the constraints but not reanalyzed the
+     * parent). The parent is listening for isUninlineable changes on the
+     * function, so we can treat this as a state change on the function to
+     * trigger any necessary reanalysis.
+     */
+    if (script->fun)
+        ObjectStateChange(cx, script->fun->getType(), false);
+
+    /*
      * For inc/dec ops, we need to go back and reanalyze the affected opcode
      * taking the overflow into account. We won't see an explicit adjustment
      * of the type of the thing being inc/dec'ed, nor will adding TYPE_DOUBLE to
      * the pushed value affect that type. We only handle inc/dec operations
      * that do not have an object lvalue; INCNAME/INCPROP/INCELEM and friends
      * should call typeMonitorAssign to update the property type.
      */
     jsbytecode *pc = script->code + offset;
@@ -1874,29 +1916,35 @@ TypeCompartment::dynamicPush(JSContext *
 
 bool
 TypeCompartment::processPendingRecompiles(JSContext *cx)
 {
     /* Steal the list of scripts to recompile, else we will try to recursively recompile them. */
     Vector<JSScript*> *pending = pendingRecompiles;
     pendingRecompiles = NULL;
 
+    JS_ASSERT(!pending->empty());
+
+#ifdef JS_METHODJIT
+
+    mjit::ExpandInlineFrames(cx, true);
+
     for (unsigned i = 0; i < pending->length(); i++) {
-#ifdef JS_METHODJIT
         JSScript *script = (*pending)[i];
         mjit::Recompiler recompiler(cx, script);
         if (!recompiler.recompile()) {
             pendingNukeTypes = true;
-            cx->free(pending);
+            js_delete< Vector<JSScript*> >(pending);
             return nukeTypes(cx);
         }
-#endif
     }
 
-    cx->free(pending);
+#endif /* JS_METHODJIT */
+
+    js_delete< Vector<JSScript*> >(pending);
     return true;
 }
 
 void
 TypeCompartment::setPendingNukeTypes(JSContext *cx)
 {
     if (!pendingNukeTypes) {
         js_ReportOutOfMemory(cx);
@@ -1934,22 +1982,21 @@ void
 TypeCompartment::addPendingRecompile(JSContext *cx, JSScript *script)
 {
     if (!script->jitNormal && !script->jitCtor) {
         /* Scripts which haven't been compiled yet don't need to be recompiled. */
         return;
     }
 
     if (!pendingRecompiles) {
-        pendingRecompiles = (Vector<JSScript*>*) cx->calloc(sizeof(Vector<JSScript*>));
+        pendingRecompiles = js_new< Vector<JSScript*> >(cx);
         if (!pendingRecompiles) {
             cx->compartment->types.setPendingNukeTypes(cx);
             return;
         }
-        new(pendingRecompiles) Vector<JSScript*>(cx);
     }
 
     for (unsigned i = 0; i < pendingRecompiles->length(); i++) {
         if (script == (*pendingRecompiles)[i])
             return;
     }
 
     if (!pendingRecompiles->append(script)) {
@@ -2054,16 +2101,17 @@ TypeCompartment::monitorBytecode(JSConte
       default:
         TypeFailure(cx, "Monitoring unknown bytecode: %s", js_CodeNameTwo[op]);
     }
 
     InferSpew(ISpewOps, "addMonitorNeeded: #%u:%05u", script->id(), offset);
 
     script->types->setMonitored(offset);
 
+    /* :FIXME: Also mark scripts this was inlined into as needing recompilation? */
     if (script->hasJITCode())
         cx->compartment->types.addPendingRecompile(cx, script);
 }
 
 void
 TypeCompartment::print(JSContext *cx, JSCompartment *compartment)
 {
     JS_ASSERT(this == &compartment->types);
@@ -2469,34 +2517,16 @@ TypeObject::addProperty(JSContext *cx, j
         storeToInstances(cx, base);
 
     /* Pull in this property from all prototypes up the chain. */
     getFromPrototypes(cx, base);
 
     return true;
 }
 
-static inline void
-ObjectStateChange(JSContext *cx, TypeObject *object, bool markingUnknown)
-{
-    /* All constraints listening to state changes are on the element types. */
-    TypeSet *elementTypes = object->getProperty(cx, JSID_VOID, false);
-    if (!elementTypes)
-        return;
-    if (markingUnknown) {
-        /* Mark as unknown after getting the element types, to avoid assert. */
-        object->unknownProperties = true;
-    }
-    TypeConstraint *constraint = elementTypes->constraintList;
-    while (constraint) {
-        constraint->newObjectState(cx);
-        constraint = constraint->next;
-    }
-}
-
 void
 TypeObject::markNotPacked(JSContext *cx, bool notDense)
 {
     JS_ASSERT(cx->compartment->types.inferenceDepth);
 
     if (notDense) {
         if (!isDenseArray)
             return;
@@ -2507,24 +2537,38 @@ TypeObject::markNotPacked(JSContext *cx,
     isPackedArray = false;
 
     InferSpew(ISpewOps, "%s: %s", notDense ? "NonDenseArray" : "NonPackedArray", name());
 
     ObjectStateChange(cx, this, false);
 }
 
 void
+TypeObject::markUninlineable(JSContext *cx)
+{
+    JS_ASSERT(cx->compartment->types.inferenceDepth);
+
+    JS_ASSERT(!isUninlineable);
+    isUninlineable = true;
+
+    InferSpew(ISpewOps, "Uninlineable: %s", name());
+
+    ObjectStateChange(cx, this, false);
+}
+
+void
 TypeObject::markUnknown(JSContext *cx)
 {
     JS_ASSERT(!unknownProperties);
 
     InferSpew(ISpewOps, "UnknownProperties: %s", name());
 
     isDenseArray = false;
     isPackedArray = false;
+    isUninlineable = true;
     hasSpecialEquality = true;
 
     ObjectStateChange(cx, this, true);
 
     /* Mark existing instances as unknown. */
 
     TypeObject *instance = instanceList;
     while (instance) {
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -197,30 +197,31 @@ public:
 };
 
 /*
  * Coarse kinds of a set of objects.  These form the following lattice:
  *
  *                    NONE
  *       ___________ /  | \______________
  *      /               |                \
- * PACKED_ARRAY  SCRIPTED_FUNCTION  NATIVE_FUNCTION
+ * PACKED_ARRAY  INLINEABLE_FUNCTION  NATIVE_FUNCTION
  *      |               |                 |
- * DENSE_ARRAY          |                 |
+ * DENSE_ARRAY    SCRIPTED_FUNCTION       |
  *      \____________   |   _____________/
  *                   \  |  /
  *             NO_SPECIAL_EQUALITY
  *                      |
  *                   UNKNOWN
  */
 enum ObjectKind {
     OBJECT_NONE,
     OBJECT_UNKNOWN,
     OBJECT_PACKED_ARRAY,
     OBJECT_DENSE_ARRAY,
+    OBJECT_INLINEABLE_FUNCTION,
     OBJECT_SCRIPTED_FUNCTION,
     OBJECT_NATIVE_FUNCTION,
     OBJECT_NO_SPECIAL_EQUALITY
 };
 
 /* Coarse flags for the contents of a type set. */
 enum {
     TYPE_FLAG_UNDEFINED = 1 << TYPE_UNDEFINED,
@@ -433,20 +434,23 @@ struct TypeObject
     bool unknownProperties;
 
     /* Whether all objects this represents are dense arrays. */
     bool isDenseArray;
 
     /* Whether all objects this represents are packed arrays (implies isDenseArray). */
     bool isPackedArray;
 
+    /* Whether any objects this represents have had their .arguments accessed. */
+    bool isUninlineable;
+
     /* Whether any objects this represents have an equality hook. */
     bool hasSpecialEquality;
 
-    /* Native object for which this type was created. */
+    /* If at most one JSObject can have this as its type, that object. */
     JSObject *singleton;
 
     TypeObject() {}
 
     /* Make an object with the specified name. */
     inline TypeObject(jsid id, JSObject *proto);
 
     /* Coerce this object to a function. */
@@ -481,16 +485,17 @@ struct TypeObject
     void splicePrototype(JSContext *cx, JSObject *proto);
 
     /* Helpers */
 
     bool addProperty(JSContext *cx, jsid id, Property **pprop);
     void addPrototype(JSContext *cx, TypeObject *proto);
     void markNotPacked(JSContext *cx, bool notDense);
     void markUnknown(JSContext *cx);
+    void markUninlineable(JSContext *cx);
     void storeToInstances(JSContext *cx, Property *base);
     void getFromPrototypes(JSContext *cx, Property *base);
 
     void print(JSContext *cx);
     void trace(JSTracer *trc);
 };
 
 /*
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -211,18 +211,21 @@ JSContext::getTypeNewObject(JSProtoKey k
     return proto->getNewType(this);
 }
 
 inline js::types::TypeObject *
 JSContext::getTypeCallerInitObject(bool isArray)
 {
     if (typeInferenceEnabled()) {
         JSStackFrame *caller = js_GetScriptedCaller(this, NULL);
-        if (caller && caller->script()->compartment == compartment)
-            return caller->script()->getTypeInitObject(this, caller->pc(this), isArray);
+        if (caller && caller->script()->compartment == compartment) {
+            JSScript *script;
+            jsbytecode *pc = caller->inlinepc(this, &script);
+            return script->getTypeInitObject(this, pc, isArray);
+        }
     }
     return getTypeNewObject(isArray ? JSProto_Array : JSProto_Object);
 }
 
 inline bool
 JSContext::markTypeCallerUnexpected(js::types::jstype type)
 {
     if (!typeInferenceEnabled())
@@ -243,28 +246,34 @@ JSContext::markTypeCallerUnexpected(js::
 
     /*
      * Watch out if the caller is in a different compartment from this one.
      * This must have gone through a cross-compartment wrapper.
      */
     if (caller->script()->compartment != compartment)
         return true;
 
-    switch ((JSOp)*caller->pc(this)) {
+    JSScript *script;
+    jsbytecode *pc = caller->inlinepc(this, &script);
+
+    switch ((JSOp)*pc) {
       case JSOP_CALL:
       case JSOP_EVAL:
       case JSOP_FUNCALL:
       case JSOP_FUNAPPLY:
       case JSOP_NEW:
         break;
+      case JSOP_ITER:
+        /* This is also used for handling custom iterators. */
+        break;
       default:
         return true;
     }
 
-    return caller->script()->typeMonitorResult(this, caller->pc(this), type);
+    return script->typeMonitorResult(this, pc, type);
 }
 
 inline bool
 JSContext::markTypeCallerUnexpected(const js::Value &value)
 {
     return markTypeCallerUnexpected(js::types::GetValueType(this, value));
 }
 
@@ -384,17 +393,29 @@ JSContext::markTypeArrayNotPacked(js::ty
         return true;
     js::types::AutoEnterTypeInference enter(this);
 
     obj->markNotPacked(this, notDense);
 
     return compartment->types.checkPendingRecompiles(this);
 }
 
-bool
+inline bool
+JSContext::markTypeFunctionUninlineable(js::types::TypeObject *obj)
+{
+    if (!typeInferenceEnabled() || obj->isUninlineable)
+        return true;
+    js::types::AutoEnterTypeInference enter(this);
+
+    obj->markUninlineable(this);
+
+    return compartment->types.checkPendingRecompiles(this);
+}
+
+inline bool
 JSContext::markTypeObjectUnknownProperties(js::types::TypeObject *obj)
 {
     if (!typeInferenceEnabled() || obj->unknownProperties)
         return true;
 
     js::types::AutoEnterTypeInference enter(this);
     obj->markUnknown(this);
     return compartment->types.checkPendingRecompiles(this);
@@ -1201,28 +1222,30 @@ TypeObject::name()
 #endif
 }
 
 inline TypeObject::TypeObject(jsid name, JSObject *proto)
     : proto(proto), emptyShapes(NULL), isFunction(false), marked(false),
       initializerObject(false), initializerArray(false), initializerOffset(0),
       contribution(0), propertySet(NULL), propertyCount(0),
       instanceList(NULL), instanceNext(NULL), next(NULL), unknownProperties(false),
-      isDenseArray(false), isPackedArray(false), hasSpecialEquality(false),
+      isDenseArray(false), isPackedArray(false),
+      isUninlineable(false), hasSpecialEquality(false),
       singleton(NULL)
 {
 #ifdef DEBUG
     this->name_ = name;
 #endif
 
     InferSpew(ISpewOps, "newObject: %s", this->name());
 
     if (proto) {
         TypeObject *prototype = proto->getType();
         if (prototype->unknownProperties) {
+            isUninlineable = true;
             hasSpecialEquality = true;
             unknownProperties = true;
         }
         instanceNext = prototype->instanceList;
         prototype->instanceList = this;
     }
 }
 
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -107,53 +107,83 @@ using namespace js::types;
 /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
 #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
 
 #ifdef DEBUG
 JSObject *const JSStackFrame::sInvalidScopeChain = (JSObject *)0xbeef;
 #endif
 
 jsbytecode *
-JSStackFrame::pc(JSContext *cx, JSStackFrame *next)
+JSStackFrame::pc(JSContext *cx, JSStackFrame *next, JSInlinedSite **pinlined)
 {
     JS_ASSERT_IF(next, next->prev_ == this);
 
     StackSegment *seg = cx->containingSegment(this);
     JSFrameRegs *regs = seg->getCurrentRegs();
-    if (regs->fp == this)
+    if (regs->fp == this) {
+        if (pinlined)
+            *pinlined = regs->inlined;
         return regs->pc;
+    }
 
     if (!next)
         next = seg->computeNextFrame(this);
 
-    if (next->flags_ & JSFRAME_HAS_PREVPC)
+    if (next->flags_ & JSFRAME_HAS_PREVPC) {
+        if (pinlined)
+            *pinlined = next->prevInline_;
         return next->prevpc_;
+    }
 
 #if defined(JS_METHODJIT) && defined(JS_MONOIC)
     js::mjit::JITScript *jit = script()->getJIT(isConstructing());
-    jsbytecode *pc = jit->nativeToPC(next->ncode_);
+    js::mjit::CallSite *inlined;
+    jsbytecode *pc = jit->nativeToPC(next->ncode_, &inlined);
 
     /*
      * Remember the PC in the next frame. This is needed during recompilation,
      * which fills in frame PCs as the JIT script may become unavailable.
      */
     next->flags_ |= JSFRAME_HAS_PREVPC;
     next->prevpc_ = pc;
-
+    next->prevInline_ = inlined;
+
+    if (pinlined)
+        *pinlined = inlined;
     return pc;
 #else
     JS_NOT_REACHED("Unknown PC for frame");
     return NULL;
 #endif
 }
 
+jsbytecode *
+JSStackFrame::inlinepc(JSContext *cx, JSScript **pscript)
+{
+    JSInlinedSite *inlined;
+    jsbytecode *pc = this->pc(cx, NULL, &inlined);
+
+#ifdef JS_METHODJIT
+    if (inlined) {
+        JS_ASSERT(inlined->inlineIndex < jit()->nInlineFrames);
+        js::mjit::InlineFrame *frame = &jit()->inlineFrames()[inlined->inlineIndex];
+        *pscript = frame->fun->script();
+        return (*pscript)->code + inlined->pcOffset;
+    }
+#endif
+
+    JS_ASSERT(!inlined);
+    *pscript = script();
+    return pc;
+}
+
 JSObject *
 js::GetScopeChain(JSContext *cx)
 {
-    JSStackFrame *fp = js_GetTopStackFrame(cx);
+    JSStackFrame *fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE);
     if (!fp) {
         /*
          * There is no code active on this context. In place of an actual
          * scope chain, use the context's global object, which is set in
          * js_InitFunctionAndObjectClasses, and which represents the default
          * scope chain for the embedding. See also js_FindClassObject.
          *
          * For embeddings that use the inner and outer object hooks, the inner
@@ -2211,22 +2241,16 @@ IteratorNext(JSContext *cx, JSObject *it
         }
     }
     return js_IteratorNext(cx, iterobj, rval);
 }
 
 static inline bool
 ScriptPrologue(JSContext *cx, JSStackFrame *fp, bool newType)
 {
-    /*
-     * Clear out the return address used by JIT frames, to mark this as an
-     * interpreter frame for the recompiler.
-     */
-    fp->setNativeReturnAddress(NULL);
-
     if (fp->isConstructing()) {
         JSObject *obj = js_CreateThisForFunction(cx, &fp->callee(), newType);
         if (!obj)
             return false;
         fp->functionThis().setObject(*obj);
     }
     if (fp->isExecuteFrame()) {
         if (JSInterpreterHook hook = cx->debugHooks->executeHook)
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -45,21 +45,29 @@
  */
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsfun.h"
 #include "jsopcode.h"
 #include "jsscript.h"
 #include "jsvalue.h"
 
+#ifdef JS_METHODJIT
+namespace js { namespace mjit { struct CallSite; } }
+typedef js::mjit::CallSite JSInlinedSite;
+#else
+struct JSInlinedSite {};
+#endif
+
 struct JSFrameRegs
 {
     STATIC_SKIP_INFERENCE
     js::Value       *sp;                  /* stack pointer */
     jsbytecode      *pc;                  /* program counter */
+    JSInlinedSite   *inlined;             /* inlined call site */
     JSStackFrame    *fp;                  /* active frame */
 };
 
 /* Flags to toggle js::Interpret() execution. */
 enum JSInterpMode
 {
     JSINTERP_NORMAL            =     0, /* interpreter is running normally */
     JSINTERP_RECORD            =     1, /* interpreter has been started to record/run traces */
@@ -95,17 +103,17 @@ enum JSFrameFlags
     /* Lazy frame initialization */
     JSFRAME_HAS_IMACRO_PC      =   0x8000, /* frame has imacpc value available */
     JSFRAME_HAS_CALL_OBJ       =  0x10000, /* frame has a callobj reachable from scopeChain_ */
     JSFRAME_HAS_ARGS_OBJ       =  0x20000, /* frame has an argsobj in JSStackFrame::args */
     JSFRAME_HAS_HOOK_DATA      =  0x40000, /* frame has hookData_ set */
     JSFRAME_HAS_ANNOTATION     =  0x80000, /* frame has annotation_ set */
     JSFRAME_HAS_RVAL           = 0x100000, /* frame has rval_ set */
     JSFRAME_HAS_SCOPECHAIN     = 0x200000, /* frame has scopeChain_ set */
-    JSFRAME_HAS_PREVPC         = 0x400000  /* frame has prevpc_ set */
+    JSFRAME_HAS_PREVPC         = 0x400000  /* frame has prevpc_ and prevInline_ set */
 };
 
 namespace js { namespace mjit { struct JITScript; } }
 
 /*
  * A stack frame is a part of a stack segment (see js::StackSegment) which is
  * on the per-thread VM stack (see js::StackSpace).
  */
@@ -124,20 +132,25 @@ struct JSStackFrame
     } args;
     mutable JSObject    *scopeChain_;   /* current scope chain */
     JSStackFrame        *prev_;         /* previous cx->regs->fp */
     void                *ncode_;        /* return address for method JIT */
 
     /* Lazily initialized */
     js::Value           rval_;          /* return value of the frame */
     jsbytecode          *prevpc_;       /* pc of previous frame*/
+    JSInlinedSite       *prevInline_;   /* inlined site in previous frame */
     jsbytecode          *imacropc_;     /* pc of macro caller */
     void                *hookData_;     /* closure returned by call hook */
     void                *annotation_;   /* perhaps remove with bug 546848 */
 
+#if JS_BITS_PER_WORD == 32
+    void *padding;
+#endif
+
     friend class js::StackSpace;
     friend class js::FrameRegsIter;
     friend struct JSContext;
 
     inline void initPrev(JSContext *cx);
 
   public:
     /*
@@ -228,16 +241,19 @@ struct JSStackFrame
      * to set cx->regs->fp to when this frame is popped.
      */
 
     JSStackFrame *prev() const {
         return prev_;
     }
 
     inline void resetGeneratorPrev(JSContext *cx);
+    inline void resetInlinePrev(JSStackFrame *prevfp, jsbytecode *prevpc);
+
+    inline void initInlineFrame(JSFunction *fun, JSStackFrame *prevfp, jsbytecode *prevpc);
 
     /*
      * Frame slots
      *
      * A frame's 'slots' are the fixed slots associated with the frame (like
      * local variables) followed by an expression stack holding temporary
      * values. A frame's 'base' is the base of the expression stack.
      */
@@ -255,30 +271,48 @@ struct JSStackFrame
         JS_ASSERT_IF(maybeFun(), i < script()->bindings.countVars());
         return slots()[i];
     }
 
     /*
      * Script
      *
      * All function and global frames have an associated JSScript which holds
-     * the bytecode being executed for the frame.
+     * the bytecode being executed for the frame. This script/bytecode does
+     * not reflect any inlining that has been performed by the method JIT.
+     *
+     * If other frames were inlined into this one, the script/pc reflect the
+     * point of the outermost call. Use inlinepc to get the script/pc for
+     * the innermost inlined frame. Inlined frame invariants:
+     *
+     * - Inlined frames have the same scope chain as the outer frame.
+     * - Inlined frames have the same strictness as the outer frame.
      */
 
     /*
      * Get the frame's current bytecode, assuming |this| is in |cx|.
      * next is frame whose prev == this, NULL if not known or if this == cx->fp().
+     * If the frame is inside an inline call made within the pc, the state
+     * of any inlined frame(s) is returned through pinlined.
      */
-    jsbytecode *pc(JSContext *cx, JSStackFrame *next = NULL);
+    jsbytecode *pc(JSContext *cx, JSStackFrame *next = NULL, JSInlinedSite **pinlined = NULL);
 
     jsbytecode *prevpc() {
         JS_ASSERT((prev_ != NULL) && (flags_ & JSFRAME_HAS_PREVPC));
         return prevpc_;
     }
 
+    JSInlinedSite *prevInline() {
+        JS_ASSERT((prev_ != NULL) && (flags_ & JSFRAME_HAS_PREVPC));
+        return prevInline_;
+    }
+
+    /* Get the innermost pc/script in this frame, looking through any inlining. */
+    jsbytecode *inlinepc(JSContext *cx, JSScript **pscript);
+
     JSScript *script() const {
         JS_ASSERT(isScriptFrame());
         return isFunctionFrame()
                ? isEvalFrame() ? args.script : fun()->script()
                : exec.script;
     }
 
     JSScript *functionScript() const {
--- a/js/src/jsinterpinlines.h
+++ b/js/src/jsinterpinlines.h
@@ -53,47 +53,68 @@
 
 inline void
 JSStackFrame::initPrev(JSContext *cx)
 {
     JS_ASSERT(flags_ & JSFRAME_HAS_PREVPC);
     if (JSFrameRegs *regs = cx->regs) {
         prev_ = regs->fp;
         prevpc_ = regs->pc;
+        prevInline_ = regs->inlined;
         JS_ASSERT_IF(!prev_->isDummyFrame() && !prev_->hasImacropc(),
                      uint32(prevpc_ - prev_->script()->code) < prev_->script()->length);
     } else {
         prev_ = NULL;
 #ifdef DEBUG
         prevpc_ = (jsbytecode *)0xbadc;
+        prevInline_ = (JSInlinedSite *)0xbadc;
 #endif
     }
 }
 
 inline void
 JSStackFrame::resetGeneratorPrev(JSContext *cx)
 {
     flags_ |= JSFRAME_HAS_PREVPC;
     initPrev(cx);
 }
 
 inline void
+JSStackFrame::initInlineFrame(JSFunction *fun, JSStackFrame *prevfp, jsbytecode *prevpc)
+{
+    flags_ = JSFRAME_FUNCTION;
+    exec.fun = fun;
+    resetInlinePrev(prevfp, prevpc);
+}
+
+inline void
+JSStackFrame::resetInlinePrev(JSStackFrame *prevfp, jsbytecode *prevpc)
+{
+    JS_ASSERT_IF(flags_ & JSFRAME_HAS_PREVPC, prevInline_);
+    flags_ |= JSFRAME_HAS_PREVPC;
+    prev_ = prevfp;
+    prevpc_ = prevpc;
+    prevInline_ = NULL;
+}
+
+inline void
 JSStackFrame::initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun,
                             uint32 nactual, uint32 flagsArg)
 {
     JS_ASSERT((flagsArg & ~(JSFRAME_CONSTRUCTING |
                             JSFRAME_OVERFLOW_ARGS |
                             JSFRAME_UNDERFLOW_ARGS)) == 0);
     JS_ASSERT(fun == callee.getFunctionPrivate());
 
     /* Initialize stack frame members. */
     flags_ = JSFRAME_FUNCTION | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN | flagsArg;
     exec.fun = fun;
     args.nactual = nactual;  /* only need to write if over/under-flow */
     scopeChain_ = callee.getParent();
+    ncode_ = NULL;
     initPrev(cx);
     JS_ASSERT(!hasImacropc());
     JS_ASSERT(!hasHookData());
     JS_ASSERT(annotation() == NULL);
 
     JS_ASSERT(!hasCallObj());
 }
 
@@ -183,17 +204,17 @@ JSStackFrame::initEvalFrame(JSContext *c
     } else {
         exec.script = script;
     }
 
     scopeChain_ = &prev->scopeChain();
     JS_ASSERT_IF(isFunctionFrame(), &callObj() == &prev->callObj());
 
     prev_ = prev;
-    prevpc_ = prev->pc(cx);
+    prevpc_ = prev->pc(cx, NULL, &prevInline_);
     JS_ASSERT(!hasImacropc());
     JS_ASSERT(!hasHookData());
     setAnnotation(prev->annotation());
 }
 
 inline void
 JSStackFrame::initGlobalFrame(JSScript *script, JSObject &chain, uint32 flagsArg)
 {
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -372,25 +372,19 @@ GetCustomIterator(JSContext *cx, JSObjec
         return true;
     }
 
     /*
      * Notify type inference of the custom iterator.  This only needs to be done
      * if this is coming from a 'for in' loop, not a call to Iterator itself.
      * If an Iterator object is used in a for loop then the values fetched in
      * that loop are unknown, whether there is a custom __iterator__ or not.
-     * Watch out for the case where this iteration request came through a
-     * cross-compartment wrapper, where cx->regs->pc is NULL. The iteration
-     * value will have already been marked as unknown by the wrapper.
      */
-    if (!(flags & JSITER_OWNONLY) && cx->regs->pc) {
-        JS_ASSERT(JSOp(*cx->regs->pc) == JSOP_ITER);
-        if (!cx->fp()->script()->typeMonitorUnknown(cx, cx->regs->pc))
-            return false;
-    }
+    if (!(flags & JSITER_OWNONLY) && !cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN))
+        return false;
 
     /* Otherwise call it and return that object. */
     LeaveTrace(cx);
     Value arg = BooleanValue((flags & JSITER_FOREACH) == 0);
     if (!ExternalInvoke(cx, ObjectValue(*obj), *vp, 1, &arg, vp))
         return false;
     if (vp->isPrimitive()) {
         /*
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -3065,17 +3065,17 @@ JS_DEFINE_CALLINFO_3(extern, CONSTRUCTOR
 JS_REQUIRES_STACK JSBool
 Detecting(JSContext *cx, jsbytecode *pc)
 {
     JSScript *script;
     jsbytecode *endpc;
     JSOp op;
     JSAtom *atom;
 
-    script = cx->fp()->script();
+    cx->fp()->inlinepc(cx, &script);
     endpc = script->code + script->length;
     for (;; pc += js_CodeSpec[op].length) {
         JS_ASSERT_IF(!cx->fp()->hasImacropc(), script->code <= pc && pc < endpc);
 
         /* General case: a branch or equality op follows the access. */
         op = js_GetOpcode(cx, script, pc);
         if (js_CodeSpec[op].format & JOF_DETECTING)
             return JS_TRUE;
@@ -3136,28 +3136,31 @@ js_InferFlags(JSContext *cx, uintN defau
 
     JS_ASSERT_NOT_ON_TRACE(cx);
 
     jsbytecode *pc;
     const JSCodeSpec *cs;
     uint32 format;
     uintN flags = 0;
 
-    JSStackFrame *const fp = js_GetTopStackFrame(cx);
-    if (!fp || !(pc = cx->regs->pc))
+    JSStackFrame *const fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE);
+    if (!fp || !cx->regs->pc)
         return defaultFlags;
-    cs = &js_CodeSpec[js_GetOpcode(cx, fp->script(), pc)];
+
+    JSScript *script;
+    pc = fp->inlinepc(cx, &script);
+
+    cs = &js_CodeSpec[js_GetOpcode(cx, script, pc)];
     format = cs->format;
     if (JOF_MODE(format) != JOF_NAME)
         flags |= JSRESOLVE_QUALIFIED;
     if ((format & (JOF_SET | JOF_FOR)) || fp->isAssigning()) {
         flags |= JSRESOLVE_ASSIGNING;
     } else if (cs->length >= 0) {
         pc += cs->length;
-        JSScript *script = cx->fp()->script();
         if (pc < script->code + script->length && Detecting(cx, pc))
             flags |= JSRESOLVE_DETECTING;
     }
     if (format & JOF_DECLARING)
         flags |= JSRESOLVE_DECLARING;
     return flags;
 }
 
@@ -5121,17 +5124,17 @@ js_FindPropertyHelper(JSContext *cx, jsi
                       JSObject **objp, JSObject **pobjp, JSProperty **propp)
 {
     JSObject *scopeChain, *obj, *parent, *pobj;
     PropertyCacheEntry *entry;
     int scopeIndex, protoIndex;
     JSProperty *prop;
 
     JS_ASSERT_IF(cacheResult, !JS_ON_TRACE(cx));
-    scopeChain = &js_GetTopStackFrame(cx)->scopeChain();
+    scopeChain = &js_GetTopStackFrame(cx, FRAME_EXPAND_NONE)->scopeChain();
 
     /* Scan entries on the scope chain that we can cache across. */
     entry = JS_NO_PROP_CACHE_FILL;
     obj = scopeChain;
     parent = obj->getParent();
     for (scopeIndex = 0;
          parent
          ? IsCacheableNonGlobalScope(obj)
@@ -5589,17 +5592,17 @@ js_GetMethod(JSContext *cx, JSObject *ob
         return js_GetXMLMethod(cx, obj, id, vp);
 #endif
     return op(cx, obj, obj, id, vp);
 }
 
 JS_FRIEND_API(bool)
 js_CheckUndeclaredVarAssignment(JSContext *cx, JSString *propname)
 {
-    JSStackFrame *const fp = js_GetTopStackFrame(cx);
+    JSStackFrame *const fp = js_GetTopStackFrame(cx, FRAME_EXPAND_TOP);
     if (!fp)
         return true;
 
     /* If neither cx nor the code is strict, then no check is needed. */
     if (!(fp->isScriptFrame() && fp->script()->strictModeCode) &&
         !cx->hasStrictOption()) {
         return true;
     }
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -2897,17 +2897,17 @@ Decompile(SprintStack *ss, jsbytecode *p
                      * We must be in an eval called from jp->fun, where
                      * jp->script is the eval-compiled script.
                      *
                      * However, it's possible that a js_Invoke already
                      * pushed a frame trying to call Construct on an
                      * object that's not a constructor, causing us to be
                      * called with an intervening frame on the stack.
                      */
-                    JSStackFrame *fp = js_GetTopStackFrame(cx);
+                    JSStackFrame *fp = js_GetTopStackFrame(cx, FRAME_EXPAND_ALL);
                     if (fp) {
                         while (!fp->isEvalFrame())
                             fp = fp->prev();
                         JS_ASSERT(fp->script() == jp->script);
                         JS_ASSERT(fp->prev()->fun() == jp->fun);
                         JS_ASSERT(jp->fun->isInterpreted());
                         JS_ASSERT(jp->script != jp->fun->script());
                         JS_ASSERT(JSScript::isValidOffset(jp->script->upvarsOffset));
@@ -5080,22 +5080,21 @@ js_DecompileValueGenerator(JSContext *cx
     jsbytecode *pc;
 
     Value v = Valueify(v_in);
 
     JS_ASSERT(spindex < 0 ||
               spindex == JSDVG_IGNORE_STACK ||
               spindex == JSDVG_SEARCH_STACK);
 
-    LeaveTrace(cx);
-    
-    if (!cx->regs || !cx->regs->fp || !cx->regs->fp->isScriptFrame())
+    fp = js_GetTopStackFrame(cx, FRAME_EXPAND_TOP);
+
+    if (!cx->regs || !fp || !fp->isScriptFrame())
         goto do_fallback;
 
-    fp = cx->regs->fp;
     script = fp->script();
     pc = fp->hasImacropc() ? fp->imacropc() : cx->regs->pc;
     JS_ASSERT(script->code <= pc && pc < script->code + script->length);
 
     if (pc < script->main)
         goto do_fallback;
     
     if (spindex != JSDVG_IGNORE_STACK) {
--- a/js/src/jspropertycache.cpp
+++ b/js/src/jspropertycache.cpp
@@ -131,18 +131,19 @@ PropertyCache::fill(JSContext *cx, JSObj
         PCMETER(longchains++);
         return JS_NO_PROP_CACHE_FILL;
     }
 
     /*
      * Optimize the cached vword based on our parameters and the current pc's
      * opcode format flags.
      */
-    pc = cx->regs->pc;
-    op = js_GetOpcode(cx, cx->fp()->script(), pc);
+    JSScript *script;
+    pc = cx->fp()->inlinepc(cx, &script);
+    op = js_GetOpcode(cx, script, pc);
     cs = &js_CodeSpec[op];
     kshape = 0;
 
     do {
         /*
          * Check for a prototype "plain old method" callee computation. What
          * is a plain old method? It's a function-valued property with stub
          * getter, so get of a function is idempotent.
@@ -307,36 +308,42 @@ GetAtomFromBytecode(JSContext *cx, jsbyt
     if (op == JSOP_LENGTH)
         return cx->runtime->atomState.lengthAtom;
 
     // The method JIT's implementation of instanceof contains an internal lookup
     // of the prototype property.
     if (op == JSOP_INSTANCEOF)
         return cx->runtime->atomState.classPrototypeAtom;
 
+    JSScript *script;
+    cx->fp()->inlinepc(cx, &script);
+
     ptrdiff_t pcoff = (JOF_TYPE(cs.format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
     JSAtom *atom;
-    GET_ATOM_FROM_BYTECODE(cx->fp()->script(), pc, pcoff, atom);
+    GET_ATOM_FROM_BYTECODE(script, pc, pcoff, atom);
     return atom;
 }
 
 JS_REQUIRES_STACK JSAtom *
 PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject **pobjp,
                         PropertyCacheEntry *entry)
 {
     JSObject *obj, *pobj, *tmp;
     uint32 vcap;
 
     JSStackFrame *fp = cx->fp();
 
+    JSScript *script;
+    fp->inlinepc(cx, &script);
+
     JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
-    JS_ASSERT(uintN((fp->hasImacropc() ? fp->imacropc() : pc) - fp->script()->code)
-              < fp->script()->length);
+    JS_ASSERT(uintN((fp->hasImacropc() ? fp->imacropc() : pc) - script->code)
+              < script->length);
 
-    JSOp op = js_GetOpcode(cx, fp->script(), pc);
+    JSOp op = js_GetOpcode(cx, script, pc);
     const JSCodeSpec &cs = js_CodeSpec[op];
 
     obj = *objp;
     vcap = entry->vcap;
 
     if (entry->kpc != pc) {
         PCMETER(kpcmisses++);
 
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -1522,16 +1522,22 @@ JSScript::NewScriptFromCG(JSContext *cx,
 
 #ifdef CHECK_SCRIPT_OWNER
         script->owner = NULL;
 #endif
         if (cg->flags & TCF_FUN_HEAVYWEIGHT)
             fun->flags |= JSFUN_HEAVYWEIGHT;
         if (!script->typeSetFunction(cx, fun))
             goto bad;
+
+        /* Watch for scripts whose functions will not be cloned. These are singletons. */
+        if (cx->typeInferenceEnabled() && cg->parent && cg->parent->compiling() &&
+            cg->parent->asCodeGenerator()->checkSingletonContext()) {
+            fun->getType()->singleton = fun;
+        }
     }
 
     /* Tell the debugger about this compiled script. */
     js_CallNewScriptHook(cx, script, fun);
 #ifdef DEBUG
     {
         jsrefcount newEmptyLive, newLive, newTotal;
         if (script->isEmpty()) {
@@ -1657,16 +1663,18 @@ DestroyScript(JSContext *cx, JSScript *s
 
     types::TypeResult *result = script->typeResults;
     while (result) {
         types::TypeResult *next = result->next;
         cx->free(result);
         result = next;
     }
 
+    cx->free(script->varTypes);
+
 #if defined(JS_METHODJIT)
     mjit::ReleaseScriptCode(cx, script);
 #endif
 
     JS_REMOVE_LINK(&script->links);
 
     cx->free(script);
 }
@@ -1724,20 +1732,29 @@ js_TraceScript(JSTracer *trc, JSScript *
         } while (i != 0);
     }
 
     if (JSScript::isValidOffset(script->constOffset)) {
         JSConstArray *constarray = script->consts();
         MarkValueRange(trc, constarray->length, constarray->vector, "consts");
     }
 
+    /*
+     * Mark the object keeping this script alive. The script can be traced
+     * separately if, e.g. we are GC'ing while type inference code is active,
+     * and we need to make sure both the script and the object survive the GC.
+     */
     if (!script->isCachedEval && !script->isUncachedEval && script->u.object) {
         JS_SET_TRACING_NAME(trc, "object");
         Mark(trc, script->u.object);
     }
+    if (script->fun) {
+        JS_SET_TRACING_NAME(trc, "script_fun");
+        Mark(trc, script->fun);
+    }
 
     if (IS_GC_MARKING_TRACER(trc) && script->filename)
         js_MarkScriptFilename(script->filename);
 
     script->bindings.trace(trc);
 
     /*
      * Trace all type objects associated with the script, these can be freely
@@ -1745,20 +1762,22 @@ js_TraceScript(JSTracer *trc, JSScript *
      */
     types::TypeObject *obj = script->typeObjects;
     while (obj) {
         if (!obj->marked)
             obj->trace(trc);
         obj = obj->next;
     }
 
-    if (script->fun) {
-        JS_SET_TRACING_NAME(trc, "script_fun");
-        Mark(trc, script->fun);
-    }
+#ifdef JS_METHODJIT
+    if (script->jitNormal)
+        script->jitNormal->trace(trc);
+    if (script->jitCtor)
+        script->jitCtor->trace(trc);
+#endif
 }
 
 JSObject *
 js_NewScriptObject(JSContext *cx, JSScript *script)
 {
     AutoScriptRooter root(cx, script);
 
     JS_ASSERT(!script->u.object);
--- a/js/src/jsvalue.h
+++ b/js/src/jsvalue.h
@@ -197,21 +197,24 @@ JS_STATIC_ASSERT((JSVAL_TYPE_NONFUNOBJ &
 JS_STATIC_ASSERT((JSVAL_TYPE_FUNOBJ & 0xF) == JSVAL_TYPE_OBJECT);
 #endif
 
 static JS_ALWAYS_INLINE jsval_layout
 BOX_NON_DOUBLE_JSVAL(JSValueType type, uint64 *slot)
 {
     jsval_layout l;
     JS_ASSERT(type > JSVAL_TYPE_DOUBLE && type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET);
+    /*
+    // FIXME overasserting
     JS_ASSERT_IF(type == JSVAL_TYPE_STRING ||
                  type == JSVAL_TYPE_OBJECT ||
                  type == JSVAL_TYPE_NONFUNOBJ ||
                  type == JSVAL_TYPE_FUNOBJ,
                  *(uint32 *)slot != 0);
+    */
     l.s.tag = JSVAL_TYPE_TO_TAG(type & 0xF);
     /* A 32-bit value in a 64-bit slot always occupies the low-addressed end. */
     l.s.payload.u32 = *(uint32 *)slot;
     return l;
 }
 
 static JS_ALWAYS_INLINE void
 UNBOX_NON_DOUBLE_JSVAL(jsval_layout l, uint64 *out)
@@ -301,21 +304,24 @@ BOX_NON_DOUBLE_JSVAL(JSValueType type, u
 {
     /* N.B. for 32-bit payloads, the high 32 bits of the slot are trash. */
     jsval_layout l;
     JS_ASSERT(type > JSVAL_TYPE_DOUBLE && type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET);
     uint32 isI32 = (uint32)(type < JSVAL_LOWER_INCL_TYPE_OF_PTR_PAYLOAD_SET);
     uint32 shift = isI32 * 32;
     uint64 mask = ((uint64)-1) >> shift;
     uint64 payload = *slot & mask;
+    /*
+    // FIXME overasserting
     JS_ASSERT_IF(type == JSVAL_TYPE_STRING ||
                  type == JSVAL_TYPE_OBJECT ||
                  type == JSVAL_TYPE_NONFUNOBJ ||
                  type == JSVAL_TYPE_FUNOBJ,
                  payload != 0);
+    */
     l.asBits = payload | JSVAL_TYPE_TO_SHIFTED_TAG(type & 0xF);
     return l;
 }
 
 static JS_ALWAYS_INLINE void
 UNBOX_NON_DOUBLE_JSVAL(jsval_layout l, uint64 *out)
 {
     JS_ASSERT(!JSVAL_IS_DOUBLE_IMPL(l));
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -610,21 +610,18 @@ Reify(JSContext *cx, JSCompartment *orig
     if (isKeyIter)
         return VectorToKeyIterator(cx, obj, ni->flags, keys, vp);
     return VectorToValueIterator(cx, obj, ni->flags, keys, vp); 
 }
 
 bool
 JSCrossCompartmentWrapper::iterate(JSContext *cx, JSObject *wrapper, uintN flags, Value *vp)
 {
-    if (!(flags & JSITER_OWNONLY)) {
-        JS_ASSERT(JSOp(*cx->regs->pc) == JSOP_ITER);
-        if (!cx->fp()->script()->typeMonitorUnknown(cx, cx->regs->pc))
-            return false;
-    }
+    if (!(flags & JSITER_OWNONLY) && !cx->markTypeCallerUnexpected(types::TYPE_UNKNOWN))
+        return false;
 
     PIERCE(cx, wrapper, GET,
            NOTHING,
            JSWrapper::iterate(cx, wrapper, flags, vp),
            CanReify(vp) ? Reify(cx, call.origin, vp) : call.origin->wrap(cx, vp));
 }
 
 bool
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -7323,17 +7323,17 @@ js_SetDefaultXMLNamespace(JSContext *cx,
 {
     Value argv[2];
     argv[0].setString(cx->runtime->emptyString);
     argv[1] = v;
     JSObject *ns = js_ConstructObject(cx, &js_NamespaceClass, NULL, NULL, 2, argv);
     if (!ns)
         return JS_FALSE;
 
-    JSStackFrame *fp = js_GetTopStackFrame(cx);
+    JSStackFrame *fp = js_GetTopStackFrame(cx, FRAME_EXPAND_NONE);
     JSObject &varobj = fp->varobj(cx);
 
     if (!cx->addTypePropertyId(varobj.getType(), JS_DEFAULT_XML_NAMESPACE_ID, types::TYPE_UNKNOWN))
         return JS_FALSE;
 
     if (!varobj.defineProperty(cx, JS_DEFAULT_XML_NAMESPACE_ID, ObjectValue(*ns),
                                PropertyStub, StrictPropertyStub, JSPROP_PERMANENT)) {
         return JS_FALSE;
@@ -7456,17 +7456,17 @@ js_FindXMLProperty(JSContext *cx, const 
         JS_ASSERT(nameobj->getClass() == &js_AttributeNameClass ||
                   nameobj->getClass() == &js_QNameClass);
     }
 
     qn = nameobj;
     if (!IsFunctionQName(cx, qn, &funid))
         return JS_FALSE;
 
-    obj = &js_GetTopStackFrame(cx)->scopeChain();
+    obj = &js_GetTopStackFrame(cx, FRAME_EXPAND_NONE)->scopeChain();
     do {
         /* Skip any With object that can wrap XML. */
         target = obj;
         while (target->getClass() == &js_WithClass) {
              proto = target->getProto();
              if (!proto)
                  break;
              target = proto;
--- a/js/src/methodjit/BaseAssembler.h
+++ b/js/src/methodjit/BaseAssembler.h
@@ -575,19 +575,19 @@ static const JSC::MacroAssembler::Regist
     }
 
     // Wrap AbstractMacroAssembler::getLinkerCallReturnOffset which is protected.
     unsigned callReturnOffset(Call call) {
         return getLinkerCallReturnOffset(call);
     }
 
 
-#define STUB_CALL_TYPE(type)                                                \
-    Call callWithVMFrame(type stub, jsbytecode *pc, uint32 fd) {            \
-        return fallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stub), pc, fd);   \
+#define STUB_CALL_TYPE(type)                                                             \
+    Call callWithVMFrame(type stub, jsbytecode *pc, DataLabelPtr *pinlined, uint32 fd) { \
+        return fallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stub), pc, pinlined, fd);      \
     }
 
     STUB_CALL_TYPE(JSObjStub);
     STUB_CALL_TYPE(VoidPtrStubUInt32);
     STUB_CALL_TYPE(VoidStubUInt32);
     STUB_CALL_TYPE(VoidStub);
 
 #undef STUB_CALL_TYPE
@@ -605,41 +605,52 @@ static const JSC::MacroAssembler::Regist
         }
 
         // The JIT has moved Arg1 already, and we've guaranteed to not clobber
         // it. Move ArgReg0 into place now. setupFallibleVMFrame will not
         // clobber it either.
         move(MacroAssembler::stackPointerRegister, Registers::ArgReg0);
     }
 
-    void setupFallibleVMFrame(jsbytecode *pc, int32 frameDepth) {
+    void setupFallibleVMFrame(jsbytecode *pc, DataLabelPtr *pinlined, int32 frameDepth) {
         setupInfallibleVMFrame(frameDepth);
 
         /* regs->fp = fp */
         storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
 
         /* PC -> regs->pc :( */
-        storePtr(ImmPtr(pc),
-                 FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, pc)));
+        storePtr(ImmPtr(pc), FrameAddress(offsetof(VMFrame, regs.pc)));
+
+        /* inlined -> regs->inlined :( */
+        DataLabelPtr ptr = storePtrWithPatch(ImmPtr(NULL),
+                                             FrameAddress(offsetof(VMFrame, regs.inlined)));
+        if (pinlined)
+            *pinlined = ptr;
     }
 
     // An infallible VM call is a stub call (taking a VMFrame & and one
     // optional parameter) that does not need |pc| and |fp| updated, since
     // the call is guaranteed to not fail. However, |sp| is always coherent.
     Call infallibleVMCall(void *ptr, int32 frameDepth) {
         setupInfallibleVMFrame(frameDepth);
         return wrapVMCall(ptr);
     }
 
     // A fallible VM call is a stub call (taking a VMFrame & and one optional
     // parameter) that needs the entire VMFrame to be coherent, meaning that
-    // |pc| and |fp| are guaranteed to be up-to-date.
-    Call fallibleVMCall(void *ptr, jsbytecode *pc, int32 frameDepth) {
-        setupFallibleVMFrame(pc, frameDepth);
-        return wrapVMCall(ptr);
+    // |pc|, |inlined| and |fp| are guaranteed to be up-to-date.
+    Call fallibleVMCall(void *ptr, jsbytecode *pc, DataLabelPtr *pinlined, int32 frameDepth) {
+        setupFallibleVMFrame(pc, pinlined, frameDepth);
+        Call call = wrapVMCall(ptr);
+
+        // Restore the frame pointer from the VM, in case it pushed/popped
+        // some frames or expanded any inline frames.
+        loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
+
+        return call;
     }
 
     Call wrapVMCall(void *ptr) {
         JS_ASSERT(!callIsAligned);
 
         // Every stub call has at most two arguments.
         setupABICall(Registers::FastCall, 2);
 
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -78,29 +78,28 @@ using namespace js::mjit::ic;
 #if defined(JS_METHODJIT_SPEW)
 static const char *OpcodeNames[] = {
 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
 # include "jsopcode.tbl"
 # undef OPDEF
 };
 #endif
 
-mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp, const Vector<PatchableFrame> *frames)
+mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp,
+                         const Vector<PatchableFrame> *patchFrames, bool recompiling)
   : BaseCompiler(cx),
-    fp(fp),
-    frames(frames),
-    script(fp->script()),
+    fp(fp), outerScript(fp->script()),
+    patchFrames(patchFrames),
     scopeChain(&fp->scopeChain()),
     globalObj(scopeChain->getGlobal()),
-    fun(fp->isFunctionFrame() && !fp->isEvalFrame()
-        ? fp->fun()
-        : NULL),
     isConstructing(fp->isConstructing()),
-    analysis(NULL), jumpMap(NULL), savedTraps(NULL),
-    frame(cx, script, fun, *this, masm, stubcc, liveness),
+    savedTraps(NULL),
+    frame(cx, *this, masm, stubcc),
+    a(NULL), outer(NULL), script(NULL), PC(NULL),
+    inlineFrames(CompilerAllocPolicy(cx, *thisFromCtor())),
     branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
 #if defined JS_MONOIC
     getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
     setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
     callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
     equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
     traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
 #endif
@@ -110,46 +109,42 @@ mjit::Compiler::Compiler(JSContext *cx, 
     setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
 #endif
     callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
     callSites(CompilerAllocPolicy(cx, *thisFromCtor())), 
     doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
     loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
-    stubcc(cx, *thisFromCtor(), frame, script),
+    stubcc(cx, *thisFromCtor(), frame),
     debugMode_(cx->compartment->debugMode),
 #if defined JS_TRACER
     addTraceHints(cx->traceJitEnabled),
 #else
     addTraceHints(false),
 #endif
-    recompiling(false),
-    hasThisType(false),
-    thisType(JSVAL_TYPE_UNKNOWN),
-    argumentTypes(ContextAllocPolicy(cx)),
-    localTypes(ContextAllocPolicy(cx)),
+    recompiling(recompiling),
     oomInVector(false),
     applyTricks(NoApplyTricks)
 {
     /* :FIXME: bug 637856 disabling traceJit if inference is enabled */
     if (cx->typeInferenceEnabled())
         addTraceHints = false;
 }
 
 CompileStatus
 mjit::Compiler::compile()
 {
-    JS_ASSERT_IF(isConstructing, !script->jitCtor);
-    JS_ASSERT_IF(!isConstructing, !script->jitNormal);
-
-    JITScript **jit = isConstructing ? &script->jitCtor : &script->jitNormal;
+    JS_ASSERT_IF(isConstructing, !outerScript->jitCtor);
+    JS_ASSERT_IF(!isConstructing, !outerScript->jitNormal);
+
+    JITScript **jit = isConstructing ? &outerScript->jitCtor : &outerScript->jitNormal;
     void **checkAddr = isConstructing
-                       ? &script->jitArityCheckCtor
-                       : &script->jitArityCheckNormal;
+                       ? &outerScript->jitArityCheckCtor
+                       : &outerScript->jitArityCheckNormal;
 
     CompileStatus status = performCompilation(jit);
     if (status == Compile_Okay) {
         // Global scripts don't have an arity check entry. That's okay, we
         // just need a pointer so the VM can quickly decide whether this
         // method can be JIT'd or not. Global scripts cannot be IC'd, since
         // they have no functions, so there is no danger.
         *checkAddr = (*jit)->arityCheckEntry
@@ -157,107 +152,141 @@ mjit::Compiler::compile()
                      : (*jit)->invokeEntry;
     } else {
         *checkAddr = JS_UNJITTABLE_SCRIPT;
     }
 
     return status;
 }
 
+CompileStatus
+mjit::Compiler::pushActiveFrame(JSScript *script, uint32 argc)
+{
+    ActiveFrame *newa = js_new<ActiveFrame>(cx);
+    if (!newa)
+        return Compile_Error;
+
+    newa->parent = a;
+    if (a)
+        newa->parentPC = PC;
+    newa->script = script;
+
+    if (outer) {
+        newa->inlineIndex = uint32(inlineFrames.length());
+        inlineFrames.append(newa);
+    } else {
+        newa->inlineIndex = uint32(-1);
+        outer = newa;
+    }
+
+    newa->analysis.analyze(cx, script);
+
+    if (newa->analysis.OOM())
+        return Compile_Error;
+    if (newa->analysis.failed()) {
+        JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
+        return Compile_Abort;
+    }
+
+    if (!newa->liveness.analyze(cx, &newa->analysis, script)) {
+        js_ReportOutOfMemory(cx);
+        return Compile_Error;
+    }
+
+#ifdef JS_METHODJIT_SPEW
+    if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
+        for (unsigned i = 0; i < script->nfixed; i++) {
+            if (!newa->analysis.localEscapes(i)) {
+                JaegerSpew(JSpew_Regalloc, "Local %u:", i);
+                newa->liveness.dumpLocal(i);
+            }
+        }
+        for (unsigned i = 0; script->fun && i < script->fun->nargs; i++) {
+            if (!newa->analysis.argEscapes(i)) {
+                JaegerSpew(JSpew_Regalloc, "Argument %u:", i);
+                newa->liveness.dumpArg(i);
+            }
+        }
+    }
+#endif
+
+    if (a)
+        frame.getUnsyncedEntries(&newa->depth, &newa->unsyncedEntries);
+
+    if (!frame.pushActiveFrame(script, argc, &newa->analysis, &newa->liveness)) {
+        js_ReportOutOfMemory(cx);
+        return Compile_Error;
+    }
+
+    newa->jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
+    if (!newa->jumpMap) {
+        js_ReportOutOfMemory(cx);
+        return Compile_Error;
+    }
+#ifdef DEBUG
+    for (uint32 i = 0; i < script->length; i++)
+        newa->jumpMap[i] = Label();
+#endif
+
+    if (cx->typeInferenceEnabled()) {
+        CompileStatus status = prepareInferenceTypes(script, newa);
+        if (status != Compile_Okay)
+            return status;
+    }
+
+    this->script = script;
+    this->PC = script->code;
+    this->a = newa;
+
+    return Compile_Okay;
+}
+
+void
+mjit::Compiler::popActiveFrame()
+{
+    JS_ASSERT(a->parent);
+    this->PC = a->parentPC;
+    this->a = a->parent;
+    this->script = a->script;
+
+    frame.popActiveFrame();
+}
+
 #define CHECK_STATUS(expr)                                           \
     JS_BEGIN_MACRO                                                   \
         CompileStatus status_ = (expr);                              \
         if (status_ != Compile_Okay) {                               \
             if (oomInVector || masm.oom() || stubcc.masm.oom())      \
                 js_ReportOutOfMemory(cx);                            \
             if (!cx->compartment->types.checkPendingRecompiles(cx))  \
                 return Compile_Error;                                \
             return status_;                                          \
         }                                                            \
     JS_END_MACRO
 
 CompileStatus
 mjit::Compiler::performCompilation(JITScript **jitp)
 {
+    outerScript = fp->script();
+
     JaegerSpew(JSpew_Scripts, "compiling script (file \"%s\") (line \"%d\") (length \"%d\")\n",
-               script->filename, script->lineno, script->length);
-
-    analyze::Script analysis_;
-    analysis_.analyze(cx, script);
-
-    this->analysis = &analysis_;
-
-    if (analysis->OOM())
-        return Compile_Error;
-    if (analysis->failed()) {
-        JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
-        return Compile_Abort;
-    }
-    frame.setAnalysis(analysis);
-
-    if (!liveness.analyze(cx, analysis, script, fun)) {
-        js_ReportOutOfMemory(cx);
-        return Compile_Error;
-    }
-
-#ifdef JS_METHODJIT_SPEW
-    if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
-        for (unsigned i = 0; i < script->nfixed; i++) {
-            if (!analysis->localEscapes(i)) {
-                JaegerSpew(JSpew_Regalloc, "Local %u:", i);
-                liveness.dumpLocal(i);
-            }
-        }
-        for (unsigned i = 0; fun && i < fun->nargs; i++) {
-            if (!analysis->argEscapes(i)) {
-                JaegerSpew(JSpew_Regalloc, "Argument %u:", i);
-                liveness.dumpArg(i);
-            }
-        }
-    }
-#endif
-
-    if (!frame.init()) {
-        js_ReportOutOfMemory(cx);
-        return Compile_Error;
-    }
-
-    jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
-    if (!jumpMap) {
-        js_ReportOutOfMemory(cx);
-        return Compile_Error;
-    }
-#ifdef DEBUG
-    for (uint32 i = 0; i < script->length; i++)
-        jumpMap[i] = Label();
-#endif
+               outerScript->filename, outerScript->lineno, outerScript->length);
 
 #ifdef JS_METHODJIT_SPEW
     Profiler prof;
     prof.start();
 #endif
 
-    /* Initialize PC early so stub calls in the prologue can be fallible. */
-    PC = script->code;
-
 #ifdef JS_METHODJIT
-    script->debugMode = debugMode();
+    outerScript->debugMode = debugMode();
 #endif
 
     types::AutoEnterTypeInference enter(cx, true);
 
-    if (cx->typeInferenceEnabled()) {
-        CompileStatus status = prepareInferenceTypes();
-        if (status != Compile_Okay) {
-            if (!cx->compartment->types.checkPendingRecompiles(cx))
-                return Compile_Error;
-            return status;
-        }
-    }
-
+    CHECK_STATUS(pushActiveFrame(outerScript, 0));
     CHECK_STATUS(generatePrologue());
     CHECK_STATUS(generateMethod());
     CHECK_STATUS(generateEpilogue());
     CHECK_STATUS(finishThisUp(jitp));
 
 #ifdef JS_METHODJIT_SPEW
     prof.stop();
     JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
@@ -272,54 +301,79 @@ mjit::Compiler::performCompilation(JITSc
     if (!*jitp)
         return Compile_Abort;
 
     return Compile_Okay;
 }
 
 #undef CHECK_STATUS
 
+mjit::Compiler::ActiveFrame::ActiveFrame(JSContext *cx)
+    : parent(NULL), parentPC(NULL), script(NULL), inlineIndex(uint32(-1)),
+      jumpMap(NULL), hasThisType(false), argumentTypes(NULL), localTypes(NULL),
+      unsyncedEntries(cx),
+      needReturnValue(false), syncReturnValue(false),
+      returnValueDouble(false), returnSet(false), returnParentRegs(0), returnJumps(cx)
+{}
+
+mjit::Compiler::ActiveFrame::~ActiveFrame()
+{
+    js_free(jumpMap);
+    js_array_delete(argumentTypes);
+    js_array_delete(localTypes);
+}
+
 mjit::Compiler::~Compiler()
 {
-    cx->free(jumpMap);
+    if (outer)
+        js_delete<ActiveFrame>(outer);
+    for (unsigned i = 0; i < inlineFrames.length(); i++)
+        js_delete<ActiveFrame>(inlineFrames[i]);
+
     cx->free(savedTraps);
 }
 
 CompileStatus
-mjit::Compiler::prepareInferenceTypes()
+mjit::Compiler::prepareInferenceTypes(JSScript *script, ActiveFrame *a)
 {
     /* Analyze the script if we have not already done so. */
     if (!script->types) {
         /* Uncached eval scripts are not analyzed or compiled. */
         if (script->isUncachedEval)
             return Compile_Abort;
         types::AnalyzeScriptTypes(cx, script);
         if (!script->types)
             return Compile_Error;
     }
 
     /* Get the known types of arguments and locals. */
 
-    uint32 nargs = fun ? fun->nargs : 0;
-    if (!argumentTypes.reserve(nargs))
-        return Compile_Error;
-    for (unsigned i = 0; i < nargs; i++) {
-        JSValueType type = JSVAL_TYPE_UNKNOWN;
-        if (!analysis->argEscapes(i))
-            type = script->argTypes(i)->getKnownTypeTag(cx, script);
-        argumentTypes.append(type);
+    uint32 nargs = script->fun ? script->fun->nargs : 0;
+    if (nargs) {
+        a->argumentTypes = js_array_new<JSValueType>(nargs);
+        if (!a->argumentTypes)
+            return Compile_Error;
+        for (unsigned i = 0; i < nargs; i++) {
+            JSValueType type = JSVAL_TYPE_UNKNOWN;
+            if (!a->analysis.argEscapes(i))
+                type = script->argTypes(i)->getKnownTypeTag(cx, outerScript);
+            a->argumentTypes[i] = type;
+        }
     }
 
-    if (!localTypes.reserve(script->nfixed))
-        return Compile_Error;
-    for (unsigned i = 0; i < script->nfixed; i++) {
-        JSValueType type = JSVAL_TYPE_UNKNOWN;
-        if (!analysis->localHasUseBeforeDef(i))
-            type = script->localTypes(i)->getKnownTypeTag(cx, script);
-        localTypes.append(type);
+    if (script->nfixed) {
+        a->localTypes = js_array_new<JSValueType>(script->nfixed);
+        if (!a->localTypes)
+            return Compile_Error;
+        for (unsigned i = 0; i < script->nfixed; i++) {
+            JSValueType type = JSVAL_TYPE_UNKNOWN;
+            if (!a->analysis.localHasUseBeforeDef(i))
+                type = script->localTypes(i)->getKnownTypeTag(cx, outerScript);
+            a->localTypes[i] = type;
+        }
     }
 
     return Compile_Okay;
 }
 
 CompileStatus JS_NEVER_INLINE
 mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
 {
@@ -329,110 +383,113 @@ mjit::TryCompile(JSContext *cx, JSStackF
     if (fp->script()->hasSharps)
         return Compile_Abort;
 #endif
 
     // Ensure that constructors have at least one slot.
     if (fp->isConstructing() && !fp->script()->nslots)
         fp->script()->nslots++;
 
-    // If there are static overflows in the function, try recompiling it a few
-    // times, using a limit to handle scripts with many static overflows.
-    CompileStatus status = Compile_Overflow;
-    for (unsigned i = 0; status == Compile_Overflow && i < 5; i++) {
-        Compiler cc(cx, fp, NULL);
+    // If there were recoverable compilation failures in the function from
+    // static overflow or bad inline callees, try recompiling a few times
+    // before giving up.
+    CompileStatus status = Compile_Retry;
+    for (unsigned i = 0; status == Compile_Retry && i < 5; i++) {
+        Compiler cc(cx, fp, NULL, false);
         status = cc.compile();
     }
 
     return status;
 }
 
 bool
 mjit::Compiler::loadOldTraps(const Vector<CallSite> &sites)
 {
-    recompiling = true;
-
-    savedTraps = (bool *)cx->calloc(sizeof(bool) * script->length);
+    savedTraps = (bool *)cx->calloc(sizeof(bool) * outerScript->length);
     if (!savedTraps)
         return false;
     
     for (size_t i = 0; i < sites.length(); i++) {
         const CallSite &site = sites[i];
-        if (site.isTrap())
+        if (site.isTrap()) {
+            JS_ASSERT(site.inlineIndex == uint32(-1) && site.pcOffset < outerScript->length);
             savedTraps[site.pcOffset] = true;
+        }
     }
 
     return true;
 }
 
 CompileStatus
 mjit::Compiler::generatePrologue()
 {
     invokeLabel = masm.label();
 
     /*
      * If there is no function, then this can only be called via JaegerShot(),
      * which expects an existing frame to be initialized like the interpreter.
      */
-    if (fun) {
+    if (script->fun) {
         Jump j = masm.jump();
 
         /*
          * Entry point #2: The caller has partially constructed a frame, and
          * either argc >= nargs or the arity check has corrected the frame.
          */
         invokeLabel = masm.label();
 
         Label fastPath = masm.label();
 
         /* Store this early on so slow paths can access it. */
-        masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
+        masm.storePtr(ImmPtr(script->fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
 
         {
             /*
              * Entry point #3: The caller has partially constructed a frame,
              * but argc might be != nargs, so an arity check might be called.
              *
              * This loops back to entry point #2.
              */
             arityLabel = stubcc.masm.label();
 
             Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
-                                                 Imm32(fun->nargs));
+                                                 Imm32(script->fun->nargs));
 
             if (JSParamReg_Argc != Registers::ArgReg1)
                 stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
 
             /* Slow path - call the arity check function. Returns new fp. */
-            stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
-            stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
+            stubcc.masm.storePtr(ImmPtr(script->fun),
+                                 Address(JSFrameReg, JSStackFrame::offsetOfExec()));
             OOL_STUBCALL(stubs::FixupArity);
             stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
             argMatch.linkTo(stubcc.masm.label(), &stubcc.masm);
 
             /* Type check the arguments as well. */
             if (cx->typeInferenceEnabled()) {
 #ifdef JS_MONOIC
                 this->argsCheckJump = stubcc.masm.jump();
                 this->argsCheckStub = stubcc.masm.label();
                 this->argsCheckJump.linkTo(this->argsCheckStub, &stubcc.masm);
 #endif
-                stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
+                stubcc.masm.storePtr(ImmPtr(script->fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
                 OOL_STUBCALL(stubs::CheckArgumentTypes);
 #ifdef JS_MONOIC
                 this->argsCheckFallthrough = stubcc.masm.label();
 #endif
             }
 
             stubcc.crossJump(stubcc.masm.jump(), fastPath);
         }
 
         /*
          * Guard that there is enough stack space. Note we include the size of
          * a second frame, to ensure we can create a frame from call sites.
+         * :FIXME: this check does not currently account for space from inlined frames,
+         * nor do checks made when pushing the frame from the interpreter.
          */
         masm.addPtr(Imm32((script->nslots + VALUES_PER_STACK_FRAME * 2) * sizeof(Value)),
                     JSFrameReg,
                     Registers::ReturnReg);
         Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
                                          FrameAddress(offsetof(VMFrame, stackLimit)));
 
         /* If the stack check fails... */
@@ -443,78 +500,91 @@ mjit::Compiler::generatePrologue()
         }
 
         /*
          * Set locals to undefined, as in initCallFrameLatePrologue.
          * Skip locals which aren't closed and are known to be defined before used,
          * :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
          */
         for (uint32 i = 0; i < script->nfixed; i++) {
-            if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
+            if (a->analysis.localHasUseBeforeDef(i) || addTraceHints) {
                 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
                 masm.storeValue(UndefinedValue(), local);
             }
         }
 
         /* Create the call object. */
-        if (fun->isHeavyweight()) {
+        if (script->fun->isHeavyweight()) {
             prepareStubCall(Uses(0));
             INLINE_STUBCALL(stubs::GetCallObject);
         }
 
         j.linkTo(masm.label(), &masm);
 
-        if (analysis->usesScopeChain() && !fun->isHeavyweight()) {
+        if (a->analysis.usesScopeChain() && !script->fun->isHeavyweight()) {
             /*
              * Load the scope chain into the frame if necessary.  The scope chain
              * is always set for global and eval frames, and will have been set by
              * GetCallObject for heavyweight function frames.
              */
             RegisterID t0 = Registers::ReturnReg;
             Jump hasScope = masm.branchTest32(Assembler::NonZero,
                                               FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
-            masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
+            masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(script->fun)), t0);
             masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
             masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
             hasScope.linkTo(masm.label(), &masm);
         }
     }
 
     if (isConstructing)
         constructThis();
 
     if (debugMode() || Probes::callTrackingActive(cx))
         INLINE_STUBCALL(stubs::EnterScript);
 
     /*
-     * Set initial types of locals with known type.  These will stay synced
-     * through the rest of the script.
+     * Set initial types of locals with known type. These will stay synced
+     * through the rest of the script, allowing us to avoid syncing the types
+     * of locals after writing their payloads. Notes:
+     *
+     * - We don't call generatePrologue and perform this syncing when inlining
+     *   frames; such locals are not assumed to be synced after being assigned.
+     *
+     * - If we are recompiling, the earlier compilation might not have known
+     *   the type of the local (its type set was empty, say), in which case
+     *   it wouldn't have stored that type tag. We need to walk the frames and
+     *   fixup the type tags accordingly.
      */
     for (uint32 i = 0; i < script->nfixed; i++) {
         JSValueType type = knownLocalType(i);
         if (type != JSVAL_TYPE_UNKNOWN) {
-            JS_ASSERT(!analysis->localHasUseBeforeDef(i));
+            JS_ASSERT(!a->analysis.localHasUseBeforeDef(i));
             /* Doubles will be written entirely when syncing. */
             if (type != JSVAL_TYPE_DOUBLE) {
                 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
                 masm.storeTypeTag(ImmType(type), local);
+                for (unsigned j = 0; patchFrames && j < patchFrames->length(); j++) {
+                    JSStackFrame *patchfp = (*patchFrames)[j].fp;
+                    patchfp->varSlot(i).boxNonDoubleFrom(type, (uint64*) &patchfp->varSlot(i));
+                }
             }
             frame.learnType(frame.getLocal(i), type, false);
         }
     }
 
     /*
      * Learn types of arguments with known type, and make sure double arguments
      * are actually doubles and not ints.
      */
-    for (uint32 i = 0; fun && i < fun->nargs; i++) {
+    for (uint32 i = 0; script->fun && i < script->fun->nargs; i++) {
         JSValueType type = knownArgumentType(i);
         if (type != JSVAL_TYPE_UNKNOWN) {
             if (type == JSVAL_TYPE_DOUBLE) {
-                if (!analysis->argEscapes(i))
+                if (!a->analysis.argEscapes(i))
                     frame.ensureDouble(frame.getArg(i));
             } else {
                 frame.learnType(frame.getArg(i), type, false);
             }
         }
     }
 
     return Compile_Okay;
@@ -527,17 +597,17 @@ mjit::Compiler::generateEpilogue()
 }
 
 CompileStatus
 mjit::Compiler::finishThisUp(JITScript **jitp)
 {
     RETURN_IF_OOM(Compile_Error);
 
     for (size_t i = 0; i < branchPatches.length(); i++) {
-        Label label = labelOf(branchPatches[i].pc);
+        Label label = labelOf(branchPatches[i].pc, branchPatches[i].inlineIndex);
         branchPatches[i].jump.linkTo(label, &masm);
     }
 
 #ifdef JS_CPU_ARM
     masm.forceFlushConstantPool();
     stubcc.masm.forceFlushConstantPool();
 #endif
     JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %u, Slow code (stubcc) size = %u.\n", masm.size(), stubcc.size());
@@ -560,69 +630,82 @@ mjit::Compiler::finishThisUp(JITScript *
     masm.executableCopy(result);
     stubcc.masm.executableCopy(result + masm.size());
     
     JSC::LinkBuffer fullCode(result, totalSize);
     JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
 
     size_t nNmapLive = loopEntries.length();
     for (size_t i = 0; i < script->length; i++) {
-        analyze::Bytecode *opinfo = analysis->maybeCode(i);
-        if (opinfo && opinfo->safePoint && !liveness.getCode(i).loopBackedge)
+        analyze::Bytecode *opinfo = a->analysis.maybeCode(i);
+        if (opinfo && opinfo->safePoint && !a->liveness.getCode(i).loopBackedge)
             nNmapLive++;
     }
 
+    size_t nUnsyncedEntries = 0;
+    for (size_t i = 0; i < inlineFrames.length(); i++)
+        nUnsyncedEntries += inlineFrames[i]->unsyncedEntries.length();
+
     /* Please keep in sync with JITScript::scriptDataSize! */
     size_t totalBytes = sizeof(JITScript) +
                         sizeof(NativeMapEntry) * nNmapLive +
+                        sizeof(InlineFrame) * inlineFrames.length() +
+                        sizeof(CallSite) * callSites.length() +
 #if defined JS_MONOIC
                         sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
                         sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
                         sizeof(ic::CallICInfo) * callICs.length() +
                         sizeof(ic::EqualityICInfo) * equalityICs.length() +
                         sizeof(ic::TraceICInfo) * traceICs.length() +
 #endif
 #if defined JS_POLYIC
                         sizeof(ic::PICInfo) * pics.length() +
                         sizeof(ic::GetElementIC) * getElemICs.length() +
                         sizeof(ic::SetElementIC) * setElemICs.length() +
 #endif
-                        sizeof(CallSite) * callSites.length();
+                        sizeof(UnsyncedEntry) * nUnsyncedEntries;
 
     uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
     if (!cursor) {
         execPool->release();
         js_ReportOutOfMemory(cx);
         return Compile_Error;
     }
 
     JITScript *jit = new(cursor) JITScript;
     cursor += sizeof(JITScript);
 
+    JS_ASSERT(outerScript == script);
+
+    jit->script = script;
     jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
     jit->invokeEntry = result;
     jit->singleStepMode = script->singleStepMode;
-    if (fun) {
+    jit->rejoinPoints = recompiling;
+    if (script->fun) {
         jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
         jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
     }
 
     /* 
      * WARNING: mics(), callICs() et al depend on the ordering of these
      * variable-length sections.  See JITScript's declaration for details.
      */
 
+    /* ICs can only refer to bytecodes in the outermost script, not inlined calls. */
+    Label *jumpMap = a->jumpMap;
+
     /* Build the pc -> ncode mapping. */
     NativeMapEntry *jitNmap = (NativeMapEntry *)cursor;
     jit->nNmapPairs = nNmapLive;
     cursor += sizeof(NativeMapEntry) * jit->nNmapPairs;
     size_t ix = 0;
     if (jit->nNmapPairs > 0) {
         for (size_t i = 0; i < script->length; i++) {
-            analyze::Bytecode *opinfo = analysis->maybeCode(i);
+            analyze::Bytecode *opinfo = a->analysis.maybeCode(i);
             if (opinfo && opinfo->safePoint) {
                 Label L = jumpMap[i];
                 JS_ASSERT(L.isValid());
                 jitNmap[ix].bcOff = i;
                 jitNmap[ix].ncode = (uint8 *)(result + masm.distanceOf(L));
                 ix++;
             }
         }
@@ -638,20 +721,62 @@ mjit::Compiler::finishThisUp(JITScript *
             }
             jitNmap[j].bcOff = entry.pcOffset;
             jitNmap[j].ncode = (uint8 *) stubCode.locationOf(entry.label).executableAddress();
             ix++;
         }
     }
     JS_ASSERT(ix == jit->nNmapPairs);
 
+    /* Build the table of inlined frames. */
+    InlineFrame *jitInlineFrames = (InlineFrame *)cursor;
+    jit->nInlineFrames = inlineFrames.length();
+    cursor += sizeof(InlineFrame) * jit->nInlineFrames;
+    for (size_t i = 0; i < jit->nInlineFrames; i++) {
+        InlineFrame &to = jitInlineFrames[i];
+        ActiveFrame *from = inlineFrames[i];
+        if (from->parent != outer)
+            to.parent = &jitInlineFrames[from->parent->inlineIndex];
+        else
+            to.parent = NULL;
+        to.parentpc = from->parentPC;
+        to.fun = from->script->fun;
+        to.depth = from->depth;
+    }
+
+    /* Build the table of call sites. */
+    CallSite *jitCallSites = (CallSite *)cursor;
+    jit->nCallSites = callSites.length();
+    cursor += sizeof(CallSite) * jit->nCallSites;
+    for (size_t i = 0; i < jit->nCallSites; i++) {
+        CallSite &to = jitCallSites[i];
+        InternalCallSite &from = callSites[i];
+
+        /* Patch stores of f.regs.inlined for stubs called from within inline frames. */
+        if (from.id != CallSite::NCODE_RETURN_ID &&
+            from.id != CallSite::MAGIC_TRAP_ID &&
+            from.inlineIndex != uint32(-1)) {
+            if (from.ool)
+                stubCode.patch(from.inlinePatch, &to);
+            else
+                fullCode.patch(from.inlinePatch, &to);
+        }
+
+        JSScript *script =
+            (from.inlineIndex == uint32(-1)) ? outerScript : inlineFrames[from.inlineIndex]->script;
+        uint32 codeOffset = from.ool
+                            ? masm.size() + from.returnOffset
+                            : from.returnOffset;
+        to.initialize(codeOffset, from.inlineIndex, from.inlinepc - script->code, from.id);
+    }
+
 #if defined JS_MONOIC
     JS_INIT_CLIST(&jit->callers);
 
-    if (fun && cx->typeInferenceEnabled()) {
+    if (script->fun && cx->typeInferenceEnabled()) {
         jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
         jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
         jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
         jit->argsCheckPool = NULL;
     }
 
     ic::GetGlobalNameIC *getGlobalNames_ = (ic::GetGlobalNameIC *)cursor;
     jit->nGetGlobalNames = getGlobalNames.length();
@@ -749,17 +874,17 @@ mjit::Compiler::finishThisUp(JITScript *
         JS_ASSERT(jitCallICs[i].slowJoinOffset == offset);
 
         /* Compute the join point offset for continuing on the hot path. */
         offset = stubCode.locationOf(callICs[i].hotPathLabel) -
                  stubCode.locationOf(callICs[i].funGuard);
         jitCallICs[i].hotPathOffset = offset;
         JS_ASSERT(jitCallICs[i].hotPathOffset == offset);
 
-        jitCallICs[i].pc = callICs[i].pc;
+        jitCallICs[i].call = &jitCallSites[callICs[i].callIndex];
         jitCallICs[i].frameSize = callICs[i].frameSize;
         jitCallICs[i].funObjReg = callICs[i].funObjReg;
         jitCallICs[i].funPtrReg = callICs[i].funPtrReg;
         stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]);
         stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]);
     }
 
     ic::EqualityICInfo *jitEqualityICs = (ic::EqualityICInfo *)cursor;
@@ -928,16 +1053,28 @@ mjit::Compiler::finishThisUp(JITScript *
                 JS_ASSERT(distance <= 0);
                 jitPics[i].u.get.typeCheckOffset = distance;
             }
         }
         stubCode.patch(pics[i].paramAddr, &jitPics[i]);
     }
 #endif
 
+    for (size_t i = 0; i < jit->nInlineFrames; i++) {
+        InlineFrame &to = jitInlineFrames[i];
+        ActiveFrame *from = inlineFrames[i];
+        to.nUnsyncedEntries = from->unsyncedEntries.length();
+        to.unsyncedEntries = (UnsyncedEntry *) cursor;
+        cursor += sizeof(UnsyncedEntry) * to.nUnsyncedEntries;
+        for (size_t j = 0; j < to.nUnsyncedEntries; j++)
+            to.unsyncedEntries[j] = from->unsyncedEntries[j];
+    }
+
+    JS_ASSERT(size_t(cursor - (uint8*)jit) == totalBytes);
+
     /* Link fast and slow paths together. */
     stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
 
     size_t doubleOffset = masm.size() + stubcc.size();
     double *inlineDoubles = (double *) (result + doubleOffset);
     double *oolDoubles = (double*) (result + doubleOffset +
                                     masm.numDoubles() * sizeof(double));
 
@@ -958,31 +1095,16 @@ mjit::Compiler::finishThisUp(JITScript *
 
     /* Patch all outgoing calls. */
     masm.finalize(fullCode, inlineDoubles);
     stubcc.masm.finalize(stubCode, oolDoubles);
 
     JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
     JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
 
-    /* Build the table of call sites. */
-    CallSite *jitCallSites = (CallSite *)cursor;
-    jit->nCallSites = callSites.length();
-    cursor += sizeof(CallSite) * jit->nCallSites;
-    for (size_t i = 0; i < jit->nCallSites; i++) {
-        CallSite &to = jitCallSites[i];
-        InternalCallSite &from = callSites[i];
-        uint32 codeOffset = from.ool
-                            ? masm.size() + from.returnOffset
-                            : from.returnOffset;
-        to.initialize(codeOffset, from.pc - script->code, from.id);
-    }
-
-    JS_ASSERT(size_t(cursor - (uint8*)jit) == totalBytes);
-
     *jitp = jit;
 
     /* We tolerate a race in the stats. */
     cx->runtime->mjitMemoryUsed += totalSize + totalBytes;
 
     return Compile_Okay;
 }
 
@@ -1055,17 +1177,17 @@ mjit::Compiler::generateMethod()
             if (!trapper.untrap(PC))
                 return Compile_Error;
             op = JSOp(*PC);
             trap |= stubs::JSTRAP_TRAP;
         }
         if (script->singleStepMode && scanner.firstOpInLine(PC - script->code))
             trap |= stubs::JSTRAP_SINGLESTEP;
 
-        analyze::Bytecode *opinfo = analysis->maybeCode(PC);
+        analyze::Bytecode *opinfo = a->analysis.maybeCode(PC);
 
         if (!opinfo) {
             if (op == JSOP_STOP)
                 break;
             if (js_CodeSpec[op].length != -1)
                 PC += js_CodeSpec[op].length;
             else
                 PC += js_GetVariableBytecodeLength(PC);
@@ -1079,17 +1201,17 @@ mjit::Compiler::generateMethod()
                 fixDoubleTypes(Uses(0));
 
                 /*
                  * Watch for fallthrough to the head of a 'do while' loop.
                  * We don't know what register state we will be using at the head
                  * of the loop so sync, branch, and fix it up after the loop
                  * has been processed.
                  */
-                if (liveness.getCode(PC).loopBackedge) {
+                if (a->liveness.getCode(PC).loopBackedge) {
                     frame.syncAndForgetEverything();
                     Jump j = masm.jump();
                     if (!frame.pushLoop(PC, j, PC))
                         return Compile_Error;
                 } else {
                     if (!frame.syncForBranch(PC, Uses(0)))
                         return Compile_Error;
                     JS_ASSERT(frame.consistentRegisters(PC));
@@ -1097,29 +1219,29 @@ mjit::Compiler::generateMethod()
             }
 
             if (!frame.discardForJoin(PC, opinfo->stackDepth))
                 return Compile_Error;
             restoreAnalysisTypes(opinfo->stackDepth);
             fallthrough = true;
         }
 
-        jumpMap[uint32(PC - script->code)] = masm.label();
+        a->jumpMap[uint32(PC - script->code)] = masm.label();
 
         SPEW_OPCODE();
         JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
 
         if (trap) {
             prepareStubCall(Uses(0));
             masm.move(Imm32(trap), Registers::ArgReg1);
-            Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap));
-            InternalCallSite site(masm.callReturnOffset(cl), PC,
+            Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap), NULL);
+            InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
                                   CallSite::MAGIC_TRAP_ID, true, false);
             addCallSite(site);
-        } else if (savedTraps && savedTraps[PC - script->code]) {
+        } else if (!a->parent && savedTraps && savedTraps[PC - script->code]) {
             // Normally when we patch return addresses, we have generated the
             // same exact code at that site. For example, patching a stub call's
             // return address will resume at the same stub call.
             //
             // In the case we're handling here, we could potentially be
             // recompiling to remove a trap, and therefore we won't generate
             // a call to the trap. However, we could be re-entering from that
             // trap. The callsite will be missing, and fixing the stack will
@@ -1141,36 +1263,37 @@ mjit::Compiler::generateMethod()
             // amount of memory.
             uint32 offset = stubcc.masm.distanceOf(stubcc.masm.label());
             if (Assembler::ReturnStackAdjustment) {
                 stubcc.masm.addPtr(Imm32(Assembler::ReturnStackAdjustment),
                                    Assembler::stackPointerRegister);
             }
             stubcc.crossJump(stubcc.masm.jump(), masm.label());
 
-            InternalCallSite site(offset, PC, CallSite::MAGIC_TRAP_ID, false, true);
+            InternalCallSite site(offset, a->inlineIndex, PC,
+                                  CallSite::MAGIC_TRAP_ID, false, true);
             addCallSite(site);
         }
 
         /*
          * If we are recompiling, check for any frames on the stack at this
          * opcode, and patch the types of any arg/local/stack slots which are
          * integers but need to be doubles. Any value assumed to be a double in
          * this compilation may instead be an int in the earlier compilation
          * and stack frames. Other transitions between known types are not
          * possible --- type sets can only grow, and if new non-double type
          * tags become possible we will treat that slot as unknown in this
          * compilation.
          */
-        for (unsigned i = 0; frames && i < frames->length(); i++) {
-            if ((*frames)[i].pc != PC)
+        for (unsigned i = 0; patchFrames && i < patchFrames->length(); i++) {
+            if ((*patchFrames)[i].pc != PC)
                 continue;
-            JSStackFrame *patchfp = (*frames)[i].fp;
-
-            for (unsigned j = 0; fun && j < fun->nargs; j++) {
+            JSStackFrame *patchfp = (*patchFrames)[i].fp;
+
+            for (unsigned j = 0; script->fun && j < script->fun->nargs; j++) {
                 FrameEntry *fe = frame.getArg(j);
                 if (fe->isType(JSVAL_TYPE_DOUBLE))
                     FixDouble(patchfp->formalArg(j));
             }
 
             for (unsigned j = 0; j < script->nfixed; j++) {
                 FrameEntry *fe = frame.getLocal(j);
                 if (fe->isType(JSVAL_TYPE_DOUBLE))
@@ -1203,16 +1326,19 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_SETRVAL)
           {
             RegisterID reg = frame.allocReg();
             masm.load32(FrameFlagsAddress(), reg);
             masm.or32(Imm32(JSFRAME_HAS_RVAL), reg);
             masm.store32(reg, FrameFlagsAddress());
             frame.freeReg(reg);
 
+            /* Scripts which write to the frame's return slot aren't inlined. */
+            JS_ASSERT(a == outer);
+
             FrameEntry *fe = frame.peek(-1);
             frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true);
             frame.pop();
           }
           END_CASE(JSOP_POPV)
 
           BEGIN_CASE(JSOP_RETURN)
             emitReturn(frame.peek(-1));
@@ -1240,17 +1366,17 @@ mjit::Compiler::generateMethod()
                 }
             }
 
             /*
              * Watch for gotos which are entering a 'for' or 'while' loop. These jump
              * to the loop condition test and are immediately followed by the head of the loop.
              */
             jsbytecode *next = PC + JSOP_GOTO_LENGTH;
-            if (analysis->maybeCode(next) && liveness.getCode(next).loopBackedge) {
+            if (a->analysis.maybeCode(next) && a->liveness.getCode(next).loopBackedge) {
                 frame.syncAndForgetEverything();
                 Jump j = masm.jump();
                 if (!frame.pushLoop(next, j, target))
                     return Compile_Error;
             } else {
                 if (!frame.syncForBranch(target, Uses(0)))
                     return Compile_Error;
                 Jump j = masm.jump();
@@ -1320,17 +1446,17 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_GT)
           BEGIN_CASE(JSOP_GE)
           BEGIN_CASE(JSOP_EQ)
           BEGIN_CASE(JSOP_NE)
           {
             /* Detect fusions. */
             jsbytecode *next = &PC[JSOP_GE_LENGTH];
             JSOp fused = JSOp(*next);
-            if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
+            if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || a->analysis.jumpTarget(next))
                 fused = JSOP_NOP;
 
             /* Get jump target, if any. */
             jsbytecode *target = NULL;
             if (fused != JSOP_NOP) {
                 target = next + GET_JUMP_OFFSET(next);
                 fixDoubleTypes(Uses(2));
             }
@@ -1432,37 +1558,37 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_RSH)
 
           BEGIN_CASE(JSOP_URSH)
             jsop_bitop(op);
           END_CASE(JSOP_URSH)
 
           BEGIN_CASE(JSOP_ADD)
             if (!jsop_binary(op, stubs::Add, knownPushedType(0), pushedTypeSet(0)))
-                return Compile_Overflow;
+                return Compile_Retry;
           END_CASE(JSOP_ADD)
 
           BEGIN_CASE(JSOP_SUB)
             if (!jsop_binary(op, stubs::Sub, knownPushedType(0), pushedTypeSet(0)))
-                return Compile_Overflow;
+                return Compile_Retry;
           END_CASE(JSOP_SUB)
 
           BEGIN_CASE(JSOP_MUL)
             if (!jsop_binary(op, stubs::Mul, knownPushedType(0), pushedTypeSet(0)))
-                return Compile_Overflow;
+                return Compile_Retry;
           END_CASE(JSOP_MUL)
 
           BEGIN_CASE(JSOP_DIV)
             if (!jsop_binary(op, stubs::Div, knownPushedType(0), pushedTypeSet(0)))
-                return Compile_Overflow;
+                return Compile_Retry;
           END_CASE(JSOP_DIV)
 
           BEGIN_CASE(JSOP_MOD)
             if (!jsop_mod())
-                return Compile_Overflow;
+                return Compile_Retry;
           END_CASE(JSOP_MOD)
 
           BEGIN_CASE(JSOP_NOT)
             jsop_not();
           END_CASE(JSOP_NOT)
 
           BEGIN_CASE(JSOP_BITNOT)
           {
@@ -1487,17 +1613,17 @@ mjit::Compiler::generateMethod()
                 ValueToNumber(cx, top->getValue(), &d);
                 d = -d;
                 Value v = NumberValue(d);
 
                 /* Watch for overflow in constant propagation. */
                 types::TypeSet *pushed = pushedTypeSet(0);
                 if (!v.isInt32() && pushed && !pushed->hasType(types::TYPE_DOUBLE)) {
                     script->typeMonitorResult(cx, PC, types::TYPE_DOUBLE);
-                    return Compile_Overflow;
+                    return Compile_Retry;
                 }
 
                 frame.pop();
                 frame.push(v);
             } else {
                 jsop_neg();
             }
           }
@@ -1672,17 +1798,17 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_GETELEM)
             if (!jsop_getelem(false))
                 return Compile_Error;
           END_CASE(JSOP_GETELEM)
 
           BEGIN_CASE(JSOP_SETELEM)
           {
             jsbytecode *next = &PC[JSOP_SETELEM_LENGTH];
-            bool pop = (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next));
+            bool pop = (JSOp(*next) == JSOP_POP && !a->analysis.jumpTarget(next));
             if (!jsop_setelem(pop))
                 return Compile_Error;
           }
           END_CASE(JSOP_SETELEM);
 
           BEGIN_CASE(JSOP_CALLNAME)
             prepareStubCall(Uses(0));
             masm.move(Imm32(fullAtomIndex(PC)), Registers::ArgReg1);
@@ -1703,17 +1829,24 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_FUNAPPLY)
           BEGIN_CASE(JSOP_FUNCALL)
           {
             bool done = false;
             if (op == JSOP_CALL) {
                 CompileStatus status = inlineNativeFunction(GET_ARGC(PC), false);
                 if (status == Compile_Okay)
                     done = true;
-                else if (status != Compile_Abort)
+                else if (status != Compile_InlineAbort)
+                    return status;
+            }
+            if (!done) {
+                CompileStatus status = inlineScriptedFunction(GET_ARGC(PC), false);
+                if (status == Compile_Okay)
+                    done = true;
+                else if (status != Compile_InlineAbort)
                     return status;
             }
             if (!done) {
                 JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
                 inlineCallHelper(GET_ARGC(PC), false);
                 JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
             }
           }
@@ -1874,17 +2007,17 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_BINDGNAME)
             jsop_bindgname();
           END_CASE(JSOP_BINDGNAME)
 
           BEGIN_CASE(JSOP_SETARG)
           {
             uint32 arg = GET_SLOTNO(PC);
             jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
-            bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
+            bool pop = JSOp(*next) == JSOP_POP && !a->analysis.jumpTarget(next);
             frame.storeArg(arg, knownArgumentType(arg), pop);
             if (pop) {
                 frame.pop();
                 PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
                 break;
             }
           }
           END_CASE(JSOP_SETARG)
@@ -1895,17 +2028,17 @@ mjit::Compiler::generateMethod()
             frame.pushLocal(slot, knownPushedType(0));
           }
           END_CASE(JSOP_GETLOCAL)
 
           BEGIN_CASE(JSOP_SETLOCAL)
           {
             uint32 slot = GET_SLOTNO(PC);
             jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
-            bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
+            bool pop = JSOp(*next) == JSOP_POP && !a->analysis.jumpTarget(next);
             frame.storeLocal(slot, knownLocalType(slot), pop, true);
             if (pop) {
                 frame.pop();
                 PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
                 break;
             }
           }
           END_CASE(JSOP_SETLOCAL)
@@ -1957,39 +2090,39 @@ mjit::Compiler::generateMethod()
 
           BEGIN_CASE(JSOP_INCARG)
           BEGIN_CASE(JSOP_DECARG)
           BEGIN_CASE(JSOP_ARGINC)
           BEGIN_CASE(JSOP_ARGDEC)
           {
             jsbytecode *next = &PC[JSOP_ARGINC_LENGTH];
             bool popped = false;
-            if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
+            if (JSOp(*next) == JSOP_POP && !a->analysis.jumpTarget(next))
                 popped = true;
             if (!jsop_arginc(op, GET_SLOTNO(PC), popped))
-                return Compile_Overflow;
+                return Compile_Retry;
             PC += JSOP_ARGINC_LENGTH;
             if (popped)
                 PC += JSOP_POP_LENGTH;
             break;
           }
           END_CASE(JSOP_ARGDEC)
 
           BEGIN_CASE(JSOP_INCLOCAL)
           BEGIN_CASE(JSOP_DECLOCAL)
           BEGIN_CASE(JSOP_LOCALINC)
           BEGIN_CASE(JSOP_LOCALDEC)
           {
             jsbytecode *next = &PC[JSOP_LOCALINC_LENGTH];
             bool popped = false;
-            if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
+            if (JSOp(*next) == JSOP_POP && !a->analysis.jumpTarget(next))
                 popped = true;
             /* These manually advance the PC. */
             if (!jsop_localinc(op, GET_SLOTNO(PC), popped))
-                return Compile_Overflow;
+                return Compile_Retry;
             PC += JSOP_LOCALINC_LENGTH;
             if (popped)
                 PC += JSOP_POP_LENGTH;
             break;
           }
           END_CASE(JSOP_LOCALDEC)
 
           BEGIN_CASE(JSOP_FORNAME)
@@ -2087,17 +2220,17 @@ mjit::Compiler::generateMethod()
             /* No-op for the decompiler. */
           END_CASE(JSOP_CONDSWITCH)
 
           BEGIN_CASE(JSOP_DEFFUN)
           {
             uint32 index = fullAtomIndex(PC);
             JSFunction *innerFun = script->getFunction(index);
 
-            if (fun && script->bindings.hasBinding(cx, innerFun->atom))
+            if (script->fun && script->bindings.hasBinding(cx, innerFun->atom))
                 frame.syncAndForgetEverything();
 
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(innerFun), Registers::ArgReg1);
             INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun));
           }
           END_CASE(JSOP_DEFFUN)
 
@@ -2113,17 +2246,17 @@ mjit::Compiler::generateMethod()
           }
           END_CASE(JSOP_DEFVAR)
 
           BEGIN_CASE(JSOP_SETCONST)
           {
             uint32 index = fullAtomIndex(PC);
             JSAtom *atom = script->getAtom(index);
 
-            if (fun && script->bindings.hasBinding(cx, atom))
+            if (script->fun && script->bindings.hasBinding(cx, atom))
                 frame.syncAndForgetEverything();
 
             prepareStubCall(Uses(1));
             masm.move(ImmPtr(atom), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::SetConst);
           }
           END_CASE(JSOP_SETCONST)
 
@@ -2333,17 +2466,17 @@ mjit::Compiler::generateMethod()
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
           }
           END_CASE(JSOP_LAMBDA_FC)
 
           BEGIN_CASE(JSOP_TRACE)
           BEGIN_CASE(JSOP_NOTRACE)
           {
-            if (analysis->jumpTarget(PC))
+            if (a->analysis.jumpTarget(PC))
                 interruptCheckHelper();
           }
           END_CASE(JSOP_TRACE)
 
           BEGIN_CASE(JSOP_DEBUGGER)
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(PC), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::Debugger);
@@ -2397,21 +2530,24 @@ mjit::Compiler::generateMethod()
   done:
     return Compile_Okay;
 }
 
 #undef END_CASE
 #undef BEGIN_CASE
 
 JSC::MacroAssembler::Label
-mjit::Compiler::labelOf(jsbytecode *pc)
+mjit::Compiler::labelOf(jsbytecode *pc, uint32 inlineIndex)
 {
-    uint32 offs = uint32(pc - script->code);
-    JS_ASSERT(jumpMap[offs].isValid());
-    return jumpMap[offs];
+    ActiveFrame *a = (inlineIndex == uint32(-1)) ? outer : inlineFrames[inlineIndex];
+    JS_ASSERT(uint32(pc - a->script->code) < a->script->length);
+
+    uint32 offs = uint32(pc - a->script->code);
+    JS_ASSERT(a->jumpMap[offs].isValid());
+    return a->jumpMap[offs];
 }
 
 uint32
 mjit::Compiler::fullAtomIndex(jsbytecode *pc)
 {
     return GET_SLOTNO(pc);
 
     /* If we ever enable INDEXBASE garbage, use this below. */
@@ -2421,53 +2557,26 @@ mjit::Compiler::fullAtomIndex(jsbytecode
 }
 
 bool
 mjit::Compiler::knownJump(jsbytecode *pc)
 {
     return pc < PC;
 }
 
-void *
-mjit::Compiler::findCallSite(const CallSite &callSite)
-{
-    JS_ASSERT(callSite.pcOffset < script->length);
-
-    JITScript *jit = script->getJIT(fp->isConstructing());
-    uint8* ilPath = (uint8 *)jit->code.m_code.executableAddress();
-    uint8* oolPath = ilPath + masm.size();
-
-    for (uint32 i = 0; i < callSites.length(); i++) {
-        InternalCallSite &cs = callSites[i];
-        if (cs.pc == script->code + callSite.pcOffset && cs.id == callSite.id) {
-#ifdef DEBUG
-            for (i++; i < callSites.length(); i++)
-                JS_ASSERT(cs.pc != callSites[i].pc || cs.id != callSites[i].id);
-#endif
-            if (cs.ool)
-                return oolPath + cs.returnOffset;
-            return ilPath + cs.returnOffset;
-        }
-    }
-
-    /* We have no idea where to patch up to. */
-    JS_NOT_REACHED("Call site vanished.");
-    return NULL;
-}
-
 bool
 mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
 {
     JS_ASSERT(pc >= script->code && uint32(pc - script->code) < script->length);
 
     if (pc < PC) {
-        j.linkTo(jumpMap[uint32(pc - script->code)], &masm);
+        j.linkTo(a->jumpMap[uint32(pc - script->code)], &masm);
         return true;
     }
-    return branchPatches.append(BranchPatch(j, pc));
+    return branchPatches.append(BranchPatch(j, pc, a->inlineIndex));
 }
 
 void
 mjit::Compiler::jsop_getglobal(uint32 index)
 {
     JS_ASSERT(globalObj);
     uint32 slot = script->getGlobalSlot(index);
 
@@ -2538,17 +2647,17 @@ mjit::Compiler::loadReturnValue(Assemble
             }
         } else {
             frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
         }
     } else {
          // Load a return value from POPV or SETRVAL into the return registers,
          // otherwise return undefined.
         masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
-        if (analysis->usesReturnValue()) {
+        if (a->analysis.usesReturnValue()) {
             Jump rvalClear = masm->branchTest32(Assembler::Zero,
                                                FrameFlagsAddress(),
                                                Imm32(JSFRAME_HAS_RVAL));
             Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
             masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
             rvalClear.linkTo(masm->label(), masm);
         }
     }
@@ -2559,22 +2668,22 @@ mjit::Compiler::loadReturnValue(Assemble
 // loaded out of the frame. Otherwise, the explicitly returned object is kept.
 //
 void
 mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
 {
     JS_ASSERT(isConstructing);
 
     bool ool = (masm != &this->masm);
-    Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
+    Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(script->fun));
 
     // We can just load |thisv| if either of the following is true:
     //  (1) There is no explicit return value, AND fp->rval is not used.
     //  (2) There is an explicit return value, and it's known to be primitive.
-    if ((!fe && !analysis->usesReturnValue()) ||
+    if ((!fe && !a->analysis.usesReturnValue()) ||
         (fe && fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT))
     {
         if (ool)
             masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
         else
             frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
         return;
     }
@@ -2601,39 +2710,129 @@ mjit::Compiler::emitReturnValue(Assemble
 {
     if (isConstructing)
         fixPrimitiveReturn(masm, fe);
     else
         loadReturnValue(masm, fe);
 }
 
 void
+mjit::Compiler::emitInlineReturnValue(FrameEntry *fe)
+{
+    JS_ASSERT(!isConstructing && a->needReturnValue);
+
+    if (a->syncReturnValue) {
+        /* Needed return value with unknown type, the caller's entry is synced. */
+        Address address = frame.addressForInlineReturn();
+        if (fe)
+            frame.storeTo(fe, address);
+        else
+            masm.storeValue(UndefinedValue(), address);
+        return;
+    }
+
+    if (a->returnValueDouble) {
+        JS_ASSERT(fe);
+        frame.ensureDouble(fe);
+        Registers mask(a->returnSet
+                       ? Registers::maskReg(a->returnRegister)
+                       : Registers::AvailFPRegs);
+        FPRegisterID fpreg;
+        if (!fe->isConstant()) {
+            fpreg = frame.tempRegInMaskForData(fe, mask.freeMask).fpreg();
+        } else {
+            fpreg = frame.allocReg(mask.freeMask).fpreg();
+            masm.slowLoadConstantDouble(fe->getValue().toDouble(), fpreg);
+        }
+        JS_ASSERT_IF(a->returnSet, fpreg == a->returnRegister.fpreg());
+        a->returnRegister = fpreg;
+    } else {
+        Registers mask(a->returnSet
+                       ? Registers::maskReg(a->returnRegister)
+                       : Registers::AvailRegs);
+        RegisterID reg;
+        if (fe && !fe->isConstant()) {
+            reg = frame.tempRegInMaskForData(fe, mask.freeMask).reg();
+        } else {
+            reg = frame.allocReg(mask.freeMask).reg();
+            Value val = fe ? fe->getValue() : UndefinedValue();
+            masm.loadValuePayload(val, reg);
+        }
+        JS_ASSERT_IF(a->returnSet, reg == a->returnRegister.reg());
+        a->returnRegister = reg;
+    }
+}
+
+void
 mjit::Compiler::emitReturn(FrameEntry *fe)
 {
-    JS_ASSERT_IF(!fun, JSOp(*PC) == JSOP_STOP);
+    JS_ASSERT_IF(!script->fun, JSOp(*PC) == JSOP_STOP);
 
     /* Only the top of the stack can be returned. */
     JS_ASSERT_IF(fe, fe == frame.peek(-1));
 
     if (debugMode() || Probes::callTrackingActive(cx)) {
         prepareStubCall(Uses(0));
         INLINE_STUBCALL(stubs::LeaveScript);
     }
 
+    if (a != outer) {
+        /*
+         * Returning from an inlined script. The checks we do for inlineability
+         * and recompilation triggered by args object construction ensure that
+         * there can't be an arguments or call object.
+         */
+
+        if (a->needReturnValue)
+            emitInlineReturnValue(fe);
+
+        /* Make sure the parent entries still in registers are consistent between return sites. */
+        if (!a->returnSet) {
+            a->returnParentRegs = frame.getParentRegs();
+            if (a->needReturnValue && !a->syncReturnValue &&
+                a->returnParentRegs.hasReg(a->returnRegister)) {
+                a->returnParentRegs.takeReg(a->returnRegister);
+            }
+        }
+
+        frame.discardLocalRegisters();
+        frame.syncParentRegistersInMask(masm,
+            frame.getParentRegs().freeMask & ~a->returnParentRegs.freeMask, true);
+        frame.restoreParentRegistersInMask(masm,
+            a->returnParentRegs.freeMask & ~frame.getParentRegs().freeMask, true);
+
+        a->returnSet = true;
+
+        /*
+         * Simple tests to see if we are at the end of the script and will
+         * fallthrough after the script body finishes, thus won't need to jump.
+         */
+        bool endOfScript =
+            (JSOp(*PC) == JSOP_STOP) ||
+            (JSOp(*PC) == JSOP_RETURN &&
+             (JSOp(*(PC + JSOP_RETURN_LENGTH)) == JSOP_STOP &&
+              !a->analysis.maybeCode(PC + JSOP_RETURN_LENGTH)));
+        if (!endOfScript)
+            a->returnJumps.append(masm.jump());
+
+        frame.discardFrame();
+        return;
+    }
+
     /*
      * If there's a function object, deal with the fact that it can escape.
      * Note that after we've placed the call object, all tracked state can
      * be thrown away. This will happen anyway because the next live opcode
      * (if any) must have an incoming edge.
      *
      * However, it's an optimization to throw it away early - the tracker
      * won't be spilled on further exits or join points.
      */
-    if (fun) {
-        if (fun->isHeavyweight()) {
+    if (script->fun) {
+        if (script->fun->isHeavyweight()) {
             /* There will always be a call object. */
             prepareStubCall(Uses(fe ? 1 : 0));
             INLINE_STUBCALL(stubs::PutActivationObjects);
         } else {
             /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
             Jump putObjs = masm.branchTest32(Assembler::NonZero,
                                              Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
                                              Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
@@ -2662,20 +2861,20 @@ void
 mjit::Compiler::prepareStubCall(Uses uses)
 {
     JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
     frame.syncAndKill(Registers(Registers::TempAnyRegs), uses);
     JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
 }
 
 JSC::MacroAssembler::Call
-mjit::Compiler::emitStubCall(void *ptr)
+mjit::Compiler::emitStubCall(void *ptr, DataLabelPtr *pinline)
 {
     JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
-    Call cl = masm.fallibleVMCall(ptr, PC, frame.stackDepth() + script->nfixed);
+    Call cl = masm.fallibleVMCall(ptr, outerPC(), pinline, frame.totalDepth());
     JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
     return cl;
 }
 
 void
 mjit::Compiler::interruptCheckHelper()
 {
     /*
@@ -2705,19 +2904,20 @@ mjit::Compiler::interruptCheckHelper()
 
     frame.sync(stubcc.masm, Uses(0));
     stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
     OOL_STUBCALL(stubs::Interrupt);
     stubcc.rejoin(Changes(0));
 }
 
 void
-mjit::Compiler::addReturnSite(Label joinPoint)
+mjit::Compiler::addReturnSite(Label joinPoint, bool ool)
 {
-    InternalCallSite site(masm.distanceOf(joinPoint), PC, CallSite::NCODE_RETURN_ID, false, false);
+    InternalCallSite site(masm.distanceOf(joinPoint), a->inlineIndex, PC,
+                          CallSite::NCODE_RETURN_ID, false, ool);
     addCallSite(site);
 }
 
 void
 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
 {
     CallPatchInfo callPatch;
 
@@ -2732,17 +2932,16 @@ mjit::Compiler::emitUncachedCall(uint32 
     if (recompiling) {
         /* In case we recompiled this call to an uncached call. */
         OOL_STUBCALL(JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call));
         stubcc.crossJump(stubcc.masm.jump(), masm.label());
     }
 
     Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
 
-    masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
     callPatch.hasFastNcode = true;
     callPatch.fastNcodePatch =
         masm.storePtrWithPatch(ImmPtr(NULL),
                                Address(JSFrameReg, JSStackFrame::offsetOfncode()));
 
     masm.jump(r0);
     callPatch.joinPoint = masm.label();
     addReturnSite(callPatch.joinPoint);
@@ -2815,23 +3014,22 @@ mjit::Compiler::checkCallApplySpeculatio
             frameDepthAdjust = +1;
         } else {
             frameDepthAdjust = 0;
         }
 
         stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
         JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
         OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
-                           frame.localSlots() + frameDepthAdjust);
+                                 frame.totalDepth() + frameDepthAdjust);
         JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
 
         RegisterID r0 = Registers::ReturnReg;
         Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
 
-        stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
         Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
         uncachedCallPatch->hasSlowNcode = true;
         uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr);
 
         stubcc.masm.jump(r0);
         notCompiled.linkTo(stubcc.masm.label(), &stubcc.masm);
 
         /*
@@ -2862,18 +3060,18 @@ mjit::Compiler::checkCallApplySpeculatio
 /* This predicate must be called before the current op mutates the FrameState. */
 bool
 mjit::Compiler::canUseApplyTricks()
 {
     JS_ASSERT(*PC == JSOP_ARGUMENTS);
     jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
     return *nextpc == JSOP_FUNAPPLY &&
            IsLowerableFunCallOrApply(nextpc) &&
-           !analysis->jumpTarget(nextpc) &&
-           !debugMode();
+           !a->analysis.jumpTarget(nextpc) &&
+           !debugMode() && !a->parent;
 }
 
 /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
 bool
 mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
 {
     /* Check for interrupts on function call */
     interruptCheckHelper();
@@ -2894,24 +3092,28 @@ mjit::Compiler::inlineCallHelper(uint32 
         frame.discardFe(origThis);
 
     /*
      * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
      * going to call js_fun_{call,apply}. Normally, this call would go through
      * js::Invoke to ultimately call 'this'. We can do much better by having
      * the callIC cache and call 'this' directly. However, if it turns out that
      * we are not actually calling js_fun_call, the callIC must act as normal.
+     *
+     * Note: do *NOT* use type information or inline state in any way when
+     * deciding whether to lower a CALL or APPLY. The stub calls here store
+     * their return values in a different slot, so when recompiling we need
+     * to go down the exact same path.
      */
     bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
 
     bool newType = callingNew && cx->typeInferenceEnabled() && types::UseNewType(cx, script, PC);
 
 #ifdef JS_MONOIC
-    if (debugMode() || newType || origCallee->isNotType(JSVAL_TYPE_OBJECT) ||
-        (lowerFunCallOrApply && origThis->isNotType(JSVAL_TYPE_OBJECT))) {
+    if (debugMode() || newType || origCallee->isNotType(JSVAL_TYPE_OBJECT)) {
 #endif
         if (applyTricks == LazyArgsObj) {
             /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
             jsop_arguments();
             frame.pushSynced(JSVAL_TYPE_UNKNOWN);
         }
         emitUncachedCall(callImmArgc, callingNew);
         applyTricks = NoApplyTricks;
@@ -2921,21 +3123,24 @@ mjit::Compiler::inlineCallHelper(uint32 
             OOL_STUBCALL(stubs::SlowCall);
             stubcc.rejoin(Changes(1));
         }
         return true;
 #ifdef JS_MONOIC
     }
 
     frame.forgetConstantData(origCallee);
-    if (lowerFunCallOrApply)
+    if (lowerFunCallOrApply) {
         frame.forgetConstantData(origThis);
+        if (origThis->isNotType(JSVAL_TYPE_OBJECT))
+            frame.forgetType(origThis);
+    }
 
     /* Initialized by both branches below. */
-    CallGenInfo     callIC(PC);
+    CallGenInfo     callIC;
     CallPatchInfo   callPatch;
     MaybeRegisterID icCalleeType; /* type to test for function-ness */
     RegisterID      icCalleeData; /* data to call */
     Address         icRvalAddr;   /* return slot on slow-path rejoin */
 
     /*
      * IC space must be reserved (using RESERVE_IC_SPACE or RESERVE_OOL_SPACE) between the
      * following labels (as used in finishThisUp):
@@ -2989,45 +3194,45 @@ mjit::Compiler::inlineCallHelper(uint32 
 
             /*
              * For f.call(), since we compile the ic under the (checked)
              * assumption that call == js_fun_call, we still have a static
              * frame size. For f.apply(), the frame size depends on the dynamic
              * length of the array passed to apply.
              */
             if (*PC == JSOP_FUNCALL)
-                callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc - 1);
+                callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc - 1);
             else
                 callIC.frameSize.initDynamic();
         } else {
             /* Leaves pinned regs untouched. */
             frame.syncAndKill(Uses(speculatedArgc + 2));
 
             icCalleeType = origCalleeType;
             icCalleeData = origCalleeData;
             icRvalAddr = frame.addressOf(origCallee);
-            callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc);
+            callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc);
         }
     }
 
     callIC.argTypes = NULL;
     callIC.typeMonitored = monitored(PC);
     if (callIC.typeMonitored && callIC.frameSize.isStatic()) {
         unsigned argc = callIC.frameSize.staticArgc();
         callIC.argTypes = (types::ClonedTypeSet *)
             js_calloc((1 + argc) * sizeof(types::ClonedTypeSet));
         if (!callIC.argTypes) {
             js_ReportOutOfMemory(cx);
             return false;
         }
         types::TypeSet *types = frame.getTypeSet(frame.peek(-(argc + 1)));
-        types::TypeSet::Clone(cx, script, types, &callIC.argTypes[0]);
+        types::TypeSet::Clone(cx, outerScript, types, &callIC.argTypes[0]);
         for (unsigned i = 0; i < argc; i++) {
             types::TypeSet *types = frame.getTypeSet(frame.peek(-(argc - i)));
-            types::TypeSet::Clone(cx, script, types, &callIC.argTypes[i + 1]);
+            types::TypeSet::Clone(cx, outerScript, types, &callIC.argTypes[i + 1]);
         }
     }
 
     /* Test the type if necessary. Failing this always takes a really slow path. */
     MaybeJump notObjectJump;
     if (icCalleeType.isSet())
         notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
 
@@ -3099,35 +3304,33 @@ mjit::Compiler::inlineCallHelper(uint32 
         /*
          * At this point the function is definitely scripted, so we try to
          * compile it and patch either funGuard/funJump or oolJump. This code
          * is only executed once.
          */
         callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
         void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
         if (callIC.frameSize.isStatic())
-            callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.localSlots());
+            callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.totalDepth());
         else
             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, -1);
 
         callIC.funObjReg = icCalleeData;
         callIC.funPtrReg = funPtrReg;
 
         /*
          * The IC call either returns NULL, meaning call completed, or a
-         * function pointer to jump to. Caveat: Must restore JSFrameReg
-         * because a new frame has been pushed.
+         * function pointer to jump to.
          */
         rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                             Registers::ReturnReg);
         if (callIC.frameSize.isStatic())
             stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
         else
             stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
-        stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
         callPatch.hasSlowNcode = true;
         callPatch.slowNcodePatch =
             stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
                                           Address(JSFrameReg, JSStackFrame::offsetOfncode()));
         stubcc.masm.jump(Registers::ReturnReg);
 
         /*
          * This ool path is the catch-all for everything but scripted function
@@ -3158,16 +3361,17 @@ mjit::Compiler::inlineCallHelper(uint32 
         flags |= JSFRAME_CONSTRUCTING;
 
     InlineFrameAssembler inlFrame(masm, callIC, flags);
     callPatch.hasFastNcode = true;
     callPatch.fastNcodePatch = inlFrame.assemble(NULL);
 
     callIC.hotJump = masm.jump();
     callIC.joinPoint = callPatch.joinPoint = masm.label();
+    callIC.callIndex = callSites.length();
     addReturnSite(callPatch.joinPoint);
     if (lowerFunCallOrApply)
         uncachedCallPatch.joinPoint = callIC.joinPoint;
     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
 
     /*
      * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
      * in the in-line path so we can check the IC space now.
@@ -3221,37 +3425,166 @@ mjit::Compiler::inlineCallHelper(uint32 
     }
 
     applyTricks = NoApplyTricks;
 
     return true;
 #endif
 }
 
+CompileStatus
+mjit::Compiler::inlineScriptedFunction(uint32 argc, bool callingNew)
+{
+    if (!cx->typeInferenceEnabled())
+        return Compile_InlineAbort;
+
+    /* :XXX: Not doing inlining yet when calling 'new' or calling from 'new'. */
+    if (isConstructing || callingNew)
+        return Compile_InlineAbort;
+
+    if (applyTricks == LazyArgsObj)
+        return Compile_InlineAbort;
+
+    FrameEntry *origCallee = frame.peek(-(argc + 2));
+    FrameEntry *origThis = frame.peek(-(argc + 1));
+
+    if (!origCallee->isConstant() || !origCallee->isType(JSVAL_TYPE_OBJECT))
+        return Compile_InlineAbort;
+
+    JSObject *callee = &origCallee->getValue().toObject();
+    if (!callee->isFunction())
+        return Compile_InlineAbort;
+
+    JSFunction *fun = callee->getFunctionPrivate();
+    if (!fun->isInterpreted())
+        return Compile_InlineAbort;
+
+    /*
+     * The outer and inner scripts must have the same scope. This allows us to
+     * only inline calls between non-inner functions with the same global.
+     */
+    if (!outerScript->compileAndGo ||
+        (outerScript->fun && outerScript->fun->getParent() != globalObj) ||
+        !fun->script()->compileAndGo ||
+        fun->getParent() != globalObj) {
+        return Compile_InlineAbort;
+    }
+
+    /* The outer and inner scripts must have the same strictness. */
+    if (outerScript->strictModeCode != fun->script()->strictModeCode)
+        return Compile_InlineAbort;
+
+    /* We can't cope with inlining recursive functions yet. */
+    ActiveFrame *checka = a;
+    while (checka) {
+        if (checka->script == fun->script())
+            return Compile_InlineAbort;
+        checka = checka->parent;
+    }
+
+    /*
+     * Make sure the script has not had its .arguments accessed, and trigger
+     * recompilation if it ever is accessed.
+     */
+    types::TypeSet *types = frame.getTypeSet(origCallee);
+    types::ObjectKind kind = types->getKnownObjectKind(cx, outerScript);
+    if (kind != types::OBJECT_INLINEABLE_FUNCTION)
+        return Compile_InlineAbort;
+
+    /*
+     * For 'this' and arguments which are copies of other entries still in
+     * memory, try to get registers now. This will let us carry these entries
+     * around loops if possible. (Entries first accessed within the inlined
+     * call can't be loop carried).
+     */
+    frame.tryCopyRegister(origThis, origCallee);
+    for (unsigned i = 0; i < argc; i++)
+        frame.tryCopyRegister(frame.peek(-(i + 1)), origCallee);
+
+    JSValueType returnType = knownPushedType(0);
+
+    bool needReturnValue = JSOP_POP != (JSOp)*(PC + JSOP_CALL_LENGTH);
+    bool syncReturnValue = needReturnValue && returnType == JSVAL_TYPE_UNKNOWN;
+
+    CompileStatus status;
+
+    status = pushActiveFrame(fun->script(), argc);
+    if (status != Compile_Okay)
+        return status;
+
+    if (!a->analysis.inlineable(argc)) {
+        popActiveFrame();
+        return Compile_InlineAbort;
+    }
+
+    if (a->analysis.usesThisValue() && origThis->isNotType(JSVAL_TYPE_OBJECT)) {
+        popActiveFrame();
+        return Compile_InlineAbort;
+    }
+
+    a->needReturnValue = needReturnValue;
+    a->syncReturnValue = syncReturnValue;
+    a->returnValueDouble = returnType == JSVAL_TYPE_DOUBLE;
+
+    status = generateMethod();
+    if (status != Compile_Okay) {
+        popActiveFrame();
+        if (status == Compile_Abort) {
+            /* The callee is uncompileable, mark it as uninlineable and retry. */
+            if (!cx->markTypeFunctionUninlineable(fun->getType()))
+                return Compile_Error;
+            return Compile_Retry;
+        }
+        return status;
+    }
+
+    JS_ASSERT(a->returnSet);
+
+    AnyRegisterID returnRegister = a->returnRegister;
+    Registers evictedRegisters = Registers(Registers::AvailAnyRegs & ~a->returnParentRegs.freeMask);
+
+    for (unsigned i = 0; i < a->returnJumps.length(); i++)
+        a->returnJumps[i].linkTo(masm.label(), &masm);
+
+    popActiveFrame();
+
+    frame.evictInlineModifiedRegisters(evictedRegisters);
+
+    frame.popn(argc + 2);
+    if (needReturnValue && !syncReturnValue) {
+        frame.takeReg(returnRegister);
+        if (returnRegister.isReg())
+            frame.pushTypedPayload(returnType, returnRegister.reg());
+        else
+            frame.pushDouble(returnRegister.fpreg());
+    } else {
+        frame.pushSynced(JSVAL_TYPE_UNKNOWN);
+    }
+
+    /* If we end up expanding the inline frame, it will need a return site to rejoin at. */
+    addReturnSite(stubcc.masm.label(), true);
+    stubcc.masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
+    stubcc.masm.storeValueFromComponents(JSReturnReg_Type, JSReturnReg_Data,
+                                         frame.addressOf(frame.peek(-1)));
+    stubcc.rejoin(Changes(1));
+
+    return Compile_Okay;
+}
+
 /*
  * This function must be called immediately after any instruction which could
  * cause a new JSStackFrame to be pushed and could lead to a new debug trap
  * being set. This includes any API callbacks and any scripted or native call.
  */
 void
 mjit::Compiler::addCallSite(const InternalCallSite &site)
 {
-#if 1 /* Expensive assertion on some tests. */
-    for (unsigned i = 0; i < callSites.length(); i++)
-        JS_ASSERT(site.pc != callSites[i].pc || site.id != callSites[i].id);
-#endif
     callSites.append(site);
 }
 
-void
-mjit::Compiler::restoreFrameRegs(Assembler &masm)
-{
-    masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
-}
-
 bool
 mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
 {
     JS_ASSERT(lhs.isPrimitive());
     JS_ASSERT(rhs.isPrimitive());
 
     if (lhs.isString() && rhs.isString()) {
         int32 cmp;
@@ -3602,17 +3935,17 @@ mjit::Compiler::jsop_callprop_generic(JS
     pic.objReg = objReg;
     pic.shapeReg = shapeReg;
     pic.atom = atom;
 
     /*
      * Store the type and object back. Don't bother keeping them in registers,
      * since a sync will be needed for the upcoming call.
      */
-    uint32 thisvSlot = frame.localSlots();
+    uint32 thisvSlot = frame.totalDepth();
     Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
 
 #if defined JS_NUNBOX32
     masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
 #elif defined JS_PUNBOX64
     masm.orPtr(pic.objReg, pic.typeReg);
     masm.storePtr(pic.typeReg, thisv);
 #endif
@@ -3985,17 +4318,17 @@ mjit::Compiler::jsop_setprop(JSAtom *ato
     if (monitored(PC)) {
         types::TypeSet *types = frame.getTypeSet(rhs);
         pic.typeMonitored = true;
         pic.rhsTypes = (types::ClonedTypeSet *) ::js_calloc(sizeof(types::ClonedTypeSet));
         if (!pic.rhsTypes) {
             js_ReportOutOfMemory(cx);
             return false;
         }
-        types::TypeSet::Clone(cx, script, types, pic.rhsTypes);
+        types::TypeSet::Clone(cx, outerScript, types, pic.rhsTypes);
     } else {
         pic.typeMonitored = false;
         pic.rhsTypes = NULL;
     }
 
     RESERVE_IC_SPACE(masm);
     RESERVE_OOL_SPACE(stubcc.masm);
 
@@ -4219,17 +4552,17 @@ void
 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
 {
     PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC), usePropCache);
 
     // This code does not check the frame flags to see if scopeChain has been
     // set. Rather, it relies on the up-front analysis statically determining
     // whether BINDNAME can be used, which reifies the scope chain at the
     // prologue.
-    JS_ASSERT(analysis->usesScopeChain());
+    JS_ASSERT(a->analysis.usesScopeChain());
 
     pic.shapeReg = frame.allocReg();
     pic.objReg = frame.allocReg();
     pic.typeReg = Registers::ReturnReg;
     pic.atom = atom;
     pic.hasTypeCheck = false;
 
     RESERVE_IC_SPACE(masm);
@@ -4331,17 +4664,17 @@ mjit::Compiler::jsop_this()
 {
     frame.pushThis();
 
     /* 
      * In strict mode code, we don't wrap 'this'.
      * In direct-call eval code, we wrapped 'this' before entering the eval.
      * In global code, 'this' is always an object.
      */
-    if (fun && !script->strictModeCode) {
+    if (script->fun && !script->strictModeCode) {
         FrameEntry *thisFe = frame.peek(-1);
         if (!thisFe->isTypeKnown()) {
             JSValueType type = knownThisType();
             if (type != JSVAL_TYPE_OBJECT) {
                 Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
                 stubcc.linkExit(notObj, Uses(1));
                 stubcc.leave();
                 OOL_STUBCALL(stubs::This);
@@ -4360,17 +4693,17 @@ mjit::Compiler::jsop_this()
 
 void
 mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
 {
     JSAtom *atom = script->getAtom(index);
 
 #if defined JS_MONOIC
     jsbytecode *next = &PC[JSOP_GNAMEINC_LENGTH];
-    bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
+    bool pop = (JSOp(*next) == JSOP_POP) && !a->analysis.jumpTarget(next);
     int amt = (op == JSOP_GNAMEINC || op == JSOP_INCGNAME) ? -1 : 1;
 
     if (pop || (op == JSOP_INCGNAME || op == JSOP_DECGNAME)) {
         /* These cases are easy, the original value is not observed. */
 
         jsop_getgname(index, JSVAL_TYPE_UNKNOWN);
         // V
 
@@ -4448,17 +4781,17 @@ mjit::Compiler::jsop_gnameinc(JSOp op, V
 }
 
 bool
 mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
 {
     JSAtom *atom = script->getAtom(index);
 #if defined JS_POLYIC
     jsbytecode *next = &PC[JSOP_NAMEINC_LENGTH];
-    bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
+    bool pop = (JSOp(*next) == JSOP_POP) && !a->analysis.jumpTarget(next);
     int amt = (op == JSOP_NAMEINC || op == JSOP_INCNAME) ? -1 : 1;
 
     if (pop || (op == JSOP_INCNAME || op == JSOP_DECNAME)) {
         /* These cases are easy, the original value is not observed. */
 
         jsop_bindname(atom, false);
         // OBJ
 
@@ -4525,17 +4858,17 @@ mjit::Compiler::jsop_nameinc(JSOp op, Vo
 bool
 mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
 {
     JSAtom *atom = script->getAtom(index);
 #if defined JS_POLYIC
     FrameEntry *objFe = frame.peek(-1);
     if (!objFe->isTypeKnown() || objFe->getKnownType() == JSVAL_TYPE_OBJECT) {
         jsbytecode *next = &PC[JSOP_PROPINC_LENGTH];
-        bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
+        bool pop = (JSOp(*next) == JSOP_POP) && !a->analysis.jumpTarget(next);
         int amt = (op == JSOP_PROPINC || op == JSOP_INCPROP) ? -1 : 1;
 
         if (pop || (op == JSOP_INCPROP || op == JSOP_DECPROP)) {
             /*
              * These cases are easier, the original value is not observed.
              * Use a consistent stack layout for the value as the observed case,
              * so that if the operation overflows the stub will be able to find
              * the modified object.
@@ -5420,17 +5753,17 @@ mjit::Compiler::finishLoop(jsbytecode *h
     jsbytecode *entryTarget;
     frame.popLoop(head, &entry, &entryTarget);
 
     if (!jumpInScript(entry, entryTarget))
         return false;
 
     fallthrough.linkTo(masm.label(), &masm);
 
-    if (!analysis->getCode(head).safePoint) {
+    if (!a->analysis.getCode(head).safePoint) {
         /*
          * Emit a stub into the OOL path which loads registers from a synced state
          * and jumps to the loop head, for rejoining from the interpreter.
          */
         LoopEntry entry;
         entry.pcOffset = head - script->code;
         entry.label = stubcc.masm.label();
         loopEntries.append(entry);
@@ -5463,19 +5796,19 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
 {
     if (trampoline)
         *trampoline = false;
 
     /*
      * Unless we are coming from a branch which synced everything, syncForBranch
      * must have been called and ensured an allocation at the target.
      */
-    RegisterAllocation *&lvtarget = liveness.getCode(target).allocation;
+    RegisterAllocation *&lvtarget = a->liveness.getCode(target).allocation;
     if (!lvtarget) {
-        lvtarget = ArenaNew<RegisterAllocation>(liveness.pool, false);
+        lvtarget = ArenaNew<RegisterAllocation>(a->liveness.pool, false);
         if (!lvtarget)
             return false;
     }
 
     bool consistent = frame.consistentRegisters(target);
 
     if (!addTraceHints || target >= PC ||
         (JSOp(*target) != JSOP_TRACE && JSOp(*target) != JSOP_NOTRACE)
@@ -5560,17 +5893,16 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
 
         OOL_STUBCALL(stubs::InvokeTracer);
 
         PC = pc;
     }
 
     Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                         Registers::ReturnReg);
-    restoreFrameRegs(stubcc.masm);
     stubcc.masm.jump(Registers::ReturnReg);
     no.linkTo(stubcc.masm.label(), &stubcc.masm);
 
 #ifdef JS_MONOIC
     ic.jumpTarget = target;
     ic.fastTrampoline = !consistent;
     ic.trampolineStart = stubcc.masm.label();
 
@@ -5610,18 +5942,18 @@ void
 mjit::Compiler::enterBlock(JSObject *obj)
 {
     // If this is an exception entry point, then jsl_InternalThrow has set
     // VMFrame::fp to the correct fp for the entry point. We need to copy
     // that value here to FpReg so that FpReg also has the correct sp.
     // Otherwise, we would simply be using a stale FpReg value.
     // Additionally, we check the interrupt flag to allow interrupting
     // deeply nested exception handling.
-    if (analysis->getCode(PC).exceptionEntry) {
-        restoreFrameRegs(masm);
+    if (a->analysis.getCode(PC).exceptionEntry) {
+        masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
         interruptCheckHelper();
     }
 
     /* For now, don't bother doing anything for this opcode. */
     frame.syncAndForgetEverything();
     masm.move(ImmPtr(obj), Registers::ArgReg1);
     uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
     INLINE_STUBCALL(stubs::EnterBlock);
@@ -5858,98 +6190,98 @@ mjit::Compiler::jsop_forgname(JSAtom *at
  */
 
 void
 mjit::Compiler::fixDoubleTypes(Uses uses)
 {
     if (!cx->typeInferenceEnabled())
         return;
 
-    for (uint32 i = 0; fun && i < fun->nargs; i++) {
+    for (uint32 i = 0; script->fun && i < script->fun->nargs; i++) {
         JSValueType type = knownArgumentType(i);
-        if (type == JSVAL_TYPE_DOUBLE && !analysis->argEscapes(i)) {
+        if (type == JSVAL_TYPE_DOUBLE && !a->analysis.argEscapes(i)) {
             FrameEntry *fe = frame.getArg(i);
             if (!fe->isType(JSVAL_TYPE_DOUBLE))
                 frame.ensureDouble(fe);
         }
     }
 
     for (uint32 i = 0; i < script->nfixed; i++) {
         JSValueType type = knownLocalType(i);
-        if (type == JSVAL_TYPE_DOUBLE && !analysis->localEscapes(i)) {
+        if (type == JSVAL_TYPE_DOUBLE && !a->analysis.localEscapes(i)) {
             FrameEntry *fe = frame.getLocal(i);
             if (!fe->isType(JSVAL_TYPE_DOUBLE))
                 frame.ensureDouble(fe);
         }
     }
 }
 
 void
 mjit::Compiler::restoreAnalysisTypes(uint32 stackDepth)
 {
     if (!cx->typeInferenceEnabled())
         return;
 
     /* Restore known types of locals/args, for join points or after forgetting everything. */
     for (uint32 i = 0; i < script->nfixed; i++) {
         JSValueType type = knownLocalType(i);
-        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || !analysis->localEscapes(i))) {
+        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || !a->analysis.localEscapes(i))) {
             FrameEntry *fe = frame.getLocal(i);
             JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
             if (!fe->isTypeKnown())
                 frame.learnType(fe, type, false);
         }
     }
-    for (uint32 i = 0; fun && i < fun->nargs; i++) {
+    for (uint32 i = 0; script->fun && i < script->fun->nargs; i++) {
         JSValueType type = knownArgumentType(i);
-        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || !analysis->argEscapes(i))) {
+        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || !a->analysis.argEscapes(i))) {
             FrameEntry *fe = frame.getArg(i);
             JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
             if (!fe->isTypeKnown())
                 frame.learnType(fe, type, false);
         }
     }
 }
 
 JSValueType
 mjit::Compiler::knownThisType()
 {
     if (!cx->typeInferenceEnabled())
         return JSVAL_TYPE_UNKNOWN;
-    if (hasThisType)
-        return thisType;
-    hasThisType = true;
-    thisType = script->thisTypes()->getKnownTypeTag(cx, script);
-    return thisType;
+    if (a->hasThisType)
+        return a->thisType;
+    a->hasThisType = true;
+    a->thisType = script->thisTypes()->getKnownTypeTag(cx, outerScript);
+    return a->thisType;
 }
 
 JSValueType
 mjit::Compiler::knownArgumentType(uint32 arg)
 {
     if (!cx->typeInferenceEnabled())
         return JSVAL_TYPE_UNKNOWN;
-    JS_ASSERT(fun && arg < fun->nargs);
-    return argumentTypes[arg];
+    JS_ASSERT(script->fun && arg < script->fun->nargs);
+    return a->argumentTypes[arg];
 }
 
 JSValueType
 mjit::Compiler::knownLocalType(uint32 local)
 {
     if (!cx->typeInferenceEnabled() || local >= script->nfixed)
         return JSVAL_TYPE_UNKNOWN;
-    return localTypes[local];
+    return a->localTypes[local];
 }
 
 JSValueType
 mjit::Compiler::knownPushedType(uint32 pushed)
 {
     if (!cx->typeInferenceEnabled())
         return JSVAL_TYPE_UNKNOWN;
     types::TypeSet *types = script->types->pushed(PC - script->code, pushed);
-    return types->getKnownTypeTag(cx, script);
+    return types->getKnownTypeTag(cx, outerScript);
 }
 
 bool
 mjit::Compiler::mayPushUndefined(uint32 pushed)
 {
     JS_ASSERT(cx->typeInferenceEnabled());
 
     /*
@@ -6017,11 +6349,11 @@ mjit::Compiler::arrayPrototypeHasIndexed
      * in the presence of multiple global objects, we should figure out the possible
      * prototype(s) from the objects in the type set that triggered this call.
      */
     JSObject *proto;
     if (!js_GetClassPrototype(cx, NULL, JSProto_Array, &proto, NULL))
         return false;
     types::TypeSet *arrayTypes = proto->getType()->getProperty(cx, JSID_VOID, false);
     types::TypeSet *objectTypes = proto->getProto()->getType()->getProperty(cx, JSID_VOID, false);
-    return arrayTypes->knownNonEmpty(cx, script)
-        || objectTypes->knownNonEmpty(cx, script);
+    return arrayTypes->knownNonEmpty(cx, outerScript)
+        || objectTypes->knownNonEmpty(cx, outerScript);
 }
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -58,22 +58,23 @@ struct PatchableFrame {
     jsbytecode *pc;
 };
 
 class Compiler : public BaseCompiler
 {
     friend class StubCompiler;
 
     struct BranchPatch {
-        BranchPatch(const Jump &j, jsbytecode *pc)
-          : jump(j), pc(pc)
+        BranchPatch(const Jump &j, jsbytecode *pc, uint32 inlineIndex)
+          : jump(j), pc(pc), inlineIndex(inlineIndex)
         { }
 
         Jump jump;
         jsbytecode *pc;
+        uint32 inlineIndex;
     };
 
 #if defined JS_MONOIC
     struct GlobalNameICInfo {
         Label fastPathStart;
         Call slowPathCall;
         DataLabel32 shape;
         DataLabelPtr addrLabel;
@@ -132,23 +133,21 @@ class Compiler : public BaseCompiler
         MaybeJump slowTraceHint;
 
         TraceGenInfo() : initialized(false) {}
     };
 
     /* InlineFrameAssembler wants to see this. */
   public:
     struct CallGenInfo {
-        CallGenInfo(jsbytecode *pc) : pc(pc) {}
-
         /*
          * These members map to members in CallICInfo. See that structure for
          * more comments.
          */
-        jsbytecode   *pc;
+        uint32       callIndex;
         DataLabelPtr funGuard;
         Jump         funJump;
         Jump         hotJump;
         Call         oolCall;
         Label        joinPoint;
         Label        slowJoinPoint;
         Label        slowPathStart;
         Label        hotPathLabel;
@@ -314,24 +313,30 @@ class Compiler : public BaseCompiler
         Defs(uint32 ndefs)
           : ndefs(ndefs)
         { }
         uint32 ndefs;
     };
 
     struct InternalCallSite {
         uint32 returnOffset;
-        jsbytecode *pc;
+        DataLabelPtr callPatch;
+        DataLabelPtr inlinePatch;
+        uint32 inlineIndex;
+        jsbytecode *inlinepc;
         size_t id;
         bool call;
         bool ool;
 
-        InternalCallSite(uint32 returnOffset, jsbytecode *pc, size_t id,
+        InternalCallSite(uint32 returnOffset,
+                         uint32 inlineIndex, jsbytecode *inlinepc, size_t id,
                          bool call, bool ool)
-          : returnOffset(returnOffset), pc(pc), id(id), call(call), ool(ool)
+          : returnOffset(returnOffset),
+            inlineIndex(inlineIndex), inlinepc(inlinepc), id(id),
+            call(call), ool(ool)
         { }
     };
 
     struct DoublePatch {
         double d;
         DataLabelPtr label;
         bool ool;
     };
@@ -342,32 +347,73 @@ class Compiler : public BaseCompiler
     };
 
     struct LoopEntry {
         uint32 pcOffset;
         Label label;
     };
 
     JSStackFrame *fp;
+    JSScript *outerScript;
 
     /* Existing frames on the stack whose slots may need to be updated. */
-    const Vector<PatchableFrame> *frames;
+    const Vector<PatchableFrame> *patchFrames;
 
-    JSScript *script;
     JSObject *scopeChain;
     JSObject *globalObj;
-    JSFunction *fun;
     bool isConstructing;
-    analyze::Script *analysis;
-    Label *jumpMap;
     bool *savedTraps;
-    jsbytecode *PC;
     Assembler masm;
     FrameState frame;
-    analyze::LifetimeScript liveness;
+
+    /*
+     * State for the current stack frame.
+     *
+     * When inlining function calls, we keep track of the state of each inline
+     * frame. The state of parent frames is not modified while analyzing an
+     * inner frame, though registers used by those parents can be spilled in
+     * the frame (reflected by the frame's active register state).
+     */
+
+    struct ActiveFrame {
+        ActiveFrame *parent;
+        jsbytecode *parentPC;
+        JSScript *script;
+        uint32 inlineIndex;
+        analyze::Script analysis;
+        analyze::LifetimeScript liveness;
+        Label *jumpMap;
+        bool hasThisType;
+        JSValueType thisType;
+        JSValueType *argumentTypes;
+        JSValueType *localTypes;
+        uint32 depth;
+        Vector<UnsyncedEntry> unsyncedEntries; // :XXX: handle OOM
+
+        /* State for managing return from inlined frames. */
+        bool needReturnValue;
+        bool syncReturnValue;
+        bool returnValueDouble;
+        bool returnSet;
+        AnyRegisterID returnRegister;
+        Registers returnParentRegs;
+        Vector<Jump> returnJumps; // :XXX: handle OOM
+
+        ActiveFrame(JSContext *cx);
+        ~ActiveFrame();
+    };
+    ActiveFrame *a;
+    ActiveFrame *outer;
+
+    JSScript *script;
+    jsbytecode *PC;
+
+    /* State spanning all stack frames. */
+
+    js::Vector<ActiveFrame*, 4, CompilerAllocPolicy> inlineFrames;
     js::Vector<BranchPatch, 64, CompilerAllocPolicy> branchPatches;
 #if defined JS_MONOIC
     js::Vector<GetGlobalNameICInfo, 16, CompilerAllocPolicy> getGlobalNames;
     js::Vector<SetGlobalNameICInfo, 16, CompilerAllocPolicy> setGlobalNames;
     js::Vector<CallGenInfo, 64, CompilerAllocPolicy> callICs;
     js::Vector<EqualityGenInfo, 64, CompilerAllocPolicy> equalityICs;
     js::Vector<TraceGenInfo, 64, CompilerAllocPolicy> traceICs;
 #endif
@@ -388,56 +434,64 @@ class Compiler : public BaseCompiler
 #ifdef JS_MONOIC
     Label argsCheckStub;
     Label argsCheckFallthrough;
     Jump argsCheckJump;
 #endif
     bool debugMode_;
     bool addTraceHints;
     bool recompiling;
-    bool hasThisType;
-    JSValueType thisType;
-    js::Vector<JSValueType, 16> argumentTypes;
-    js::Vector<JSValueType, 16> localTypes;
     bool oomInVector;       // True if we have OOM'd appending to a vector. 
     enum { NoApplyTricks, LazyArgsObj } applyTricks;
 
     Compiler *thisFromCtor() { return this; }
 
     friend class CompilerAllocPolicy;
   public:
     // Special atom index used to indicate that the atom is 'length'. This
     // follows interpreter usage in JSOP_LENGTH.
     enum { LengthAtomIndex = uint32(-2) };
 
-    Compiler(JSContext *cx, JSStackFrame *fp, const Vector<PatchableFrame> *frames);
+    Compiler(JSContext *cx, JSStackFrame *fp, const Vector<PatchableFrame> *patchFrames, bool recompiling);
     ~Compiler();
 
     CompileStatus compile();
 
-    jsbytecode *getPC() { return PC; }
     Label getLabel() { return masm.label(); }
     bool knownJump(jsbytecode *pc);
-    Label labelOf(jsbytecode *target);
-    void *findCallSite(const CallSite &callSite);
+    Label labelOf(jsbytecode *target, uint32 inlineIndex);
     void addCallSite(const InternalCallSite &callSite);
-    void addReturnSite(Label joinPoint);
+    void addReturnSite(Label joinPoint, bool ool = false);
     bool loadOldTraps(const Vector<CallSite> &site);
 
     bool debugMode() { return debugMode_; }
 
+    jsbytecode *outerPC() {
+        if (a == outer)
+            return PC;
+        ActiveFrame *scan = a;
+        while (scan && scan->parent != outer)
+            scan = scan->parent;
+        return scan->parentPC;
+    }
+
+    jsbytecode *inlinePC() { return PC; }
+    uint32 inlineIndex() { return a->inlineIndex; }
+
   private:
     CompileStatus performCompilation(JITScript **jitp);
     CompileStatus generatePrologue();
     CompileStatus generateMethod();
     CompileStatus generateEpilogue();
     CompileStatus finishThisUp(JITScript **jitp);
+    CompileStatus pushActiveFrame(JSScript *script, uint32 argc);
+    void popActiveFrame();
 
     /* Analysis helpers. */
-    CompileStatus prepareInferenceTypes();
+    CompileStatus prepareInferenceTypes(JSScript *script, ActiveFrame *a);
     void fixDoubleTypes(Uses uses);
     void restoreAnalysisTypes(uint32 stackDepth);
     JSValueType knownThisType();
     JSValueType knownArgumentType(uint32 arg);
     JSValueType knownLocalType(uint32 local);
     JSValueType knownPushedType(uint32 pushed);
     bool arrayPrototypeHasIndexedProperty();
     bool mayPushUndefined(uint32 pushed);
@@ -490,16 +544,17 @@ class Compiler : public BaseCompiler
     void jsop_getprop_slow(JSAtom *atom, bool usePropCache = true);
     void jsop_getarg(uint32 slot);
     void jsop_setarg(uint32 slot, bool popped);
     void jsop_this();
     void emitReturn(FrameEntry *fe);
     void emitFinalReturn(Assembler &masm);
     void loadReturnValue(Assembler *masm, FrameEntry *fe);
     void emitReturnValue(Assembler *masm, FrameEntry *fe);
+    void emitInlineReturnValue(FrameEntry *fe);
     void dispatchCall(VoidPtrStubUInt32 stub, uint32 argc);
     void interruptCheckHelper();
     void emitUncachedCall(uint32 argc, bool callingNew);
     void checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedArgc,
                                    FrameEntry *origCallee, FrameEntry *origThis,
                                    MaybeRegisterID origCalleeType, RegisterID origCalleeData,
                                    MaybeRegisterID origThisType, RegisterID origThisData,
                                    Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch);
@@ -616,39 +671,42 @@ class Compiler : public BaseCompiler
             JS_NOT_REACHED("unrecognized op");
             return Assembler::Equal;
         }
     }
 
     /* Fast builtins. */
     JSObject *pushedSingleton(unsigned pushed);
     CompileStatus inlineNativeFunction(uint32 argc, bool callingNew);
+    CompileStatus inlineScriptedFunction(uint32 argc, bool callingNew);
     CompileStatus compileMathAbsInt(FrameEntry *arg);
     CompileStatus compileMathAbsDouble(FrameEntry *arg);
     CompileStatus compileMathSqrt(FrameEntry *arg);
     CompileStatus compileMathPowSimple(FrameEntry *arg1, FrameEntry *arg2);
 
     enum RoundingMode { Floor, Round };
     CompileStatus compileRound(FrameEntry *arg, RoundingMode mode);
 
     enum GetCharMode { GetChar, GetCharCode };
     CompileStatus compileGetChar(FrameEntry *thisValue, FrameEntry *arg, GetCharMode mode);
 
     void prepareStubCall(Uses uses);
-    Call emitStubCall(void *ptr);
+    Call emitStubCall(void *ptr, DataLabelPtr *pinline);
 };
 
 // Given a stub call, emits the call into the inline assembly path. If
 // debug mode is on, adds the appropriate instrumentation for recompilation.
 #define INLINE_STUBCALL(stub)                                               \
     do {                                                                    \
         void *nstub = JS_FUNC_TO_DATA_PTR(void *, (stub));                  \
-        Call cl = emitStubCall(nstub);                                      \
-        InternalCallSite site(masm.callReturnOffset(cl), PC, (size_t)nstub, \
+        DataLabelPtr inlinePatch;                                           \
+        Call cl = emitStubCall(nstub, &inlinePatch);                        \
+        InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC, (size_t)nstub, \
                               true, false);                                 \
+        site.inlinePatch = inlinePatch;                                     \
         addCallSite(site);                                                  \
     } while (0)                                                             \
 
 // Given a stub call, emits the call into the out-of-line assembly path. If
 // debug mode is on, adds the appropriate instrumentation for recompilation.
 // Unlike the INLINE_STUBCALL variant, this returns the Call offset.
 #define OOL_STUBCALL(stub)                                                  \
     stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)))
--- a/js/src/methodjit/FastArithmetic.cpp
+++ b/js/src/methodjit/FastArithmetic.cpp
@@ -953,17 +953,17 @@ mjit::Compiler::jsop_mod()
         masm.move(Imm32(lhs->getValue().toInt32()), X86Registers::eax);
     }
 
     /* Get RHS into anything but EDX - could avoid more spilling? */
     MaybeRegisterID temp;
     RegisterID rhsReg;
     uint32 mask = Registers::AvailRegs & ~Registers::maskReg(X86Registers::edx);
     if (!rhs->isConstant()) {
-        rhsReg = frame.tempRegInMaskForData(rhs, mask);
+        rhsReg = frame.tempRegInMaskForData(rhs, mask).reg();
         JS_ASSERT(rhsReg != X86Registers::edx);
     } else {
         rhsReg = frame.allocReg(mask).reg();
         JS_ASSERT(rhsReg != X86Registers::edx);
         masm.move(Imm32(rhs->getValue().toInt32()), rhsReg);
         temp = rhsReg;
     }
     frame.takeReg(X86Registers::edx);
@@ -1136,30 +1136,30 @@ mjit::Compiler::jsop_equality_int_string
 
         ic.cond = cond;
         ic.tempReg = tempReg;
         ic.lvr = lvr;
         ic.rvr = rvr;
         ic.stubEntry = stubEntry;
         ic.stub = stub;
 
-        bool useIC = !addTraceHints || target >= PC;
+        bool useIC = (!addTraceHints || target >= PC) && !a->parent;
 
         /* Call the IC stub, which may generate a fast path. */
         if (useIC) {
             /* Adjust for the two values just pushed. */
             ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
             ic.stubCall = OOL_STUBCALL_LOCAL_SLOTS(ic::Equality,
-                                                   frame.stackDepth() + script->nfixed + 2);
+                                                   frame.totalDepth() + 2);
             needStub = false;
         }
 #endif
 
         if (needStub)
-            OOL_STUBCALL_LOCAL_SLOTS(stub, frame.stackDepth() + script->nfixed + 2);
+            OOL_STUBCALL_LOCAL_SLOTS(stub, frame.totalDepth() + 2);
 
         /*
          * The stub call has no need to rejoin, since state is synced.
          * Instead, we can just test the return value.
          */
         Assembler::Condition ncond = (fused == JSOP_IFEQ)
                                    ? Assembler::Zero
                                    : Assembler::NonZero;
--- a/js/src/methodjit/FastBuiltins.cpp
+++ b/js/src/methodjit/FastBuiltins.cpp
@@ -312,43 +312,50 @@ mjit::Compiler::compileGetChar(FrameEntr
     }
 
     stubcc.rejoin(Changes(1));
     return Compile_Okay;
 }
 
 
 CompileStatus
-mjit::Compiler::inlineNativeFunction(uint32 argc, bool callingNew) {
+mjit::Compiler::inlineNativeFunction(uint32 argc, bool callingNew)
+{
     JS_ASSERT(!callingNew);
 
+    if (applyTricks == LazyArgsObj)
+        return Compile_InlineAbort;
+
     FrameEntry *origCallee = frame.peek(-(argc + 2));
     FrameEntry *thisValue = frame.peek(-(argc + 1));
 
     if (!origCallee->isConstant() || !origCallee->isType(JSVAL_TYPE_OBJECT))
-        return Compile_Abort;
+        return Compile_InlineAbort;
 
     JSObject *callee = &origCallee->getValue().toObject();
     if (!callee->isFunction())
-        return Compile_Abort;
+        return Compile_InlineAbort;
 
     JSFunction *fun = callee->getFunctionPrivate();
     Native native = fun->maybeNative();
 
+    if (!native)
+        return Compile_InlineAbort;
+
     JSValueType type = knownPushedType(0);
     JSValueType thisType = thisValue->isTypeKnown()
                            ? thisValue->getKnownType()
                            : JSVAL_TYPE_UNKNOWN;
 
     /* All argument types must be known. */
     for (unsigned i=0; i<argc; i++) {
         FrameEntry * arg = frame.peek(-(i+1));
 
         if (!arg->isTypeKnown())
-            return Compile_Abort;
+            return Compile_InlineAbort;
     }
 
     if (argc == 1) {
         FrameEntry *arg = frame.peek(-1);
         JSValueType argType = arg->getKnownType();
 
         if (native == js_math_abs) {
             if (argType == JSVAL_TYPE_INT32 && type == JSVAL_TYPE_INT32)
@@ -388,11 +395,11 @@ mjit::Compiler::inlineNativeFunction(uin
             (arg1Type == JSVAL_TYPE_DOUBLE || arg1Type == JSVAL_TYPE_INT32) &&
             arg2Type == JSVAL_TYPE_DOUBLE && arg2->isConstant())
         {
             Value arg2Value = arg2->getValue();
             if (arg2Value.toDouble() == -0.5 || arg2Value.toDouble() == 0.5)
                 return compileMathPowSimple(arg1, arg2);
         }
     }
-    return Compile_Abort;
+    return Compile_InlineAbort;
 }
 
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -332,17 +332,17 @@ mjit::Compiler::jsop_bitop(JSOp op)
             frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
 
             stubcc.rejoin(Changes(1));
             return;
         }
 #if defined(JS_CPU_X86) || defined(JS_CPU_X64)
         /* Grosssssss! RHS _must_ be in ECX, on x86 */
         RegisterID rr = frame.tempRegInMaskForData(rhs,
-                                                   Registers::maskReg(JSC::X86Registers::ecx));
+                                                   Registers::maskReg(JSC::X86Registers::ecx)).reg();
 #else
         RegisterID rr = frame.tempRegForData(rhs);
 #endif
 
         if (frame.haveSameBacking(lhs, rhs)) {
             // It's okay to allocReg(). If |rr| is evicted, it won't result in
             // a load, and |rr == reg| is fine since this is (x << x).
             reg = frame.allocReg();
@@ -478,19 +478,19 @@ mjit::Compiler::jsop_equality(JSOp op, B
         /*
          * Handle equality between two objects. We have to ensure there is no
          * special equality operator on either object, if that passes then
          * this is a pointer comparison.
          */
         types::TypeSet *lhsTypes = frame.getTypeSet(lhs);
         types::TypeSet *rhsTypes = frame.getTypeSet(rhs);
         types::ObjectKind lhsKind =
-            lhsTypes ? lhsTypes->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
+            lhsTypes ? lhsTypes->getKnownObjectKind(cx, outerScript) : types::OBJECT_UNKNOWN;
         types::ObjectKind rhsKind =
-            rhsTypes ? rhsTypes->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
+            rhsTypes ? rhsTypes->getKnownObjectKind(cx, outerScript) : types::OBJECT_UNKNOWN;
 
         if (lhsKind != types::OBJECT_UNKNOWN && rhsKind != types::OBJECT_UNKNOWN) {
             /* :TODO: Merge with jsop_relational_int? */
             JS_ASSERT_IF(!target, fused != JSOP_IFEQ);
             frame.forgetConstantData(lhs);
             frame.forgetConstantData(rhs);
             Assembler::Condition cond = GetCompareCondition(op, fused);
             if (target) {
@@ -826,17 +826,17 @@ mjit::Compiler::booleanJumpScript(JSOp o
     /* OOL path: Conversion to boolean. */
     MaybeJump jmpCvtExecScript;
     MaybeJump jmpCvtRejoin;
     Label lblCvtPath = stubcc.masm.label();
 
     if (!fe->isTypeKnown() ||
         !(fe->isType(JSVAL_TYPE_BOOLEAN) || fe->isType(JSVAL_TYPE_INT32))) {
         stubcc.masm.infallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stubs::ValueToBoolean),
-                                     frame.localSlots());
+                                     frame.totalDepth());
 
         jmpCvtExecScript.setJump(stubcc.masm.branchTest32(cond, Registers::ReturnReg,
                                                           Registers::ReturnReg));
         jmpCvtRejoin.setJump(stubcc.masm.jump());
     }
 
     /* Rejoin tag. */
     Label lblAfterScript = masm.label();
@@ -1181,17 +1181,19 @@ mjit::Compiler::jsop_setelem(bool popGua
         jsop_setelem_slow();
         return true;
     }
 
     frame.forgetConstantData(obj);
 
     if (cx->typeInferenceEnabled()) {
         types::TypeSet *types = frame.getTypeSet(obj);
-        types::ObjectKind kind = types ? types->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
+        types::ObjectKind kind = types
+            ? types->getKnownObjectKind(cx, outerScript)
+            : types::OBJECT_UNKNOWN;
         if (id->mightBeType(JSVAL_TYPE_INT32) &&
             (kind == types::OBJECT_DENSE_ARRAY || kind == types::OBJECT_PACKED_ARRAY) &&
             !arrayPrototypeHasIndexedProperty()) {
             // This is definitely a dense array, generate code directly without
             // using an inline cache.
             jsop_setelem_dense();
             return true;
         }
@@ -1499,18 +1501,19 @@ mjit::Compiler::jsop_getelem(bool isCall
             jsop_getelem_slow();
         return true;
     }
 
     frame.forgetConstantData(obj);
 
     if (cx->typeInferenceEnabled()) {
         types::TypeSet *types = frame.getTypeSet(obj);
-        types::ObjectKind kind = types ? types->getKnownObjectKind(cx, script) : types::OBJECT_UNKNOWN;
-
+        types::ObjectKind kind = types
+            ? types->getKnownObjectKind(cx, outerScript)
+            : types::OBJECT_UNKNOWN;
         if (!isCall && id->mightBeType(JSVAL_TYPE_INT32) &&
             (kind == types::OBJECT_DENSE_ARRAY || kind == types::OBJECT_PACKED_ARRAY) &&
             !arrayPrototypeHasIndexedProperty()) {
             // this is definitely a dense array, generate code directly without
             // using an inline cache.
             jsop_getelem_dense(kind == types::OBJECT_PACKED_ARRAY);
             return true;
         }
--- a/js/src/methodjit/FrameEntry.h
+++ b/js/src/methodjit/FrameEntry.h
@@ -258,16 +258,17 @@ class FrameEntry
     JSValueType knownType;
     jsval_layout v_;
     RematInfo  type;
     RematInfo  data;
     uint32     index_;
     FrameEntry *copy;
     bool       copied;
     bool       tracked;
+    bool       inlined;
     bool       initArray;
     JSObject   *initObject;
     jsbytecode *lastLoop;
 };
 
 } /* namespace mjit */
 } /* namespace js */
 
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -42,19 +42,19 @@
 
 namespace js {
 namespace mjit {
 
 inline void
 FrameState::addToTracker(FrameEntry *fe)
 {
     JS_ASSERT(!fe->isTracked());
-    fe->track(tracker.nentries);
-    tracker.add(fe);
-    JS_ASSERT(tracker.nentries <= feLimit());
+    fe->track(a->tracker.nentries);
+    a->tracker.add(fe);
+    JS_ASSERT(a->tracker.nentries <= feLimit(script));
 }
 
 inline FrameEntry *
 FrameState::peek(int32 depth)
 {
     JS_ASSERT(depth < 0);
     JS_ASSERT(sp + depth >= spBase);
     FrameEntry *fe = &sp[depth];
@@ -80,24 +80,26 @@ FrameState::haveSameBacking(FrameEntry *
     if (rhs->isCopy())
         rhs = rhs->copyOf();
     return lhs == rhs;
 }
 
 inline AnyRegisterID
 FrameState::allocReg(uint32 mask)
 {
-    if (freeRegs.hasRegInMask(mask)) {
-        AnyRegisterID reg = freeRegs.takeAnyReg(mask);
+    if (a->freeRegs.hasRegInMask(mask)) {
+        AnyRegisterID reg = a->freeRegs.takeAnyReg(mask);
         clearLoopReg(reg);
+        modifyReg(reg);
         return reg;
     }
 
     AnyRegisterID reg = evictSomeReg(mask);
     regstate(reg).forget();
+    modifyReg(reg);
     return reg;
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::allocReg()
 {
     return allocReg(Registers::AvailRegs).reg();
 }
@@ -116,43 +118,54 @@ FrameState::allocAndLoadReg(FrameEntry *
 
     /*
      * Decide whether to retroactively mark a register as holding the entry
      * at the start of the current loop. We can do this if (a) the register has
      * not been touched since the start of the loop (it is in loopRegs), and (b)
      * the entry has also not been written to or already had a loop register
      * assigned.
      */
-    if (freeRegs.hasRegInMask(loopRegs.freeMask & mask) && type == RematInfo::DATA &&
-        (fe == this_ || isArg(fe) || isLocal(fe)) && fe->lastLoop < activeLoop->head) {
-        reg = freeRegs.takeAnyReg(loopRegs.freeMask & mask);
+    if (a->freeRegs.hasRegInMask(loopRegs.freeMask & mask) && type == RematInfo::DATA &&
+        (fe == this_ || isArg(fe) || isLocal(fe)) && fe->lastLoop < activeLoop->head &&
+        !a->parent) {
+        reg = a->freeRegs.takeAnyReg(loopRegs.freeMask & mask);
         setLoopReg(reg, fe);
         return reg;
     }
 
-    if (!freeRegs.empty(mask)) {
-        reg = freeRegs.takeAnyReg(mask);
+    if (!a->freeRegs.empty(mask)) {
+        reg = a->freeRegs.takeAnyReg(mask);
         clearLoopReg(reg);
     } else {
         reg = evictSomeReg(mask);
         regstate(reg).forget();
     }
+    modifyReg(reg);
 
     if (fp)
         masm.loadDouble(addressOf(fe), reg.fpreg());
     else if (type == RematInfo::TYPE)
         masm.loadTypeTag(addressOf(fe), reg.reg());
     else
         masm.loadPayload(addressOf(fe), reg.reg());
 
     regstate(reg).associate(fe, type);
     return reg;
 }
 
 inline void
+FrameState::modifyReg(AnyRegisterID reg)
+{
+    if (a->parentRegs.hasReg(reg)) {
+        a->parentRegs.takeReg(reg);
+        syncParentRegister(masm, reg);
+    }
+}
+
+inline void
 FrameState::clearLoopReg(AnyRegisterID reg)
 {
     JS_ASSERT(loopRegs.hasReg(reg) == (activeLoop && activeLoop->alloc->loop(reg)));
     if (loopRegs.hasReg(reg)) {
         loopRegs.takeReg(reg);
         activeLoop->alloc->setUnassigned(reg);
         JaegerSpew(JSpew_Regalloc, "clearing loop register %s\n", reg.name());
     }
@@ -186,51 +199,51 @@ FrameState::pop()
     JS_ASSERT(sp > spBase);
 
     FrameEntry *fe = --sp;
     if (!fe->isTracked())
         return;
 
     forgetAllRegs(fe);
 
-    typeSets[fe - spBase] = NULL;
+    a->typeSets[fe - spBase] = NULL;
 }
 
 inline void
 FrameState::freeReg(AnyRegisterID reg)
 {
     JS_ASSERT(!regstate(reg).usedBy());
 
-    freeRegs.putReg(reg);
+    a->freeRegs.putReg(reg);
 }
 
 inline void
 FrameState::forgetReg(AnyRegisterID reg)
 {
     /*
      * Important: Do not touch the fe here. We can peephole optimize away
      * loads and stores by re-using the contents of old FEs.
      */
     JS_ASSERT_IF(regstate(reg).fe(), !regstate(reg).fe()->isCopy());
 
     if (!regstate(reg).isPinned()) {
         regstate(reg).forget();
-        freeRegs.putReg(reg);
+        a->freeRegs.putReg(reg);
     }
 }
 
 inline FrameEntry *
 FrameState::rawPush()
 {
-    JS_ASSERT(unsigned(sp - entries) < feLimit());
+    JS_ASSERT(unsigned(sp - entries) < feLimit(script));
 
     if (!sp->isTracked())
         addToTracker(sp);
 
-    typeSets[sp - spBase] = NULL;
+    a->typeSets[sp - spBase] = NULL;
 
     return sp++;
 }
 
 inline void
 FrameState::push(const Value &v)
 {
     FrameEntry *fe = rawPush();
@@ -277,50 +290,50 @@ FrameState::push(Address address, JSValu
     // It's okay if either of these clobbers address.base, since we guarantee
     // eviction will not physically clobber. It's also safe, on x64, for
     // loadValueAsComponents() to take either type or data regs as address.base.
     RegisterID typeReg = allocReg();
     RegisterID dataReg = allocReg();
     masm.loadValueAsComponents(address, typeReg, dataReg);
 #elif JS_NUNBOX32
     // Prevent us from clobbering this reg.
-    bool free = freeRegs.hasReg(address.base);
+    bool free = a->freeRegs.hasReg(address.base);
     if (free)
-        freeRegs.takeReg(address.base);
+        a->freeRegs.takeReg(address.base);
 
     if (knownType != JSVAL_TYPE_UNKNOWN) {
         RegisterID dataReg = allocReg();
         if (free)
-            freeRegs.putReg(address.base);
+            a->freeRegs.putReg(address.base);
         masm.loadPayload(address, dataReg);
         pushTypedPayload(knownType, dataReg);
         return;
     }
 
     RegisterID typeReg = allocReg();
 
     masm.loadTypeTag(address, typeReg);
 
     // Allow re-use of the base register. This could avoid a spill, and
     // is safe because the following allocReg() won't actually emit any
     // writes to the register.
     if (free)
-        freeRegs.putReg(address.base);
+        a->freeRegs.putReg(address.base);
 
     RegisterID dataReg = allocReg();
     masm.loadPayload(address, dataReg);
 #endif
 
     pushRegs(typeReg, dataReg, knownType);
 }
 
 inline JSC::MacroAssembler::FPRegisterID
 FrameState::pushRegs(RegisterID type, RegisterID data, JSValueType knownType)
 {
-    JS_ASSERT(!freeRegs.hasReg(type) && !freeRegs.hasReg(data));
+    JS_ASSERT(!a->freeRegs.hasReg(type) && !a->freeRegs.hasReg(data));
 
     if (knownType == JSVAL_TYPE_UNKNOWN) {
         FrameEntry *fe = rawPush();
         fe->resetUnsynced();
         fe->type.setRegister(type);
         fe->data.setRegister(data);
         regstate(type).associate(fe, RematInfo::TYPE);
         regstate(data).associate(fe, RematInfo::DATA);
@@ -340,30 +353,30 @@ FrameState::pushRegs(RegisterID type, Re
     pushTypedPayload(knownType, data);
     return Registers::FPConversionTemp;
 }
 
 inline void
 FrameState::pushTypedPayload(JSValueType type, RegisterID payload)
 {
     JS_ASSERT(type != JSVAL_TYPE_DOUBLE);
-    JS_ASSERT(!freeRegs.hasReg(payload));
+    JS_ASSERT(!a->freeRegs.hasReg(payload));
 
     FrameEntry *fe = rawPush();
 
     fe->resetUnsynced();
     fe->setType(type);
     fe->data.setRegister(payload);
     regstate(payload).associate(fe, RematInfo::DATA);
 }
 
 inline void
 FrameState::pushNumber(RegisterID payload, bool asInt32)
 {
-    JS_ASSERT(!freeRegs.hasReg(payload));
+    JS_ASSERT(!a->freeRegs.hasReg(payload));
 
     FrameEntry *fe = rawPush();
     fe->clear();
 
     if (asInt32) {
         if (!fe->type.synced())
             masm.storeTypeTag(ImmType(JSVAL_TYPE_INT32), addressOf(fe));
         fe->type.setMemory();
@@ -398,17 +411,17 @@ FrameState::pushInitializerObject(Regist
     FrameEntry *fe = peek(-1);
     fe->initArray = array;
     fe->initObject = baseobj;
 }
 
 inline void
 FrameState::pushUntypedPayload(JSValueType type, RegisterID payload)
 {
-    JS_ASSERT(!freeRegs.hasReg(payload));
+    JS_ASSERT(!a->freeRegs.hasReg(payload));
 
     FrameEntry *fe = rawPush();
 
     fe->clear();
 
     masm.storeTypeTag(ImmType(type), addressOf(fe));
 
     /* The forceful type sync will assert otherwise. */
@@ -523,43 +536,56 @@ FrameState::tempFPRegForData(FrameEntry 
     if (fe->data.inFPRegister())
         return fe->data.fpreg();
 
     FPRegisterID reg = allocAndLoadReg(fe, true, RematInfo::DATA).fpreg();
     fe->data.setFPRegister(reg);
     return reg;
 }
 
-inline JSC::MacroAssembler::RegisterID
+inline AnyRegisterID
 FrameState::tempRegInMaskForData(FrameEntry *fe, uint32 mask)
 {
     JS_ASSERT(!fe->data.isConstant());
-    JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
-    JS_ASSERT(!(mask & ~Registers::AvailRegs));
+    JS_ASSERT_IF(fe->isType(JSVAL_TYPE_DOUBLE), !(mask & ~Registers::AvailFPRegs));
+    JS_ASSERT_IF(!fe->isType(JSVAL_TYPE_DOUBLE), !(mask & ~Registers::AvailRegs));
 
     if (fe->isCopy())
         fe = fe->copyOf();
 
-    RegisterID reg;
-    if (fe->data.inRegister()) {
-        RegisterID old = fe->data.reg();
+    AnyRegisterID reg;
+    if (fe->data.inRegister() || fe->data.inFPRegister()) {
+        AnyRegisterID old;
+        if (fe->data.inRegister())
+            old = fe->data.reg();
+        else
+            old = fe->data.fpreg();
         if (Registers::maskReg(old) & mask)
             return old;
 
         /* Keep the old register pinned. */
         regstate(old).forget();
-        reg = allocReg(mask).reg();
-        masm.move(old, reg);
+        reg = allocReg(mask);
+        if (reg.isReg())
+            masm.move(old.reg(), reg.reg());
+        else
+            masm.moveDouble(old.fpreg(), reg.fpreg());
         freeReg(old);
     } else {
-        reg = allocReg(mask).reg();
-        masm.loadPayload(addressOf(fe), reg);
+        reg = allocReg(mask);
+        if (reg.isReg())
+            masm.loadPayload(addressOf(fe), reg.reg());
+        else
+            masm.loadDouble(addressOf(fe), reg.fpreg());
     }
     regstate(reg).associate(fe, RematInfo::DATA);
-    fe->data.setRegister(reg);
+    if (reg.isReg())
+        fe->data.setRegister(reg.reg());
+    else
+        fe->data.setFPRegister(reg.fpreg());
     return reg;
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::tempRegForData(FrameEntry *fe, RegisterID reg, Assembler &masm) const
 {
     JS_ASSERT(!fe->data.isConstant());
 
@@ -843,24 +869,24 @@ FrameState::forgetType(FrameEntry *fe)
     ensureTypeSynced(fe, masm);
     fe->type.setMemory();
 }
 
 inline types::TypeSet *
 FrameState::getTypeSet(FrameEntry *fe)
 {
     JS_ASSERT(fe >= spBase && fe < sp);
-    return typeSets[fe - spBase];
+    return a->typeSets[fe - spBase];
 }
 
 inline void
 FrameState::learnTypeSet(unsigned slot, types::TypeSet *types)
 {
     if (slot < unsigned(sp - spBase))
-        typeSets[slot] = types;
+        a->typeSets[slot] = types;
 }
 
 inline void
 FrameState::learnType(FrameEntry *fe, JSValueType type, bool unsync)
 {
     JS_ASSERT(!fe->isType(JSVAL_TYPE_DOUBLE));
     if (fe->type.inRegister())
         forgetReg(fe->type.reg());
@@ -883,30 +909,63 @@ FrameState::learnType(FrameEntry *fe, JS
 
     fe->data.setRegister(data);
     regstate(data).associate(fe, RematInfo::DATA);
 
     fe->data.unsync();
     fe->type.unsync();
 }
 
+inline int32
+FrameState::frameOffset(const FrameEntry *fe, ActiveFrame *a) const
+{
+    if (fe >= a->locals)
+        return JSStackFrame::offsetOfFixed(uint32(fe - a->locals));
+    if (fe >= a->args)
+        return JSStackFrame::offsetOfFormalArg(a->script->fun, uint32(fe - a->args));
+    if (fe == a->this_)
+        return JSStackFrame::offsetOfThis(a->script->fun);
+    if (fe == a->callee_)
+        return JSStackFrame::offsetOfCallee(a->script->fun);
+    JS_NOT_REACHED("Bad fe");
+    return 0;
+}
+
 inline JSC::MacroAssembler::Address
-FrameState::addressOf(const FrameEntry *fe) const
+FrameState::addressOf(const FrameEntry *fe, ActiveFrame *a) const
 {
-    int32 frameOffset = 0;
-    if (fe >= locals)
-        frameOffset = JSStackFrame::offsetOfFixed(uint32(fe - locals));
-    else if (fe >= args)
-        frameOffset = JSStackFrame::offsetOfFormalArg(fun, uint32(fe - args));
-    else if (fe == this_)
-        frameOffset = JSStackFrame::offsetOfThis(fun);
-    else if (fe == callee_)
-        frameOffset = JSStackFrame::offsetOfCallee(fun);
-    JS_ASSERT(frameOffset);
-    return Address(JSFrameReg, frameOffset);
+    if (fe->inlined) {
+        /*
+         * For arguments/this to inlined frames, we should only be using the
+         * backing store in the parent. The address of the argument/this may
+         * not be synced (even if it is marked as synced). This inlined address
+         * will only be used for loads (arguments can't yet be mutated by
+         * inlined calls), and the caller must ensure the parent's entry is
+         * definitely synced.
+         */
+        JS_ASSERT(a->parent);
+        const FrameEntry *parentFE;
+        if (fe == callee_)
+            parentFE = a->parentSP - (a->parentArgc + 2);
+        else if (fe == this_)
+            parentFE = a->parentSP - (a->parentArgc + 1);
+        else
+            parentFE = a->parentSP - (a->parentArgc - (fe - a->args));
+
+        return addressOf(parentFE->backing(), a->parent);
+    }
+
+    int32 offset = frameOffset(fe, a);
+    return Address(JSFrameReg, offset + (a->depth * sizeof(Value)));
+}
+
+inline JSC::MacroAssembler::Address
+FrameState::addressForInlineReturn() const
+{
+    return addressOf(callee_);
 }
 
 inline JSC::MacroAssembler::Address
 FrameState::addressForDataRemat(const FrameEntry *fe) const
 {
     if (fe->isCopy() && !fe->data.synced())
         fe = fe->copyOf();
     JS_ASSERT(fe->data.synced());
@@ -1008,44 +1067,44 @@ FrameState::getLocal(uint32 slot)
 {
     JS_ASSERT(slot < script->nslots);
     return getOrTrack(uint32(&locals[slot] - entries));
 }
 
 inline FrameEntry *
 FrameState::getArg(uint32 slot)
 {
-    JS_ASSERT(slot < nargs);
+    JS_ASSERT(script->fun && slot < script->fun->nargs);
     return getOrTrack(uint32(&args[slot] - entries));
 }
 
 inline FrameEntry *
 FrameState::getThis()
 {
     return getOrTrack(uint32(this_ - entries));
 }
 
 inline FrameEntry *
 FrameState::getCallee()
 {
     // Callee can only be used in function code, and it's always an object.
-    JS_ASSERT(fun);
+    JS_ASSERT(script->fun);
     if (!callee_->isTracked()) {
         addToTracker(callee_);
         callee_->resetSynced();
         callee_->setType(JSVAL_TYPE_OBJECT);
     }
     return callee_;
 }
 
 inline void
 FrameState::unpinKilledReg(RegisterID reg)
 {
     regstate(reg).unpinUnsafe();
-    freeRegs.putReg(reg);
+    a->freeRegs.putReg(reg);
 }
 
 inline void
 FrameState::forgetAllRegs(FrameEntry *fe)
 {
     if (fe->type.inRegister())
         forgetReg(fe->type.reg());
     if (fe->data.inRegister())
@@ -1054,20 +1113,20 @@ FrameState::forgetAllRegs(FrameEntry *fe
         forgetReg(fe->data.fpreg());
 }
 
 inline void
 FrameState::swapInTracker(FrameEntry *lhs, FrameEntry *rhs)
 {
     uint32 li = lhs->trackerIndex();
     uint32 ri = rhs->trackerIndex();
-    JS_ASSERT(tracker[li] == lhs);
-    JS_ASSERT(tracker[ri] == rhs);
-    tracker.entries[ri] = lhs;
-    tracker.entries[li] = rhs;
+    JS_ASSERT(a->tracker[li] == lhs);
+    JS_ASSERT(a->tracker[ri] == rhs);
+    a->tracker.entries[ri] = lhs;
+    a->tracker.entries[li] = rhs;
     lhs->index_ = ri;
     rhs->index_ = li;
 }
 
 inline void
 FrameState::dup()
 {
     dupAt(-1);
@@ -1097,17 +1156,17 @@ FrameState::syncAt(int32 n)
     FrameEntry *fe = peek(n);
     syncFe(fe);
 }
 
 inline void
 FrameState::pushLocal(uint32 n, JSValueType knownType)
 {
     FrameEntry *fe = getLocal(n);
-    if (!analysis->localEscapes(n)) {
+    if (!a->analysis->localEscapes(n)) {
         pushCopyOf(indexOfFe(fe));
     } else {
 #ifdef DEBUG
         /*
          * We really want to assert on local variables, but in the presence of
          * SETLOCAL equivocation of stack slots, and let expressions, just
          * weakly assert on the fixed local vars.
          */
@@ -1120,17 +1179,17 @@ FrameState::pushLocal(uint32 n, JSValueT
         push(addressOf(fe), knownType);
     }
 }
 
 inline void
 FrameState::pushArg(uint32 n, JSValueType knownType)
 {
     FrameEntry *fe = getArg(n);
-    if (!analysis->argEscapes(n)) {
+    if (!a->analysis->argEscapes(n)) {
         pushCopyOf(indexOfFe(fe));
     } else {
 #ifdef DEBUG
         FrameEntry *fe = &args[n];
         if (fe->isTracked())
             JS_ASSERT(fe->data.inMemory());
 #endif
         push(addressOf(fe), knownType);
@@ -1165,17 +1224,17 @@ FrameState::leaveBlock(uint32 n)
 {
     popn(n);
 }
 
 inline void
 FrameState::enterBlock(uint32 n)
 {
     /* expect that tracker has 0 entries, for now. */
-    JS_ASSERT(!tracker.nentries);
+    JS_ASSERT(!a->tracker.nentries);
     JS_ASSERT(uint32(sp + n - locals) <= script->nslots);
 
     sp += n;
 }
 
 inline void
 FrameState::eviscerate(FrameEntry *fe)
 {
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -43,87 +43,349 @@
 
 using namespace js;
 using namespace js::mjit;
 using namespace js::analyze;
 
 /* Because of Value alignment */
 JS_STATIC_ASSERT(sizeof(FrameEntry) % 8 == 0);
 
-FrameState::FrameState(JSContext *cx, JSScript *script, JSFunction *fun,
-                       mjit::Compiler &cc, Assembler &masm, StubCompiler &stubcc,
-                       LifetimeScript &liveness)
-  : cx(cx), script(script), fun(fun),
-    nargs(fun ? fun->nargs : 0),
-    masm(masm), stubcc(stubcc), freeRegs(Registers::AvailAnyRegs), entries(NULL),
+FrameState::FrameState(JSContext *cx, mjit::Compiler &cc,
+                       Assembler &masm, StubCompiler &stubcc)
+  : cx(cx),
+    masm(masm), stubcc(stubcc),
+    a(NULL), script(NULL), entries(NULL),
+    callee_(NULL), this_(NULL), args(NULL), locals(NULL),
+    spBase(NULL), sp(NULL), PC(NULL),
     activeLoop(NULL), loopRegs(0),
     loopJoins(CompilerAllocPolicy(cx, cc)),
     loopPatches(CompilerAllocPolicy(cx, cc)),
-    analysis(NULL), liveness(liveness),
-#if defined JS_NUNBOX32
-    reifier(cx, *thisFromCtor()),
-#endif
     inTryBlock(false)
 {
 }
 
 FrameState::~FrameState()
 {
-    cx->free(entries);
+    while (a) {
+        ActiveFrame *parent = a->parent;
+#if defined JS_NUNBOX32
+        a->reifier.~ImmutableSync();
+#endif
+        cx->free(a);
+        a = parent;
+    }
+}
+
+void
+FrameState::getUnsyncedEntries(uint32 *pdepth, Vector<UnsyncedEntry> *unsyncedEntries)
+{
+    *pdepth = totalDepth() + VALUES_PER_STACK_FRAME;
+
+    /* Mark all unsynced entries in the frame. */
+    for (uint32 i = 0; i < a->tracker.nentries; i++) {
+        FrameEntry *fe = a->tracker[i];
+        if (fe->type.synced() && fe->data.synced())
+            continue;
+        if (fe->inlined)
+            continue;
+
+        UnsyncedEntry entry;
+        PodZero(&entry);
+
+        entry.offset = frameOffset(fe, a) + (a->depth * sizeof(Value));
+
+        if (fe->isCopy()) {
+            FrameEntry *nfe = fe->copyOf();
+            entry.copy = true;
+            entry.u.copiedOffset = frameOffset(nfe, a) + (a->depth * sizeof(Value));
+        } else if (fe->isConstant()) {
+            entry.constant = true;
+            entry.u.value = fe->getValue();
+        } else if (fe->isTypeKnown() && !fe->isType(JSVAL_TYPE_DOUBLE) && !fe->type.synced()) {
+            entry.knownType = true;
+            entry.u.type = fe->getKnownType();
+        } else {
+            /*
+             * All the unsynced portions of this entry are in registers. When
+             * making a call from within an inline frame, these will be synced
+             * beforehand.
+             */
+            continue;
+        }
+
+        unsyncedEntries->append(entry);
+    }
 }
 
 bool
-FrameState::init()
+FrameState::pushActiveFrame(JSScript *script, uint32 argc,
+                            analyze::Script *analysis, analyze::LifetimeScript *liveness)
 {
+    uint32 depth = a ? totalDepth() : 0;
+
     // nslots + nargs + 2 (callee, this)
-    uint32 nentries = feLimit();
-    if (!nentries) {
-        sp = spBase = locals = args = NULL;
-        return true;
-    }
-
-    size_t totalBytes = sizeof(FrameEntry) * nentries +                     // entries[], w/ callee+this
-                        sizeof(FrameEntry *) * nentries +                   // tracker.entries
-                        sizeof(types::TypeSet *) * script->nslots;          // typeSets
+    uint32 nentries = feLimit(script);
+
+    size_t totalBytes = sizeof(ActiveFrame) +
+                        sizeof(FrameEntry) * nentries +              // entries[]
+                        sizeof(FrameEntry *) * nentries +            // tracker.entries
+                        sizeof(types::TypeSet *) * script->nslots;   // typeSets
 
     uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
     if (!cursor)
         return false;
 
+    ActiveFrame *newa = (ActiveFrame *) cursor;
+    cursor += sizeof(ActiveFrame);
+
 #if defined JS_NUNBOX32
-    if (!reifier.init(nentries))
+    if (!newa->reifier.init(cx, *this, nentries)) {
+        cx->free(newa);
         return false;
+    }
 #endif
 
-    entries = (FrameEntry *)cursor;
+    newa->parent = a;
+    newa->parentPC = PC;
+    newa->parentSP = sp;
+    newa->parentArgc = argc;
+    newa->script = script;
+    newa->freeRegs = Registers(Registers::AvailAnyRegs);
+
+    newa->analysis = analysis;
+    newa->liveness = liveness;
+
+    newa->entries = (FrameEntry *)cursor;
     cursor += sizeof(FrameEntry) * nentries;
 
-    callee_ = entries;
-    this_ = entries + 1;
-    args = entries + 2;
-    locals = args + nargs;
-    spBase = locals + script->nfixed;
-    sp = spBase;
-
-    tracker.entries = (FrameEntry **)cursor;
+    newa->callee_ = newa->entries;
+    newa->this_ = newa->entries + 1;
+    newa->args = newa->entries + 2;
+    newa->locals = newa->args + (script->fun ? script->fun->nargs : 0);
+
+    newa->tracker.entries = (FrameEntry **)cursor;
     cursor += sizeof(FrameEntry *) * nentries;
 
-    typeSets = (types::TypeSet **)cursor;
+    newa->typeSets = (types::TypeSet **)cursor;
     cursor += sizeof(types::TypeSet *) * script->nslots;
 
-    JS_ASSERT(reinterpret_cast<uint8 *>(entries) + totalBytes == cursor);
+    JS_ASSERT(reinterpret_cast<uint8 *>(newa) + totalBytes == cursor);
+
+    this->a = newa;
+    updateActiveFrame();
+
+    if (a->parent && a->analysis->inlineable(argc)) {
+        a->depth = depth + VALUES_PER_STACK_FRAME;
+
+        /* Mark all registers which are in use by the parent or its own parent. */
+        a->parentRegs = 0;
+        Registers regs(Registers::AvailAnyRegs);
+        while (!regs.empty()) {
+            AnyRegisterID reg = regs.takeAnyReg();
+            if (a->parent->parentRegs.hasReg(reg) || !a->parent->freeRegs.hasReg(reg))
+                a->parentRegs.putReg(reg);
+        }
+
+        JS_ASSERT(argc == script->fun->nargs);
+
+        syncInlinedEntry(getCallee(), a->parentSP - (argc + 2));
+        syncInlinedEntry(getThis(), a->parentSP - (argc + 1));
+        for (unsigned i = 0; i < argc; i++)
+            syncInlinedEntry(getArg(i), a->parentSP - (argc - i));
+    }
 
     return true;
 }
 
 void
+FrameState::syncInlinedEntry(FrameEntry *fe, const FrameEntry *parent)
+{
+    /*
+     * Fill in the initial state of an entry in this inlined frame that
+     * corresponds to an entry in the caller's frame.
+     */
+
+    /*
+     * Make sure the initial sync state of the inlined entries matches the
+     * parent. These inlined entries will never unsync (since they are never
+     * modified) and will be marked as synced as necessary. Note that this
+     * follows any copies in the parent to get the eventual backing of the
+     * argument --- the slot we compute using getAddress. Syncing of the
+     * argument slots themselves is handled by the parent's unsyncedSlots.
+     */
+    JS_ASSERT(fe->type.synced() && fe->data.synced());
+    parent = parent->backing();
+    if (!parent->type.synced())
+        fe->type.unsync();
+    if (!parent->data.synced())
+        fe->data.unsync();
+
+    fe->inlined = true;
+
+    if (parent->isConstant()) {
+        fe->setConstant(Jsvalify(parent->getValue()));
+        return;
+    }
+
+    if (parent->isCopy())
+        parent = parent->copyOf();
+
+    if (parent->isTypeKnown())
+        fe->setType(parent->getKnownType());
+
+    if (parent->type.inRegister())
+        associateReg(fe, RematInfo::TYPE, parent->type.reg());
+    if (parent->data.inRegister())
+        associateReg(fe, RematInfo::DATA, parent->data.reg());
+    if (parent->data.inFPRegister())
+        associateReg(fe, RematInfo::DATA, parent->data.fpreg());
+}
+
+void
+FrameState::associateReg(FrameEntry *fe, RematInfo::RematType type, AnyRegisterID reg)
+{
+    /* :XXX: handle args/this copying each other. */
+    a->freeRegs.takeReg(reg);
+
+    if (type == RematInfo::TYPE)
+        fe->type.setRegister(reg.reg());
+    else if (reg.isReg())
+        fe->data.setRegister(reg.reg());
+    else
+        fe->data.setFPRegister(reg.fpreg());
+    regstate(reg).associate(fe, type);
+}
+
+void
+FrameState::popActiveFrame()
+{
+    jsbytecode *parentPC = a->parentPC;
+    FrameEntry *parentSP = a->parentSP;
+    ActiveFrame *parent = a->parent;
+
+#if defined JS_NUNBOX32
+    a->reifier.~ImmutableSync();
+#endif
+    cx->free(a);
+
+    a = parent;
+    updateActiveFrame();
+    PC = parentPC;
+    sp = parentSP;
+}
+
+void
+FrameState::updateActiveFrame()
+{
+    script = a->script;
+    entries = a->entries;
+    callee_ = a->callee_;
+    this_ = a->this_;
+    args = a->args;
+    locals = a->locals;
+    spBase = locals + script->nfixed;
+    sp = spBase;
+}
+
+void
+FrameState::discardLocalRegisters()
+{
+    /* Discard all local registers, without syncing. Must be followed by a discardFrame. */
+    a->freeRegs = Registers::AvailAnyRegs;
+}
+
+void
+FrameState::evictInlineModifiedRegisters(Registers regs)
+{
+    a->parentRegs.freeMask &= ~regs.freeMask;
+
+    while (!regs.empty()) {
+        AnyRegisterID reg = regs.takeAnyReg();
+        if (a->freeRegs.hasReg(reg))
+            continue;
+
+        FrameEntry *fe = regstate(reg).fe();
+        JS_ASSERT(fe);
+        if (regstate(reg).type() == RematInfo::TYPE) {
+            if (!fe->type.synced())
+                fe->type.sync();
+            fe->type.setMemory();
+        } else {
+            if (!fe->data.synced())
+                fe->data.sync();
+            if (fe->isType(JSVAL_TYPE_DOUBLE) && !fe->type.synced())
+                fe->type.sync();
+            fe->data.setMemory();
+        }
+
+        regstate(reg).forget();
+        a->freeRegs.putReg(reg);
+    }
+}
+
+void
+FrameState::tryCopyRegister(FrameEntry *fe, FrameEntry *callStart)
+{
+    JS_ASSERT(!fe->isCopied() || !isEntryCopied(fe));
+
+    if (!fe->isCopy())
+        return;
+
+    /*
+     * Uncopy the entry if it shares a backing with any other entry used
+     * in the impending call. We want to ensure that within inline calls each
+     * entry has its own set of registers.
+     */
+
+    FrameEntry *uncopyfe = NULL;
+    for (FrameEntry *nfe = callStart; !uncopyfe && nfe < fe; nfe++) {
+        if (!nfe->isTracked())
+            continue;
+        if (nfe->backing() == fe->copyOf())
+            uncopyfe = nfe;
+    }
+
+    if (uncopyfe) {
+        JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
+        if (type == JSVAL_TYPE_UNKNOWN)
+            syncType(fe);
+        fe->resetUnsynced();
+        if (type == JSVAL_TYPE_UNKNOWN) {
+            fe->type.sync();
+            fe->type.setMemory();
+        } else {
+            fe->setType(type);
+        }
+        if (type == JSVAL_TYPE_DOUBLE) {
+            FPRegisterID fpreg = allocFPReg();
+            masm.moveDouble(tempFPRegForData(uncopyfe), fpreg);
+            fe->data.setFPRegister(fpreg);
+            regstate(fpreg).associate(fe, RematInfo::DATA);
+        } else {
+            RegisterID reg = allocReg();
+            masm.move(tempRegForData(uncopyfe), reg);
+            fe->data.setRegister(reg);
+            regstate(reg).associate(fe, RematInfo::DATA);
+        }
+    } else {
+        /* Try to put the entry in a register. */
+        fe = fe->copyOf();
+        if (fe->isType(JSVAL_TYPE_DOUBLE))
+            tempFPRegForData(fe);
+        else
+            tempRegForData(fe);
+    }
+}
+
+void
 FrameState::takeReg(AnyRegisterID reg)
 {
-    if (freeRegs.hasReg(reg)) {
-        freeRegs.takeReg(reg);
+    modifyReg(reg);
+    if (a->freeRegs.hasReg(reg)) {
+        a->freeRegs.takeReg(reg);
         clearLoopReg(reg);
         JS_ASSERT(!regstate(reg).usedBy());
     } else {
         JS_ASSERT(regstate(reg).fe());
         evictReg(reg);
         regstate(reg).forget();
     }
 }
@@ -167,42 +429,42 @@ FrameState::evictReg(AnyRegisterID reg)
     }
 }
 
 inline Lifetime *
 FrameState::variableLive(FrameEntry *fe, jsbytecode *pc) const
 {
     uint32 offset = pc - script->code;
     if (fe == this_)
-        return liveness.thisLive(offset);
+        return a->liveness->thisLive(offset);
     if (isArg(fe)) {
-        JS_ASSERT(!analysis->argEscapes(fe - args));
-        return liveness.argLive(fe - args, offset);
+        JS_ASSERT(!a->analysis->argEscapes(fe - args));
+        return a->liveness->argLive(fe - args, offset);
     }
     if (isLocal(fe)) {
-        JS_ASSERT(!analysis->localEscapes(fe - locals));
-        return liveness.localLive(fe - locals, offset);
+        JS_ASSERT(!a->analysis->localEscapes(fe - locals));
+        return a->liveness->localLive(fe - locals, offset);
     }
 
     /* Liveness not computed for stack and callee entries. */
     JS_NOT_REACHED("Stack/callee entry");
     return NULL;
 }
 
 bool
 FrameState::isEntryCopied(FrameEntry *fe) const
 {
     /*
      * :TODO: It would be better for fe->isCopied() to mean 'is actually copied'
      * rather than 'might have copies', removing the need for this walk.
      */
     JS_ASSERT(fe->isCopied());
 
-    for (uint32 i = fe->trackerIndex() + 1; i < tracker.nentries; i++) {
-        FrameEntry *nfe = tracker[i];
+    for (uint32 i = fe->trackerIndex() + 1; i < a->tracker.nentries; i++) {
+        FrameEntry *nfe = a->tracker[i];
         if (nfe < sp && nfe->isCopy() && nfe->copyOf() == fe)
             return true;
     }
 
     return false;
 }
 
 AnyRegisterID
@@ -325,28 +587,28 @@ FrameState::evictSomeReg(uint32 mask)
     AnyRegisterID reg = bestEvictReg(mask, false);
     evictReg(reg);
     return reg;
 }
 
 void
 FrameState::resetInternalState()
 {
-    for (uint32 i = 0; i < tracker.nentries; i++)
-        tracker[i]->untrack();
-
-    tracker.reset();
-    freeRegs = Registers(Registers::AvailAnyRegs);
+    for (uint32 i = 0; i < a->tracker.nentries; i++)
+        a->tracker[i]->untrack();
+
+    a->tracker.reset();
+    a->freeRegs = Registers(Registers::AvailAnyRegs);
 }
 
 void
 FrameState::discardFrame()
 {
     resetInternalState();
-    PodArrayZero(regstate_);
+    PodArrayZero(a->regstate_);
 }
 
 void
 FrameState::forgetEverything()
 {
     resetInternalState();
 
 #ifdef DEBUG
@@ -387,20 +649,20 @@ FrameState::pushLoop(jsbytecode *head, J
         return false;
 
     loop->outer = activeLoop;
     loop->head = head;
     loop->entry = entry;
     loop->entryTarget = entryTarget;
     activeLoop = loop;
 
-    RegisterAllocation *&alloc = liveness.getCode(head).allocation;
+    RegisterAllocation *&alloc = a->liveness->getCode(head).allocation;
     JS_ASSERT(!alloc);
 
-    alloc = ArenaNew<RegisterAllocation>(liveness.pool, true);
+    alloc = ArenaNew<RegisterAllocation>(a->liveness->pool, true);
     if (!alloc)
         return false;
 
     loop->alloc = alloc;
     loopRegs = Registers::AvailAnyRegs;
     return true;
 }
 
@@ -463,58 +725,71 @@ FrameState::setLoopReg(AnyRegisterID reg
     if (activeLoop->entryTarget &&
         activeLoop->entryTarget != activeLoop->head &&
         PC >= activeLoop->entryTarget) {
         /*
          * We've advanced past the entry point of the loop (we're analyzing the condition),
          * so need to update the register state at that entry point so that the right
          * things get loaded when we enter the loop.
          */
-        RegisterAllocation *entry = liveness.getCode(activeLoop->entryTarget).allocation;
+        RegisterAllocation *entry = a->liveness->getCode(activeLoop->entryTarget).allocation;
         JS_ASSERT(entry && !entry->assigned(reg));
         entry->set(reg, slot, true);
     }
 }
 
 #ifdef DEBUG
 void
 FrameState::dumpAllocation(RegisterAllocation *alloc)
 {
     for (unsigned i = 0; i < Registers::TotalAnyRegisters; i++) {
         AnyRegisterID reg = AnyRegisterID::fromRaw(i);
         if (alloc->assigned(reg)) {
             printf(" (%s: %s%s)", reg.name(), entryName(entries + alloc->slot(reg)),
                    alloc->synced(reg) ? "" : " unsynced");
         }
     }
+    Registers regs = alloc->getParentRegs();
+    while (!regs.empty()) {
+        AnyRegisterID reg = regs.takeAnyReg();
+        printf(" (%s: parent)", reg.name());
+    }
     printf("\n");
 }
 #endif
 
 RegisterAllocation *
 FrameState::computeAllocation(jsbytecode *target)
 {
-    RegisterAllocation *alloc = ArenaNew<RegisterAllocation>(liveness.pool, false);
+    RegisterAllocation *alloc = ArenaNew<RegisterAllocation>(a->liveness->pool, false);
     if (!alloc)
         return NULL;
 
-    if (analysis->getCode(target).exceptionEntry || analysis->getCode(target).switchTarget ||
+    if (a->analysis->getCode(target).exceptionEntry || a->analysis->getCode(target).switchTarget ||
         JSOp(*target) == JSOP_TRAP) {
         /* State must be synced at exception and switch targets, and at traps. */
+#ifdef DEBUG
+        if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
+            JaegerSpew(JSpew_Regalloc, "allocation at %u:", target - script->code);
+            dumpAllocation(alloc);
+        }
+#endif
         return alloc;
     }
 
+    alloc->setParentRegs(a->parentRegs);
+
     /*
      * The allocation to use at the target consists of all variables currently
      * in registers which are live at the target.
      */
     Registers regs = Registers::AvailRegs;
     while (!regs.empty()) {
         AnyRegisterID reg = regs.takeAnyReg();
-        if (freeRegs.hasReg(reg) || regstate(reg).type() == RematInfo::TYPE)
+        if (a->freeRegs.hasReg(reg) || regstate(reg).type() == RematInfo::TYPE)
             continue;
         FrameEntry *fe = regstate(reg).fe();
         if (fe == callee_ || fe >= spBase || !variableLive(fe, target))
             continue;
         alloc->set(reg, indexOfFe(fe), fe->data.synced());
     }
 
 #ifdef DEBUG
@@ -532,70 +807,72 @@ FrameState::relocateReg(AnyRegisterID re
 {
     /*
      * The reg needs to be freed to make room for a variable carried across
      * a branch. Either evict its entry, or try to move it to a different
      * register if it is needed to test the branch condition. :XXX: could also
      * watch for variables which are carried across the branch but are in a
      * the register for a different carried entry, we just spill these for now.
      */
-    JS_ASSERT(alloc->assigned(reg) && !freeRegs.hasReg(reg));
+    JS_ASSERT(alloc->assigned(reg) && !a->freeRegs.hasReg(reg));
 
     for (unsigned i = 0; i < uses.nuses; i++) {
         FrameEntry *fe = peek(-1 - i);
         if (fe->isCopy())
             fe = fe->copyOf();
         if (reg.isReg() && fe->data.inRegister() && fe->data.reg() == reg.reg()) {
             pinReg(reg);
             RegisterID nreg = allocReg();
             unpinReg(reg);
 
             JaegerSpew(JSpew_Regalloc, "relocating %s\n", reg.name());
 
             masm.move(reg.reg(), nreg);
             regstate(reg).forget();
             regstate(nreg).associate(fe, RematInfo::DATA);
             fe->data.setRegister(nreg);
-            freeRegs.putReg(reg);
+            a->freeRegs.putReg(reg);
             return;
         }
     }
 
     JaegerSpew(JSpew_Regalloc, "could not relocate %s\n", reg.name());
 
     takeReg(reg);
-    freeRegs.putReg(reg);
+    a->freeRegs.putReg(reg);
 }
 
 bool
 FrameState::syncForBranch(jsbytecode *target, Uses uses)
 {
     /* There should be no unowned or pinned registers. */
 #ifdef DEBUG
     Registers checkRegs(Registers::AvailAnyRegs);
     while (!checkRegs.empty()) {
         AnyRegisterID reg = checkRegs.takeAnyReg();
-        JS_ASSERT_IF(!freeRegs.hasReg(reg), regstate(reg).fe());
+        JS_ASSERT_IF(!a->freeRegs.hasReg(reg), regstate(reg).fe());
     }
 #endif
 
-    RegisterAllocation *&alloc = liveness.getCode(target).allocation;
+    Registers regs = 0;
+
+    RegisterAllocation *&alloc = a->liveness->getCode(target).allocation;
     if (!alloc) {
         alloc = computeAllocation(target);
         if (!alloc)
             return false;
     }
 
     /*
      * First pass. Sync all entries which will not be carried in a register,
      * and uncopy everything except values used in the branch.
      */
 
-    for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) {
-        FrameEntry *fe = tracker[i];
+    for (uint32 i = a->tracker.nentries - 1; i < a->tracker.nentries; i--) {
+        FrameEntry *fe = a->tracker[i];
 
         if (fe >= sp - uses.nuses) {
             /* No need to sync, this will get popped before branching. */
             continue;
         }
 
         unsigned index = indexOfFe(fe);
         if (!fe->isCopy() && alloc->hasAnyReg(index)) {
@@ -604,119 +881,121 @@ FrameState::syncForBranch(jsbytecode *ta
                 syncType(fe);
         } else {
             syncFe(fe);
             if (fe->isCopy())
                 fe->resetSynced();
         }
     }
 
+    syncParentRegistersInMask(masm, a->parentRegs.freeMask & ~alloc->getParentRegs().freeMask, true);
+
     /*
      * Second pass. Move entries carried in registers to the right register
      * provided no value used in the branch is evicted. After this pass,
      * everything will either be in the right register or will be in memory.
      */
 
-    Registers regs(Registers::AvailAnyRegs);
+    regs = Registers(Registers::AvailAnyRegs);
     while (!regs.empty()) {
         AnyRegisterID reg = regs.takeAnyReg();
         if (!alloc->assigned(reg))
             continue;
         FrameEntry *fe = getOrTrack(alloc->slot(reg));
         JS_ASSERT(!fe->isCopy());
 
         JS_ASSERT_IF(!fe->isType(JSVAL_TYPE_DOUBLE), fe->type.synced());
         if (!fe->data.synced() && alloc->synced(reg))
             syncFe(fe);
 
         if (fe->dataInRegister(reg))
             continue;
 
-        if (!freeRegs.hasReg(reg))
+        if (!a->freeRegs.hasReg(reg))
             relocateReg(reg, alloc, uses);
 
         /*
          * It is possible that the fe is known to be a double currently but is not
          * known to be a double at the join point (it may have non-double values
          * assigned elsewhere in the script). It is *not* possible for the fe to
          * be a non-double currently but a double at the join point --- the Compiler
          * must have called fixDoubleTypes before branching.
          */
+        if (reg.isReg() && fe->isType(JSVAL_TYPE_DOUBLE)) {
+            syncFe(fe);
+            forgetAllRegs(fe);
+            fe->resetSynced();
+        }
+        JS_ASSERT_IF(!reg.isReg(), fe->isType(JSVAL_TYPE_DOUBLE));
 
         if (reg.isReg()) {
-            if (fe->isType(JSVAL_TYPE_DOUBLE)) {
-                syncFe(fe);
-                forgetAllRegs(fe);
-                fe->resetSynced();
-            }
-
             RegisterID nreg = reg.reg();
             if (fe->data.inMemory()) {
                 masm.loadPayload(addressOf(fe), nreg);
             } else if (fe->isConstant()) {
                 masm.loadValuePayload(fe->getValue(), nreg);
             } else {
                 JS_ASSERT(fe->data.inRegister() && fe->data.reg() != nreg);
                 masm.move(fe->data.reg(), nreg);
-                freeRegs.putReg(fe->data.reg());
+                a->freeRegs.putReg(fe->data.reg());
                 regstate(fe->data.reg()).forget();
             }
-
             fe->data.setRegister(nreg);
         } else {
-            JS_ASSERT(fe->isType(JSVAL_TYPE_DOUBLE));
-
             FPRegisterID nreg = reg.fpreg();
             if (fe->data.inMemory()) {
                 masm.loadDouble(addressOf(fe), nreg);
             } else if (fe->isConstant()) {
                 masm.slowLoadConstantDouble(fe->getValue().toDouble(), nreg);
             } else {
                 JS_ASSERT(fe->data.inFPRegister() && fe->data.fpreg() != nreg);
                 masm.moveDouble(fe->data.fpreg(), nreg);
-                freeRegs.putReg(fe->data.fpreg());
+                a->freeRegs.putReg(fe->data.fpreg());
                 regstate(fe->data.fpreg()).forget();
             }
-
             fe->data.setFPRegister(nreg);
         }
 
-        freeRegs.takeReg(reg);
+        a->freeRegs.takeReg(reg);
         regstate(reg).associate(fe, RematInfo::DATA);
     }
 
+    restoreParentRegistersInMask(masm, alloc->getParentRegs().freeMask & ~a->parentRegs.freeMask, true);
+
     return true;
 }
 
 bool
 FrameState::discardForJoin(jsbytecode *target, uint32 stackDepth)
 {
-    RegisterAllocation *&alloc = liveness.getCode(target).allocation;
+    RegisterAllocation *&alloc = a->liveness->getCode(target).allocation;
 
     if (!alloc) {
         /*
          * This shows up for loop entries which are not reachable from the
          * loop head, and for exception, switch target and trap safe points.
          */
-        alloc = ArenaNew<RegisterAllocation>(liveness.pool, false);
+        alloc = ArenaNew<RegisterAllocation>(a->liveness->pool, false);
         if (!alloc)
             return false;
     }
 
     resetInternalState();
-    PodArrayZero(regstate_);
+    PodArrayZero(a->regstate_);
+
+    a->parentRegs = alloc->getParentRegs();
 
     Registers regs(Registers::AvailAnyRegs);
     while (!regs.empty()) {
         AnyRegisterID reg = regs.takeAnyReg();
         if (!alloc->assigned(reg))
             continue;
         FrameEntry *fe = getOrTrack(alloc->slot(reg));
 
-        freeRegs.takeReg(reg);
+        a->freeRegs.takeReg(reg);
 
         /*
          * We can't look at the type of the fe as we haven't restored analysis types yet,
          * but if this is an FP reg it will be set to double type.
          */
         if (reg.isReg()) {
             fe->data.setRegister(reg.reg());
         } else {
@@ -726,89 +1005,101 @@ FrameState::discardForJoin(jsbytecode *t
 
         regstate(reg).associate(fe, RematInfo::DATA);
         if (!alloc->synced(reg))
             fe->data.unsync();
     }
 
     sp = spBase + stackDepth;
 
-    PodZero(typeSets, stackDepth);
+    PodZero(a->typeSets, stackDepth);
 
     return true;
 }
 
 bool
 FrameState::consistentRegisters(jsbytecode *target)
 {
     /*
      * Before calling this, either the entire state should have been synced or
      * syncForBranch should have been called. These will ensure that any FE
      * which is not consistent with the target's register state has already
      * been synced, and no stores will need to be issued by prepareForJump.
      */
-    RegisterAllocation *alloc = liveness.getCode(target).allocation;
+    RegisterAllocation *alloc = a->liveness->getCode(target).allocation;
     JS_ASSERT(alloc);
 
     Registers regs(Registers::AvailAnyRegs);
     while (!regs.empty()) {
         AnyRegisterID reg = regs.takeAnyReg();
         if (alloc->assigned(reg)) {
             FrameEntry *needed = getOrTrack(alloc->slot(reg));
-            if (!freeRegs.hasReg(reg)) {
+            if (!a->freeRegs.hasReg(reg)) {
                 FrameEntry *fe = regstate(reg).fe();
                 if (fe != needed)
                     return false;
             } else {
                 return false;
             }
         }
     }
 
+    if (!a->parentRegs.hasAllRegs(alloc->getParentRegs().freeMask))
+        return false;
+
     return true;
 }
 
 void
 FrameState::prepareForJump(jsbytecode *target, Assembler &masm, bool synced)
 {
     JS_ASSERT_IF(!synced, !consistentRegisters(target));
 
-    RegisterAllocation *alloc = liveness.getCode(target).allocation;
+    RegisterAllocation *alloc = a->liveness->getCode(target).allocation;
     JS_ASSERT(alloc);
 
-    Registers regs(Registers::AvailAnyRegs);
+    Registers regs = 0;
+
+    regs = Registers(Registers::AvailAnyRegs);
     while (!regs.empty()) {
         AnyRegisterID reg = regs.takeAnyReg();
         if (!alloc->assigned(reg))
             continue;
 
         const FrameEntry *fe = getOrTrack(alloc->slot(reg));
         if (synced || !fe->backing()->dataInRegister(reg)) {
             JS_ASSERT_IF(!synced, fe->data.synced());
             if (reg.isReg())
                 masm.loadPayload(addressOf(fe), reg.reg());
             else
                 masm.loadDouble(addressOf(fe), reg.fpreg());
         }
     }
+
+    regs = Registers(alloc->getParentRegs());
+    while (!regs.empty()) {
+        AnyRegisterID reg = regs.takeAnyReg();
+        if (synced || !a->parentRegs.hasReg(reg))
+            restoreParentRegister(masm, reg);
+    }
 }
 
 void
 FrameState::storeTo(FrameEntry *fe, Address address, bool popped)
 {
     if (fe->isConstant()) {
         masm.storeValue(fe->getValue(), address);
         return;
     }
 
     if (fe->isCopy())
         fe = fe->copyOf();
 
     /* Cannot clobber the address's register. */
-    JS_ASSERT(!freeRegs.hasReg(address.base));
+    JS_ASSERT(!a->freeRegs.hasReg(address.base));
 
     /* If loading from memory, ensure destination differs. */
     JS_ASSERT_IF((fe->type.inMemory() || fe->data.inMemory()),
                  addressOf(fe).base != address.base ||
                  addressOf(fe).offset != address.offset);
 
     if (fe->data.inFPRegister()) {
         masm.storeDouble(fe->data.fpreg(), address);
@@ -1023,18 +1314,18 @@ void FrameState::loadForReturn(FrameEntr
 }
 
 #ifdef DEBUG
 void
 FrameState::assertValidRegisterState() const
 {
     Registers checkedFreeRegs(Registers::AvailAnyRegs);
 
-    for (uint32 i = 0; i < tracker.nentries; i++) {
-        FrameEntry *fe = tracker[i];
+    for (uint32 i = 0; i < a->tracker.nentries; i++) {
+        FrameEntry *fe = a->tracker[i];
         if (fe >= sp)
             continue;
 
         JS_ASSERT(i == fe->trackerIndex());
         JS_ASSERT_IF(fe->isCopy(),
                      fe->trackerIndex() > fe->copyOf()->trackerIndex());
         JS_ASSERT_IF(fe->isCopy(), fe > fe->copyOf());
         JS_ASSERT_IF(fe->isCopy(),
@@ -1054,54 +1345,94 @@ FrameState::assertValidRegisterState() c
         }
         if (fe->data.inFPRegister()) {
             JS_ASSERT(fe->isType(JSVAL_TYPE_DOUBLE));
             checkedFreeRegs.takeReg(fe->data.fpreg());
             JS_ASSERT(regstate(fe->data.fpreg()).fe() == fe);
         }
     }
 
-    JS_ASSERT(checkedFreeRegs == freeRegs);
+    JS_ASSERT(checkedFreeRegs == a->freeRegs);
 
     for (uint32 i = 0; i < Registers::TotalRegisters; i++) {
         AnyRegisterID reg = (RegisterID) i;
         JS_ASSERT(!regstate(reg).isPinned());
-        JS_ASSERT_IF(regstate(reg).fe(), !freeRegs.hasReg(reg));
+        JS_ASSERT_IF(regstate(reg).fe(), !a->freeRegs.hasReg(reg));
         JS_ASSERT_IF(regstate(reg).fe(), regstate(reg).fe()->isTracked());
     }
 
     for (uint32 i = 0; i < Registers::TotalFPRegisters; i++) {
         AnyRegisterID reg = (FPRegisterID) i;
         JS_ASSERT(!regstate(reg).isPinned());
-        JS_ASSERT_IF(regstate(reg).fe(), !freeRegs.hasReg(reg));
+        JS_ASSERT_IF(regstate(reg).fe(), !a->freeRegs.hasReg(reg));
         JS_ASSERT_IF(regstate(reg).fe(), regstate(reg).fe()->isTracked());
         JS_ASSERT_IF(regstate(reg).fe(), regstate(reg).type() == RematInfo::DATA);
     }
 }
 #endif
 
 #if defined JS_NUNBOX32
 void
 FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
                       FrameEntry *bottom) const
 {
-    reifier.reset(&masm, avail, resumeAt, bottom);
+    a->reifier.reset(&masm, avail, resumeAt, bottom);
 
     for (FrameEntry *fe = resumeAt; fe >= bottom; fe--) {
         if (!fe->isTracked())
             continue;
 
-        reifier.sync(fe);
+        a->reifier.sync(fe);
     }
 }
 #endif
 
 void
+FrameState::syncParentRegister(Assembler &masm, AnyRegisterID reg) const
+{
+    ActiveFrame *which = a->parent;
+    while (which->freeRegs.hasReg(reg))
+        which = which->parent;
+
+    FrameEntry *fe = which->regstate(reg).usedBy();
+    Address address = addressOf(fe, which);
+
+    if (reg.isReg() && fe->type.inRegister() && fe->type.reg() == reg.reg()) {
+        if (!fe->type.synced())
+            masm.storeTypeTag(reg.reg(), address);
+    } else if (reg.isReg()) {
+        JS_ASSERT(fe->data.inRegister() && fe->data.reg() == reg.reg());
+        if (!fe->data.synced())
+            masm.storePayload(reg.reg(), address);
+    } else {
+        JS_ASSERT(fe->data.inFPRegister() && fe->data.fpreg() == reg.fpreg());
+        if (!fe->data.synced())
+            masm.storeDouble(reg.fpreg(), address);
+    }
+}
+
+void
+FrameState::syncParentRegistersInMask(Assembler &masm, uint32 mask, bool update) const
+{
+    JS_ASSERT((a->parentRegs.freeMask & mask) == mask);
+
+    Registers parents(mask);
+    while (!parents.empty()) {
+        AnyRegisterID reg = parents.takeAnyReg();
+        if (update)
+            a->parentRegs.takeReg(reg);
+        syncParentRegister(masm, reg);
+    }
+}
+
+void
 FrameState::sync(Assembler &masm, Uses uses) const
 {
+    syncParentRegistersInMask(masm, a->parentRegs.freeMask, false);
+
     if (!entries)
         return;
 
     /* Sync all registers up-front. */
     Registers allRegs(Registers::AvailAnyRegs);
     while (!allRegs.empty()) {
         AnyRegisterID reg = allRegs.takeAnyReg();
         FrameEntry *fe = regstate(reg).usedBy();
@@ -1132,17 +1463,17 @@ FrameState::sync(Assembler &masm, Uses u
         }
 #endif
     }
 
     /*
      * Keep track of free registers using a bitmask. If we have to drop into
      * syncFancy(), then this mask will help avoid eviction.
      */
-    Registers avail(freeRegs.freeMask & Registers::AvailRegs);
+    Registers avail(a->freeRegs.freeMask & Registers::AvailRegs);
     Registers temp(Registers::TempAnyRegs);
 
     for (FrameEntry *fe = sp - 1; fe >= entries; fe--) {
         if (!fe->isTracked())
             continue;
 
         if (fe->isType(JSVAL_TYPE_DOUBLE)) {
             /* Copies of in-memory doubles can be synced without spilling. */
@@ -1208,29 +1539,32 @@ FrameState::sync(Assembler &masm, Uses u
             ensureTypeSynced(fe, masm);
 #endif
     }
 }
 
 void
 FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
 {
+    syncParentRegistersInMask(masm, a->parentRegs.freeMask, true);
+    JS_ASSERT(a->parentRegs.empty());
+
     if (activeLoop) {
         /*
          * Drop any remaining loop registers so we don't do any more after-the-fact
          * allocation of the initial register state.
          */
         activeLoop->alloc->clearLoops();
         loopRegs = 0;
     }
 
     FrameEntry *spStop = sp - ignore.nuses;
 
     /* Sync all kill-registers up-front. */
-    Registers search(kill.freeMask & ~freeRegs.freeMask);
+    Registers search(kill.freeMask & ~a->freeRegs.freeMask);
     while (!search.empty()) {
         AnyRegisterID reg = search.takeAnyReg();
         FrameEntry *fe = regstate(reg).usedBy();
         if (!fe || fe >= spStop)
             continue;
 
         JS_ASSERT(fe->isTracked());
 
@@ -1264,17 +1598,17 @@ FrameState::syncAndKill(Registers kill, 
             syncData(fe);
         } else {
             JS_ASSERT(fe->type.reg() == reg.reg());
             syncType(fe);
         }
 #endif
     }
 
-    uint32 maxvisits = tracker.nentries;
+    uint32 maxvisits = a->tracker.nentries;
 
     for (FrameEntry *fe = sp - 1; fe >= entries && maxvisits; fe--) {
         if (!fe->isTracked())
             continue;
 
         maxvisits--;
 
         if (fe >= spStop)
@@ -1296,17 +1630,17 @@ FrameState::syncAndKill(Registers kill, 
             fe->type.setMemory();
         }
     }
 
     /*
      * Anything still alive at this point is guaranteed to be synced. However,
      * it is necessary to evict temporary registers.
      */
-    search = Registers(kill.freeMask & ~freeRegs.freeMask);
+    search = Registers(kill.freeMask & ~a->freeRegs.freeMask);
     while (!search.empty()) {
         AnyRegisterID reg = search.takeAnyReg();
         FrameEntry *fe = regstate(reg).usedBy();
         if (!fe || fe >= spStop)
             continue;
 
         JS_ASSERT(fe->isTracked() && !fe->isType(JSVAL_TYPE_DOUBLE));
 
@@ -1320,37 +1654,76 @@ FrameState::syncAndKill(Registers kill, 
             fe->type.setMemory();
         }
 
         forgetReg(reg);
     }
 }
 
 void
+FrameState::restoreParentRegister(Assembler &masm, AnyRegisterID reg) const
+{
+    ActiveFrame *which = a->parent;
+    while (which->freeRegs.hasReg(reg))
+        which = which->parent;
+
+    FrameEntry *fe = which->regstate(reg).usedBy();
+    Address address = addressOf(fe, which);
+
+    if (reg.isReg() && fe->type.inRegister() && fe->type.reg() == reg.reg()) {
+        masm.loadTypeTag(address, reg.reg());
+    } else if (reg.isReg()) {
+        JS_ASSERT(fe->data.inRegister() && fe->data.reg() == reg.reg());
+        masm.loadPayload(address, reg.reg());
+    } else {
+        JS_ASSERT(fe->data.inFPRegister() && fe->data.fpreg() == reg.fpreg());
+        masm.loadDouble(address, reg.fpreg());
+    }
+}
+
+void
+FrameState::restoreParentRegistersInMask(Assembler &masm, uint32 mask, bool update) const
+{
+    JS_ASSERT_IF(update, (a->parentRegs.freeMask & mask) == 0);
+
+    Registers parents(mask);
+    while (!parents.empty()) {
+        AnyRegisterID reg = parents.takeAnyReg();
+        if (update) {
+            JS_ASSERT(a->freeRegs.hasReg(reg));
+            a->parentRegs.putReg(reg);
+        }
+        restoreParentRegister(masm, reg);
+    }
+}
+
+void
 FrameState::merge(Assembler &masm, Changes changes) const
 {
     /*
      * Note: this should only be called by StubCompiler::rejoin, which will notify
      * this FrameState about the jump to patch up in case a new loop register is
      * allocated later.
      */
 
+    restoreParentRegistersInMask(masm, a->parentRegs.freeMask, false);
+
     /*
      * For any changed values we are merging back which we consider to be doubles,
      * ensure they actually are doubles.  They must be doubles or ints, but we
      * do not require stub paths to always generate a double when needed.
      * :FIXME: we check this on OOL stub calls, but not inline stub calls.
      */
     for (unsigned i = 0; i < changes.nchanges; i++) {
         FrameEntry *fe = sp - 1 - i;
         if (fe->isType(JSVAL_TYPE_DOUBLE))
             masm.ensureInMemoryDouble(addressOf(fe));
     }
 
-    uint32 mask = Registers::AvailAnyRegs & ~freeRegs.freeMask;
+    uint32 mask = Registers::AvailAnyRegs & ~a->freeRegs.freeMask;
     Registers search(mask);
 
     while (!search.empty(mask)) {
         AnyRegisterID reg = search.peekReg(mask);
         FrameEntry *fe = regstate(reg).usedBy();
 
         if (!fe) {
             search.takeReg(reg);
@@ -1393,59 +1766,62 @@ FrameState::copyDataIntoReg(FrameEntry *
     if (fe->isCopy())
         fe = fe->copyOf();
 
     if (!fe->data.inRegister())
         tempRegForData(fe);
 
     RegisterID reg = fe->data.reg();
     if (reg == hint) {
-        if (freeRegs.empty(Registers::AvailRegs)) {
+        if (a->freeRegs.empty(Registers::AvailRegs)) {
             ensureDataSynced(fe, masm);
             fe->data.setMemory();
         } else {
             reg = allocReg();
             masm.move(hint, reg);
             fe->data.setRegister(reg);
             regstate(reg).associate(regstate(hint).fe(), RematInfo::DATA);
         }
         regstate(hint).forget();
     } else {
         pinReg(reg);
         takeReg(hint);
         unpinReg(reg);
         masm.move(reg, hint);
     }
+
+    modifyReg(hint);
 }
 
 JSC::MacroAssembler::RegisterID
 FrameState::copyDataIntoReg(Assembler &masm, FrameEntry *fe)
 {
     JS_ASSERT(!fe->data.isConstant());
 
     if (fe->isCopy())
         fe = fe->copyOf();
 
     if (fe->data.inRegister()) {
         RegisterID reg = fe->data.reg();
-        if (freeRegs.empty(Registers::AvailRegs)) {
+        if (a->freeRegs.empty(Registers::AvailRegs)) {
             ensureDataSynced(fe, masm);
             fe->data.setMemory();
             regstate(reg).forget();
+            modifyReg(reg);
         } else {
             RegisterID newReg = allocReg();
             masm.move(reg, newReg);
             reg = newReg;
         }
         return reg;
     }
 
     RegisterID reg = allocReg();
 
-    if (!freeRegs.empty(Registers::AvailRegs))
+    if (!a->freeRegs.empty(Registers::AvailRegs))
         masm.move(tempRegForData(fe), reg);
     else
         masm.loadPayload(addressOf(fe),reg);
 
     return reg;
 }
 
 JSC::MacroAssembler::RegisterID
@@ -1453,31 +1829,32 @@ FrameState::copyTypeIntoReg(FrameEntry *
 {
     JS_ASSERT(!fe->type.isConstant());
 
     if (fe->isCopy())
         fe = fe->copyOf();
 
     if (fe->type.inRegister()) {
         RegisterID reg = fe->type.reg();
-        if (freeRegs.empty(Registers::AvailRegs)) {
+        if (a->freeRegs.empty(Registers::AvailRegs)) {
             ensureTypeSynced(fe, masm);
             fe->type.setMemory();
             regstate(reg).forget();
+            modifyReg(reg);
         } else {
             RegisterID newReg = allocReg();
             masm.move(reg, newReg);
             reg = newReg;
         }
         return reg;
     }
 
     RegisterID reg = allocReg();
 
-    if (!freeRegs.empty(Registers::AvailRegs))
+    if (!a->freeRegs.empty(Registers::AvailRegs))
         masm.move(tempRegForType(fe), reg);
     else
         masm.loadTypeTag(addressOf(fe), reg);
 
     return reg;
 }
 
 JSC::MacroAssembler::RegisterID
@@ -1508,37 +1885,39 @@ FrameState::ownRegForType(FrameEntry *fe
     if (fe->isCopy()) {
         /* For now, just do an extra move. The reg must be mutable. */
         FrameEntry *backing = fe->copyOf();
         if (!backing->type.inRegister()) {
             JS_ASSERT(backing->type.inMemory());
             tempRegForType(backing);
         }
 
-        if (freeRegs.empty(Registers::AvailRegs)) {
+        if (a->freeRegs.empty(Registers::AvailRegs)) {
             /* For now... just steal the register that already exists. */
             ensureTypeSynced(backing, masm);
             reg = backing->type.reg();
             backing->type.setMemory();
             regstate(reg).forget();
+            modifyReg(reg);
         } else {
             reg = allocReg();
             masm.move(backing->type.reg(), reg);
         }
         return reg;
     }
 
     if (fe->type.inRegister()) {
         reg = fe->type.reg();
 
         /* Remove ownership of this register. */
         JS_ASSERT(regstate(reg).fe() == fe);
         JS_ASSERT(regstate(reg).type() == RematInfo::TYPE);
         regstate(reg).forget();
         fe->type.invalidate();
+        modifyReg(reg);
     } else {
         JS_ASSERT(fe->type.inMemory());
         reg = allocReg();
         masm.loadTypeTag(addressOf(fe), reg);
     }
     return reg;
 }
 
@@ -1552,22 +1931,23 @@ FrameState::ownRegForData(FrameEntry *fe
     if (fe->isCopy()) {
         /* For now, just do an extra move. The reg must be mutable. */
         FrameEntry *backing = fe->copyOf();
         if (!backing->data.inRegister()) {
             JS_ASSERT(backing->data.inMemory());
             tempRegForData(backing);
         }
 
-        if (freeRegs.empty(Registers::AvailRegs)) {
+        if (a->freeRegs.empty(Registers::AvailRegs)) {
             /* For now... just steal the register that already exists. */
             ensureDataSynced(backing, masm);
             reg = backing->data.reg();
             backing->data.setMemory();
             regstate(reg).forget();
+            modifyReg(reg);
         } else {
             reg = allocReg();
             masm.move(backing->data.reg(), reg);
         }
         return reg;
     }
 
     if (fe->isCopied()) {
@@ -1581,16 +1961,17 @@ FrameState::ownRegForData(FrameEntry *fe
     
     if (fe->data.inRegister()) {
         reg = fe->data.reg();
         /* Remove ownership of this register. */
         JS_ASSERT(regstate(reg).fe() == fe);
         JS_ASSERT(regstate(reg).type() == RematInfo::DATA);
         regstate(reg).forget();
         fe->data.invalidate();
+        modifyReg(reg);
     } else {
         JS_ASSERT(fe->data.inMemory());
         reg = allocReg();
         masm.loadPayload(addressOf(fe), reg);
     }
     return reg;
 }
 
@@ -1618,35 +1999,35 @@ FrameState::pushDouble(Address address)
     FPRegisterID fpreg = allocFPReg();
     masm.loadDouble(address, fpreg);
     pushDouble(fpreg);
 }
 
 void
 FrameState::ensureDouble(FrameEntry *fe)
 {
+    if (fe->isType(JSVAL_TYPE_DOUBLE))
+        return;
+
     if (fe->isConstant()) {
         JS_ASSERT(fe->getValue().isInt32());
         Value newValue = DoubleValue(double(fe->getValue().toInt32()));
         fe->setConstant(Jsvalify(newValue));
         return;
     }
 
-    if (fe->isType(JSVAL_TYPE_DOUBLE))
-        return;
-
     FrameEntry *backing = fe;
     if (fe->isCopy()) {
         /* Forget this entry is a copy.  We are converting this entry, not the backing. */
         backing = fe->copyOf();
         fe->clear();
     } else if (fe->isCopied()) {
         /* Sync and forget any copies of this entry. */
-        for (uint32 i = fe->trackerIndex() + 1; i < tracker.nentries; i++) {
-            FrameEntry *nfe = tracker[i];
+        for (uint32 i = fe->trackerIndex() + 1; i < a->tracker.nentries; i++) {
+            FrameEntry *nfe = a->tracker[i];
             if (nfe < sp && nfe->isCopy() && nfe->copyOf() == fe) {
                 syncFe(nfe);
                 nfe->resetSynced();
             }
         }
     }
 
     FPRegisterID fpreg = allocFPReg();
@@ -1699,18 +2080,18 @@ FrameState::pushCopyOf(uint32 index)
 }
 
 FrameEntry *
 FrameState::walkTrackerForUncopy(FrameEntry *original)
 {
     uint32 firstCopy = InvalidIndex;
     FrameEntry *bestFe = NULL;
     uint32 ncopies = 0;
-    for (uint32 i = original->trackerIndex() + 1; i < tracker.nentries; i++) {
-        FrameEntry *fe = tracker[i];
+    for (uint32 i = original->trackerIndex() + 1; i < a->tracker.nentries; i++) {
+        FrameEntry *fe = a->tracker[i];
         if (fe >= sp)
             continue;
         if (fe->isCopy() && fe->copyOf() == original) {
             if (firstCopy == InvalidIndex) {
                 firstCopy = i;
                 bestFe = fe;
             } else if (fe < bestFe) {
                 bestFe = fe;
@@ -1728,18 +2109,18 @@ FrameState::walkTrackerForUncopy(FrameEn
     JS_ASSERT(firstCopy != InvalidIndex);
     JS_ASSERT(bestFe);
     JS_ASSERT(bestFe > original);
 
     /* Mark all extra copies as copies of the new backing index. */
     bestFe->setCopyOf(NULL);
     if (ncopies > 1) {
         bestFe->setCopied();
-        for (uint32 i = firstCopy; i < tracker.nentries; i++) {
-            FrameEntry *other = tracker[i];
+        for (uint32 i = firstCopy; i < a->tracker.nentries; i++) {
+            FrameEntry *other = a->tracker[i];
             if (other >= sp || other == bestFe)
                 continue;
 
             /* The original must be tracked before copies. */
             JS_ASSERT(other != original);
 
             if (!other->isCopy() || other->copyOf() != original)
                 continue;
@@ -1765,17 +2146,17 @@ FrameState::walkTrackerForUncopy(FrameEn
 
 FrameEntry *
 FrameState::walkFrameForUncopy(FrameEntry *original)
 {
     FrameEntry *bestFe = NULL;
     uint32 ncopies = 0;
 
     /* It's only necessary to visit as many FEs are being tracked. */
-    uint32 maxvisits = tracker.nentries;
+    uint32 maxvisits = a->tracker.nentries;
 
     for (FrameEntry *fe = original + 1; fe < sp && maxvisits; fe++) {
         if (!fe->isTracked())
             continue;
 
         maxvisits--;
 
         if (fe->isCopy() && fe->copyOf() == original) {
@@ -1821,17 +2202,17 @@ FrameState::uncopy(FrameEntry *original)
      * and select B, not D (see bug 583684).
      *
      * Note: |tracker.nentries <= (nslots + nargs)|. However, this walk is
      * sub-optimal if |tracker.nentries - original->trackerIndex() > sp - original|.
      * With large scripts this may be a problem worth investigating. Note that
      * the tracker is walked twice, so we multiply by 2 for pessimism.
      */
     FrameEntry *fe;
-    if ((tracker.nentries - original->trackerIndex()) * 2 > uint32(sp - original))
+    if ((a->tracker.nentries - original->trackerIndex()) * 2 > uint32(sp - original))
         fe = walkFrameForUncopy(original);
     else
         fe = walkTrackerForUncopy(original);
     if (!fe) {
         original->setNotCopied();
         return NULL;
     }
 
@@ -1872,71 +2253,71 @@ FrameState::uncopy(FrameEntry *original)
     return fe;
 }
 
 bool
 FrameState::hasOnlyCopy(FrameEntry *backing, FrameEntry *fe)
 {
     JS_ASSERT(backing->isCopied() && fe->copyOf() == backing);
 
-    for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
-        FrameEntry *nfe = tracker[i];
+    for (uint32 i = backing->trackerIndex() + 1; i < a->tracker.nentries; i++) {
+        FrameEntry *nfe = a->tracker[i];
         if (nfe != fe && nfe < sp && nfe->isCopy() && nfe->copyOf() == backing)
             return false;
     }
 
     return true;
 }
 
 void
 FrameState::storeLocal(uint32 n, JSValueType type, bool popGuaranteed, bool fixedType)
 {
     FrameEntry *local = getLocal(n);
 
-    if (analysis->localEscapes(n)) {
+    if (a->analysis->localEscapes(n)) {
         JS_ASSERT(local->data.inMemory());
         storeTo(peek(-1), addressOf(local), popGuaranteed);
         return;
     }
 
     storeTop(local, type, popGuaranteed);
 
     if (activeLoop)
         local->lastLoop = activeLoop->head;
 
     if (type != JSVAL_TYPE_UNKNOWN && type != JSVAL_TYPE_DOUBLE &&
-        fixedType && !local->type.synced()) {
-        /* Known types are always in sync for locals. */
+        fixedType && !a->parent && !local->type.synced()) {
+        /* Except when inlining, known types are always in sync for locals. */
         local->type.sync();
     }
 
     if (inTryBlock)
         syncFe(local);
 }
 
 void
 FrameState::storeArg(uint32 n, JSValueType type, bool popGuaranteed)
 {
     // Note that args are always immediately synced, because they can be
     // aliased (but not written to) via f.arguments.
     FrameEntry *arg = getArg(n);
 
-    if (analysis->argEscapes(n)) {
+    if (a->analysis->argEscapes(n)) {
         JS_ASSERT(arg->data.inMemory());
         storeTo(peek(-1), addressOf(arg), popGuaranteed);
         return;
     }
 
     storeTop(arg, type, popGuaranteed);
 
     if (activeLoop)
         arg->lastLoop = activeLoop->head;
 
     if (type != JSVAL_TYPE_UNKNOWN && type != JSVAL_TYPE_DOUBLE && !arg->type.synced()) {
-        /* Known types are always in sync for args. */
+        /* Known types are always in sync for args. (Frames which update args are not inlined). */
         arg->type.sync();
     }
 
     syncFe(arg);
 }
 
 void
 FrameState::forgetEntry(FrameEntry *fe)
@@ -1945,17 +2326,17 @@ FrameState::forgetEntry(FrameEntry *fe)
         uncopy(fe);
         if (!fe->isCopied())
             forgetAllRegs(fe);
     } else {
         forgetAllRegs(fe);
     }
 
     if (fe >= sp)
-        typeSets[fe - spBase] = NULL;
+        a->typeSets[fe - spBase] = NULL;
 }
 
 void
 FrameState::storeTop(FrameEntry *target, JSValueType type, bool popGuaranteed)
 {
     /* Detect something like (x = x) which is a no-op. */
     FrameEntry *top = peek(-1);
     if (top->isCopy() && top->copyOf() == target) {
@@ -2021,18 +2402,18 @@ FrameState::storeTop(FrameEntry *target,
          * 
          * Because of |let| expressions, it's kind of hard to really know
          * whether a region on the stack will be popped all at once. Bleh!
          *
          * This should be rare except in browser code (and maybe even then),
          * but even so there's a quick workaround. We take all copies of the
          * backing fe, and redirect them to be copies of the destination.
          */
-        for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
-            FrameEntry *fe = tracker[i];
+        for (uint32 i = backing->trackerIndex() + 1; i < a->tracker.nentries; i++) {
+            FrameEntry *fe = a->tracker[i];
             if (fe >= sp)
                 continue;
             if (fe->isCopy() && fe->copyOf() == backing) {
                 fe->setCopyOf(target);
                 copied = true;
             }
         }
     }
@@ -2260,17 +2641,17 @@ FrameState::allocForSameBinary(FrameEntr
 
     if (!fe->isTypeKnown()) {
         alloc.lhsType = tempRegForType(fe);
         pinReg(alloc.lhsType.reg());
     }
 
     alloc.lhsData = tempRegForData(fe);
 
-    if (!freeRegs.empty(Registers::AvailRegs)) {
+    if (!a->freeRegs.empty(Registers::AvailRegs)) {
         alloc.result = allocReg();
         masm.move(alloc.lhsData.reg(), alloc.result);
         alloc.lhsNeedsRemat = false;
     } else {
         alloc.result = alloc.lhsData.reg();
         takeReg(alloc.result);
         alloc.lhsNeedsRemat = true;
     }
@@ -2480,17 +2861,17 @@ FrameState::allocForBinary(FrameEntry *l
         (op == JSOP_ADD || (op == JSOP_SUB && backingRight->isConstant())) &&
         (lhs == backingLeft || hasOnlyCopy(backingLeft, lhs))) {
         alloc.result = backingLeft->data.reg();
         alloc.undoResult = true;
         alloc.resultHasRhs = false;
         goto skip;
     }
 
-    if (!freeRegs.empty(Registers::AvailRegs)) {
+    if (!a->freeRegs.empty(Registers::AvailRegs)) {
         /* Free reg - just grab it. */
         alloc.result = allocReg();
         if (!alloc.lhsData.isSet()) {
             JS_ASSERT(alloc.rhsData.isSet());
             JS_ASSERT(commu);
             masm.move(alloc.rhsData.reg(), alloc.result);
             alloc.resultHasRhs = true;
         } else {
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -186,17 +186,16 @@ class FrameState
         }
 
         void associate(FrameEntry *fe, RematInfo::RematType type) {
             JS_ASSERT(!fe_);
             JS_ASSERT(!save_);
 
             fe_ = fe;
             type_ = type;
-            JS_ASSERT(!save_);
         }
 
         /* Change ownership. */
         void reassociate(FrameEntry *fe) {
             assertConsistency();
             JS_ASSERT(fe);
 
             fe_ = fe;
@@ -229,28 +228,27 @@ class FrameState
         }
 
       private:
         /* FrameEntry owning this register, or NULL if not owned by a frame. */
         FrameEntry *fe_;
 
         /* Hack - simplifies register allocation for pairs. */
         FrameEntry *save_;
-        
+
         /* Part of the FrameEntry that owns the FE. */
         RematInfo::RematType type_;
     };
 
+    struct ActiveFrame;
+
     FrameState *thisFromCtor() { return this; }
   public:
-    FrameState(JSContext *cx, JSScript *script, JSFunction *fun,
-               Compiler &cc, Assembler &masm, StubCompiler &stubcc,
-               analyze::LifetimeScript &liveness);
+    FrameState(JSContext *cx, Compiler &cc, Assembler &masm, StubCompiler &stubcc);
     ~FrameState();
-    bool init();
 
     /*
      * Pushes a synced slot that may have a known type.
      */
     inline void pushSynced(JSValueType knownType);
 
     /*
      * Pushes a slot that has a known, synced type and payload.
@@ -390,17 +388,17 @@ class FrameState
      * The compiler should NOT explicitly free it.
      */
     inline RegisterID tempRegForData(FrameEntry *fe);
     inline FPRegisterID tempFPRegForData(FrameEntry *fe);
 
     /*
      * Same as above, except register must match identically.
      */
-    inline RegisterID tempRegInMaskForData(FrameEntry *fe, uint32 mask);
+    inline AnyRegisterID tempRegInMaskForData(FrameEntry *fe, uint32 mask);
 
     /*
      * Same as above, except loads into reg (using masm) if the entry does not
      * already have a register, and does not change the frame state in doing so.
      */
     inline RegisterID tempRegForData(FrameEntry *fe, RegisterID reg, Assembler &masm) const;
 
     /*
@@ -775,42 +773,49 @@ class FrameState
     /*
      * If the frameentry is a copy, give it its own registers.
      * This may only be called on the topmost fe.
      */
     inline void giveOwnRegs(FrameEntry *fe);
 
     uint32 stackDepth() const { return sp - spBase; }
 
+    /*
+     * The stack depth of the current frame plus any locals and space
+     * for inlined frames, i.e. the difference between the end of the
+     * current fp and sp.
+     */
+    uint32 totalDepth() const { return a->depth + script->nfixed + stackDepth(); }
+
     // Returns the number of entries in the frame, that is:
     //   2 for callee, this +
     //   nargs +
     //   nfixed +
     //   currently pushed stack slots
     uint32 frameSlots() const { return uint32(sp - entries); }
 
-    // Returns the number of local variables and active stack slots.
-    uint32 localSlots() const { return uint32(sp - locals); }
-
 #ifdef DEBUG
     void assertValidRegisterState() const;
 #endif
 
     // Return an address, relative to the JSStackFrame, that represents where
     // this FrameEntry is stored in memory. Note that this is its canonical
     // address, not its backing store. There is no guarantee that the memory
     // is coherent.
-    Address addressOf(const FrameEntry *fe) const;
+    Address addressOf(const FrameEntry *fe) const { return addressOf(fe, a); }
 
     // Returns an address, relative to the JSStackFrame, that represents where
     // this FrameEntry is backed in memory. This is not necessarily its
     // canonical address, but the address for which the payload has been synced
     // to memory. The caller guarantees that the payload has been synced.
     Address addressForDataRemat(const FrameEntry *fe) const;
 
+    // Inside an inline frame, the address for the return value in the caller.
+    Address addressForInlineReturn() const;
+
     inline StateRemat dataRematInfo(const FrameEntry *fe) const;
 
     /*
      * This is similar to freeReg(ownRegForData(fe)) - except no movement takes place.
      * The fe is simply invalidated as if it were popped. This can be used to free
      * registers in the working area of the stack. Obviously, this can only be called
      * in infallible code that will pop these entries soon after.
      */
@@ -828,19 +833,17 @@ class FrameState
      * call shift(-2).
      */
     void shift(int32 n);
 
     inline void setInTryBlock(bool inTryBlock) {
         this->inTryBlock = inTryBlock;
     }
 
-    void setAnalysis(analyze::Script *analysis) { this->analysis = analysis; }
-
-    inline uint32 regsInUse() const { return Registers::AvailRegs & ~freeRegs.freeMask; }
+    inline uint32 regsInUse() const { return Registers::AvailRegs & ~a->freeRegs.freeMask; }
 
     bool pushLoop(jsbytecode *head, Jump entry, jsbytecode *entryTarget);
     void popLoop(jsbytecode *head, Jump *pentry, jsbytecode **pentryTarget);
 
     void setPC(jsbytecode *PC) { this->PC = PC; }
 
     struct StubJoin {
         unsigned index;
@@ -851,16 +854,30 @@ class FrameState
         if (activeLoop) {
             StubJoin r;
             r.index = index;
             r.script = script;
             loopJoins.append(r);
         }
     }
 
+    void getUnsyncedEntries(uint32 *pdepth, Vector<UnsyncedEntry> *unsyncedEntries);
+
+    bool pushActiveFrame(JSScript *script, uint32 argc,
+                         analyze::Script *analysis, analyze::LifetimeScript *liveness);
+    void popActiveFrame();
+
+    void discardLocalRegisters();
+    void evictInlineModifiedRegisters(Registers regs);
+    void syncParentRegistersInMask(Assembler &masm, uint32 mask, bool update) const;
+    void restoreParentRegistersInMask(Assembler &masm, uint32 mask, bool update) const;
+    Registers getParentRegs() const { return a->parentRegs; }
+
+    void tryCopyRegister(FrameEntry *fe, FrameEntry *callStart);
+
   private:
     inline AnyRegisterID allocAndLoadReg(FrameEntry *fe, bool fp, RematInfo::RematType type);
     inline void forgetReg(AnyRegisterID reg);
     AnyRegisterID evictSomeReg(uint32 mask);
     void evictReg(AnyRegisterID reg);
     inline FrameEntry *rawPush();
     inline void addToTracker(FrameEntry *fe);
 
@@ -913,96 +930,161 @@ class FrameState
     void forgetEntry(FrameEntry *fe);
 
     FrameEntry *entryFor(uint32 index) const {
         JS_ASSERT(entries[index].isTracked());
         return &entries[index];
     }
 
     uint32 indexOf(int32 depth) const {
-        JS_ASSERT(uint32((sp + depth) - entries) < feLimit());
+        JS_ASSERT(uint32((sp + depth) - entries) < feLimit(script));
         return uint32((sp + depth) - entries);
     }
     uint32 indexOfFe(FrameEntry *fe) const {
-        JS_ASSERT(uint32(fe - entries) < feLimit());
+        JS_ASSERT(uint32(fe - entries) < feLimit(script));
         return uint32(fe - entries);
     }
-    uint32 feLimit() const { return script->nslots + nargs + 2; }
+
+    static uint32 feLimit(JSScript *script) {
+        return script->nslots + 2 + (script->fun ? script->fun->nargs : 0);
+    }
 
     RegisterState & regstate(AnyRegisterID reg) {
         JS_ASSERT(reg.reg_ < Registers::TotalAnyRegisters);
-        return regstate_[reg.reg_];
+        return a->regstate_[reg.reg_];
     }
 
     const RegisterState & regstate(AnyRegisterID reg) const {
         JS_ASSERT(reg.reg_ < Registers::TotalAnyRegisters);
-        return regstate_[reg.reg_];
+        return a->regstate_[reg.reg_];
     }
 
     AnyRegisterID bestEvictReg(uint32 mask, bool includePinned) const;
 
     inline analyze::Lifetime * variableLive(FrameEntry *fe, jsbytecode *pc) const;
     inline bool binaryEntryLive(FrameEntry *fe) const;
     RegisterAllocation * computeAllocation(jsbytecode *target);
     void relocateReg(AnyRegisterID reg, RegisterAllocation *alloc, Uses uses);
 
-    bool isArg(FrameEntry *fe) const { return fun && fe >= args && fe - args < fun->nargs; }
-    bool isLocal(FrameEntry *fe) const { return fe >= locals && fe - locals < script->nfixed; }
+    bool isArg(FrameEntry *fe) const {
+        return script->fun && fe >= args && fe - args < script->fun->nargs;
+    }
+
+    bool isLocal(FrameEntry *fe) const {
+        return fe >= locals && fe - locals < script->nfixed;
+    }
 
+    int32 frameOffset(const FrameEntry *fe, ActiveFrame *a) const;
+    Address addressOf(const FrameEntry *fe, ActiveFrame *a) const;
+
+    void updateActiveFrame();
+    void syncInlinedEntry(FrameEntry *fe, const FrameEntry *parent);
+    void associateReg(FrameEntry *fe, RematInfo::RematType type, AnyRegisterID reg);
+
+    inline void modifyReg(AnyRegisterID reg);
     inline void clearLoopReg(AnyRegisterID reg);
     void setLoopReg(AnyRegisterID reg, FrameEntry *fe);
     void flushLoopJoins();
 
 #ifdef DEBUG
     const char * entryName(FrameEntry *fe) const;
     void dumpAllocation(RegisterAllocation *alloc);
 #else
     const char * entryName(FrameEntry *fe) const { return NULL; }
 #endif
 
+    void syncParentRegister(Assembler &masm, AnyRegisterID reg) const;
+    void restoreParentRegister(Assembler &masm, AnyRegisterID reg) const;
+
   private:
     JSContext *cx;
-    JSScript *script;
-    JSFunction *fun;
-    uint32 nargs;
     Assembler &masm;
     StubCompiler &stubcc;
 
-    /* All allocated registers. */
-    Registers freeRegs;
+    /* State for the active stack frame. */
+
+    struct ActiveFrame {
+        ActiveFrame *parent;
+        jsbytecode *parentPC;
+        FrameEntry *parentSP;
+        uint32 parentArgc;
+
+        JSScript *script;
+        uint32 depth;
+
+        /* All unallocated registers. */
+        Registers freeRegs;
+
+        /*
+         * Registers which are in use by parents and still hold their original value.
+         * These may or may not be in freeRegs: a parent register is allocated to
+         * an fe if that fe copies an entry in the parent (i.e. an argument or this).
+         */
+        Registers parentRegs;
+
+        /* Cache of FrameEntry objects. */
+        FrameEntry *entries;
+        FrameEntry *callee_;
+        FrameEntry *this_;
+        FrameEntry *args;
+        FrameEntry *locals;
 
-    /* Cache of FrameEntry objects. */
+        /* Vector of tracked slot indexes. */
+        Tracker tracker;
+
+        /* Type sets for the stack contents. */
+        types::TypeSet **typeSets;
+
+        /*
+         * Register ownership state. This can't be used alone; to find whether an
+         * entry is active, you must check the allocated registers.
+         */
+        RegisterState regstate_[Registers::TotalAnyRegisters];
+
+        const RegisterState & regstate(AnyRegisterID reg) const {
+            JS_ASSERT(reg.reg_ < Registers::TotalAnyRegisters);
+            return regstate_[reg.reg_];
+        }
+
+#if defined JS_NUNBOX32
+        mutable ImmutableSync reifier;
+#endif
+
+        analyze::Script *analysis;
+        analyze::LifetimeScript *liveness;
+    };
+    ActiveFrame *a;
+
+    /* State derived/copied from the active frame. :XXX: remove? */
+
+    JSScript *script;
+
     FrameEntry *entries;
-
     FrameEntry *callee_;
     FrameEntry *this_;
 
     /* Base pointer for arguments. */
     FrameEntry *args;
 
     /* Base pointer for local variables. */
     FrameEntry *locals;
 
     /* Base pointer for the stack. */
     FrameEntry *spBase;
 
     /* Dynamic stack pointer. */
     FrameEntry *sp;
 
-    /* Vector of tracked slot indexes. */
-    Tracker tracker;
-
-    /* Type sets for the stack contents. */
-    types::TypeSet **typeSets;
+    /* Current PC, for managing register allocation. */
+    jsbytecode *PC;
 
     /*
-     * Register ownership state. This can't be used alone; to find whether an
-     * entry is active, you must check the allocated registers.
+     * State for managing registers within loops. Calls to functions which
+     * contain loops are not inlined.
      */
-    RegisterState regstate_[Registers::TotalAnyRegisters];
 
     struct LoopState
     {
         LoopState *outer;
         jsbytecode *head;
         RegisterAllocation *alloc;
 
         /*
@@ -1027,29 +1109,16 @@ class FrameState
         StubJoin join;
         Address address;
         AnyRegisterID reg;
     };
 
     /* Pending loads to patch for stub rejoins. */
     Vector<StubJoinPatch,16,CompilerAllocPolicy> loopPatches;
 
-    analyze::Script *analysis;
-
-    /*
-     * Liveness analysis, and current pc. The pc should only be used for decisions
-     * on register eviction.
-     */
-    analyze::LifetimeScript &liveness;
-    jsbytecode *PC;
-
-#if defined JS_NUNBOX32
-    mutable ImmutableSync reifier;
-#endif
-
     bool inTryBlock;
 };
 
 /*
  * Register allocation overview. We want to allocate registers at the same time
  * as we emit code, in a single forward pass over the script. This is good both
  * for compilation speed and for design simplicity; we allocate registers for
  * variables and temporaries as the compiler needs them. To get a good allocation,
@@ -1067,43 +1136,48 @@ class FrameState
 
 /* Register allocation to use at a join point. */
 struct RegisterAllocation {
   private:
     typedef JSC::MacroAssembler::RegisterID RegisterID;
     typedef JSC::MacroAssembler::FPRegisterID FPRegisterID;
 
     /* Entry for an unassigned register at the join point. */
-    static const uint32 UNASSIGNED_REGISTER = 0xffffffff;
+    static const uint32 UNASSIGNED_REGISTER = uint32(-1);
 
     /*
      * In the body of a loop, entry for an unassigned register that has not been
      * used since the start of the loop. We do not finalize the register state
      * at the start of a loop body until after generating code for the entire loop,
      * so we can decide on which variables to carry around the loop after seeing
      * them accessed early on in the body.
      */
-    static const uint32 LOOP_REGISTER = 0xfffffffe;
+    static const uint32 LOOP_REGISTER = uint32(-2);
 
     /*
      * Assignment of registers to payloads. Type tags are always in memory,
      * except for known doubles in FP registers.
      */
     uint32 regstate_[Registers::TotalAnyRegisters];
 
     /* Mask for regstate entries indicating if the slot is synced. */
     static const uint32 SYNCED = 0x80000000;
 
     uint32 & regstate(AnyRegisterID reg) {
         JS_ASSERT(reg.reg_ < Registers::TotalAnyRegisters);
         return regstate_[reg.reg_];
     }
 
+    /* Registers used for entries in parent frames which still hold their original value. */
+    Registers parentRegs;
+
   public:
-    RegisterAllocation(bool forLoop) {
+    RegisterAllocation(bool forLoop)
+        : parentRegs(0)
+    {
         uint32 entry = forLoop ? (uint32) LOOP_REGISTER : (uint32) UNASSIGNED_REGISTER;
         for (unsigned i = 0; i < Registers::TotalAnyRegisters; i++) {
             AnyRegisterID reg = AnyRegisterID::fromRaw(i);
             bool avail = Registers::maskReg(reg) & Registers::AvailAnyRegs;
             regstate_[i] = avail ? entry : UNASSIGNED_REGISTER;
         }
     }
 
@@ -1129,16 +1203,22 @@ struct RegisterAllocation {
         JS_ASSERT(slot != LOOP_REGISTER && slot != UNASSIGNED_REGISTER);
         regstate(reg) = slot | (synced ? SYNCED : 0);
     }
 
     void setUnassigned(AnyRegisterID reg) {
         regstate(reg) = UNASSIGNED_REGISTER;
     }
 
+    void setParentRegs(Registers regs) {
+        parentRegs = regs;
+    }
+
+    Registers getParentRegs() { return parentRegs; }
+
     bool synced() {
         for (unsigned i = 0; i < Registers::TotalAnyRegisters; i++) {
             if (assigned(AnyRegisterID::fromRaw(i)))
                 return false;
         }
         return true;
     }
 
--- a/js/src/methodjit/ImmutableSync.cpp
+++ b/js/src/methodjit/ImmutableSync.cpp
@@ -42,29 +42,32 @@
 #include "FrameEntry.h"
 #include "FrameState.h"
 #include "FrameState-inl.h"
 #include "ImmutableSync.h"
 
 using namespace js;
 using namespace js::mjit;
 
-ImmutableSync::ImmutableSync(JSContext *cx, const FrameState &frame)
-  : cx(cx), entries(NULL), frame(frame), avail(Registers::AvailRegs), generation(0)
+ImmutableSync::ImmutableSync()
+  : cx(NULL), entries(NULL), frame(NULL), avail(Registers::AvailRegs), generation(0)
 {
 }
 
 ImmutableSync::~ImmutableSync()
 {
     cx->free(entries);
 }
 
 bool
-ImmutableSync::init(uint32 nentries)
+ImmutableSync::init(JSContext *cx, const FrameState &frame, uint32 nentries)
 {
+    this->cx = cx;
+    this->frame = &frame;
+
     entries = (SyncEntry *)cx->calloc(sizeof(SyncEntry) * nentries);
     return !!entries;
 }
 
 void
 ImmutableSync::reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom)
 {
     this->avail = avail;
@@ -89,36 +92,36 @@ ImmutableSync::allocReg()
         RegisterID reg = RegisterID(i);
         if (!(Registers::maskReg(reg) & Registers::AvailRegs))
             continue;
 
         lastResort = 0;
 
         if (!regs[i]) {
             /* If the frame does not own this register, take it! */
-            FrameEntry *fe = frame.regstate(reg).usedBy();
+            FrameEntry *fe = frame->regstate(reg).usedBy();
             if (!fe)
                 return reg;
 
             evictFromFrame = i;
 
             /*
              * If not copied, we can sync and not have to load again later.
              * That's about as good as it gets, so just break out now.
              */
             if (!fe->isCopied())
                 break;
         }
     }
 
     if (evictFromFrame != FrameState::InvalidIndex) {
         RegisterID evict = RegisterID(evictFromFrame);
-        FrameEntry *fe = frame.regstate(evict).usedBy();
+        FrameEntry *fe = frame->regstate(evict).usedBy();
         SyncEntry &e = entryFor(fe);
-        if (frame.regstate(evict).type() == RematInfo::TYPE) {
+        if (frame->regstate(evict).type() == RematInfo::TYPE) {
             JS_ASSERT(!e.typeClobbered);
             e.typeClobbered = true;
         } else {
             JS_ASSERT(!e.dataClobbered);
             e.dataClobbered = true;
         }
         return evict;
     }
@@ -138,17 +141,17 @@ ImmutableSync::allocReg()
 
     return reg;
 }
 
 inline ImmutableSync::SyncEntry &
 ImmutableSync::entryFor(FrameEntry *fe)
 {
     JS_ASSERT(fe <= top);
-    SyncEntry &e = entries[frame.indexOfFe(fe)];
+    SyncEntry &e = entries[frame->indexOfFe(fe)];
     if (e.generation != generation)
         e.reset(generation);
     return e;
 }
 
 void
 ImmutableSync::sync(FrameEntry *fe)
 {
@@ -181,45 +184,45 @@ ImmutableSync::ensureTypeReg(FrameEntry 
 {
     if (fe->type.inRegister() && !e.typeClobbered)
         return fe->type.reg();
     if (e.hasTypeReg)
         return e.typeReg;
     e.typeReg = allocReg();
     e.hasTypeReg = true;
     regs[e.typeReg] = &e;
-    masm->loadTypeTag(frame.addressOf(fe), e.typeReg);
+    masm->loadTypeTag(frame->addressOf(fe), e.typeReg);
     return e.typeReg;
 }
 
 JSC::MacroAssembler::RegisterID
 ImmutableSync::ensureDataReg(FrameEntry *fe, SyncEntry &e)
 {
     if (fe->data.inRegister() && !e.dataClobbered)
         return fe->data.reg();
     if (e.hasDataReg)
         return e.dataReg;
     e.dataReg = allocReg();
     e.hasDataReg = true;
     regs[e.dataReg] = &e;
-    masm->loadPayload(frame.addressOf(fe), e.dataReg);
+    masm->loadPayload(frame->addressOf(fe), e.dataReg);
     return e.dataReg;
 }
 
 void
 ImmutableSync::syncCopy(FrameEntry *fe)
 {
     JS_ASSERT(fe >= bottom);
 
     FrameEntry *backing = fe->copyOf();
     SyncEntry &e = entryFor(backing);
 
     JS_ASSERT(!backing->isConstant());
 
-    Address addr = frame.addressOf(fe);
+    Address addr = frame->addressOf(fe);
 
     if (fe->isTypeKnown() && !fe->isType(JSVAL_TYPE_DOUBLE) && !e.learnedType) {
         e.learnedType = true;
         e.type = fe->getKnownType();
     }
 
     if (!fe->data.synced())
         masm->storePayload(ensureDataReg(backing, e), addr);
@@ -232,17 +235,17 @@ ImmutableSync::syncCopy(FrameEntry *fe)
     }
 }
 
 void
 ImmutableSync::syncNormal(FrameEntry *fe)
 {
     SyncEntry &e = entryFor(fe);
 
-    Address addr = frame.addressOf(fe);
+    Address addr = frame->addressOf(fe);
 
     if (fe->isTypeKnown() && !fe->isType(JSVAL_TYPE_DOUBLE)) {
         e.learnedType = true;
         e.type = fe->getKnownType();
     }
 
     if (shouldSyncData(fe, e)) {
         if (fe->isConstant()) {
@@ -259,24 +262,24 @@ ImmutableSync::syncNormal(FrameEntry *fe
             masm->storeTypeTag(ensureTypeReg(fe, e), addr);
     }
 
     if (e.hasDataReg) {
         avail.putReg(e.dataReg);
         regs[e.dataReg] = NULL;
     } else if (!e.dataClobbered &&
                fe->data.inRegister() &&
-               frame.regstate(fe->data.reg()).usedBy()) {
+               frame->regstate(fe->data.reg()).usedBy()) {
         avail.putReg(fe->data.reg());
     }
 
     if (e.hasTypeReg) {
         avail.putReg(e.typeReg);
         regs[e.typeReg] = NULL;
     } else if (!e.typeClobbered &&
                fe->type.inRegister() &&
-               frame.regstate(fe->type.reg()).usedBy()) {
+               frame->regstate(fe->type.reg()).usedBy()) {
         avail.putReg(fe->type.reg());
     }
 }
 
 #endif /* JS_NUNBOX32 */
 
--- a/js/src/methodjit/ImmutableSync.h
+++ b/js/src/methodjit/ImmutableSync.h
@@ -86,19 +86,19 @@ class ImmutableSync
             hasDataReg = false;
             hasTypeReg = false;
             learnedType = false;
             generation = gen;
         }
     };
 
   public:
-    ImmutableSync(JSContext *cx, const FrameState &frame);
+    ImmutableSync();
     ~ImmutableSync();
-    bool init(uint32 nentries);
+    bool init(JSContext *cx, const FrameState &frame, uint32 nentries);
 
     void reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom);
     void sync(FrameEntry *fe);
 
   private:
     void syncCopy(FrameEntry *fe);
     void syncNormal(FrameEntry *fe);
     RegisterID ensureDataReg(FrameEntry *fe, SyncEntry &e);
@@ -108,17 +108,17 @@ class ImmutableSync
     inline SyncEntry &entryFor(FrameEntry *fe);
 
     bool shouldSyncType(FrameEntry *fe, SyncEntry &e);
     bool shouldSyncData(FrameEntry *fe, SyncEntry &e);
 
   private:
     JSContext *cx;
     SyncEntry *entries;
-    const FrameState &frame;
+    const FrameState *frame;
     uint32 nentries;
     Registers avail;
     Assembler *masm;
     SyncEntry *regs[Assembler::TotalRegisters];
     FrameEntry *top;
     FrameEntry *bottom;
     uint32 generation;
 };
--- a/js/src/methodjit/InlineFrameAssembler.h
+++ b/js/src/methodjit/InlineFrameAssembler.h
@@ -71,37 +71,36 @@ class InlineFrameAssembler {
     typedef JSC::MacroAssembler::Address Address;
     typedef JSC::MacroAssembler::Imm32 Imm32;
     typedef JSC::MacroAssembler::ImmPtr ImmPtr;
     typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
 
     Assembler &masm;
     FrameSize  frameSize;       // size of the caller's frame
     RegisterID funObjReg;       // register containing the function object (callee)
-    jsbytecode *pc;             // bytecode location at the caller call site
     uint32     flags;           // frame flags
 
   public:
     /*
      * Register state, so consumers of this class can restrict which registers
      * can and can't be clobbered.
      */
     Registers  tempRegs;
 
     InlineFrameAssembler(Assembler &masm, ic::CallICInfo &ic, uint32 flags)
-      : masm(masm), pc(ic.pc), flags(flags), tempRegs(Registers::AvailRegs)
+      : masm(masm), flags(flags), tempRegs(Registers::AvailRegs)
     {
         frameSize = ic.frameSize;
         funObjReg = ic.funObjReg;
         tempRegs.takeReg(ic.funPtrReg);
         tempRegs.takeReg(funObjReg);
     }
 
     InlineFrameAssembler(Assembler &masm, Compiler::CallGenInfo &gen, uint32 flags)
-      : masm(masm), pc(gen.pc), flags(flags), tempRegs(Registers::AvailRegs)
+      : masm(masm), flags(flags), tempRegs(Registers::AvailRegs)
     {
         frameSize = gen.frameSize;
         funObjReg = gen.funObjReg;
         tempRegs.takeReg(funObjReg);
     }
 
     DataLabelPtr assemble(void *ncode)
     {
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -268,17 +268,17 @@ stubs::FixupArity(VMFrame &f, uint32 nac
     JSStackFrame *newfp = cx->stack().getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual,
                                                                 fun, fun->script(), &flags,
                                                                 f.entryfp, &f.stackLimit);
     if (!newfp) {
         /*
          * The PC is not coherent with the current frame, so fix it up for
          * exception handling.
          */
-        f.regs.pc = f.jit()->nativeToPC(ncode);
+        f.regs.pc = f.jit()->nativeToPC(ncode, &f.regs.inlined);
         THROWV(NULL);
     }
 
     /* Reset the part of the stack frame set by the caller. */
     newfp->initCallFrameCallerHalf(cx, flags, ncode);
 
     /* Reset the part of the stack frame set by the prologue up to now. */
     newfp->initCallFrameEarlyPrologue(fun, nactual);
@@ -355,17 +355,17 @@ UncachedInlineCall(VMFrame &f, uint32 fl
 {
     JSContext *cx = f.cx;
     Value *vp = f.regs.sp - (argc + 2);
     JSObject &callee = vp->toObject();
     JSFunction *newfun = callee.getFunctionPrivate();
     JSScript *newscript = newfun->script();
 
     bool newType = (flags & JSFRAME_CONSTRUCTING) && cx->typeInferenceEnabled() &&
-        types::UseNewType(cx, f.regs.fp->script(), f.regs.pc);
+        types::UseNewType(cx, f.script(), f.pc());
 
     if (argTypes && argc == newfun->nargs) {
         /*
          * Use the space of all possible types being passed at this callsite if there
          * is a match between argc and nargs, so that the fastEntry can be subsequently
          * used without further type checking. If there is an argument count mismatch,
          * the callee's args will end up getting marked as unknown.
          */
@@ -563,18 +563,17 @@ js_InternalThrow(VMFrame &f)
 
     // Make sure sp is up to date.
     JS_ASSERT(cx->regs == &f.regs);
 
     // Call the throw hook if necessary
     JSThrowHook handler = f.cx->debugHooks->throwHook;
     if (handler) {
         Value rval;
-        switch (handler(cx, cx->fp()->script(), cx->regs->pc, Jsvalify(&rval),
-                        cx->debugHooks->throwHookData)) {
+        switch (handler(cx, f.script(), f.pc(), Jsvalify(&rval), cx->debugHooks->throwHookData)) {
           case JSTRAP_ERROR:
             cx->clearPendingException();
             return NULL;
 
           case JSTRAP_RETURN:
             cx->clearPendingException();
             cx->fp()->setReturnValue(rval);
             return JS_FUNC_TO_DATA_PTR(void *,
--- a/js/src/methodjit/MachineRegs.h
+++ b/js/src/methodjit/MachineRegs.h
@@ -388,16 +388,20 @@ struct Registers {
     bool hasReg(AnyRegisterID reg) const {
         return !!(freeMask & (1 << reg.reg_));
     }
 
     bool hasRegInMask(uint32 mask) const {
         return !!(freeMask & mask);
     }
 
+    bool hasAllRegs(uint32 mask) const {
+        return (freeMask & mask) == mask;
+    }
+
     void putRegUnchecked(AnyRegisterID reg) {
         freeMask |= (1 << reg.reg_);
     }
 
     void putReg(AnyRegisterID reg) {
         JS_ASSERT(!hasReg(reg));
         putRegUnchecked(reg);
     }
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -114,18 +114,16 @@ JSStackFrame::methodjitStaticAsserts()
  *  - Used by RunTracer()
  */
 
 #ifdef JS_METHODJIT_PROFILE_STUBS
 static const size_t STUB_CALLS_FOR_OP_COUNT = 255;
 static uint32 StubCallsForOp[STUB_CALLS_FOR_OP_COUNT];
 #endif
 
-extern "C" void JaegerTrampolineReturn();
-
 extern "C" void JS_FASTCALL
 PushActiveVMFrame(VMFrame &f)
 {
     f.entryfp->script()->compartment->jaegerCompartment->pushActiveFrame(&f);
     f.regs.fp->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn));
 }
 
 extern "C" void JS_FASTCALL
@@ -462,17 +460,17 @@ SYMBOL_STRING(JaegerTrampoline) ":"     
      *  [ entryfp   ]
      *  [ stkLimit  ]
      *  [ cx        ]
      *  [ regs.fp   ]
      *  [ regs.pc   ]
      *  [ regs.sp   ]
      *  [ scratch   ]
      *  [ previous  ]
-     *  [ args.ptr3 ]
+     *  [ inlined   ]
      *  [ args.ptr2 ]
      *  [ args.ptr  ]
      */
     
     /* Push callee-saved registers. */
 "   push    {r4-r11,lr}"                        "\n"
     /* Push interesting VMFrame content. */
 "   push    {r1}"                               "\n"    /* entryfp */
@@ -843,27 +841,39 @@ JITScript::nmap() const
 }
 
 char *
 JITScript::nmapSectionLimit() const
 {
     return (char *)nmap() + sizeof(NativeMapEntry) * nNmapPairs;
 }
 
+js::mjit::InlineFrame *
+JITScript::inlineFrames() const
+{
+    return (js::mjit::InlineFrame *)nmapSectionLimit();
+}
+
+js::mjit::CallSite *
+JITScript::callSites() const
+{
+    return (js::mjit::CallSite *)((char *)inlineFrames() + sizeof(js::mjit::InlineFrame) * nInlineFrames);
+}
+
 #ifdef JS_MONOIC
 ic::GetGlobalNameIC *
 JITScript::getGlobalNames() const
 {
-    return (ic::GetGlobalNameIC *)nmapSectionLimit();
+    return (ic::GetGlobalNameIC *)((char *)callSites() + sizeof(js::mjit::CallSite) * nCallSites);
 }
 
 ic::SetGlobalNameIC *
 JITScript::setGlobalNames() const
 {
-    return (ic::SetGlobalNameIC *)((char *)nmapSectionLimit() +
+    return (ic::SetGlobalNameIC *)((char *)getGlobalNames() +
             sizeof(ic::GetGlobalNameIC) * nGetGlobalNames);
 }
 
 ic::CallICInfo *
 JITScript::callICs() const
 {
     return (ic::CallICInfo *)((char *)setGlobalNames() +
             sizeof(ic::SetGlobalNameIC) * nSetGlobalNames);
@@ -921,22 +931,16 @@ JITScript::polyICSectionsLimit() const
 #else   // JS_POLYIC
 char *
 JITScript::polyICSectionsLimit() const
 {
     return monoICSectionsLimit();
 }
 #endif  // JS_POLYIC
 
-js::mjit::CallSite *
-JITScript::callSites() const
-{
-    return (js::mjit::CallSite *)polyICSectionsLimit();
-}
-
 template <typename T>
 static inline void Destroy(T &t)
 {
     t.~T();
 }
 
 mjit::JITScript::~JITScript()
 {
@@ -1095,17 +1099,17 @@ mjit::GetCallTargetCount(JSScript *scrip
 uintN
 mjit::GetCallTargetCount(JSScript *script, jsbytecode *pc)
 {
     return 1;
 }
 #endif
 
 jsbytecode *
-JITScript::nativeToPC(void *returnAddress) const
+JITScript::nativeToPC(void *returnAddress, CallSite **pinline) const
 {
     size_t low = 0;
     size_t high = nCallICs;
     js::mjit::ic::CallICInfo *callICs_ = callICs();
     while (high > low + 1) {
         /* Could overflow here on a script with 2 billion calls. Oh well. */
         size_t mid = (high + low) / 2;
         void *entry = callICs_[mid].funGuard.executableAddress();
@@ -1116,13 +1120,38 @@ JITScript::nativeToPC(void *returnAddres
          */
         if (entry >= returnAddress)
             high = mid;
         else
             low = mid;
     }
 
     js::mjit::ic::CallICInfo &ic = callICs_[low];
+    JS_ASSERT((uint8*)ic.funGuard.executableAddress() + ic.joinPointOffset == returnAddress);
 
-    JS_ASSERT((uint8*)ic.funGuard.executableAddress() + ic.joinPointOffset == returnAddress);
-    return ic.pc;
+    if (ic.call->inlineIndex != uint32(-1)) {
+        if (pinline)
+            *pinline = ic.call;
+        InlineFrame *frame = &inlineFrames()[ic.call->inlineIndex];
+        while (frame && frame->parent)
+            frame = frame->parent;
+        return frame->parentpc;
+    }
+
+    if (pinline)
+        *pinline = NULL;
+    return script->code + ic.call->pcOffset;
 }
 
+void
+JITScript::trace(JSTracer *trc)
+{
+    /*
+     * MICs and PICs attached to the JITScript are weak references, and either
+     * entirely purged or selectively purged on each GC. We do, however, need
+     * to maintain references to any scripts whose code was inlined into this.
+     */
+    InlineFrame *inlineFrames_ = inlineFrames();
+    for (unsigned i = 0; i < nInlineFrames; i++) {
+        JS_SET_TRACING_NAME(trc, "jitscript_fun");
+        Mark(trc, inlineFrames_[i].fun);
+    }
+}
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -57,37 +57,33 @@
 namespace js {
 
 namespace mjit { struct JITScript; }
 
 struct VMFrame
 {
     union Arguments {
         struct {
-            void *ptr;
-            void *ptr2;
-            void *ptr3;
-        } x;
-        struct {
             uint32 lazyArgsObj;
             uint32 dynamicArgc;
         } call;
     } u;
 
     VMFrame      *previous;
     void         *scratch;
     JSFrameRegs  regs;
     JSContext    *cx;
     Value        *stackLimit;
     JSStackFrame *entryfp;
 
 /*
  * Value stored in the 'scratch' field when making a native call. This is used
  * by the recompiler and this value must not be written in other cases
  * (i.e. scratch must be used to store a pointer, not an integer.
+ * :XXX: remove horrible hack.
  */
 #define NATIVE_CALL_SCRATCH_VALUE (void *) 0x1
 
 #if defined(JS_CPU_X86)
     void *savedEBX;
     void *savedEDI;
     void *savedESI;
     void *savedEBP;
@@ -141,18 +137,21 @@ struct VMFrame
     }
 #else
 # error "The VMFrame layout isn't defined for your processor architecture!"
 #endif
 
     JSRuntime *runtime() { return cx->runtime; }
 
     JSStackFrame *&fp() { return regs.fp; }
-    JSScript *script() { return fp()->script(); }
     mjit::JITScript *jit() { return fp()->jit(); }
+
+    /* Get the inner script/PC in case of inlining. */
+    inline JSScript *script();
+    inline jsbytecode *pc();
 };
 
 #ifdef JS_CPU_ARM
 // WARNING: Do not call this function directly from C(++) code because it is not ABI-compliant.
 extern "C" void JaegerStubVeneer(void);
 #endif
 
 namespace mjit {
@@ -295,56 +294,60 @@ typedef void * (JS_FASTCALL *VoidPtrStub
 #ifdef JS_POLYIC
 typedef void (JS_FASTCALL *VoidStubPIC)(VMFrame &, js::mjit::ic::PICInfo *);
 typedef void (JS_FASTCALL *VoidStubGetElemIC)(VMFrame &, js::mjit::ic::GetElementIC *);
 typedef void (JS_FASTCALL *VoidStubSetElemIC)(VMFrame &f, js::mjit::ic::SetElementIC *);
 #endif
 
 namespace mjit {
 
+struct InlineFrame;
 struct CallSite;
 
 struct NativeMapEntry {
     size_t          bcOff;  /* bytecode offset in script */
     void            *ncode; /* pointer to native code */
 };
 
 struct JITScript {
     typedef JSC::MacroAssemblerCodeRef CodeRef;
     CodeRef         code;       /* pool & code addresses */
 
+    JSScript        *script;
 
     void            *invokeEntry;       /* invoke address */
     void            *fastEntry;         /* cached entry, fastest */
     void            *arityCheckEntry;   /* arity check address */
 
     /*
      * This struct has several variable-length sections that are allocated on
      * the end:  nmaps, MICs, callICs, etc.  To save space -- worthwhile
      * because JITScripts are common -- we only record their lengths.  We can
      * find any of the sections from the lengths because we know their order.
      * Therefore, do not change the section ordering in finishThisUp() without
      * changing nMICs() et al as well.
      */
-    uint32          nNmapPairs:31;      /* The NativeMapEntrys are sorted by .bcOff.
+    uint32          nNmapPairs:30;      /* The NativeMapEntrys are sorted by .bcOff.
                                            .ncode values may not be NULL. */
     bool            singleStepMode:1;   /* compiled in "single step mode" */
+    bool            rejoinPoints:1;     /* compiled with recompilation rejoin points */
+    uint32          nInlineFrames;
+    uint32          nCallSites;
 #ifdef JS_MONOIC
     uint32          nGetGlobalNames;
     uint32          nSetGlobalNames;
     uint32          nCallICs;
     uint32          nEqualityICs;
     uint32          nTraceICs;
 #endif
 #ifdef JS_POLYIC
     uint32          nGetElems;
     uint32          nSetElems;
     uint32          nPICs;
 #endif
-    uint32          nCallSites;
 
     /*
      * Number of on-stack recompilations of this JIT script. Reset to zero if
      * the JIT script is destroyed if marked for recompilation with no active
      * frame on the stack.
      */
     uint32          recompilations;
 
@@ -362,29 +365,30 @@ struct JITScript {
 
 #ifdef JS_MONOIC
     // Additional ExecutablePools that IC stubs were generated into.
     typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
     ExecPoolVector execPools;
 #endif
 
     NativeMapEntry *nmap() const;
+    js::mjit::InlineFrame *inlineFrames() const;
+    js::mjit::CallSite *callSites() const;
 #ifdef JS_MONOIC
     ic::GetGlobalNameIC *getGlobalNames() const;
     ic::SetGlobalNameIC *setGlobalNames() const;
     ic::CallICInfo *callICs() const;
     ic::EqualityICInfo *equalityICs() const;
     ic::TraceICInfo *traceICs() const;
 #endif
 #ifdef JS_POLYIC
     ic::GetElementIC *getElems() const;
     ic::SetElementIC *setElems() const;
     ic::PICInfo     *pics() const;
 #endif
-    js::mjit::CallSite *callSites() const;
 
     ~JITScript();
 
     bool isValidCode(void *ptr) {
         char *jitcode = (char *)code.m_code.executableAddress();
         char *jcheck = (char *)ptr;
         return jcheck >= jitcode && jcheck < jitcode + code.m_size;
     }
@@ -393,17 +397,19 @@ struct JITScript {
     void sweepCallICs(JSContext *cx, bool purgeAll);
     void purgeMICs();
     void purgePICs();
 
     size_t scriptDataSize();
 
     size_t mainCodeSize() { return code.m_size; } /* doesn't account for fragmentation */
 
-    jsbytecode *nativeToPC(void *returnAddress) const;
+    jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;
+
+    void trace(JSTracer *trc);
 
   private:
     /* Helpers used to navigate the variable-length sections. */
     char *nmapSectionLimit() const;
     char *monoICSectionsLimit() const;
     char *polyICSectionsLimit() const;
 };
 
@@ -417,51 +423,96 @@ JSBool EnterMethodJIT(JSContext *cx, JSS
 JSBool JaegerShot(JSContext *cx);
 
 /* Drop into the middle of a method at an arbitrary point, and execute. */
 JSBool JaegerShotAtSafePoint(JSContext *cx, void *safePoint);
 
 enum CompileStatus
 {
     Compile_Okay,
-    Compile_Abort,
-    Compile_Overflow,
-    Compile_Error,
+    Compile_Abort,        // abort compilation
+    Compile_InlineAbort,  // inlining attempt failed, continue compilation
+    Compile_Retry,        // static overflow or failed inline, try to recompile
+    Compile_Error,        // OOM
     Compile_Skipped
 };
 
 void JS_FASTCALL
 ProfileStubCall(VMFrame &f);
 
 CompileStatus JS_NEVER_INLINE
 TryCompile(JSContext *cx, JSStackFrame *fp);
 
 void
 ReleaseScriptCode(JSContext *cx, JSScript *script);
 
+// Expand either the topmost stack frame or all stack frames inlined by the JIT.
+void
+ExpandInlineFrames(JSContext *cx, bool all);
+
+// Information about an unsynced slot within a frame.
+struct UnsyncedEntry
+{
+    // Slot being updated, in bytes from the start of the outer JSStackFrame.
+    uint32 offset;
+
+    bool copy : 1;
+    bool constant : 1;
+    bool knownType : 1;
+    union {
+        uint32 copiedOffset;
+        Value value;
+        JSValueType type;
+    } u;
+};
+
+// Information about a frame inlined during compilation.
+struct InlineFrame
+{
+    InlineFrame *parent;
+    jsbytecode *parentpc;
+    JSFunction *fun;
+
+    // Total distance between the start of the outer JSStackFrame and the start
+    // of this frame, in multiples of sizeof(Value).
+    uint32 depth;
+
+    // When making a call from an inline frame, only the slots owned by that
+    // frame are guaranteed to be synced. Slots owned by parents (including the
+    // this/callee/args of the call) may not be synced, and if they are
+    // unsynced the entries here describe how to remat them in case of
+    // recompilation. Note that since the arguments cannot be modified within
+    // the call without triggering recompilation, the contents of these parent
+    // slots are invariant within the call.
+    uint32 nUnsyncedEntries;
+    UnsyncedEntry *unsyncedEntries;
+};
+
 struct CallSite
 {
     uint32 codeOffset;
+    uint32 inlineIndex;
     uint32 pcOffset;
     size_t id;
 
     // The identifier is either the address of the stub function being called,
     // or one of the below magic identifiers. Each of these can appear at most
     // once per opcode.
 
     // Identifier for traps. Since traps can be removed, we make sure they carry over
     // from each compilation, and identify them with a single, canonical
     // ID. Hopefully a SpiderMonkey file won't have two billion source lines.
     static const size_t MAGIC_TRAP_ID = 0;
 
     // Identifier for the return site from a scripted call.
     static const size_t NCODE_RETURN_ID = 1;
 
-    void initialize(uint32 codeOffset, uint32 pcOffset, size_t id) {
+    void initialize(uint32 codeOffset, uint32 inlineIndex, uint32 pcOffset, size_t id) {
         this->codeOffset = codeOffset;
+        this->inlineIndex = inlineIndex;
         this->pcOffset = pcOffset;
         this->id = id;
     }
 
     bool isTrap() const {
         return id == MAGIC_TRAP_ID;
     }
 };
@@ -494,16 +545,32 @@ inline void * bsearch_nmap(NativeMapEntr
             continue;
         }
         return nmap[mid-1].ncode;
     }
 }
 
 } /* namespace mjit */
 
+inline JSScript *
+VMFrame::script()
+{
+    if (regs.inlined)
+        return jit()->inlineFrames()[regs.inlined->inlineIndex].fun->script();
+    return fp()->script();
+}
+
+inline jsbytecode *
+VMFrame::pc()
+{
+    if (regs.inlined)
+        return script()->code + regs.inlined->pcOffset;
+    return regs.pc;
+}
+
 } /* namespace js */
 
 inline void *
 JSScript::maybeNativeCodeForPC(bool constructing, jsbytecode *pc)
 {
     js::mjit::JITScript *jit = getJIT(constructing);
     if (!jit)
         return NULL;
@@ -516,16 +583,18 @@ JSScript::nativeCodeForPC(bool construct
 {
     js::mjit::JITScript *jit = getJIT(constructing);
     JS_ASSERT(pc >= code && pc < code + length);
     void* native = bsearch_nmap(jit->nmap(), jit->nNmapPairs, (size_t)(pc - code));
     JS_ASSERT(native);
     return native;
 }
 
+extern "C" void JaegerTrampolineReturn();
+
 #if defined(_MSC_VER) || defined(_WIN64)
 extern "C" void *JaegerThrowpoline(js::VMFrame *vmFrame);
 #else
 extern "C" void JaegerThrowpoline();
 #endif
 extern "C" void InjectJaegerReturn();
 
 #endif /* jsjaeger_h__ */
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -79,17 +79,17 @@ PatchGetFallback(VMFrame &f, ic::GetGlob
     JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::GetGlobalName));
     repatch.relink(ic->slowPathCall, fptr);
 }
 
 void JS_FASTCALL
 ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic)
 {
     JSObject *obj = f.fp()->scopeChain().getGlobal();
-    JSAtom *atom = f.fp()->script()->getAtom(GET_INDEX(f.regs.pc));
+    JSAtom *atom = f.script()->getAtom(GET_INDEX(f.pc()));
     jsid id = ATOM_TO_JSID(atom);
 
     const Shape *shape = obj->nativeLookup(id);
     if (!shape ||
         !shape->hasDefaultGetterOrIsMethod() ||
         !shape->hasSlot())
     {
         if (shape)
@@ -110,41 +110,40 @@ ic::GetGlobalName(VMFrame &f, ic::GetGlo
     /* Do load anyway... this time. */
     stubs::GetGlobalName(f);
 }
 
 template <JSBool strict>
 static void JS_FASTCALL
 DisabledSetGlobal(VMFrame &f, ic::SetGlobalNameIC *ic)
 {
-    JSScript *script = f.fp()->script();
-    JSAtom *atom = script->getAtom(GET_INDEX(f.regs.pc));
+    JSScript *script = f.script();
+    JSAtom *atom = script->getAtom(GET_INDEX(f.pc()));
     stubs::SetGlobalName<strict>(f, atom);
 }
 
 template void JS_FASTCALL DisabledSetGlobal<true>(VMFrame &f, ic::SetGlobalNameIC *ic);
 template void JS_FASTCALL DisabledSetGlobal<false>(VMFrame &f, ic::SetGlobalNameIC *ic);
 
 template <JSBool strict>
 static void JS_FASTCALL
 DisabledSetGlobalNoCache(VMFrame &f, ic::SetGlobalNameIC *ic)
 {
-    JSScript *script = f.fp()->script();
-    JSAtom *atom = script->getAtom(GET_INDEX(f.regs.pc));
+    JSScript *script = f.script();
+    JSAtom *atom = script->getAtom(GET_INDEX(f.pc()));
     stubs::SetGlobalNameNoCache<strict>(f, atom);
 }
 
 template void JS_FASTCALL DisabledSetGlobalNoCache<true>(VMFrame &f, ic::SetGlobalNameIC *ic);
 template void JS_FASTCALL DisabledSetGlobalNoCache<false>(VMFrame &f, ic::SetGlobalNameIC *ic);
 
 static void
 PatchSetFallback(VMFrame &f, ic::SetGlobalNameIC *ic)
 {
-    JSScript *script = f.fp()->script();
-
+    JSScript *script = f.script();
     Repatcher repatch(f.jit());
     VoidStubSetGlobal stub = ic->usePropertyCache
                              ? STRICT_VARIANT(DisabledSetGlobal)
                              : STRICT_VARIANT(DisabledSetGlobalNoCache);
     JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stub));
     repatch.relink(ic->slowPathCall, fptr);
 }
 
@@ -311,18 +310,18 @@ UpdateSetGlobalName(VMFrame &f, ic::SetG
 
     return Lookup_Cacheable;
 }
 
 void JS_FASTCALL
 ic::SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic)
 {
     JSObject *obj = f.fp()->scopeChain().getGlobal();
-    JSScript *script = f.fp()->script();
-    JSAtom *atom = script->getAtom(GET_INDEX(f.regs.pc));
+    JSScript *script = f.script();
+    JSAtom *atom = script->getAtom(GET_INDEX(f.pc()));
     const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
 
     LookupStatus status = UpdateSetGlobalName(f, ic, obj, shape);
     if (status == Lookup_Error)
         THROW();
 
     if (ic->usePropertyCache)
         STRICT_VARIANT(stubs::SetGlobalName)(f, atom);
@@ -338,16 +337,17 @@ class EqualityICLinker : public LinkerHe
     EqualityICLinker(Assembler &masm, VMFrame &f)
         : LinkerHelper(masm), f(f)
     { }
 
     bool init(JSContext *cx) {
         JSC::ExecutablePool *pool = LinkerHelper::init(cx);
         if (!pool)
             return false;
+        JS_ASSERT(!f.regs.inlined);
         JSScript *script = f.fp()->script();
         JITScript *jit = script->getJIT(f.fp()->isConstructing());
         if (!jit->execPools.append(pool)) {
             pool->release();
             js_ReportOutOfMemory(cx);
             return false;
         }
         return true;
@@ -647,26 +647,24 @@ class CallCompiler : public BaseCompiler
          */
         size_t offset = callingNew
                         ? offsetof(JSScript, jitArityCheckCtor)
                         : offsetof(JSScript, jitArityCheckNormal);
         masm.loadPtr(Address(t0, offset), t0);
         Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT));
 
         /* Try and compile. On success we get back the nmap pointer. */
-        masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
         void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
         if (ic.frameSize.isStatic()) {
             masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
-            masm.fallibleVMCall(compilePtr, script->code, ic.frameSize.staticLocalSlots());
+            masm.fallibleVMCall(compilePtr, NULL, NULL, ic.frameSize.staticLocalSlots());
         } else {
             masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), Registers::ArgReg1);
-            masm.fallibleVMCall(compilePtr, script->code, -1);
+            masm.fallibleVMCall(compilePtr, NULL, NULL, -1);
         }
-        masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
 
         Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                               Registers::ReturnReg);
 
         masm.jump(Registers::ReturnReg);
 
         hasCode.linkTo(masm.label(), &masm);
 
@@ -784,16 +782,17 @@ class CallCompiler : public BaseCompiler
          * SplatApplyArgs has not been called, so we call it here before
          * potentially touching f.u.call.dynamicArgc.
          */
         Value *vp;
         if (ic.frameSize.isStatic()) {
             JS_ASSERT(f.regs.sp - f.regs.fp->slots() == (int)ic.frameSize.staticLocalSlots());
             vp = f.regs.sp - (2 + ic.frameSize.staticArgc());
         } else {
+            JS_ASSERT(!f.regs.inlined);
             JS_ASSERT(*f.regs.pc == JSOP_FUNAPPLY && GET_ARGC(f.regs.pc) == 2);
             if (!ic::SplatApplyArgs(f))       /* updates regs.sp */
                 THROWV(true);
             vp = f.regs.sp - (2 + f.u.call.dynamicArgc);
         }
 
         JSObject *obj;
         if (!IsFunctionObject(*vp, &obj))
@@ -814,54 +813,62 @@ class CallCompiler : public BaseCompiler
         /* Don't touch the IC if the call triggered a recompilation. */
         if (f.jit()->recompilations != recompilations)
             return true;
 
         /* Right now, take slow-path for IC misses or multiple stubs. */
         if (ic.fastGuardedNative || ic.hasJsFunCheck)
             return true;
 
+        /* Don't generate native MICs within inlined frames, we can't recompile them yet. */
+        if (f.regs.inlined != NULL)
+            return true;
+
         /* Native MIC needs to warm up first. */
         if (!ic.hit) {
             ic.hit = true;
             return true;
         }
 
         /* Generate fast-path for calling this native. */
         Assembler masm;
 
         /* Guard on the function object identity, for now. */
         Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(obj));
 
         /* N.B. After this call, the frame will have a dynamic frame size. */
         if (ic.frameSize.isDynamic()) {
             masm.fallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs),
-                                f.regs.pc, initialFrameDepth);
+                                f.regs.pc, NULL, initialFrameDepth);
         }
 
         Registers tempRegs(Registers::AvailRegs);
 #ifndef JS_CPU_X86
         tempRegs.takeReg(Registers::ArgReg0);
         tempRegs.takeReg(Registers::ArgReg1);
         tempRegs.takeReg(Registers::ArgReg2);
 #endif
         RegisterID t0 = tempRegs.takeAnyReg().reg();
 
         /* Store pc. */
         masm.storePtr(ImmPtr(cx->regs->pc),
-                       FrameAddress(offsetof(VMFrame, regs.pc)));
+                      FrameAddress(offsetof(VMFrame, regs.pc)));
+
+        /* Store inlined (NULL). */
+        masm.storePtr(ImmPtr(NULL),
+                      FrameAddress(offsetof(VMFrame, regs.inlined)));
 
         /* Store sp (if not already set by ic::SplatApplyArgs). */
         if (ic.frameSize.isStatic()) {
             uint32 spOffset = sizeof(JSStackFrame) + initialFrameDepth * sizeof(Value);
             masm.addPtr(Imm32(spOffset), JSFrameReg, t0);
             masm.storePtr(t0, FrameAddress(offsetof(VMFrame, regs.sp)));
         }
 
-        /* Store fp. */
+        /* Store fp. Note this doesn't need restoring afterwards, as we aren't inlining. */
         masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
 
         /* Grab cx. */
 #ifdef JS_CPU_X86
         RegisterID cxReg = tempRegs.takeAnyReg().reg();
 #else
         RegisterID cxReg = Registers::ArgReg0;
 #endif
@@ -1109,16 +1116,17 @@ BumpStack(VMFrame &f, uintN inc)
  * Additionally, the callee has already been checked to be the native apply.
  * All successful paths through SplatApplyArgs must set f.u.call.dynamicArgc
  * and f.regs.sp.
  */
 JSBool JS_FASTCALL
 ic::SplatApplyArgs(VMFrame &f)
 {
     JSContext *cx = f.cx;
+    JS_ASSERT(!f.regs.inlined);
     JS_ASSERT(GET_ARGC(f.regs.pc) == 2);
 
     /*
      * The lazyArgsObj flag indicates an optimized call |f.apply(x, arguments)|
      * where the args obj has not been created or pushed on the stack. Thus,
      * if lazyArgsObj is set, the stack for |f.apply(x, arguments)| is:
      *
      *  | Function.prototype.apply | f | x |
@@ -1302,22 +1310,16 @@ ic::GenerateArgumentCheckStub(VMFrame &f
     }
 
     for (unsigned i = 0; i < fun->nargs; i++) {
         Address address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, i));
         if (!GenerateTypeCheck(f.cx, masm, address, script->argTypes(i), &mismatches))
             return;
     }
 
-#ifdef DEBUG
-    void *ptr = JS_FUNC_TO_DATA_PTR(void *, stubs::AssertArgumentTypes);
-    masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
-    masm.fallibleVMCall(ptr, script->code, script->nfixed);
-#endif
-
     Jump done = masm.jump();
 
     LinkerHelper linker(masm);
     JSC::ExecutablePool *ep = linker.init(f.cx);
     if (!ep)
         return;
     jit->argsCheckPool = ep;
 
--- a/js/src/methodjit/MonoIC.h
+++ b/js/src/methodjit/MonoIC.h
@@ -203,18 +203,18 @@ struct CallICInfo {
     };
 
     JSC::ExecutablePool *pools[Total_Pools];
 
     /* Used for rooting and reification. */
     JSObject *fastGuardedObject;
     JSObject *fastGuardedNative;
 
-    /* PC at the call site. */
-    jsbytecode *pc;
+    /* Return site for scripted calls at this site, with PC and inlining state. */
+    CallSite *call;
 
     FrameSize frameSize;
 
     /* Function object identity guard. */
     JSC::CodeLocationDataLabelPtr funGuard;
 
     /* Starting point for all slow call paths. */
     JSC::CodeLocationLabel slowPathStart;
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -1502,17 +1502,17 @@ class ScopeNameCompiler : public PICStub
         JSObject *obj = getprop.obj;
         JSObject *holder = getprop.holder;
         const JSProperty *prop = getprop.prop;
 
         if (!prop) {
             /* Kludge to allow (typeof foo == "undefined") tests. */
             disable("property not found");
             if (pic.kind == ic::PICInfo::NAME) {
-                JSOp op2 = js_GetOpcode(cx, script, cx->regs->pc + JSOP_NAME_LENGTH);
+                JSOp op2 = js_GetOpcode(cx, f.script(), f.pc() + JSOP_NAME_LENGTH);
                 if (op2 == JSOP_TYPEOF) {
                     vp->setUndefined();
                     return true;
                 }
             }
             ReportAtomNotDefined(cx, atom);
             return false;
         }
@@ -1550,25 +1550,25 @@ class ScopeNameCompiler : public PICStub
                 return cx->compartment->types.checkPendingRecompiles(cx);
             if (shape->getterOp() == GetCallArg)
                 types = newscript->argTypes(slot);
             else if (shape->getterOp() == GetCallVar)
                 types = newscript->localTypes(slot);
         } else {
             JS_ASSERT(!getprop.obj->getParent());
             if (getprop.obj->getType()->unknownProperties) {
-                script->typeMonitorResult(cx, f.regs.pc, types::TYPE_UNKNOWN);
+                f.script()->typeMonitorResult(cx, f.pc(), types::TYPE_UNKNOWN);
                 return cx->compartment->types.checkPendingRecompiles(cx);
             }
             types = getprop.obj->getType()->getProperty(cx, shape->id, false);
             if (!types)
                 return cx->compartment->types.checkPendingRecompiles(cx);
         }
 
-        types->pushAllTypes(cx, script, f.regs.pc);
+        types->pushAllTypes(cx, f.script(), f.pc());
         return cx->compartment->types.checkPendingRecompiles(cx);
     }
 };
  
 class BindNameCompiler : public PICStubCompiler
 {
     JSObject *scopeChain;
     JSAtom *atom;
@@ -1781,17 +1781,17 @@ ic::GetProp(VMFrame &f, ic::PICInfo *pic
 
     /*
      * Ignore undefined reads for the 'prototype' property in constructors,
      * which will be at the start of the script and are never holes due to fun_resolve.
      * Any undefined value was explicitly stored here, and is known by inference.
      * :FIXME: looking under the usePropCache abstraction, which is only unset for
      * reads of the prototype.
      */
-    if (v.isUndefined() && usePropCache && !f.script()->typeMonitorUndefined(f.cx, f.regs.pc))
+    if (v.isUndefined() && usePropCache && !f.script()->typeMonitorUndefined(f.cx, f.pc()))
         THROW();
 
     f.regs.sp[-1] = v;
 }
 
 void JS_FASTCALL
 ic::GetPropNoCache(VMFrame &f, ic::PICInfo *pic)
 {
@@ -1899,17 +1899,17 @@ ic::CallProp(VMFrame &f, ic::PICInfo *pi
     jsid id = ATOM_TO_JSID(pic->atom);
 
     JSObject *aobj = js_GetProtoIfDenseArray(&objv.toObject());
     Value rval;
 
     PropertyCacheEntry *entry;
     JSObject *obj2;
     JSAtom *atom;
-    JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
+    JS_PROPERTY_CACHE(cx).test(cx, f.pc(), aobj, obj2, entry, atom);
     if (!atom) {
         if (entry->vword.isFunObj()) {
             rval.setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
             rval = obj2->nativeGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
@@ -1952,17 +1952,17 @@ ic::CallProp(VMFrame &f, ic::PICInfo *pi
 #if JS_HAS_NO_SUCH_METHOD
     if (JS_UNLIKELY(rval.isUndefined()) && regs.sp[-1].isObject()) {
         regs.sp[-2].setString(pic->atom);
         if (!js_OnUnknownMethod(cx, regs.sp - 2))
             THROW();
     }
 #endif
 
-    if (regs.sp[-2].isUndefined() && !f.script()->typeMonitorUndefined(cx, regs.pc))
+    if (regs.sp[-2].isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
         THROW();
 
     if (f.jit()->recompilations != recompilations)
         return;
 
     GetPropCompiler cc(f, script, &objv.toObject(), *pic, pic->atom, DisabledCallPropIC);
     if (lval.isObject()) {
         if (pic->shouldUpdate(cx)) {
@@ -2005,17 +2005,17 @@ ic::XName(VMFrame &f, ic::PICInfo *pic)
     if (status == Lookup_Error)
         THROW();
 
     Value rval;
     if (!cc.retrieve(&rval))
         THROW();
     f.regs.sp[-1] = rval;
 
-    if (rval.isUndefined() && !script->typeMonitorUndefined(f.cx, f.regs.pc))
+    if (rval.isUndefined() && !f.script()->typeMonitorUndefined(f.cx, f.pc()))
         THROW();
 }
 
 void JS_FASTCALL
 ic::Name(VMFrame &f, ic::PICInfo *pic)
 {
     JSScript *script = f.fp()->script();
 
@@ -2027,17 +2027,17 @@ ic::Name(VMFrame &f, ic::PICInfo *pic)
 
     Value rval;
     if (!cc.retrieve(&rval))
         THROW();
     f.regs.sp[0] = rval;
 
     if (status == Lookup_Cacheable && !cc.updateTypes())
         THROW();
-    if (!script->typeMonitorResult(f.cx, f.regs.pc, rval))
+    if (!f.script()->typeMonitorResult(f.cx, f.pc(), rval))
         THROW();
 }
 
 static void JS_FASTCALL
 DisabledBindNameIC(VMFrame &f, ic::PICInfo *pic)
 {
     stubs::BindName(f);
 }
@@ -2490,17 +2490,17 @@ ic::CallElement(VMFrame &f, ic::GetEleme
         LookupStatus status = ic->update(f, cx, thisObj, idval, id, &f.regs.sp[-2]);
         if (status != Lookup_Uncacheable) {
             if (status == Lookup_Error)
                 THROW();
 
             // If the result can be cached, the value was already retrieved.
             JS_ASSERT(!f.regs.sp[-2].isMagic());
             f.regs.sp[-1].setObject(*thisObj);
-            if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.regs.pc))
+            if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
                 THROW();
             return;
         }
     }
 
     /* Get or set the element. */
     if (!js_GetMethod(cx, thisObj, id, JSGET_NO_METHOD_BARRIER, &f.regs.sp[-2]))
         THROW();
@@ -2511,19 +2511,19 @@ ic::CallElement(VMFrame &f, ic::GetEleme
         f.regs.sp[-1].setObject(*thisObj);
         if (!js_OnUnknownMethod(cx, f.regs.sp - 2))
             THROW();
     } else
 #endif
     {
         f.regs.sp[-1] = thisv;
     }
-    if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.regs.pc))
+    if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
         THROW();
-    if (f.regs.sp[-2].isUndefined() && !f.script()->typeMonitorUndefined(cx, f.regs.pc))
+    if (f.regs.sp[-2].isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
         THROW();
 }
 
 void JS_FASTCALL
 ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
 {
     JSContext *cx = f.cx;
 
@@ -2554,30 +2554,30 @@ ic::GetElement(VMFrame &f, ic::GetElemen
 #endif
         LookupStatus status = ic->update(f, cx, obj, idval, id, &f.regs.sp[-2]);
         if (status != Lookup_Uncacheable) {
             if (status == Lookup_Error)
                 THROW();
 
             // If the result can be cached, the value was already retrieved.
             JS_ASSERT(!f.regs.sp[-2].isMagic());
-            if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.regs.pc))
+            if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
                 THROW();
             return;
         }
     }
 
     if (!obj->getProperty(cx, id, &f.regs.sp[-2]))
         THROW();
-    if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.regs.pc))
+    if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
         THROW();
     if (f.regs.sp[-2].isUndefined()) {
         if (idval.isInt32())
             cx->addTypeProperty(obj->getType(), NULL, types::TYPE_UNDEFINED);
-        if (!f.script()->typeMonitorUndefined(cx, f.regs.pc))
+        if (!f.script()->typeMonitorUndefined(cx, f.pc()))
             THROW();
     }
 }
 
 #define APPLY_STRICTNESS(f, s)                          \
     (FunctionTemplateConditional(s, f<true>, f<false>))
 
 LookupStatus
--- a/js/src/methodjit/Retcon.cpp
+++ b/js/src/methodjit/Retcon.cpp
@@ -44,16 +44,17 @@
 #include "MethodJIT.h"
 #include "Compiler.h"
 #include "jsdbgapi.h"
 #include "jsnum.h"
 #include "assembler/assembler/LinkBuffer.h"
 #include "assembler/assembler/RepatchBuffer.h"
 
 #include "jscntxtinlines.h"
+#include "jsinterpinlines.h"
 
 using namespace js;
 using namespace js::mjit;
 
 namespace js {
 namespace mjit {
 
 AutoScriptRetrapper::~AutoScriptRetrapper()
@@ -87,20 +88,40 @@ Recompiler::findPatch(JITScript *jit, vo
             return result;
         }
     }
 
     JS_NOT_REACHED("failed to find call site");
     return PatchableAddress();
 }
 
+void *
+Recompiler::findCallSite(JITScript *jit, const CallSite &callSite)
+{
+    JS_ASSERT(callSite.inlineIndex == uint32(-1));
+
+    CallSite *callSites_ = jit->callSites();
+    for (uint32 i = 0; i < jit->nCallSites; i++) {
+        CallSite &cs = callSites_[i];
+        if (cs.inlineIndex == uint32(-1) &&
+            cs.pcOffset == callSite.pcOffset && cs.id == callSite.id) {
+            uint8* codeStart = (uint8 *)jit->code.m_code.executableAddress();
+            return codeStart + cs.codeOffset;
+        }
+    }
+
+    /* We have no idea where to patch up to. */
+    JS_NOT_REACHED("Call site vanished.");
+    return NULL;
+}
+
 void
-Recompiler::applyPatch(Compiler& c, PatchableAddress& toPatch)
+Recompiler::applyPatch(JITScript *jit, PatchableAddress& toPatch)
 {
-    void *result = c.findCallSite(toPatch.callSite);
+    void *result = findCallSite(jit, toPatch.callSite);
     JS_ASSERT(result);
     *toPatch.location = result;
 }
 
 Recompiler::PatchableNative
 Recompiler::stealNative(JITScript *jit, jsbytecode *pc)
 {
     /*
@@ -109,17 +130,18 @@ Recompiler::stealNative(JITScript *jit, 
      * preparing for the native call. Either way, we don't want to patch up the call,
      * but will instead steal the pool for the native IC so it doesn't get freed
      * with the old script, and patch up the jump at the end to point to the slow join
      * point in the new script.
      */
     unsigned i;
     ic::CallICInfo *callICs = jit->callICs();
     for (i = 0; i < jit->nCallICs; i++) {
-        if (callICs[i].pc == pc)
+        CallSite *call = callICs[i].call;
+        if (call->inlineIndex == uint32(-1) && call->pcOffset == uint32(pc - jit->script->code))
             break;
     }
     JS_ASSERT(i < jit->nCallICs);
     ic::CallICInfo &ic = callICs[i];
     JS_ASSERT(ic.fastGuardedNative);
 
     JSC::ExecutablePool *&pool = ic.pools[ic::CallICInfo::Pool_NativeStub];
 
@@ -153,17 +175,18 @@ void
 Recompiler::patchNative(JITScript *jit, PatchableNative &native)
 {
     if (!native.pc)
         return;
 
     unsigned i;
     ic::CallICInfo *callICs = jit->callICs();
     for (i = 0; i < jit->nCallICs; i++) {
-        if (callICs[i].pc == native.pc)
+        CallSite *call = callICs[i].call;
+        if (call->inlineIndex == uint32(-1) && call->pcOffset == uint32(native.pc - jit->script->code))
             break;
     }
     JS_ASSERT(i < jit->nCallICs);
     ic::CallICInfo &ic = callICs[i];
 
     ic.fastGuardedNative = native.guardedNative;
     ic.pools[ic::CallICInfo::Pool_NativeStub] = native.pool;
     ic.nativeStart = native.nativeStart;
@@ -188,16 +211,166 @@ Recompiler::patchNative(JITScript *jit, 
     {
         JSC::CodeLocationLabel joinPoint = ic.slowPathStart.labelAtOffset(ic.slowJoinOffset);
         uint8 *start = (uint8 *)native.nativeJump.executableAddress();
         JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
         repatch.relink(native.nativeJump, joinPoint);
     }
 }
 
+JSStackFrame *
+Recompiler::expandInlineFrameChain(JSContext *cx, JSStackFrame *outer, InlineFrame *inner)
+{
+    JSStackFrame *parent;
+    if (inner->parent)
+        parent = expandInlineFrameChain(cx, outer, inner->parent);
+    else
+        parent = outer;
+
+    JaegerSpew(JSpew_Recompile, "Expanding inline frame, %u unsynced entries\n",
+               inner->nUnsyncedEntries);
+
+    /*
+     * Remat any slots in the parent frame which may not be fully synced.
+     * Note that we need to do this *after* fixing the slots in parent frames,
+     * as the parent's own parents may need to be coherent for, e.g. copies
+     * of arguments to get the correct value.
+     */
+    for (unsigned i = 0; i < inner->nUnsyncedEntries; i++) {
+        const UnsyncedEntry &e = inner->unsyncedEntries[i];
+        Value *slot = (Value *) ((uint8 *)outer + e.offset);
+        if (e.copy) {
+            Value *copied = (Value *) ((uint8 *)outer + e.u.copiedOffset);
+            *slot = *copied;
+        } else if (e.constant) {
+            *slot = e.u.value;
+        } else if (e.knownType) {
+            slot->boxNonDoubleFrom(e.u.type, (uint64 *) slot);
+        }
+    }
+
+    JSStackFrame *fp = (JSStackFrame *) ((uint8 *)outer + sizeof(Value) * inner->depth);
+    fp->initInlineFrame(inner->fun, parent, inner->parentpc);
+    uint32 pcOffset = inner->parentpc - parent->script()->code;
+
+    /*
+     * The erased frame needs JIT code with rejoin points added. Note that the
+     * outer frame does not need to have rejoin points, as it is definitely at
+     * an inline call and rejoin points are always added for such calls.
+     */
+    if (fp->jit() && !fp->jit()->rejoinPoints) {
+        mjit::Recompiler recompiler(cx, fp->script());
+        if (!recompiler.recompile())
+            return NULL; // FIXME
+    }
+    if (!fp->jit()) {
+        CompileStatus status = Compile_Retry;
+        while (status == Compile_Retry) {
+            mjit::Compiler cc(cx, fp, NULL, true);
+            status = cc.compile();
+        }
+        if (status != Compile_Okay)
+            return NULL; // FIXME
+    }
+
+    PatchableAddress patch;
+    patch.location = fp->addressOfNativeReturnAddress();
+    patch.callSite.initialize(0, uint32(-1), pcOffset, CallSite::NCODE_RETURN_ID);
+    applyPatch(parent->jit(), patch);
+
+    return fp;
+}
+
+/*
+ * Expand all inlined frames within fp per 'inlined' and update next and regs
+ * to refer to the new innermost frame.
+ */
+void
+Recompiler::expandInlineFrames(JSContext *cx, JSStackFrame *fp, mjit::CallSite *inlined,
+                               JSStackFrame *next, VMFrame *f)
+{
+    JS_ASSERT_IF(next, next->prev() == fp && next->prevInline() == inlined);
+
+    void **frameAddr = f->returnAddressLocation();
+    bool patchFrameReturn = (f->scratch != NATIVE_CALL_SCRATCH_VALUE && fp->jit()->isValidCode(*frameAddr));
+
+    InlineFrame *inner = &fp->jit()->inlineFrames()[inlined->inlineIndex];
+    jsbytecode *innerpc = inner->fun->script()->code + inlined->pcOffset;
+
+    JSStackFrame *innerfp = expandInlineFrameChain(cx, fp, inner);
+    JITScript *jit = innerfp->jit();
+
+    if (f->regs.fp == fp) {
+        JS_ASSERT(f->regs.inlined == inlined);
+        f->regs.fp = innerfp;
+        f->regs.pc = innerpc;
+        f->regs.inlined = NULL;
+    }
+
+    if (patchFrameReturn) {
+        PatchableAddress patch;
+        patch.location = frameAddr;
+        patch.callSite.initialize(0, uint32(-1), inlined->pcOffset, inlined->id);
+        applyPatch(jit, patch);
+    }
+
+    if (next) {
+        next->resetInlinePrev(innerfp, innerpc);
+        void **addr = next->addressOfNativeReturnAddress();
+        if (*addr != NULL && *addr != JaegerTrampolineReturn) {
+            PatchableAddress patch;
+            patch.location = addr;
+            patch.callSite.initialize(0, uint32(-1), inlined->pcOffset, CallSite::NCODE_RETURN_ID);
+            applyPatch(jit, patch);
+        }
+    }
+}
+
+void
+ExpandInlineFrames(JSContext *cx, bool all)
+{
+    if (!all) {
+        VMFrame *f = cx->compartment->jaegerCompartment->activeFrame();
+        if (f && f->regs.inlined && cx->fp() == f->fp())
+            mjit::Recompiler::expandInlineFrames(cx, f->fp(), f->regs.inlined, NULL, f);
+        return;
+    }
+
+    for (VMFrame *f = cx->compartment->jaegerCompartment->activeFrame();
+         f != NULL;
+         f = f->previous) {
+
+        if (f->regs.inlined) {
+            StackSegment *seg = cx->containingSegment(f->fp());
+            JSFrameRegs *regs = seg->getCurrentRegs();
+            if (regs->fp == f->fp()) {
+                JS_ASSERT(regs == &f->regs);
+                mjit::Recompiler::expandInlineFrames(cx, f->fp(), f->regs.inlined, NULL, f);
+            } else {
+                JSStackFrame *nnext = seg->computeNextFrame(f->fp());
+                mjit::Recompiler::expandInlineFrames(cx, f->fp(), f->regs.inlined, nnext, f);
+            }
+        }
+
+        JSStackFrame *end = f->entryfp->prev();
+        JSStackFrame *next = NULL;
+        for (JSStackFrame *fp = f->fp(); fp != end; fp = fp->prev()) {
+            mjit::CallSite *inlined;
+            fp->pc(cx, next, &inlined);
+            if (next && inlined) {
+                mjit::Recompiler::expandInlineFrames(cx, fp, inlined, next, f);
+                fp = next;
+                next = NULL;
+            } else {
+                next = fp;
+            }
+        }
+    }
+}
+
 Recompiler::Recompiler(JSContext *cx, JSScript *script)
   : cx(cx), script(script)
 {    
 }
 
 /*
  * Recompilation can be triggered either by the debugger (turning debug mode on for
  * a script or setting/clearing a trap), or by dynamic changes in type information
@@ -261,48 +434,64 @@ Recompiler::recompile()
     for (VMFrame *f = script->compartment->jaegerCompartment->activeFrame();
          f != NULL;
          f = f->previous) {
 
         // Scan all frames owned by this VMFrame.
         JSStackFrame *end = f->entryfp->prev();
         JSStackFrame *next = NULL;
         for (JSStackFrame *fp = f->fp(); fp != end; fp = fp->prev()) {
-            if (fp->script() == script) {
-                // Remember every frame for each type of JIT'd code.
-                PatchableFrame frame;
-                frame.fp = fp;
-                frame.pc = fp->pc(cx, next);
-                if (fp->isConstructing() && !ctorFrames.append(frame))
-                    return false;
-                if (!fp->isConstructing() && !normalFrames.append(frame))
-                    return false;
+            if (fp->script() != script) {
+                next = fp;
+                continue;
             }
 
-            // check for a scripted call returning into the recompiled script.
-            void **addr = fp->addressOfNativeReturnAddress();
-            if (script->jitCtor && script->jitCtor->isValidCode(*addr)) {
-                if (!ctorPatches.append(findPatch(script->jitCtor, addr)))
-                    return false;
-            } else if (script->jitNormal && script->jitNormal->isValidCode(*addr)) {
-                if (!normalPatches.append(findPatch(script->jitNormal, addr)))
-                    return false;
+            // Remember every frame for each type of JIT'd code.
+            PatchableFrame frame;
+            frame.fp = fp;
+            frame.pc = fp->pc(cx, next);
+            if (fp->isConstructing() && !ctorFrames.append(frame))
+                return false;
+            if (!fp->isConstructing() && !normalFrames.append(frame))
+                return false;
+
+            if (next) {
+                // check for a scripted call returning into the recompiled script.
+                // this misses scanning the entry fp, which cannot return directly
+                // into JIT code.
+                void **addr = next->addressOfNativeReturnAddress();
+
+                if (!*addr) {
+                    // next is an interpreted frame.
+                } else if (*addr == JaegerTrampolineReturn) {
+                    // next entered from the interpreter.
+                } else if (fp->isConstructing()) {
+                    JS_ASSERT(script->jitCtor && script->jitCtor->isValidCode(*addr));
+                    if (!ctorPatches.append(findPatch(script->jitCtor, addr)))
+                        return false;
+                } else {
+                    JS_ASSERT(script->jitNormal && script->jitNormal->isValidCode(*addr));
+                    if (!normalPatches.append(findPatch(script->jitNormal, addr)))
+                        return false;
+                }
             }
 
             next = fp;
         }
 
+        /* Check if the VMFrame returns directly into the recompiled script. */
+        JSStackFrame *fp = f->fp();
         void **addr = f->returnAddressLocation();
-        if (f->fp()->script() == script && f->scratch == NATIVE_CALL_SCRATCH_VALUE) {
+        if (f->scratch == NATIVE_CALL_SCRATCH_VALUE) {
             // Native call.
-            if (f->fp()->isConstructing()) {
-                if (!ctorNatives.append(stealNative(script->jitCtor, f->fp()->pc(cx, NULL))))
+            if (fp->script() == script && fp->isConstructing()) {
+                if (!ctorNatives.append(stealNative(script->jitCtor, fp->pc(cx, NULL))))
                     return false;
-            } else {
-                if (!normalNatives.append(stealNative(script->jitNormal, f->fp()->pc(cx, NULL))))
+            } else if (fp->script() == script) {
+                if (!normalNatives.append(stealNative(script->jitNormal, fp->pc(cx, NULL))))
                     return false;
             }
         } else if (script->jitCtor && script->jitCtor->isValidCode(*addr)) {
             if (!ctorPatches.append(findPatch(script->jitCtor, addr)))
                 return false;
         } else if (script->jitNormal && script->jitNormal->isValidCode(*addr)) {
             if (!normalPatches.append(findPatch(script->jitNormal, addr)))
                 return false;
@@ -370,29 +559,34 @@ Recompiler::recompile(Vector<PatchableFr
                       Vector<PatchableNative> &natives,
                       uint32 recompilations)
 {
     JSStackFrame *fp = frames[0].fp;
 
     JaegerSpew(JSpew_Recompile, "On stack recompilation, %u patches, %u natives\n",
                patches.length(), natives.length());
 
-    Compiler c(cx, fp, &frames);
-    if (!c.loadOldTraps(sites))
-        return false;
-    if (c.compile() != Compile_Okay)
+    CompileStatus status = Compile_Retry;
+    while (status == Compile_Retry) {
+        Compiler cc(cx, fp, &frames, true);
+        if (!cc.loadOldTraps(sites))
+            return false;
+        status = cc.compile();
+    }
+    if (status != Compile_Okay)
         return false;
 
-    script->getJIT(fp->isConstructing())->recompilations = recompilations + 1;
+    JITScript *jit = script->getJIT(fp->isConstructing());
+    jit->recompilations = recompilations + 1;
 
     /* Perform the earlier scanned patches */
     for (uint32 i = 0; i < patches.length(); i++)
-        applyPatch(c, patches[i]);
+        applyPatch(jit, patches[i]);
     for (uint32 i = 0; i < natives.length(); i++)
-        patchNative(script->getJIT(fp->isConstructing()), natives[i]);
+        patchNative(jit, natives[i]);
 
     return true;
 }
 
 } /* namespace mjit */
 } /* namespace js */
 
 #endif /* JS_METHODJIT */
--- a/js/src/methodjit/Retcon.h
+++ b/js/src/methodjit/Retcon.h
@@ -97,29 +97,38 @@ class Recompiler {
         JSC::CodeLocationJump nativeJump;
     };
 
 public:
     Recompiler(JSContext *cx, JSScript *script);
     
     bool recompile();
 
+    static void
+    expandInlineFrames(JSContext *cx, JSStackFrame *fp, mjit::CallSite *inlined,
+                       JSStackFrame *next, VMFrame *f);
+
 private:
     JSContext *cx;
     JSScript *script;
-    
-    PatchableAddress findPatch(JITScript *jit, void **location);
-    void applyPatch(Compiler& c, PatchableAddress& toPatch);
+
+    static PatchableAddress findPatch(JITScript *jit, void **location);
+    static void * findCallSite(JITScript *jit, const CallSite &callSite);
+
+    static void applyPatch(JITScript *jit, PatchableAddress& toPatch);
     PatchableNative stealNative(JITScript *jit, jsbytecode *pc);
     void patchNative(JITScript *jit, PatchableNative &native);
     bool recompile(Vector<PatchableFrame> &frames,
                    Vector<PatchableAddress> &patches, Vector<CallSite> &sites,
                    Vector<PatchableNative> &natives,
                    uint32 recompilations);
 
+    static JSStackFrame *
+    expandInlineFrameChain(JSContext *cx, JSStackFrame *outer, InlineFrame *inner);
+
     /* Detach jit from any IC callers and save any traps to sites. */
     bool cleanup(JITScript *jit, Vector<CallSite> *sites, uint32 *recompilations);
 };
 
 } /* namespace mjit */
 } /* namespace js */
 
 #endif
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -84,17 +84,17 @@ stubs::BindName(VMFrame &f)
 
     /* Fast-path should have caught this. See comment in interpreter. */
     JS_ASSERT(f.fp()->scopeChain().getParent());
 
     JSAtom *atom;
     JSObject *obj2;
     JSContext *cx = f.cx;
     JSObject *obj = &f.fp()->scopeChain();
-    JS_PROPERTY_CACHE(cx).test(cx, f.regs.pc, obj, obj2, entry, atom);
+    JS_PROPERTY_CACHE(cx).test(cx, f.pc(), obj, obj2, entry, atom);
     if (atom) {
         jsid id = ATOM_TO_JSID(atom);
         obj = js_FindIdentifierBase(cx, &f.fp()->scopeChain(), id);
         if (!obj)
             THROW();
     }
     f.regs.sp++;
     f.regs.sp[-1].setObject(*obj);
@@ -150,17 +150,17 @@ stubs::SetName(VMFrame &f, JSAtom *origA
          *
          * in a frequently executed method or loop body, where p will
          * (possibly after the first iteration) always exist in native
          * object o.
          */
         PropertyCacheEntry *entry;
         JSObject *obj2;
         JSAtom *atom;
-        if (cache->testForSet(cx, f.regs.pc, obj, &entry, &obj2, &atom)) {
+        if (cache->testForSet(cx, f.pc(), obj, &entry, &obj2, &atom)) {
             /*
              * Property cache hit, only partially confirmed by testForSet. We
              * know that the entry applies to regs.pc and that obj's shape
              * matches.
              *
              * The entry predicts either a new property to be added directly to
              * obj by this set, or on an existing "own" property, or on a
              * prototype property that has a setter.
@@ -249,17 +249,17 @@ stubs::SetName(VMFrame &f, JSAtom *origA
             atom = origAtom;
         } else {
             JS_ASSERT(atom);
         }
 
         jsid id = ATOM_TO_JSID(atom);
         if (entry && JS_LIKELY(!obj->getOps()->setProperty)) {
             uintN defineHow;
-            JSOp op = JSOp(*f.regs.pc);
+            JSOp op = JSOp(*f.pc());
             if (op == JSOP_SETMETHOD)
                 defineHow = JSDNP_CACHE_RESULT | JSDNP_SET_METHOD;
             else if (op == JSOP_SETNAME)
                 defineHow = JSDNP_CACHE_RESULT | JSDNP_UNQUALIFIED;
             else
                 defineHow = JSDNP_CACHE_RESULT;
             if (!js_SetPropertyHelper(cx, obj, id, defineHow, &rval, strict))
                 THROW();
@@ -346,17 +346,17 @@ NameOp(VMFrame &f, JSObject *obj, bool m
     JSContext *cx = f.cx;
 
     const Shape *shape;
     Value rval;
 
     PropertyCacheEntry *entry;
     JSObject *obj2;
     JSAtom *atom;
-    JS_PROPERTY_CACHE(cx).test(cx, f.regs.pc, obj, obj2, entry, atom);
+    JS_PROPERTY_CACHE(cx).test(cx, f.pc(), obj, obj2, entry, atom);
     if (!atom) {
         if (entry->vword.isFunObj()) {
             rval.setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uintN slot = entry->vword.toSlot();
             rval = obj2->nativeGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
@@ -368,21 +368,21 @@ NameOp(VMFrame &f, JSObject *obj, bool m
     } else {
         jsid id;
         id = ATOM_TO_JSID(atom);
         JSProperty *prop;
         if (!js_FindPropertyHelper(cx, id, true, &obj, &obj2, &prop))
             return NULL;
         if (!prop) {
             /* Kludge to allow (typeof foo == "undefined") tests. */
-            JSOp op2 = js_GetOpcode(cx, f.fp()->script(), f.regs.pc + JSOP_NAME_LENGTH);
+            JSOp op2 = js_GetOpcode(cx, f.script(), f.pc() + JSOP_NAME_LENGTH);
             if (op2 == JSOP_TYPEOF) {
                 f.regs.sp++;
                 f.regs.sp[-1].setUndefined();
-                if (!f.script()->typeMonitorUndefined(cx, f.regs.pc))
+                if (!f.script()->typeMonitorUndefined(cx, f.pc()))
                     return NULL;
                 return obj;
             }
             ReportAtomNotDefined(cx, atom);
             return NULL;
         }
 
         /* Take the slow path if prop was not found in a native object. */
@@ -394,20 +394,20 @@ NameOp(VMFrame &f, JSObject *obj, bool m
             JSObject *normalized = obj;
             if (normalized->getClass() == &js_WithClass && !shape->hasDefaultGetter())
                 normalized = js_UnwrapWithObject(cx, normalized);
             NATIVE_GET(cx, normalized, obj2, shape, JSGET_METHOD_BARRIER, &rval, return NULL);
         }
     }
 
     if (markresult) {
-        if (!f.script()->typeMonitorResult(cx, f.regs.pc, rval))
+        if (!f.script()->typeMonitorResult(cx, f.pc(), rval))
             return NULL;
     } else if (rval.isUndefined()) {
-        if (!f.script()->typeMonitorUndefined(cx, f.regs.pc))
+        if (!f.script()->typeMonitorUndefined(cx, f.pc()))
             return NULL;
     }
 
     *f.regs.sp++ = rval;
 
     if (callname)
         PushImplicitThis(f, obj, rval);
 
@@ -493,25 +493,25 @@ stubs::GetElem(VMFrame &f)
                 THROW();
         }
     }
 
     if (!obj->getProperty(cx, id, &rval))
         THROW();
     copyFrom = &rval;
 
-    if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, regs.pc))
+    if (!JSID_IS_INT(id) && !f.script()->typeMonitorUnknown(cx, f.pc()))
         THROW();
 
   end_getelem:
     f.regs.sp[-2] = *copyFrom;
 
     if (copyFrom->isUndefined()) {
         cx->addTypeProperty(obj->getType(), NULL, TYPE_UNDEFINED);
-        if (!f.script()->typeMonitorUndefined(cx, regs.pc))
+        if (!f.script()->typeMonitorUndefined(cx, f.pc()))
             THROW();
     }
 }
 
 static inline bool
 FetchElementId(VMFrame &f, JSObject *obj, const Value &idval, jsid &id, Value *vp)
 {
     int32_t i_;
@@ -550,17 +550,17 @@ stubs::CallElem(VMFrame &f)
         if (!js_OnUnknownMethod(cx, regs.sp - 2))
             THROW();
     } else
 #endif
     {
         regs.sp[-1] = thisv;
     }
     if ((regs.sp[-2].isUndefined() || !JSID_IS_INT(id)) &&
-        !f.script()->typeMonitorUnknown(cx, regs.pc)) {
+        !f.script()->typeMonitorUnknown(cx, f.pc())) {
         THROW();
     }
 }
 
 template<JSBool strict>
 void JS_FASTCALL
 stubs::SetElem(VMFrame &f)
 {
@@ -713,17 +713,17 @@ stubs::Ursh(VMFrame &f)
     if (!ValueToECMAUint32(f.cx, f.regs.sp[-2], &u))
         THROW();
     int32_t j;
     if (!ValueToECMAInt32(f.cx, f.regs.sp[-1], &j))
         THROW();
 
     u >>= (j & 31);
 
-	if (!f.regs.sp[-2].setNumber(uint32(u)) && !f.script()->typeMonitorOverflow(f.cx, f.regs.pc))
+	if (!f.regs.sp[-2].setNumber(uint32(u)) && !f.script()->typeMonitorOverflow(f.cx, f.pc()))
         THROW();
 }
 
 template<JSBool strict>
 void JS_FASTCALL
 stubs::DefFun(VMFrame &f, JSFunction *fun)
 {
     JSObject *obj2;
@@ -1044,47 +1044,47 @@ DefaultValue(VMFrame &f, JSType hint, Va
 }
 
 static inline bool
 MonitorArithmeticOverflow(VMFrame &f, const Value &v)
 {
     JSContext *cx = f.cx;
 
     JS_ASSERT(v.isDouble());
-    if (!f.script()->typeMonitorOverflow(cx, f.regs.pc))
+    if (!f.script()->typeMonitorOverflow(cx, f.pc()))
         return false;
 
     /*
      * Monitoring the overflow is not enough for fused INC operations on NAME/PROP,
      * as modifying the pushed stack types does not affect the object itself.
      * The method JIT fuses these opcodes (unlike the interpreter, which has a case
      * to modify the object directly on overflow), so we have to detect that the
      * current operation is fused and determine the object to update --- it must be
      * synced and at a particular slot. This is a gross hack.
      */
 
     Value ov;
 
-    switch (JSOp(*f.regs.pc)) {
+    switch (JSOp(*f.pc())) {
       case JSOP_INCPROP:
       case JSOP_DECPROP:
       case JSOP_PROPINC:
       case JSOP_PROPDEC:
         ov = f.regs.sp[-4];
         break;
 
       default:
         return true;
     }
 
     JSObject *obj = ValueToObject(cx, &ov);
     if (!obj)
         return true;
     JSAtom *atom;
-    GET_ATOM_FROM_BYTECODE(f.script(), f.regs.pc, 0, atom);
+    GET_ATOM_FROM_BYTECODE(f.script(), f.pc(), 0, atom);
 
     return cx->addTypePropertyId(obj->getType(), ATOM_TO_JSID(atom), TYPE_DOUBLE);
 }
 
 void JS_FASTCALL
 stubs::Add(VMFrame &f)
 {
     JSContext *cx = f.cx;
@@ -1104,17 +1104,17 @@ stubs::Add(VMFrame &f)
     } else
 #if JS_HAS_XML_SUPPORT
     if (lval.isObject() && lval.toObject().isXML() &&
         rval.isObject() && rval.toObject().isXML()) {
         if (!js_ConcatenateXML(cx, &lval.toObject(), &rval.toObject(), &rval))
             THROW();
         regs.sp--;
         regs.sp[-1] = rval;
-        if (!f.script()->typeMonitorUnknown(cx, regs.pc))
+        if (!f.script()->typeMonitorUnknown(cx, f.pc()))
             THROW();
     } else
 #endif
     {
         /* These can convert lval/rval to strings. */
         bool lIsObject, rIsObject;
         if ((lIsObject = lval.isObject()) && !DefaultValue(f, JSTYPE_VOID, lval, -2))
             THROW();
@@ -1132,17 +1132,17 @@ stubs::Add(VMFrame &f)
             if (rIsString) {
                 rstr = rval.toString();
             } else {
                 rstr = js_ValueToString(cx, rval);
                 if (!rstr)
                     THROW();
                 regs.sp[-1].setString(rstr);
             }
-            if ((lIsObject || rIsObject) && !f.script()->typeMonitorString(cx, regs.pc))
+            if ((lIsObject || rIsObject) && !f.script()->typeMonitorString(cx, f.pc()))
                 THROW();
             goto string_concat;
 
         } else {
             double l, r;
             if (!ValueToNumber(cx, lval, &l) || !ValueToNumber(cx, rval, &r))
                 THROW();
             l += r;
@@ -1184,17 +1184,17 @@ stubs::Mul(VMFrame &f)
     JSContext *cx = f.cx;
     JSFrameRegs &regs = f.regs;
     double d1, d2;
     if (!ValueToNumber(cx, regs.sp[-2], &d1) ||
         !ValueToNumber(cx, regs.sp[-1], &d2)) {
         THROW();
     }
     double d = d1 * d2;
-    if (!regs.sp[-2].setNumber(d) && !f.script()->typeMonitorOverflow(cx, f.regs.pc))
+    if (!regs.sp[-2].setNumber(d) && !f.script()->typeMonitorOverflow(cx, f.pc()))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::Div(VMFrame &f)
 {
     JSContext *cx = f.cx;
     JSRuntime *rt = cx->runtime;
@@ -1215,21 +1215,21 @@ stubs::Div(VMFrame &f)
 #endif
         if (d1 == 0 || JSDOUBLE_IS_NaN(d1))
             vp = &rt->NaNValue;
         else if (JSDOUBLE_IS_NEG(d1) != JSDOUBLE_IS_NEG(d2))
             vp = &rt->negativeInfinityValue;
         else
             vp = &rt->positiveInfinityValue;
         regs.sp[-2] = *vp;
-        if (!f.script()->typeMonitorOverflow(cx, f.regs.pc))
+        if (!f.script()->typeMonitorOverflow(cx, f.pc()))
             THROW();
     } else {
         d1 /= d2;
-        if (!regs.sp[-2].setNumber(d1) && !f.script()->typeMonitorOverflow(cx, f.regs.pc))
+        if (!regs.sp[-2].setNumber(d1) && !f.script()->typeMonitorOverflow(cx, f.pc()))
             THROW();
     }
 }
 
 void JS_FASTCALL
 stubs::Mod(VMFrame &f)
 {
     JSContext *cx = f.cx;
@@ -1249,28 +1249,28 @@ stubs::Mod(VMFrame &f)
             THROW();
         }
         if (d2 == 0) {
             regs.sp[-2].setDouble(js_NaN);
         } else {
             d1 = js_fmod(d1, d2);
             regs.sp[-2].setDouble(d1);
         }
-        if (!f.script()->typeMonitorOverflow(cx, f.regs.pc))
+        if (!f.script()->typeMonitorOverflow(cx, f.pc()))
             THROW();
     }
 }
 
 void JS_FASTCALL
 stubs::Debugger(VMFrame &f, jsbytecode *pc)
 {
     JSDebuggerHandler handler = f.cx->debugHooks->debuggerHandler;
     if (handler) {
         Value rval;
-        switch (handler(f.cx, f.cx->fp()->script(), pc, Jsvalify(&rval),
+        switch (handler(f.cx, f.script(), f.pc(), Jsvalify(&rval),
                         f.cx->debugHooks->debuggerHandlerData)) {
           case JSTRAP_THROW:
             f.cx->setPendingException(rval);
             THROW();
 
           case JSTRAP_RETURN:
             f.cx->clearPendingException();
             f.cx->fp()->setReturnValue(rval);
@@ -1299,37 +1299,36 @@ stubs::Interrupt(VMFrame &f, jsbytecode 
     if (!js_HandleExecutionInterrupt(f.cx))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::Trap(VMFrame &f, uint32 trapTypes)
 {
     Value rval;
-    jsbytecode *pc = f.cx->regs->pc;
 
     /*
      * Trap may be called for a single-step interrupt trap and/or a
      * regular trap. Try the single-step first, and if it lets control
      * flow through or does not exist, do the regular trap.
      */
     JSTrapStatus result = JSTRAP_CONTINUE;
     if (trapTypes & JSTRAP_SINGLESTEP) {
         /*
          * single step mode may be paused without recompiling by
          * setting the interruptHook to NULL.
          */
         JSInterruptHook hook = f.cx->debugHooks->interruptHook;
         if (hook)
-            result = hook(f.cx, f.cx->fp()->script(), pc, Jsvalify(&rval),
+            result = hook(f.cx, f.script(), f.pc(), Jsvalify(&rval),
                           f.cx->debugHooks->interruptHookData);
     }
 
     if (result == JSTRAP_CONTINUE && (trapTypes & JSTRAP_TRAP))
-        result = JS_HandleTrap(f.cx, f.cx->fp()->script(), pc, Jsvalify(&rval));
+        result = JS_HandleTrap(f.cx, f.script(), f.pc(), Jsvalify(&rval));
 
     switch (result) {
       case JSTRAP_THROW:
         f.cx->setPendingException(rval);
         THROW();
 
       case JSTRAP_RETURN:
         f.cx->clearPendingException();
@@ -1350,29 +1349,39 @@ stubs::Trap(VMFrame &f, uint32 trapTypes
       default:
         break;
     }
 }
 
 void JS_FASTCALL
 stubs::This(VMFrame &f)
 {
+    /*
+     * We can't yet inline scripts which need to compute their 'this' object
+     * from a primitive; the frame we are computing 'this' for does not exist yet.
+     */
+    if (f.regs.inlined) {
+        JSFunction *fun = f.jit()->inlineFrames()[f.regs.inlined->inlineIndex].fun;
+        if (!f.cx->markTypeFunctionUninlineable(fun->getType()))
+            THROW();
+    }
+
     if (!f.fp()->computeThis(f.cx))
         THROW();
     f.regs.sp[-1] = f.fp()->thisValue();
 }
 
 void JS_FASTCALL
 stubs::Neg(VMFrame &f)
 {
     double d;
     if (!ValueToNumber(f.cx, f.regs.sp[-1], &d))
         THROW();
     d = -d;
-    if (!f.regs.sp[-1].setNumber(d) && !f.script()->typeMonitorOverflow(f.cx, f.regs.pc))
+    if (!f.regs.sp[-1].setNumber(d) && !f.script()->typeMonitorOverflow(f.cx, f.pc()))
         THROW();
 }
 
 JSObject * JS_FASTCALL
 stubs::NewInitArray(VMFrame &f, uint32 count)
 {
     JSObject *obj = NewDenseAllocatedArray(f.cx, count);
     if (!obj)
@@ -1449,22 +1458,22 @@ stubs::InitElem(VMFrame &f, uint32 last)
             THROW();
     }
 }
 
 void JS_FASTCALL
 stubs::GetUpvar(VMFrame &f, uint32 ck)
 {
     /* :FIXME: We can do better, this stub isn't needed. */
-    uint32 staticLevel = f.fp()->script()->staticLevel;
+    uint32 staticLevel = f.script()->staticLevel;
     UpvarCookie cookie;
     cookie.fromInteger(ck);
     f.regs.sp[0] = GetUpvar(f.cx, staticLevel, cookie);
 
-    if (f.regs.sp[0].isUndefined() && !f.script()->typeMonitorUndefined(f.cx, f.regs.pc))
+    if (f.regs.sp[0].isUndefined() && !f.script()->typeMonitorUndefined(f.cx, f.pc()))
         THROW();
 }
 
 JSObject * JS_FASTCALL
 stubs::DefLocalFun(VMFrame &f, JSFunction *fun)
 {
     /*
      * Define a local function (i.e., one nested at the top level of another
@@ -1527,30 +1536,30 @@ stubs::RegExp(VMFrame &f, JSObject *rege
     return obj;
 }
 
 JSObject * JS_FASTCALL
 stubs::LambdaForInit(VMFrame &f, JSFunction *fun)
 {
     JSObject *obj = FUN_OBJECT(fun);
     if (FUN_NULL_CLOSURE(fun) && obj->getParent() == &f.fp()->scopeChain()) {
-        fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(f.regs.pc)));
+        fun->setMethodAtom(f.script()->getAtom(GET_SLOTNO(f.pc())));
         return obj;
     }
     return Lambda(f, fun);
 }
 
 JSObject * JS_FASTCALL
 stubs::LambdaForSet(VMFrame &f, JSFunction *fun)
 {
     JSObject *obj = FUN_OBJECT(fun);
     if (FUN_NULL_CLOSURE(fun) && obj->getParent() == &f.fp()->scopeChain()) {
         const Value &lref = f.regs.sp[-1];
         if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) {
-            fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(f.regs.pc)));
+            fun->setMethodAtom(f.script()->getAtom(GET_SLOTNO(f.pc())));
             return obj;
         }
     }
     return Lambda(f, fun);
 }
 
 JSObject * JS_FASTCALL
 stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun)
@@ -1559,17 +1568,17 @@ stubs::LambdaJoinableForCall(VMFrame &f,
     if (FUN_NULL_CLOSURE(fun) && obj->getParent() == &f.fp()->scopeChain()) {
         /*
          * Array.prototype.sort and String.prototype.replace are
          * optimized as if they are special form. We know that they
          * won't leak the joined function object in obj, therefore
          * we don't need to clone that compiler- created function
          * object for identity/mutation reasons.
          */
-        int iargc = GET_ARGC(f.regs.pc);
+        int iargc = GET_ARGC(f.pc());
 
         /*
          * Note that we have not yet pushed obj as the final argument,
          * so regs.sp[1 - (iargc + 2)], and not regs.sp[-(iargc + 2)],
          * is the callee for this JSOP_CALL.
          */
         const Value &cref = f.regs.sp[1 - (iargc + 2)];
         JSObject *callee;
@@ -1589,17 +1598,17 @@ stubs::LambdaJoinableForCall(VMFrame &f,
     return Lambda(f, fun);
 }
 
 JSObject * JS_FASTCALL
 stubs::LambdaJoinableForNull(VMFrame &f, JSFunction *fun)
 {
     JSObject *obj = FUN_OBJECT(fun);
     if (FUN_NULL_CLOSURE(fun) && obj->getParent() == &f.fp()->scopeChain()) {
-        jsbytecode *pc2 = f.regs.pc + JSOP_NULL_LENGTH;
+        jsbytecode *pc2 = f.pc() + JSOP_NULL_LENGTH;
         JSOp op2 = JSOp(*pc2);
 
         if (op2 == JSOP_CALL && GET_ARGC(pc2) == 0)
             return obj;
     }
     return Lambda(f, fun);
 }
 
@@ -1668,17 +1677,17 @@ ObjIncOp(VMFrame &f, JSObject *obj, jsid
             return false;
         if (POST) {
             ref.setNumber(d);
             d += N;
         } else {
             d += N;
             ref.setNumber(d);
         }
-        if (!v.setNumber(d) && !f.script()->typeMonitorOverflow(cx, f.regs.pc))
+        if (!v.setNumber(d) && !f.script()->typeMonitorOverflow(cx, f.pc()))
             return false;
         if (!cx->typeMonitorAssign(obj, id, v))
             return false;
         fp->setAssigning();
         JSBool ok = obj->setProperty(cx, id, &v, strict);
         fp->clearAssigning();
         if (!ok)
             return false;
@@ -1692,17 +1701,17 @@ static inline bool
 NameIncDec(VMFrame &f, JSObject *obj, JSAtom *origAtom)
 {
     JSContext *cx = f.cx;
 
     JSAtom *atom;
     JSObject *obj2;
     JSProperty *prop;
     PropertyCacheEntry *entry;
-    JS_PROPERTY_CACHE(cx).test(cx, f.regs.pc, obj, obj2, entry, atom);
+    JS_PROPERTY_CACHE(cx).test(cx, f.pc(), obj, obj2, entry, atom);
     if (!atom) {
         if (obj == obj2 && entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
             Value &rref = obj->nativeGetSlotRef(slot);
             int32_t tmp;
             if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
                 int32_t inc = tmp + N;
                 if (!POST)
@@ -1971,17 +1980,17 @@ InlineGetProp(VMFrame &f)
          * assuming any property gets it does (e.g., for 'toString'
          * from JSOP_NEW) will not be leaked to the calling script.
          */
         JSObject *aobj = js_GetProtoIfDenseArray(obj);
 
         PropertyCacheEntry *entry;
         JSObject *obj2;
         JSAtom *atom;
-        JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
+        JS_PROPERTY_CACHE(cx).test(cx, f.pc(), aobj, obj2, entry, atom);
         if (!atom) {
             if (entry->vword.isFunObj()) {
                 rval.setObject(entry->vword.toFunObj());
             } else if (entry->vword.isSlot()) {
                 uint32 slot = entry->vword.toSlot();
                 rval = obj2->nativeGetSlot(slot);
             } else {
                 JS_ASSERT(entry->vword.isShape());
@@ -2000,17 +2009,17 @@ InlineGetProp(VMFrame &f)
                     ? JSGET_CACHE_RESULT | JSGET_NO_METHOD_BARRIER
                     : JSGET_CACHE_RESULT | JSGET_METHOD_BARRIER,
                     &rval)
                 : !obj->getProperty(cx, id, &rval)) {
             return false;
         }
     } while(0);
 
-    if (rval.isUndefined() && !f.script()->typeMonitorUndefined(cx, regs.pc))
+    if (rval.isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
         return false;
 
     regs.sp[-1] = rval;
     return true;
 }
 
 void JS_FASTCALL
 stubs::GetProp(VMFrame &f)
@@ -2067,17 +2076,17 @@ stubs::CallProp(VMFrame &f, JSAtom *orig
     }
 
     JSObject *aobj = js_GetProtoIfDenseArray(&objv.toObject());
     Value rval;
 
     PropertyCacheEntry *entry;
     JSObject *obj2;
     JSAtom *atom;
-    JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
+    JS_PROPERTY_CACHE(cx).test(cx, f.pc(), aobj, obj2, entry, atom);
     if (!atom) {
         if (entry->vword.isFunObj()) {
             rval.setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
             rval = obj2->nativeGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
@@ -2121,17 +2130,17 @@ stubs::CallProp(VMFrame &f, JSAtom *orig
     }
 #if JS_HAS_NO_SUCH_METHOD
     if (JS_UNLIKELY(rval.isUndefined()) && regs.sp[-1].isObject()) {
         regs.sp[-2].setString(origAtom);
         if (!js_OnUnknownMethod(cx, regs.sp - 2))
             THROW();
     }
 #endif
-    if (rval.isUndefined() && !f.script()->typeMonitorUndefined(cx, regs.pc))
+    if (rval.isUndefined() && !f.script()->typeMonitorUndefined(cx, f.pc()))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::Length(VMFrame &f)
 {
     JSFrameRegs &regs = f.regs;
     Value *vp = &regs.sp[-1];
@@ -2192,17 +2201,17 @@ InitPropOrMethod(VMFrame &f, JSAtom *ato
      * So check first.
      *
      * On a hit, if the cached shape has a non-default setter, it must be
      * __proto__. If shape->previous() != obj->lastProperty(), there must be a
      * repeated property name. The fast path does not handle these two cases.
      */
     PropertyCacheEntry *entry;
     const Shape *shape;
-    if (JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) &&
+    if (JS_PROPERTY_CACHE(cx).testForInit(rt, f.pc(), obj, &shape, &entry) &&
         shape->hasDefaultSetter() &&
         shape->previous() == obj->lastProperty())
     {
         /* Fast path. Property cache hit. */
         uint32 slot = shape->slot;
 
         JS_ASSERT(slot == obj->slotSpan());
         JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
@@ -2405,16 +2414,17 @@ stubs::ArgCnt(VMFrame &f)
 void JS_FASTCALL
 stubs::EnterBlock(VMFrame &f, JSObject *obj)
 {
     JSFrameRegs &regs = f.regs;
 #ifdef DEBUG
     JSStackFrame *fp = f.fp();
 #endif
 
+    JS_ASSERT(!f.regs.inlined);
     JS_ASSERT(obj->isStaticBlock());
     JS_ASSERT(fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
     Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
     JS_ASSERT(regs.sp < vp);
     JS_ASSERT(vp <= fp->slots() + fp->script()->nslots);
     SetValueRangeToUndefined(regs.sp, vp);
     regs.sp = vp;
 
@@ -2603,17 +2613,17 @@ stubs::Unbrand(VMFrame &f)
         obj->unbrand(f.cx);
 }
 
 void JS_FASTCALL
 stubs::Pos(VMFrame &f)
 {
     if (!ValueToNumber(f.cx, &f.regs.sp[-1]))
         THROW();
-    if (!f.regs.sp[-1].isInt32() && !f.script()->typeMonitorOverflow(f.cx, f.regs.pc))
+    if (!f.regs.sp[-1].isInt32() && !f.script()->typeMonitorOverflow(f.cx, f.pc()))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::ArgSub(VMFrame &f, uint32 n)
 {
     jsid id = INT_TO_JSID(n);
     Value rval;
@@ -2627,17 +2637,17 @@ stubs::DelName(VMFrame &f, JSAtom *atom)
 {
     jsid id = ATOM_TO_JSID(atom);
     JSObject *obj, *obj2;
     JSProperty *prop;
     if (!js_FindProperty(f.cx, id, &obj, &obj2, &prop))
         THROW();
 
     /* Strict mode code should never contain JSOP_DELNAME opcodes. */
-    JS_ASSERT(!f.fp()->script()->strictModeCode);
+    JS_ASSERT(!f.script()->strictModeCode);
 
     /* ECMA says to return true if name is undefined or inherited. */
     f.regs.sp++;
     f.regs.sp[-1] = BooleanValue(true);
     if (prop) {
         if (!obj->deleteProperty(f.cx, id, &f.regs.sp[-1], false))
             THROW();
     }
@@ -2691,28 +2701,28 @@ stubs::DefVarOrConst(VMFrame &f, JSAtom 
     JS_ASSERT(!obj->getOps()->defineProperty);
     uintN attrs = JSPROP_ENUMERATE;
     if (!fp->isEvalFrame())
         attrs |= JSPROP_PERMANENT;
 
     /* Lookup id in order to check for redeclaration problems. */
     jsid id = ATOM_TO_JSID(atom);
     bool shouldDefine;
-    if (JSOp(*f.regs.pc) == JSOP_DEFVAR) {
+    if (JSOp(*f.pc()) == JSOP_DEFVAR) {
         /*
          * Redundant declaration of a |var|, even one for a non-writable
          * property like |undefined| in ES5, does nothing.
          */
         JSProperty *prop;
         JSObject *obj2;
         if (!obj->lookupProperty(cx, id, &obj2, &prop))
             THROW();
         shouldDefine = (!prop || obj2 != obj);
     } else {
-        JS_ASSERT(JSOp(*f.regs.pc) == JSOP_DEFCONST);
+        JS_ASSERT(JSOp(*f.pc()) == JSOP_DEFCONST);
         attrs |= JSPROP_READONLY;
         if (!CheckRedeclaration(cx, obj, id, attrs))
             THROW();
 
         /*
          * As attrs includes readonly, CheckRedeclaration can succeed only
          * if prop does not exist.
          */
@@ -2771,25 +2781,25 @@ stubs::In(VMFrame &f)
 }
 
 template void JS_FASTCALL stubs::DelElem<true>(VMFrame &f);
 template void JS_FASTCALL stubs::DelElem<false>(VMFrame &f);
 
 void JS_FASTCALL
 stubs::UndefinedHelper(VMFrame &f)
 {
-    if (!f.script()->typeMonitorUndefined(f.cx, f.regs.pc))
+    if (!f.script()->typeMonitorUndefined(f.cx, f.pc()))
         THROW();
     f.regs.sp[-1].setUndefined();
 }
 
 void JS_FASTCALL
 stubs::NegZeroHelper(VMFrame &f)
 {
-    if (!f.script()->typeMonitorOverflow(f.cx, f.regs.pc))
+    if (!f.script()->typeMonitorOverflow(f.cx, f.pc()))
         THROW();
     f.regs.sp[-1].setDouble(-0.0);
 }
 
 void JS_FASTCALL
 stubs::CheckArgumentTypes(VMFrame &f)
 {
     JSStackFrame *fp = f.fp();
--- a/js/src/methodjit/StubCompiler.cpp
+++ b/js/src/methodjit/StubCompiler.cpp
@@ -42,21 +42,20 @@
 #include "StubCompiler.h"
 #include "Compiler.h"
 #include "assembler/assembler/LinkBuffer.h"
 #include "FrameState-inl.h"