Merge tracemonkey to mozilla-central. a=blockers
authorRobert Sayre <sayrer@gmail.com>
Wed, 20 Oct 2010 09:13:20 -0500
changeset 56233 571ddddc7e13f9d7565d3523631d18bb2e3df9f5
parent 56168 877a1f5be4f421a31b6814f7bc1b2a5ebda1fc5d (current diff)
parent 56232 449fea69593061a4ff52620eb8be5ad06ac5bdda (diff)
child 56234 b003e775737b5aebcd9c6e99caf7064d02d49427
child 59309 ccef12b1fe7eec108b2c0d744e84a5f460969136
push id16433
push userrsayre@mozilla.com
push dateWed, 20 Oct 2010 14:14:01 +0000
treeherdermozilla-central@571ddddc7e13 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersblockers
milestone2.0b8pre
first release with
nightly linux32
571ddddc7e13 / 4.0b8pre / 20101020104642 / files
nightly linux64
571ddddc7e13 / 4.0b8pre / 20101020105126 / files
nightly mac
571ddddc7e13 / 4.0b8pre / 20101020104951 / files
nightly win32
571ddddc7e13 / 4.0b8pre / 20101020120347 / files
nightly win64
571ddddc7e13 / 4.0b8pre / 20101020074657 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge tracemonkey to mozilla-central. a=blockers
dom/base/nsDOMClassInfo.cpp
dom/base/nsJSEnvironment.cpp
js/jsd/test/test_bug602003.html
js/src/configure.in
modules/libpref/src/init/all.js
--- a/dom/base/nsJSEnvironment.cpp
+++ b/dom/base/nsJSEnvironment.cpp
@@ -3963,17 +3963,17 @@ SetMemoryHighWaterMarkPrefChangedCallbac
                       highwatermark * 1024L * 1024L);
   }
   return 0;
 }
 
 static int
 SetMemoryGCFrequencyPrefChangedCallback(const char* aPrefName, void* aClosure)
 {
-  PRInt32 triggerFactor = nsContentUtils::GetIntPref(aPrefName, 1600);
+  PRInt32 triggerFactor = nsContentUtils::GetIntPref(aPrefName, 300);
   JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_TRIGGER_FACTOR, triggerFactor);
   return 0;
 }
 
 static JSPrincipals *
 ObjectPrincipalFinder(JSContext *cx, JSObject *obj)
 {
   if (!sSecurityManager)
--- a/js/jsd/idl/jsdIDebuggerService.idl
+++ b/js/jsd/idl/jsdIDebuggerService.idl
@@ -850,17 +850,17 @@ interface jsdIStackFrame : jsdIEphemeral
                   in unsigned long line, out jsdIValue result);
     
 };
 
 /**
  * Script object.  In JavaScript engine terms, there's a single script for each
  * function, and one for the top level script.
  */
-[scriptable, uuid(53dadd96-69f6-4846-8958-cc8eaa3f9f09)]
+[scriptable, uuid(7e6fb9ed-4382-421d-9a14-c80a486e983b)]
 interface jsdIScript : jsdIEphemeral
 {
     /** Internal use only. */
     [noscript] readonly attribute JSDContext JSDContext;
     /** Internal use only. */
     [noscript] readonly attribute JSDScript  JSDScript;
     
     /**
@@ -997,27 +997,16 @@ interface jsdIScript : jsdIEphemeral
     unsigned long lineToPc (in unsigned long line, in unsigned long pcmap);
     /**
      * Determine is a particular line is executable, like checking that
      * lineToPc == pcToLine, except in one call.
      * The |pcmap| argument specifies which pc to source line map to use.
      */
     boolean isLineExecutable (in unsigned long line, in unsigned long pcmap);
     /**
-     * Get the first valid PC in the script. This will be either
-     * (a) the first bytecode in the script, or (b) the next bytecode
-     * in the script, iff the first bytecode is a JSOP_BEGIN.
-     */
-    unsigned long getFirstValidPC ();
-    /**
-     * Return the last valid PC in the script (i.e., the PC just after 
-     * the last bytecode).
-     */
-    unsigned long getEndValidPC ();
-    /**
      * Set a breakpoint at a PC in this script.
      */
     void setBreakpoint (in unsigned long pc);
     /**
      * Clear a breakpoint at a PC in this script.
      */
     void clearBreakpoint (in unsigned long pc);
     /**
--- a/js/jsd/jsd.h
+++ b/js/jsd/jsd.h
@@ -473,22 +473,16 @@ extern JSBool
 jsd_SetScriptHook(JSDContext* jsdc, JSD_ScriptHookProc hook, void* callerdata);
 
 extern JSBool
 jsd_GetScriptHook(JSDContext* jsdc, JSD_ScriptHookProc* hook, void** callerdata);
 
 extern jsuword
 jsd_GetClosestPC(JSDContext* jsdc, JSDScript* jsdscript, uintN line);
 
-extern jsuword
-jsd_GetFirstValidPC(JSDContext* jsdc, JSDScript* jsdscript);
-
-extern jsuword
-jsd_GetEndPC(JSDContext* jsdc, JSDScript* jsdscript);
-
 extern uintN
 jsd_GetClosestLine(JSDContext* jsdc, JSDScript* jsdscript, jsuword pc);
 
 extern void
 jsd_NewScriptHookProc(
                 JSContext   *cx,
                 const char  *filename,      /* URL this script loads from */
                 uintN       lineno,         /* line where this script starts */
--- a/js/jsd/jsd_scpt.c
+++ b/js/jsd/jsd_scpt.c
@@ -495,32 +495,30 @@ jsd_GetScriptLineExtent(JSDContext* jsdc
     if( NOT_SET_YET == (int)jsdscript->lineExtent )
         jsdscript->lineExtent = JS_GetScriptLineExtent(jsdc->dumbContext, jsdscript->script);
     return jsdscript->lineExtent;
 }
 
 jsuword
 jsd_GetClosestPC(JSDContext* jsdc, JSDScript* jsdscript, uintN line)
 {
+#ifdef LIVEWIRE
+    if( jsdscript && jsdscript->lwscript )
+    {
+        uintN newline;
+        jsdlw_RawToProcessedLineNumber(jsdc, jsdscript, line, &newline);
+        if( line != newline )
+            line = newline;
+    }
+#endif
+
     return (jsuword) JS_LineNumberToPC(jsdc->dumbContext, 
                                        jsdscript->script, line );
 }
 
-jsuword
-jsd_GetFirstValidPC(JSDContext* jsdc, JSDScript* jsdscript)
-{
-    return (jsuword) JS_FirstValidPC(jsdc->dumbContext, jsdscript->script );
-}
-
-jsuword
-jsd_GetEndPC(JSDContext* jsdc, JSDScript* jsdscript)
-{
-    return (jsuword) JS_EndPC(jsdc->dumbContext, jsdscript->script );
-}
-
 uintN
 jsd_GetClosestLine(JSDContext* jsdc, JSDScript* jsdscript, jsuword pc)
 {
     uintN first = jsdscript->lineBase;
     uintN last = first + jsd_GetScriptLineExtent(jsdc, jsdscript) - 1;
     uintN line = pc
         ? JS_PCToLineNumber(jsdc->dumbContext, 
                             jsdscript->script,
--- a/js/jsd/jsd_xpc.cpp
+++ b/js/jsd/jsd_xpc.cpp
@@ -953,34 +953,30 @@ jsdScript::jsdScript (JSDContext *aCx, J
                                                              mTag(0),
                                                              mCx(aCx),
                                                              mScript(aScript),
                                                              mFileName(0), 
                                                              mFunctionName(0),
                                                              mBaseLineNumber(0),
                                                              mLineExtent(0),
                                                              mPPLineMap(0),
-                                                             mFirstValidPC(0),
-                                                             mFirstPC(0),
-                                                             mEndPC(0)
+                                                             mFirstPC(0)
 {
     DEBUG_CREATE ("jsdScript", gScriptCount);
 
     if (mScript) {
         /* copy the script's information now, so we have it later, when it
          * gets destroyed. */
         JSD_LockScriptSubsystem(mCx);
         mFileName = new nsCString(JSD_GetScriptFilename(mCx, mScript));
         mFunctionName =
             new nsCString(JSD_GetScriptFunctionName(mCx, mScript));
         mBaseLineNumber = JSD_GetScriptBaseLineNumber(mCx, mScript);
         mLineExtent = JSD_GetScriptLineExtent(mCx, mScript);
         mFirstPC = JSD_GetClosestPC(mCx, mScript, 0);
-        mFirstValidPC = JSD_GetFirstValidPC(mCx, mScript);
-        mEndPC = JSD_GetEndPC(mCx, mScript);
         JSD_UnlockScriptSubsystem(mCx);
         
         mValid = PR_TRUE;
     }
 }
 
 jsdScript::~jsdScript () 
 {
@@ -1475,32 +1471,16 @@ jsdScript::IsLineExecutable(PRUint32 aLi
     } else {
         return NS_ERROR_INVALID_ARG;
     }
     
     return NS_OK;
 }
 
 NS_IMETHODIMP
-jsdScript::GetFirstValidPC(PRUint32 *_rval)
-{
-    ASSERT_VALID_EPHEMERAL;
-    *_rval = PRUint32(mFirstValidPC - mFirstPC);
-    return NS_OK;
-}
-
-NS_IMETHODIMP
-jsdScript::GetEndValidPC(PRUint32 *_rval)
-{
-    ASSERT_VALID_EPHEMERAL;
-    *_rval = PRUint32(mEndPC - mFirstPC);
-    return NS_OK;
-}
-
-NS_IMETHODIMP
 jsdScript::SetBreakpoint(PRUint32 aPC)
 {
     ASSERT_VALID_EPHEMERAL;
     jsuword pc = mFirstPC + aPC;
     JSD_SetExecutionHook (mCx, mScript, pc, jsds_ExecutionHookProc, NULL);
     return NS_OK;
 }
 
--- a/js/jsd/jsd_xpc.h
+++ b/js/jsd/jsd_xpc.h
@@ -177,19 +177,17 @@ class jsdScript : public jsdIScript
     PRUint32    mTag;
     JSDContext *mCx;
     JSDScript  *mScript;
     nsCString  *mFileName;
     nsCString  *mFunctionName;
     PRUint32    mBaseLineNumber, mLineExtent;
     PCMapEntry *mPPLineMap;
     PRUint32    mPCMapSize;
-    jsuword     mFirstPC;        /* address of first PC in script */
-    jsuword     mFirstValidPC;   /* address of first valid bkpt PC */
-    jsuword     mEndPC;          /* address of end of script code */
+    jsuword     mFirstPC;
 };
 
 PRUint32 jsdScript::LastTag = 0;
 
 class jsdContext : public jsdIContext
 {
   public:
     NS_DECL_ISUPPORTS
--- a/js/jsd/jsdebug.c
+++ b/js/jsd/jsdebug.c
@@ -343,32 +343,16 @@ JSD_GetScriptHook(JSDContext* jsdc, JSD_
 JSD_PUBLIC_API(jsuword)
 JSD_GetClosestPC(JSDContext* jsdc, JSDScript* jsdscript, uintN line)
 {
     JSD_ASSERT_VALID_CONTEXT(jsdc);
     JSD_ASSERT_VALID_SCRIPT(jsdscript);
     return jsd_GetClosestPC(jsdc, jsdscript, line);
 }
 
-JSD_PUBLIC_API(jsuword)
-JSD_GetFirstValidPC(JSDContext* jsdc, JSDScript* jsdscript)
-{
-    JSD_ASSERT_VALID_CONTEXT(jsdc);
-    JSD_ASSERT_VALID_SCRIPT(jsdscript);
-    return jsd_GetFirstValidPC(jsdc, jsdscript);
-}
-
-JSD_PUBLIC_API(jsuword)
-JSD_GetEndPC(JSDContext* jsdc, JSDScript* jsdscript)
-{
-    JSD_ASSERT_VALID_CONTEXT(jsdc);
-    JSD_ASSERT_VALID_SCRIPT(jsdscript);
-    return jsd_GetEndPC(jsdc, jsdscript);
-}
-
 JSD_PUBLIC_API(uintN)
 JSD_GetClosestLine(JSDContext* jsdc, JSDScript* jsdscript, jsuword pc)
 {
     JSD_ASSERT_VALID_CONTEXT(jsdc);
     JSD_ASSERT_VALID_SCRIPT(jsdscript);
     return jsd_GetClosestLine(jsdc, jsdscript, pc);
 }
 
--- a/js/jsd/jsdebug.h
+++ b/js/jsd/jsdebug.h
@@ -482,29 +482,16 @@ JSD_GetScriptHook(JSDContext* jsdc, JSD_
 * If no code is on the given line, then the returned pc represents the first
 * code within the script (if any) after the given line.
 * This function can be used to set breakpoints -- see JSD_SetExecutionHook
 */
 extern JSD_PUBLIC_API(jsuword)
 JSD_GetClosestPC(JSDContext* jsdc, JSDScript* jsdscript, uintN line);
 
 /*
-* Get the first 'Program Counter' value where a breakpoint can be set.
-*/
-extern JSD_PUBLIC_API(jsuword)
-JSD_GetFirstValidPC(JSDContext* jsdc, JSDScript* jsdscript);
-
-/*
-* Get the 'Program Counter' value just after the last byte of the script.
-* 0 is returned for invalid scripts.
-*/
-extern JSD_PUBLIC_API(jsuword)
-JSD_GetEndPC(JSDContext* jsdc, JSDScript* jsdscript);
-
-/*
 * Get the source line number for a given 'Program Counter' location.
 * Returns 0 if no source line information is appropriate (or available) for
 * the given pc.
 */
 extern JSD_PUBLIC_API(uintN)
 JSD_GetClosestLine(JSDContext* jsdc, JSDScript* jsdscript, jsuword pc);
 
 /* these are only used in cases where scripts are created outside of JS*/
--- a/js/jsd/test/Makefile.in
+++ b/js/jsd/test/Makefile.in
@@ -43,13 +43,12 @@ relativesrcdir  = js/jsd/test
 
 include $(DEPTH)/config/autoconf.mk
 
 MODULE = jsdebug
 
 include $(topsrcdir)/config/rules.mk
 
 _TEST_FILES = 	test_bug507448.html \
-                test_bug602003.html \
 		$(NULL)
 
 libs:: $(_TEST_FILES)
 	$(INSTALL) $(foreach f,$^,"$f") $(DEPTH)/_tests/testing/mochitest/tests/$(relativesrcdir)
deleted file mode 100644
--- a/js/jsd/test/test_bug602003.html
+++ /dev/null
@@ -1,62 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<!--
-https://bugzilla.mozilla.org/show_bug.cgi?id=507448
--->
-<head>
-  <title>Test for Bug 602003</title>
-  <script type="application/javascript" src="/MochiKit/packed.js"></script>
-  <script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
-  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
-</head>
-<body>
-<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=507448">Mozilla Bug 507448</a>
-<p id="display"></p>
-<div id="content" style="display: none">
-  
-</div>
-<pre id="test">
-<script type="application/javascript">
-
-/** Test for Bug 602003 **/
-
-// This is somewhat unfortunate: jsd only deals with scripts that have a
-// nonzero line number, so we can't just createElement a script here.
-// So break the test up into three <script>s, of which the middle one has our test functions.
-
-netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
-var jsdIDebuggerService = Components.interfaces.jsdIDebuggerService;
-var jsd = Components.classes['@mozilla.org/js/jsd/debugger-service;1']
-                    .getService(jsdIDebuggerService);
-var jsdOn = jsd.isOn;
-if (!jsdOn) {
-  jsd.on();
-  ok(jsd.isOn, "JSD should be running.");
-}
-</script>
-<script>
-  function g(a,b) { return a + b }
-</script>
-<script>
-  netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
-  var script = jsd.wrapValue(g).script;
-
-  // Test the script start/end PC APIs.
-  var start = script.getFirstValidPC();
-  var end = script.getEndValidPC();
-
-  // Start PC should be 1 for a function because it starts with JSOP_BEGIN.
-  is(start, 1, "Start PC should be 1");
-
-  // End PC should be something greater than 1, and not huge. Changes
-  // in the bytecode will change this, so we'll just be approximate.
-  ok(1 < end && end < 100, "End PC doesn't seem sane.");
-
-  if (!jsdOn) {
-    jsd.off();
-    ok(!jsd.isOn, "JSD shouldn't be running anymore.");
-  }
-</script>
-</pre>
-</body>
-</html>
--- a/js/src/assembler/assembler/X86Assembler.h
+++ b/js/src/assembler/assembler/X86Assembler.h
@@ -847,17 +847,17 @@ public:
         }
     }
 
 #if WTF_CPU_X86_64
     void xorq_rr(RegisterID src, RegisterID dst)
     {
         js::JaegerSpew(js::JSpew_Insns,
                        IPFX "xorq       %s, %s\n", MAYBE_PAD,
-                       nameIReg(4,src), nameIReg(4, dst));
+                       nameIReg(8,src), nameIReg(8, dst));
         m_formatter.oneByteOp64(OP_XOR_EvGv, src, dst);
     }
 
     void xorq_ir(int imm, RegisterID dst)
     {
         js::JaegerSpew(js::JSpew_Insns,
                        IPFX "xorq       %d, %s\n", MAYBE_PAD,
                        imm, nameIReg(8,dst));
--- a/js/src/configure.in
+++ b/js/src/configure.in
@@ -2629,17 +2629,16 @@ x86_64*-*)
     ENABLE_POLYIC=1
     AC_DEFINE(JS_CPU_X64)
     AC_DEFINE(JS_PUNBOX64)
     ;;
 arm*-*)
     ENABLE_TRACEJIT=1
     NANOJIT_ARCH=ARM
     ENABLE_METHODJIT=1
-    ENABLE_MONOIC=1
     AC_DEFINE(JS_CPU_ARM)
     AC_DEFINE(JS_NUNBOX32)
     ;;
 sparc*-*)
     ENABLE_TRACEJIT=1
     NANOJIT_ARCH=Sparc
     AC_DEFINE(JS_CPU_SPARC)
     ;;
--- a/js/src/jsapi-tests/Makefile.in
+++ b/js/src/jsapi-tests/Makefile.in
@@ -55,27 +55,29 @@ CPPSRCS = \
   testContexts.cpp \
   testDebugger.cpp \
   testDeepFreeze.cpp \
   testDefineGetterSetterNonEnumerable.cpp \
   testDefineProperty.cpp \
   testExtendedEq.cpp \
   testFuncCallback.cpp \
   testGCChunkAlloc.cpp \
+  testGetPropertyDefault.cpp \
   testIntString.cpp \
   testIsAboutToBeFinalized.cpp \
   testLookup.cpp \
   testNewObject.cpp \
   testOps.cpp \
   testPropCache.cpp \
   testSameValue.cpp \
   testScriptObject.cpp \
   testSetPropertyWithNativeGetterStubSetter.cpp \
   testBug604087.cpp \
   testTrap.cpp \
+  testUTF8.cpp \
   testXDR.cpp \
   $(NULL)
 
 DEFINES         += -DEXPORT_JS_API
 
 LIBS      = $(NSPR_LIBS) $(DEPTH)/$(LIB_PREFIX)js_static.$(LIB_SUFFIX)
 
 LOCAL_INCLUDES += -I$(topsrcdir) -I..
new file mode 100644
--- /dev/null
+++ b/js/src/jsapi-tests/testGetPropertyDefault.cpp
@@ -0,0 +1,70 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=99:
+ */
+
+#include "tests.h"
+
+#define JSVAL_IS_FALSE(x) ((JSVAL_IS_BOOLEAN(x)) && !(JSVAL_TO_BOOLEAN(x)))
+#define JSVAL_IS_TRUE(x)  ((JSVAL_IS_BOOLEAN(x)) && (JSVAL_TO_BOOLEAN(x)))
+
+static JSBool
+stringToId(JSContext *cx, const char *s, jsid *idp)
+{
+    char *buf = JS_strdup(cx, s);
+    if (!buf)
+        return false;
+
+    JSString *str = JS_NewString(cx, buf, strlen(s));
+    if (!str)
+        return false;
+
+    return JS_ValueToId(cx, STRING_TO_JSVAL(str), idp);
+}
+
+BEGIN_TEST(testGetPropertyDefault_bug594060)
+{
+    {
+        // Check JS_GetPropertyDefault
+
+        JSObject *obj = JS_NewObject(cx, NULL, NULL, NULL);
+        CHECK(obj);
+
+        jsval v0 = JSVAL_TRUE;
+        CHECK(JS_SetProperty(cx, obj, "here", &v0));
+
+        jsval v1;
+        CHECK(JS_GetPropertyDefault(cx, obj, "here", JSVAL_FALSE, &v1));
+        CHECK(JSVAL_IS_TRUE(v1));
+
+        jsval v2;
+        CHECK(JS_GetPropertyDefault(cx, obj, "nothere", JSVAL_FALSE, &v2));
+        CHECK(JSVAL_IS_FALSE(v2));
+    }
+
+    {
+        // Check JS_GetPropertyByIdDefault
+
+        JSObject *obj = JS_NewObject(cx, NULL, NULL, NULL);
+        CHECK(obj);
+
+        jsid hereid;
+        CHECK(stringToId(cx, "here", &hereid));
+
+        jsid nothereid;
+        CHECK(stringToId(cx, "nothere", &nothereid));
+
+        jsval v0 = JSVAL_TRUE;
+        CHECK(JS_SetPropertyById(cx, obj, hereid, &v0));
+
+        jsval v1;
+        CHECK(JS_GetPropertyByIdDefault(cx, obj, hereid, JSVAL_FALSE, &v1));
+        CHECK(JSVAL_IS_TRUE(v1));
+
+        jsval v2;
+        CHECK(JS_GetPropertyByIdDefault(cx, obj, nothereid, JSVAL_FALSE, &v2));
+        CHECK(JSVAL_IS_FALSE(v2));
+    }
+
+    return true;
+}
+END_TEST(testGetPropertyDefault_bug594060)
new file mode 100644
--- /dev/null
+++ b/js/src/jsapi-tests/testUTF8.cpp
@@ -0,0 +1,21 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=99:
+ */
+
+#include "tests.h"
+
+BEGIN_TEST(testUTF8_bug589917)
+{
+    const jschar surrogate_pair[] = { 0xd800, 0xdc00 };
+    char output_buffer[10];
+    size_t utf8_len = sizeof(output_buffer);
+
+    CHECK(JS_EncodeCharacters(cx, surrogate_pair, 2, output_buffer, &utf8_len));
+    CHECK(utf8_len == 4);
+
+    CHECK(JS_EncodeCharacters(cx, surrogate_pair, 2, NULL, &utf8_len));
+    CHECK(utf8_len == 4);
+
+    return true;
+}
+END_TEST(testUTF8_bug589917)
--- a/js/src/jsapi-tests/tests.cpp
+++ b/js/src/jsapi-tests/tests.cpp
@@ -43,16 +43,18 @@
 
 JSAPITest *JSAPITest::list;
 
 int main(int argc, char *argv[])
 {
     int failures = 0;
     const char *filter = (argc == 2) ? argv[1] : NULL;
 
+    JS_SetCStringsAreUTF8();
+
     for (JSAPITest *test = JSAPITest::list; test; test = test->next) {
         const char *name = test->name();
         if (filter && strcmp(filter, name) != 0)
             continue;
 
         printf("%s\n", name);
         if (!test->init()) {
             printf("TEST-UNEXPECTED-FAIL | %s | Failed to initialize.\n", name);
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -808,69 +808,55 @@ JS_SetRuntimePrivate(JSRuntime *rt, void
 
 #ifdef JS_THREADSAFE
 static void
 StartRequest(JSContext *cx)
 {
     JSThread *t = cx->thread;
     JS_ASSERT(CURRENT_THREAD_IS_ME(t));
    
-    if (t->data.requestDepth) {
-        t->data.requestDepth++;
+    if (t->requestDepth) {
+        t->requestDepth++;
     } else {
         JSRuntime *rt = cx->runtime;
         AutoLockGC lock(rt);
 
         /* Wait until the GC is finished. */
         if (rt->gcThread != cx->thread) {
             while (rt->gcThread)
                 JS_AWAIT_GC_DONE(rt);
         }
 
         /* Indicate that a request is running. */
         rt->requestCount++;
-        t->data.requestDepth = 1;
-
-        /*
-         * Adjust rt->interruptCounter to reflect any interrupts added while the
-         * thread was suspended.
-         */
-        if (t->data.interruptFlags)
-            JS_ATOMIC_INCREMENT(&rt->interruptCounter);
+        t->requestDepth = 1;
 
         if (rt->requestCount == 1 && rt->activityCallback)
             rt->activityCallback(rt->activityCallbackArg, true);
     }
 }
 
 static void
 StopRequest(JSContext *cx)
 {
     JSThread *t = cx->thread;
     JS_ASSERT(CURRENT_THREAD_IS_ME(t));
-    JS_ASSERT(t->data.requestDepth != 0);
-    if (t->data.requestDepth != 1) {
-        t->data.requestDepth--;
+    JS_ASSERT(t->requestDepth != 0);
+    if (t->requestDepth != 1) {
+        t->requestDepth--;
     } else {
         LeaveTrace(cx);  /* for GC safety */
 
         t->data.conservativeGC.updateForRequestEnd(t->suspendCount);
 
         /* Lock before clearing to interlock with ClaimScope, in jslock.c. */
         JSRuntime *rt = cx->runtime;
         AutoLockGC lock(rt);
 
-        t->data.requestDepth = 0;
-
-        /*
-         * Adjust rt->interruptCounter to reflect any interrupts added while the
-         * thread still had active requests.
-         */
-        if (t->data.interruptFlags)
-            JS_ATOMIC_DECREMENT(&rt->interruptCounter);
+        t->requestDepth = 0;
 
         js_ShareWaitingTitles(cx);
 
         /* Give the GC a chance to run if this was the last request running. */
         JS_ASSERT(rt->requestCount > 0);
         rt->requestCount--;
         if (rt->requestCount == 0) {
             JS_NOTIFY_REQUEST_DONE(rt);
@@ -912,42 +898,42 @@ JS_YieldRequest(JSContext *cx)
 
 JS_PUBLIC_API(jsrefcount)
 JS_SuspendRequest(JSContext *cx)
 {
 #ifdef JS_THREADSAFE
     JSThread *t = cx->thread;
     JS_ASSERT(CURRENT_THREAD_IS_ME(t));
 
-    jsrefcount saveDepth = t->data.requestDepth;
+    jsrefcount saveDepth = t->requestDepth;
     if (!saveDepth)
         return 0;
 
     t->suspendCount++;
-    t->data.requestDepth = 1;
+    t->requestDepth = 1;
     StopRequest(cx);
     return saveDepth;
 #else
     return 0;
 #endif
 }
 
 JS_PUBLIC_API(void)
 JS_ResumeRequest(JSContext *cx, jsrefcount saveDepth)
 {
 #ifdef JS_THREADSAFE
     JSThread *t = cx->thread;
     JS_ASSERT(CURRENT_THREAD_IS_ME(t));
     if (saveDepth == 0)
         return;
     JS_ASSERT(saveDepth >= 1);
-    JS_ASSERT(!t->data.requestDepth);
+    JS_ASSERT(!t->requestDepth);
     JS_ASSERT(t->suspendCount);
     StartRequest(cx);
-    t->data.requestDepth = saveDepth;
+    t->requestDepth = saveDepth;
     t->suspendCount--;
 #endif
 }
 
 JS_PUBLIC_API(void)
 JS_Lock(JSRuntime *rt)
 {
     JS_LOCK_RUNTIME(rt);
@@ -3748,29 +3734,42 @@ JS_GetPropertyById(JSContext *cx, JSObje
 {
     CHECK_REQUEST(cx);
     assertSameCompartment(cx, obj, id);
     JSAutoResolveFlags rf(cx, JSRESOLVE_QUALIFIED);
     return obj->getProperty(cx, id, Valueify(vp));
 }
 
 JS_PUBLIC_API(JSBool)
+JS_GetPropertyByIdDefault(JSContext *cx, JSObject *obj, jsid id, jsval def, jsval *vp)
+{
+    return GetPropertyDefault(cx, obj, id, Valueify(def), Valueify(vp));
+}
+
+JS_PUBLIC_API(JSBool)
 JS_GetElement(JSContext *cx, JSObject *obj, jsint index, jsval *vp)
 {
     return JS_GetPropertyById(cx, obj, INT_TO_JSID(index), vp);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_GetProperty(JSContext *cx, JSObject *obj, const char *name, jsval *vp)
 {
     JSAtom *atom = js_Atomize(cx, name, strlen(name), 0);
     return atom && JS_GetPropertyById(cx, obj, ATOM_TO_JSID(atom), vp);
 }
 
 JS_PUBLIC_API(JSBool)
+JS_GetPropertyDefault(JSContext *cx, JSObject *obj, const char *name, jsval def, jsval *vp)
+{
+    JSAtom *atom = js_Atomize(cx, name, strlen(name), 0);
+    return atom && JS_GetPropertyByIdDefault(cx, obj, ATOM_TO_JSID(atom), def, vp);
+}
+
+JS_PUBLIC_API(JSBool)
 JS_GetUCProperty(JSContext *cx, JSObject *obj, const jschar *name, size_t namelen, jsval *vp)
 {
     JSAtom *atom = js_AtomizeChars(cx, name, AUTO_NAMELEN(name, namelen), 0);
     return atom && JS_GetPropertyById(cx, obj, ATOM_TO_JSID(atom), vp);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_GetMethodById(JSContext *cx, JSObject *obj, jsid id, JSObject **objp, jsval *vp)
@@ -5021,20 +5020,32 @@ JS_PUBLIC_API(JSOperationCallback)
 JS_GetOperationCallback(JSContext *cx)
 {
     return cx->operationCallback;
 }
 
 JS_PUBLIC_API(void)
 JS_TriggerOperationCallback(JSContext *cx)
 {
+    /*
+     * We allow for cx to come from another thread. Thus we must deal with
+     * possible JS_ClearContextThread calls when accessing cx->thread. But we
+     * assume that the calling thread is in a request so JSThread cannot be
+     * GC-ed.
+     */
+    JSThreadData *td;
 #ifdef JS_THREADSAFE
-    AutoLockGC lock(cx->runtime);
+    JSThread *thread = cx->thread;
+    if (!thread)
+        return;
+    td = &thread->data;
+#else
+    td = JS_THREAD_DATA(cx);
 #endif
-    TriggerOperationCallback(cx);
+    td->triggerOperationCallback();
 }
 
 JS_PUBLIC_API(void)
 JS_TriggerAllOperationCallbacks(JSRuntime *rt)
 {
 #ifdef JS_THREADSAFE
     AutoLockGC lock(rt);
 #endif
@@ -5852,17 +5863,17 @@ JS_ClearContextThread(JSContext *cx)
     /*
      * We must not race with a GC that accesses cx->thread for all threads,
      * see bug 476934.
      */
     JSRuntime *rt = cx->runtime;
     AutoLockGC lock(rt);
     js_WaitForGC(rt);
     js_ClearContextThread(cx);
-    JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->data.requestDepth);
+    JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->requestDepth);
    
     /*
      * We can access t->id as long as the GC lock is held and we cannot race
      * with the GC that may delete t.
      */
     return reinterpret_cast<jsword>(t->id);
 #else
     return 0;
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -2055,19 +2055,25 @@ JS_GetPropertyDescriptorById(JSContext *
 
 extern JS_PUBLIC_API(JSBool)
 JS_GetOwnPropertyDescriptor(JSContext *cx, JSObject *obj, jsid id, jsval *vp);
 
 extern JS_PUBLIC_API(JSBool)
 JS_GetProperty(JSContext *cx, JSObject *obj, const char *name, jsval *vp);
 
 extern JS_PUBLIC_API(JSBool)
+JS_GetPropertyDefault(JSContext *cx, JSObject *obj, const char *name, jsval def, jsval *vp);
+
+extern JS_PUBLIC_API(JSBool)
 JS_GetPropertyById(JSContext *cx, JSObject *obj, jsid id, jsval *vp);
 
 extern JS_PUBLIC_API(JSBool)
+JS_GetPropertyByIdDefault(JSContext *cx, JSObject *obj, jsid id, jsval def, jsval *vp);
+
+extern JS_PUBLIC_API(JSBool)
 JS_GetMethodById(JSContext *cx, JSObject *obj, jsid id, JSObject **objp,
                  jsval *vp);
 
 extern JS_PUBLIC_API(JSBool)
 JS_GetMethod(JSContext *cx, JSObject *obj, const char *name, JSObject **objp,
              jsval *vp);
 
 extern JS_PUBLIC_API(JSBool)
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -428,27 +428,35 @@ SetArrayElement(JSContext *cx, JSObject 
     Value tmp = v;
     return obj->setProperty(cx, idr.id(), &tmp, true);
 }
 
 #ifdef JS_TRACER
 JSBool JS_FASTCALL
 js_EnsureDenseArrayCapacity(JSContext *cx, JSObject *obj, jsint i)
 {
+#ifdef DEBUG
+    Class *origObjClasp = obj->clasp; 
+#endif
     jsuint u = jsuint(i);
     jsuint capacity = obj->getDenseArrayCapacity();
     if (u < capacity)
         return true;
     if (INDEX_TOO_SPARSE(obj, u))
         return false;
 
-    return obj->ensureDenseArrayElements(cx, u + 1);
+    JSBool ret = obj->ensureDenseArrayElements(cx, u + 1);
+
+    /* Partially check the CallInfo's storeAccSet is correct. */
+    JS_ASSERT(obj->clasp == origObjClasp);
+    return ret;
 }
-JS_DEFINE_CALLINFO_3(extern, BOOL, js_EnsureDenseArrayCapacity, CONTEXT, OBJECT, INT32, 0,
-                     nanojit::ACCSET_STORE_ANY)
+/* This function and its callees do not touch any object's .clasp field. */
+JS_DEFINE_CALLINFO_3(extern, BOOL, js_EnsureDenseArrayCapacity, CONTEXT, OBJECT, INT32,
+                     0, nanojit::ACCSET_STORE_ANY & ~ACCSET_OBJ_CLASP)
 #endif
 
 static JSBool
 DeleteArrayElement(JSContext *cx, JSObject *obj, jsdouble index, JSBool strict)
 {
     JS_ASSERT(index >= 0);
     if (obj->isDenseArray()) {
         if (index <= jsuint(-1)) {
@@ -814,18 +822,19 @@ js_Array_dense_setelem_hole(JSContext* c
     if (js_PrototypeHasIndexedProperties(cx, obj))
         return false;
 
     jsuint u = jsuint(i);
     if (u >= obj->getArrayLength())
         obj->setArrayLength(u + 1);
     return true;
 }
+/* storeAccSet == ACCSET_OBJ_PRIVATE: because it can set 'length'. */
 JS_DEFINE_CALLINFO_3(extern, BOOL, js_Array_dense_setelem_hole, CONTEXT, OBJECT, INT32,
-                     0, nanojit::ACCSET_STORE_ANY)
+                     0, ACCSET_OBJ_PRIVATE)
 #endif
 
 static JSBool
 array_defineProperty(JSContext *cx, JSObject *obj, jsid id, const Value *value,
                      PropertyOp getter, PropertyOp setter, uintN attrs)
 {
     uint32 i = 0;       // init to shut GCC up
     JSBool isIndex;
@@ -1900,17 +1909,17 @@ js::array_sort(JSContext *cx, uintN argc
                  */
                 i = 0;
                 do {
                     vec[i] = vec[2 * i + 1];
                 } while (++i != newlen);
             }
         } else {
             CompareArgs ca(cx);
-            if (!ca.session.start(cx, fval, NullValue(), 2))
+            if (!ca.session.start(cx, fval, UndefinedValue(), 2))
                 return false;
 
             if (!js_MergeSort(vec, size_t(newlen), sizeof(Value),
                               comparator_stack_cast(sort_compare),
                               &ca, mergesort_tmp,
                               JS_SORTING_VALUES)) {
                 return false;
             }
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -174,16 +174,18 @@ const char *const js_common_atom_names[]
     js_toJSON_str,              /* toJSONAtom                   */
     "(void 0)",                 /* void0Atom                    */
     js_enumerable_str,          /* enumerableAtom               */
     js_configurable_str,        /* configurableAtom             */
     js_writable_str,            /* writableAtom                 */
     js_value_str,               /* valueAtom                    */
     js_test_str,                /* testAtom                     */
     "use strict",               /* useStrictAtom                */
+    "loc",                      /* locAtom                      */
+    "line",                     /* lineAtom                     */
 
 #if JS_HAS_XML_SUPPORT
     js_etago_str,               /* etagoAtom                    */
     js_namespace_str,           /* namespaceAtom                */
     js_ptagc_str,               /* ptagcAtom                    */
     js_qualifier_str,           /* qualifierAtom                */
     js_space_str,               /* spaceAtom                    */
     js_stago_str,               /* stagoAtom                    */
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -367,16 +367,18 @@ struct JSAtomState
     JSAtom              *toJSONAtom;
     JSAtom              *void0Atom;
     JSAtom              *enumerableAtom;
     JSAtom              *configurableAtom;
     JSAtom              *writableAtom;
     JSAtom              *valueAtom;
     JSAtom              *testAtom;
     JSAtom              *useStrictAtom;
+    JSAtom              *locAtom;
+    JSAtom              *lineAtom;
 
 #if JS_HAS_XML_SUPPORT
     JSAtom              *etagoAtom;
     JSAtom              *namespaceAtom;
     JSAtom              *ptagcAtom;
     JSAtom              *qualifierAtom;
     JSAtom              *spaceAtom;
     JSAtom              *stagoAtom;
--- a/js/src/jsbuiltins.h
+++ b/js/src/jsbuiltins.h
@@ -44,16 +44,107 @@
 
 #include "nanojit/nanojit.h"
 #include "jsvalue.h"
 
 #ifdef THIS
 #undef THIS
 #endif
 
+namespace js {
+
+/*
+ * See ValidateWriter::checkAccSet() for what each of these access regions
+ * mean.
+ *
+ * *** WARNING WARNING WARNING ***
+ *
+ * Any incorrect access region annotations on loads/stores/calls could lead to
+ * subtle bugs that manifest rarely, eg. when two loads are CSE'd that
+ * shouldn't be.
+ *
+ * If you add a new access region you will need to add some sanity checking to
+ * ValidateWriter::checkAccSet().  Do not skimp on this checking!  Make it as
+ * strong as you can.  Look at the existing cases for inspiration.  This
+ * checking helps prevent these subtle bugs.
+ *
+ * Furthermore, do not add a "catch-all" region such as "ACCSET_OTHER".  There
+ * are two reasons for this.  First, no checking could be done on loads/stores
+ * bearing it.  Second, it would be too easy for someone in the future who
+ * doesn't understand how AccSets work to use it inappropriately.  Only
+ * ACCSET_ALL (the union of all access regions) should be used as a catch-all,
+ * it can always be used safely, but it reduces optimization possibilities.
+ *
+ * Most of the access regions are type-based, ie. all structs of a particular
+ * type combined together form a region.  This is less precise than
+ * considering each struct separately, but also much simpler.
+ *
+ * - ACCSET_STATE:         The TracerState struct.
+ * - ACCSET_STACK:         The stack.
+ * - ACCSET_RSTACK:        The return stack.
+ * - ACCSET_CX:            All JSContext structs.
+ * - ACCSET_EOS:           The globals area.
+ * - ACCSET_ALLOC:         All memory blocks allocated with LIR_allocp (in
+ *                         other words, this region is the AR space).
+ * - ACCSET_FRAMEREGS:     All JSFrameRegs structs.
+ * - ACCSET_STACKFRAME:    All JSStackFrame objects.
+ * - ACCSET_RUNTIME:       The JSRuntime object.
+ * - ACCSET_OBJ_CLASP:     The 'clasp'    field of all JSObjects.
+ * - ACCSET_OBJ_FLAGS:     The 'flags'    field of all JSObjects.
+ * - ACCSET_OBJ_SHAPE:     The 'shape'    field of all JSObjects.
+ * - ACCSET_OBJ_PROTO:     The 'proto'    field of all JSObjects.
+ * - ACCSET_OBJ_PARENT:    The 'parent'   field of all JSObjects.
+ * - ACCSET_OBJ_PRIVATE:   The 'private'  field of all JSObjects.
+ * - ACCSET_OBJ_CAPACITY:  The 'capacity' field of all JSObjects.
+ * - ACCSET_OBJ_SLOTS:     The 'slots'    field of all JSObjects.
+ * - ACCSET_SLOTS:         The slots (be they fixed or dynamic) of all JSObjects.
+ * - ACCSET_TARRAY:        All TypedArray structs.
+ * - ACCSET_TARRAY_DATA:   All TypedArray data arrays.
+ * - ACCSET_ITER:          All NativeIterator structs.
+ * - ACCSET_ITER_PROPS:    The props_arrays of all NativeIterator structs.
+ * - ACCSET_STRING:        All JSString structs.
+ * - ACCSET_STRING_MCHARS: All JSString mchars arrays.
+ * - ACCSET_TYPEMAP:       All typemaps form a single region.
+ * - ACCSET_FCSLOTS:       All fcslots arrays form a single region.
+ * - ACCSET_ARGS_DATA:     All Arguments data arrays form a single region.
+ */
+static const nanojit::AccSet ACCSET_STATE         = (1 <<  0);
+static const nanojit::AccSet ACCSET_STACK         = (1 <<  1);
+static const nanojit::AccSet ACCSET_RSTACK        = (1 <<  2);
+static const nanojit::AccSet ACCSET_CX            = (1 <<  3);
+static const nanojit::AccSet ACCSET_EOS           = (1 <<  4);
+static const nanojit::AccSet ACCSET_ALLOC         = (1 <<  5);
+static const nanojit::AccSet ACCSET_FRAMEREGS     = (1 <<  6);
+static const nanojit::AccSet ACCSET_STACKFRAME    = (1 <<  7);
+static const nanojit::AccSet ACCSET_RUNTIME       = (1 <<  8);
+
+// Nb: JSObject::{lastProp,map} don't have an AccSet because they are never accessed on trace
+static const nanojit::AccSet ACCSET_OBJ_CLASP     = (1 <<  9);
+static const nanojit::AccSet ACCSET_OBJ_FLAGS     = (1 << 10);
+static const nanojit::AccSet ACCSET_OBJ_SHAPE     = (1 << 11);
+static const nanojit::AccSet ACCSET_OBJ_PROTO     = (1 << 12);
+static const nanojit::AccSet ACCSET_OBJ_PARENT    = (1 << 13);
+static const nanojit::AccSet ACCSET_OBJ_PRIVATE   = (1 << 14);
+static const nanojit::AccSet ACCSET_OBJ_CAPACITY  = (1 << 15);
+static const nanojit::AccSet ACCSET_OBJ_SLOTS     = (1 << 16);  // the pointer to the slots
+
+static const nanojit::AccSet ACCSET_SLOTS         = (1 << 17);  // the slots themselves
+static const nanojit::AccSet ACCSET_TARRAY        = (1 << 18);
+static const nanojit::AccSet ACCSET_TARRAY_DATA   = (1 << 19);
+static const nanojit::AccSet ACCSET_ITER          = (1 << 20);
+static const nanojit::AccSet ACCSET_ITER_PROPS    = (1 << 21);
+static const nanojit::AccSet ACCSET_STRING        = (1 << 22);
+static const nanojit::AccSet ACCSET_STRING_MCHARS = (1 << 23);
+static const nanojit::AccSet ACCSET_TYPEMAP       = (1 << 24);
+static const nanojit::AccSet ACCSET_FCSLOTS       = (1 << 25);
+static const nanojit::AccSet ACCSET_ARGS_DATA     = (1 << 26);
+}
+
+static const uint8_t TM_NUM_USED_ACCS = 27; // number of access regions used by TraceMonkey
+
 enum JSTNErrType { INFALLIBLE, FAIL_STATUS, FAIL_NULL, FAIL_NEG, FAIL_NEITHER };
 enum { 
     JSTN_ERRTYPE_MASK        = 0x07,
     JSTN_UNBOX_AFTER         = 0x08,
     JSTN_MORE                = 0x10,
     JSTN_CONSTRUCTOR         = 0x20,
     JSTN_RETURN_NULLABLE_STR = 0x40,
     JSTN_RETURN_NULLABLE_OBJ = 0x80
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -470,19 +470,17 @@ AllFramesIter::AllFramesIter(JSContext *
 {
 }
 
 AllFramesIter&
 AllFramesIter::operator++()
 {
     JS_ASSERT(!done());
     if (curfp == curcs->getInitialFrame()) {
-        do {
-            curcs = curcs->getPreviousInMemory();
-        } while (curcs && !curcs->inContext());
+        curcs = curcs->getPreviousInMemory();
         curfp = curcs ? curcs->getCurrentFrame() : NULL;
     } else {
         curfp = curfp->prev();
     }
     return *this;
 }
 
 bool
@@ -1024,20 +1022,20 @@ js_DestroyContext(JSContext *cx, JSDestr
      */
     JS_ASSERT(cx->thread && CURRENT_THREAD_IS_ME(cx->thread));
     if (!cx->thread)
         JS_SetContextThread(cx);
 
     /*
      * For API compatibility we support destroying contexts with non-zero
      * cx->outstandingRequests but we assume that all JS_BeginRequest calls
-     * on this cx contributes to cx->thread->data.requestDepth and there is no
+     * on this cx contributes to cx->thread->requestDepth and there is no
      * JS_SuspendRequest calls that set aside the counter.
      */
-    JS_ASSERT(cx->outstandingRequests <= cx->thread->data.requestDepth);
+    JS_ASSERT(cx->outstandingRequests <= cx->thread->requestDepth);
 #endif
 
     if (mode != JSDCM_NEW_FAILED) {
         cxCallback = rt->cxCallback;
         if (cxCallback) {
             /*
              * JSCONTEXT_DESTROY callback is not allowed to fail and must
              * return true.
@@ -1052,17 +1050,17 @@ js_DestroyContext(JSContext *cx, JSDestr
 
     JS_LOCK_GC(rt);
     JS_ASSERT(rt->state == JSRTS_UP || rt->state == JSRTS_LAUNCHING);
 #ifdef JS_THREADSAFE
     /*
      * Typically we are called outside a request, so ensure that the GC is not
      * running before removing the context from rt->contextList, see bug 477021.
      */
-    if (cx->thread->data.requestDepth == 0)
+    if (cx->thread->requestDepth == 0)
         js_WaitForGC(rt);
 #endif
     JS_REMOVE_LINK(&cx->link);
     last = (rt->contextList.next == &rt->contextList);
     if (last)
         rt->state = JSRTS_LANDING;
     if (last || mode == JSDCM_FORCE_GC || mode == JSDCM_MAYBE_GC
 #ifdef JS_THREADSAFE
@@ -1080,17 +1078,17 @@ js_DestroyContext(JSContext *cx, JSDestr
              * that we wait for any racing GC started on a not-last context to
              * finish, before we plow ahead and unpin atoms. Note that even
              * though we begin a request here if necessary, we end all
              * thread's requests before forcing a final GC. This lets any
              * not-last context destruction racing in another thread try to
              * force or maybe run the GC, but by that point, rt->state will
              * not be JSRTS_UP, and that GC attempt will return early.
              */
-            if (cx->thread->data.requestDepth == 0)
+            if (cx->thread->requestDepth == 0)
                 JS_BeginRequest(cx);
 #endif
 
             Shape::finishRuntimeState(cx);
             js_FinishRuntimeNumberState(cx);
 
             /* Unpin all common atoms before final GC. */
             js_FinishCommonAtoms(cx);
@@ -1130,17 +1128,17 @@ js_DestroyContext(JSContext *cx, JSDestr
             js_WaitForGC(rt);
         }
     }
 #ifdef JS_THREADSAFE
 #ifdef DEBUG
     JSThread *t = cx->thread;
 #endif
     js_ClearContextThread(cx);
-    JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->data.requestDepth);
+    JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->requestDepth);
 #endif
 #ifdef JS_METER_DST_OFFSET_CACHING
     cx->dstOffsetCache.dumpStats();
 #endif
     JS_UNLOCK_GC(rt);
     FreeContext(cx);
 }
 
@@ -1205,17 +1203,17 @@ js_ContextIterator(JSRuntime *rt, JSBool
 }
 
 JS_FRIEND_API(JSContext *)
 js_NextActiveContext(JSRuntime *rt, JSContext *cx)
 {
     JSContext *iter = cx;
 #ifdef JS_THREADSAFE
     while ((cx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {
-        if (cx->outstandingRequests && cx->thread->data.requestDepth)
+        if (cx->outstandingRequests && cx->thread->requestDepth)
             break;
     }
     return cx;
 #else
     return js_ContextIterator(rt, JS_FALSE, &iter);
 #endif
 }
 
@@ -1851,41 +1849,35 @@ js_GetErrorMessage(void *userRef, const 
     if ((errorNumber > 0) && (errorNumber < JSErr_Limit))
         return &js_ErrorFormatString[errorNumber];
     return NULL;
 }
 
 JSBool
 js_InvokeOperationCallback(JSContext *cx)
 {
-    JSRuntime *rt = cx->runtime;
-    JSThreadData *td = JS_THREAD_DATA(cx);
-
     JS_ASSERT_REQUEST_DEPTH(cx);
-    JS_ASSERT(td->interruptFlags != 0);
+    JS_ASSERT(JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK);
 
     /*
-     * Reset the callback counter first, then yield. If another thread is racing
+     * Reset the callback flag first, then yield. If another thread is racing
      * us here we will accumulate another callback request which will be
      * serviced at the next opportunity.
      */
-    JS_LOCK_GC(rt);
-    td->interruptFlags = 0;
-#ifdef JS_THREADSAFE
-    JS_ATOMIC_DECREMENT(&rt->interruptCounter);
-#endif
-    JS_UNLOCK_GC(rt);
+    JS_ATOMIC_CLEAR_MASK(&JS_THREAD_DATA(cx)->interruptFlags,
+                         JSThreadData::INTERRUPT_OPERATION_CALLBACK);
 
     /*
      * Unless we are going to run the GC, we automatically yield the current
      * context every time the operation callback is hit since we might be
      * called as a result of an impending GC, which would deadlock if we do
      * not yield. Operation callbacks are supposed to happen rarely (seconds,
      * not milliseconds) so it is acceptable to yield at every callback.
      */
+    JSRuntime *rt = cx->runtime;
     if (rt->gcIsNeeded) {
         js_GC(cx, GC_NORMAL);
 
         /*
          * On trace we can exceed the GC quota, see comments in NewGCArena. So
          * we check the quota and report OOM here when we are off trace.
          */
         bool delayedOutOfMemory;
@@ -1913,49 +1905,28 @@ js_InvokeOperationCallback(JSContext *cx
 
     return !cb || cb(cx);
 }
 
 JSBool
 js_HandleExecutionInterrupt(JSContext *cx)
 {
     JSBool result = JS_TRUE;
-    if (JS_THREAD_DATA(cx)->interruptFlags)
+    if (JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK)
         result = js_InvokeOperationCallback(cx) && result;
     return result;
 }
 
 namespace js {
 
 void
-TriggerOperationCallback(JSContext *cx)
-{
-    /*
-     * We allow for cx to come from another thread. Thus we must deal with
-     * possible JS_ClearContextThread calls when accessing cx->thread. But we
-     * assume that the calling thread is in a request so JSThread cannot be
-     * GC-ed.
-     */
-    JSThreadData *td;
-#ifdef JS_THREADSAFE
-    JSThread *thread = cx->thread;
-    if (!thread)
-        return;
-    td = &thread->data;
-#else
-    td = JS_THREAD_DATA(cx);
-#endif
-    td->triggerOperationCallback(cx->runtime);
-}
-
-void
 TriggerAllOperationCallbacks(JSRuntime *rt)
 {
     for (ThreadDataIter i(rt); !i.empty(); i.popFront())
-        i.threadData()->triggerOperationCallback(rt);
+        i.threadData()->triggerOperationCallback();
 }
 
 } /* namespace js */
 
 JSStackFrame *
 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
 {
     if (!fp)
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -216,20 +216,16 @@ struct TracerState
 
     // Used to communicate the location of the return value in case of a deep bail.
     double*        deepBailSp;
 
     // Used when calling natives from trace to root the vp vector.
     uintN          nativeVpLen;
     js::Value*     nativeVp;
 
-    // The regs pointed to by cx->regs while a deep-bailed slow native
-    // completes execution.
-    JSFrameRegs    bailedSlowNativeRegs;
-
     TracerState(JSContext *cx, TraceMonitor *tm, TreeFragment *ti,
                 uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp);
     ~TracerState();
 };
 
 #ifdef JS_METHODJIT
 namespace mjit {
     struct Trampolines
@@ -1103,27 +1099,21 @@ struct JSFunctionMeter {
 #define NATIVE_ITER_CACHE_SIZE  JS_BIT(NATIVE_ITER_CACHE_LOG2)
 
 struct JSPendingProxyOperation {
     JSPendingProxyOperation *next;
     JSObject *object;
 };
 
 struct JSThreadData {
-#ifdef JS_THREADSAFE
-    /* The request depth for this thread. */
-    unsigned            requestDepth;
-#endif
-
     /*
-     * If non-zero, we were been asked to call the operation callback as soon
-     * as possible.  If the thread has an active request, this contributes
-     * towards rt->interruptCounter.
+     * If this flag is set, we were asked to call back the operation callback
+     * as soon as possible.
      */
-    volatile int32      interruptFlags;
+    volatile jsword     interruptFlags;
 
     /* Keeper of the contiguous stack used by all contexts in this thread. */
     js::StackSpace      stackSpace;
 
     /*
      * Flag indicating that we are waiving any soft limits on the GC heap
      * because we want allocations to be infallible (except when we hit OOM).
      */
@@ -1198,18 +1188,27 @@ struct JSThreadData {
         return mathCache ? mathCache : allocMathCache(cx);
     }
 
     bool init();
     void finish();
     void mark(JSTracer *trc);
     void purge(JSContext *cx);
 
-    /* This must be called with the GC lock held. */
-    inline void triggerOperationCallback(JSRuntime *rt);
+    static const jsword INTERRUPT_OPERATION_CALLBACK = 0x1;
+
+    void triggerOperationCallback() {
+        /*
+         * Use JS_ATOMIC_SET in the hope that it will make sure the write will
+         * become immediately visible to other processors polling the flag.
+         * Note that we only care about visibility here, not read/write
+         * ordering.
+         */
+        JS_ATOMIC_SET_MASK(&interruptFlags, INTERRUPT_OPERATION_CALLBACK);
+    }
 };
 
 #ifdef JS_THREADSAFE
 
 /*
  * Structure uniquely representing a thread.  It holds thread-private data
  * that can be accessed without a global lock.
  */
@@ -1232,16 +1231,19 @@ struct JSThread {
      * This thread is inside js_GC, either waiting until it can start GC, or
      * waiting for GC to finish on another thread. This thread holds no locks;
      * other threads may steal titles from it.
      *
      * Protected by rt->gcLock.
      */
     bool                gcWaiting;
 
+    /* The request depth for this thread. */
+    unsigned            requestDepth;
+
     /* Number of JS_SuspendRequest calls withot JS_ResumeRequest. */
     unsigned            suspendCount;
 
 # ifdef DEBUG
     unsigned            checkRequestDepth;
 # endif
 
     /* Weak ref, for low-cost sealed title locking */
@@ -1519,20 +1521,27 @@ struct JSRuntime {
     PRLock              *scriptFilenameTableLock;
 #endif
 
     /* Number localization, used by jsnum.c */
     const char          *thousandsSeparator;
     const char          *decimalSeparator;
     const char          *numGrouping;
 
-#ifdef JS_THREADSAFE
-    /* Number of threads with active requests and unhandled interrupts. */
-    volatile int32      interruptCounter;
-#else
+    /*
+     * Weak references to lazily-created, well-known XML singletons.
+     *
+     * NB: Singleton objects must be carefully disconnected from the rest of
+     * the object graph usually associated with a JSContext's global object,
+     * including the set of standard class objects.  See jsxml.c for details.
+     */
+    JSObject            *anynameObject;
+    JSObject            *functionNamespaceObject;
+
+#ifndef JS_THREADSAFE
     JSThreadData        threadData;
 
 #define JS_THREAD_DATA(cx)      (&(cx)->runtime->threadData)
 #endif
 
     /*
      * Object shape (property cache structural type) identifier generator.
      *
@@ -2003,17 +2012,17 @@ struct JSContext
 
     bool hasfp() {
         JS_ASSERT_IF(regs, regs->fp);
         return !!regs;
     }
 
   public:
     friend class js::StackSpace;
-    friend bool js::Interpret(JSContext *, JSStackFrame *, uintN, uintN);
+    friend bool js::Interpret(JSContext *, JSStackFrame *, uintN, JSInterpMode);
 
     void resetCompartment();
 
     /* 'regs' must only be changed by calling this function. */
     void setCurrentRegs(JSFrameRegs *regs) {
         JS_ASSERT_IF(regs, regs->fp);
         this->regs = regs;
         if (!regs)
@@ -2406,17 +2415,17 @@ class AutoCheckRequestDepth {
         cx->thread->checkRequestDepth--;
     }
 };
 
 }
 
 # define CHECK_REQUEST(cx)                                                    \
     JS_ASSERT((cx)->thread);                                                  \
-    JS_ASSERT((cx)->thread->data.requestDepth || (cx)->thread == (cx)->runtime->gcThread); \
+    JS_ASSERT((cx)->thread->requestDepth || (cx)->thread == (cx)->runtime->gcThread); \
     AutoCheckRequestDepth _autoCheckRequestDepth(cx);
 
 #else
 # define CHECK_REQUEST(cx)          ((void) 0)
 # define CHECK_REQUEST_THREAD(cx)   ((void) 0)
 #endif
 
 static inline uintN
@@ -2902,16 +2911,33 @@ class AutoReleasePtr {
     JSContext   *cx;
     void        *ptr;
     AutoReleasePtr operator=(const AutoReleasePtr &other);
   public:
     explicit AutoReleasePtr(JSContext *cx, void *ptr) : cx(cx), ptr(ptr) {}
     ~AutoReleasePtr() { cx->free(ptr); }
 };
 
+/*
+ * FIXME: bug 602774: cleaner API for AutoReleaseNullablePtr
+ */
+class AutoReleaseNullablePtr {
+    JSContext   *cx;
+    void        *ptr;
+    AutoReleaseNullablePtr operator=(const AutoReleaseNullablePtr &other);
+  public:
+    explicit AutoReleaseNullablePtr(JSContext *cx, void *ptr) : cx(cx), ptr(ptr) {}
+    void reset(void *ptr2) {
+        if (ptr)
+            cx->free(ptr);
+        ptr = ptr2;
+    }
+    ~AutoReleaseNullablePtr() { if (ptr) cx->free(ptr); }
+};
+
 class AutoLocalNameArray {
   public:
     explicit AutoLocalNameArray(JSContext *cx, JSFunction *fun
                                 JS_GUARD_OBJECT_NOTIFIER_PARAM)
       : context(cx),
         mark(JS_ARENA_MARK(&cx->tempPool)),
         names(fun->getLocalNameArray(cx, &cx->tempPool)),
         count(fun->countLocalNames())
@@ -3169,67 +3195,43 @@ js_ReportValueErrorFlags(JSContext *cx, 
 #define js_ReportValueError3(cx,errorNumber,spindex,v,fallback,arg1,arg2)     \
     ((void)js_ReportValueErrorFlags(cx, JSREPORT_ERROR, errorNumber,          \
                                     spindex, v, fallback, arg1, arg2))
 
 extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
 
 #ifdef JS_THREADSAFE
 # define JS_ASSERT_REQUEST_DEPTH(cx)  (JS_ASSERT((cx)->thread),               \
-                                       JS_ASSERT((cx)->thread->data.requestDepth >= 1))
+                                       JS_ASSERT((cx)->thread->requestDepth >= 1))
 #else
 # define JS_ASSERT_REQUEST_DEPTH(cx)  ((void) 0)
 #endif
 
 /*
  * If the operation callback flag was set, call the operation callback.
  * This macro can run the full GC. Return true if it is OK to continue and
  * false otherwise.
  */
 #define JS_CHECK_OPERATION_LIMIT(cx)                                          \
     (JS_ASSERT_REQUEST_DEPTH(cx),                                             \
-     (!JS_THREAD_DATA(cx)->interruptFlags || js_InvokeOperationCallback(cx)))
-
-JS_ALWAYS_INLINE void
-JSThreadData::triggerOperationCallback(JSRuntime *rt)
-{
-    /*
-     * Use JS_ATOMIC_SET and JS_ATOMIC_INCREMENT in the hope that it ensures
-     * the write will become immediately visible to other processors polling
-     * the flag.  Note that we only care about visibility here, not read/write
-     * ordering: this field can only be written with the GC lock held.
-     */
-    if (interruptFlags)
-        return;
-    JS_ATOMIC_SET(&interruptFlags, 1);
-
-#ifdef JS_THREADSAFE
-    /* rt->interruptCounter does not reflect suspended threads. */
-    if (requestDepth != 0)
-        JS_ATOMIC_INCREMENT(&rt->interruptCounter);
-#endif
-}
+     (!(JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK) || js_InvokeOperationCallback(cx)))
 
 /*
  * Invoke the operation callback and return false if the current execution
  * is to be terminated.
  */
 extern JSBool
 js_InvokeOperationCallback(JSContext *cx);
 
 extern JSBool
 js_HandleExecutionInterrupt(JSContext *cx);
 
 namespace js {
 
-/* These must be called with GC lock taken. */
-
-JS_FRIEND_API(void)
-TriggerOperationCallback(JSContext *cx);
-
+/* Must be called with GC lock taken. */
 void
 TriggerAllOperationCallbacks(JSRuntime *rt);
 
 } /* namespace js */
 
 extern JSStackFrame *
 js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
 
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -230,22 +230,16 @@ JS_SetTrap(JSContext *cx, JSScript *scri
         return JS_FALSE;
 
     if (script == JSScript::emptyScript()) {
         JS_ReportErrorFlagsAndNumber(cx, JSREPORT_ERROR, js_GetErrorMessage,
                                      NULL, JSMSG_READ_ONLY, "empty script");
         return JS_FALSE;
     }
 
-    if (JSOp(*pc) == JSOP_BEGIN) {
-        JS_ReportErrorFlagsAndNumber(cx, JSREPORT_ERROR, js_GetErrorMessage,
-                                     NULL, JSMSG_READ_ONLY, "trap invalid on BEGIN opcode");
-        return JS_FALSE;
-    }
-
     JS_ASSERT((JSOp) *pc != JSOP_TRAP);
     junk = NULL;
     rt = cx->runtime;
     DBG_LOCK(rt);
     trap = FindTrap(rt, script, pc);
     if (trap) {
         JS_ASSERT(trap->script == script && trap->pc == pc);
         JS_ASSERT(*pc == JSOP_TRAP);
@@ -1017,23 +1011,16 @@ JS_PCToLineNumber(JSContext *cx, JSScrip
 
 JS_PUBLIC_API(jsbytecode *)
 JS_LineNumberToPC(JSContext *cx, JSScript *script, uintN lineno)
 {
     return js_LineNumberToPC(script, lineno);
 }
 
 JS_PUBLIC_API(jsbytecode *)
-JS_FirstValidPC(JSContext *cx, JSScript *script)
-{
-    jsbytecode *pc = script->code;
-    return *pc == JSOP_BEGIN ? pc + JSOP_BEGIN_LENGTH : pc;
-}
-
-JS_PUBLIC_API(jsbytecode *)
 JS_EndPC(JSContext *cx, JSScript *script)
 {
     return script->code + script->length;
 }
 
 JS_PUBLIC_API(uintN)
 JS_GetFunctionArgumentCount(JSContext *cx, JSFunction *fun)
 {
--- a/js/src/jsdbgapi.h
+++ b/js/src/jsdbgapi.h
@@ -155,19 +155,16 @@ js_WrapWatchedSetter(JSContext *cx, jsid
 
 extern JS_PUBLIC_API(uintN)
 JS_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc);
 
 extern JS_PUBLIC_API(jsbytecode *)
 JS_LineNumberToPC(JSContext *cx, JSScript *script, uintN lineno);
 
 extern JS_PUBLIC_API(jsbytecode *)
-JS_FirstValidPC(JSContext *cx, JSScript *script);
-
-extern JS_PUBLIC_API(jsbytecode *)
 JS_EndPC(JSContext *cx, JSScript *script);
 
 extern JS_PUBLIC_API(uintN)
 JS_GetFunctionArgumentCount(JSContext *cx, JSFunction *fun);
 
 extern JS_PUBLIC_API(JSBool)
 JS_FunctionHasLocalNames(JSContext *cx, JSFunction *fun);
 
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -106,17 +106,18 @@ JSCodeGenerator::JSCodeGenerator(Parser 
     spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
     numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
     arrayCompDepth(0),
     emitLevel(0),
     constMap(parser->context),
     constList(parser->context),
     globalUses(ContextAllocPolicy(parser->context)),
     closedArgs(ContextAllocPolicy(parser->context)),
-    closedVars(ContextAllocPolicy(parser->context))
+    closedVars(ContextAllocPolicy(parser->context)),
+    traceIndex(0)
 {
     flags = TCF_COMPILING;
     memset(&prolog, 0, sizeof prolog);
     memset(&main, 0, sizeof main);
     current = &main;
     firstLine = prolog.currentLine = main.currentLine = lineno;
     prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
     memset(&upvarMap, 0, sizeof upvarMap);
@@ -1373,16 +1374,17 @@ js_PushStatement(JSTreeContext *tc, JSSt
 }
 
 void
 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObjectBox *blockBox,
                   ptrdiff_t top)
 {
     js_PushStatement(tc, stmt, STMT_BLOCK, top);
     stmt->flags |= SIF_SCOPE;
+    blockBox->parent = tc->blockChainBox;
     blockBox->object->setParent(tc->blockChain());
     stmt->downScope = tc->topScopeStmt;
     tc->topScopeStmt = stmt;
     tc->blockChainBox = blockBox;
     stmt->blockBox = blockBox;
 }
 
 /*
@@ -1396,16 +1398,25 @@ EmitBackPatchOp(JSContext *cx, JSCodeGen
 
     offset = CG_OFFSET(cg);
     delta = offset - *lastp;
     *lastp = offset;
     JS_ASSERT(delta > 0);
     return EmitJump(cx, cg, op, delta);
 }
 
+static ptrdiff_t
+EmitTraceOp(JSContext *cx, JSCodeGenerator *cg)
+{
+    uint32 index = cg->traceIndex;
+    if (index < UINT16_MAX)
+        cg->traceIndex++;
+    return js_Emit3(cx, cg, JSOP_TRACE, UINT16_HI(index), UINT16_LO(index));
+}
+
 /*
  * Macro to emit a bytecode followed by a uint16 immediate operand stored in
  * big-endian order, used for arg and var numbers as well as for atomIndexes.
  * NB: We use cx and cg from our caller's lexical environment, and return
  * false on error.
  */
 #define EMIT_UINT16_IMM_OP(op, i)                                             \
     JS_BEGIN_MACRO                                                            \
@@ -1586,21 +1597,17 @@ js_PopStatement(JSTreeContext *tc)
 {
     JSStmtInfo *stmt;
 
     stmt = tc->topStmt;
     tc->topStmt = stmt->down;
     if (STMT_LINKS_SCOPE(stmt)) {
         tc->topScopeStmt = stmt->downScope;
         if (stmt->flags & SIF_SCOPE) {
-            if (stmt->downScope) {
-                tc->blockChainBox = stmt->downScope->blockBox;
-            } else {
-                tc->blockChainBox = NULL;
-            }
+            tc->blockChainBox = stmt->blockBox->parent;
             JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
         }
     }
 }
 
 JSBool
 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
 {
@@ -3711,25 +3718,20 @@ out:
 bad:
     ok = JS_FALSE;
     goto out;
 }
 
 JSBool
 js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
 {
-    CG_SWITCH_TO_PROLOG(cg);
-    JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
-    if (js_Emit1(cx, cg, JSOP_BEGIN) < 0)
-        return false;
-    CG_SWITCH_TO_MAIN(cg);
-
     if (cg->flags & TCF_FUN_IS_GENERATOR) {
-        /* JSOP_GENERATOR must be the first real instruction. */
+        /* JSOP_GENERATOR must be the first instruction. */
         CG_SWITCH_TO_PROLOG(cg);
+        JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
         if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
             return false;
         CG_SWITCH_TO_MAIN(cg);
     }
 
     if (cg->needsEagerArguments()) {
         CG_SWITCH_TO_PROLOG(cg);
         if (js_Emit1(cx, cg, JSOP_ARGUMENTS) < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
@@ -4356,17 +4358,17 @@ EmitVariables(JSContext *cx, JSCodeGener
                 if (!js_EmitTree(cx, cg, pn3))
                     return JS_FALSE;
                 cg->flags |= oldflags & TCF_IN_FOR_INIT;
 
 #if JS_HAS_BLOCK_SCOPE
                 if (popScope) {
                     cg->topStmt = stmt;
                     cg->topScopeStmt = scopeStmt;
-                    cg->blockChainBox = scopeStmt->blockBox;
+                    JS_ASSERT(cg->blockChainBox == scopeStmt->blockBox);
                 }
 #endif
             }
         }
 
         /*
          * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
          * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
@@ -4824,17 +4826,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
          */
         js_PushStatement(cg, &stmtInfo, STMT_WHILE_LOOP, top);
         noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
         if (noteIndex < 0)
             return JS_FALSE;
         jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
         if (jmp < 0)
             return JS_FALSE;
-        top = js_Emit1(cx, cg, JSOP_TRACE);
+        top = EmitTraceOp(cx, cg);
         if (top < 0)
             return JS_FALSE;
         if (!js_EmitTree(cx, cg, pn->pn_right))
             return JS_FALSE;
         CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
         if (!js_EmitTree(cx, cg, pn->pn_left))
             return JS_FALSE;
         beq = EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg));
@@ -4847,17 +4849,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
 
       case TOK_DO:
         /* Emit an annotated nop so we know to decompile a 'do' keyword. */
         noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
         if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
             return JS_FALSE;
 
         /* Compile the loop body. */
-        top = js_Emit1(cx, cg, JSOP_TRACE);
+        top = EmitTraceOp(cx, cg);
         if (top < 0)
             return JS_FALSE;
         js_PushStatement(cg, &stmtInfo, STMT_DO_LOOP, top);
         if (!js_EmitTree(cx, cg, pn->pn_left))
             return JS_FALSE;
 
         /* Set loop and enclosing label update offsets, for continue. */
         stmt = &stmtInfo;
@@ -4947,17 +4949,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
              * least one iteration, as the other loop forms do.
              */
             jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
             if (jmp < 0)
                 return JS_FALSE;
 
             top = CG_OFFSET(cg);
             SET_STATEMENT_TOP(&stmtInfo, top);
-            if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
+            if (EmitTraceOp(cx, cg) < 0)
                 return JS_FALSE;
 
 #ifdef DEBUG
             intN loopDepth = cg->stackDepth;
 #endif
 
             /*
              * Compile a JSOP_FOR* bytecode based on the left hand side.
@@ -5174,17 +5176,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                 if (jmp < 0)
                     return JS_FALSE;
             }
 
             top = CG_OFFSET(cg);
             SET_STATEMENT_TOP(&stmtInfo, top);
 
             /* Emit code for the loop body. */
-            if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
+            if (EmitTraceOp(cx, cg) < 0)
                 return JS_FALSE;
             if (!js_EmitTree(cx, cg, pn->pn_right))
                 return JS_FALSE;
 
             /* Set the second note offset so we can find the update part. */
             JS_ASSERT(noteIndex != -1);
             if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
                                      CG_OFFSET(cg) - tmp)) {
@@ -6474,17 +6476,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
 
 #if JS_HAS_XML_SUPPORT
       case TOK_FILTER:
         if (!js_EmitTree(cx, cg, pn->pn_left))
             return JS_FALSE;
         jmp = EmitJump(cx, cg, JSOP_FILTER, 0);
         if (jmp < 0)
             return JS_FALSE;
-        top = js_Emit1(cx, cg, JSOP_TRACE);
+        top = EmitTraceOp(cx, cg);
         if (top < 0)
             return JS_FALSE;
         if (!js_EmitTree(cx, cg, pn->pn_right))
             return JS_FALSE;
         CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
         if (EmitJump(cx, cg, JSOP_ENDFILTER, top - CG_OFFSET(cg)) < 0)
             return JS_FALSE;
 
--- a/js/src/jsemit.h
+++ b/js/src/jsemit.h
@@ -552,16 +552,18 @@ struct JSCodeGenerator : public JSTreeCo
     GlobalUseVector globalUses;     /* per-script global uses */
     JSAtomList      globalMap;      /* per-script map of global name to globalUses vector */
 
     /* Vectors of pn_cookie slot values. */
     typedef js::Vector<uint32, 8, js::ContextAllocPolicy> SlotVector;
     SlotVector      closedArgs;
     SlotVector      closedVars;
 
+    uint16          traceIndex;     /* index for the next JSOP_TRACE instruction */
+    
     /*
      * Initialize cg to allocate bytecode space from codePool, source note
      * space from notePool, and all other arena-allocated temporaries from
      * parser->context->tempPool.
      */
     JSCodeGenerator(js::Parser *parser,
                     JSArenaPool *codePool, JSArenaPool *notePool,
                     uintN lineno);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -752,21 +752,21 @@ MarkStackRangeConservatively(JSTracer *t
 void
 MarkConservativeStackRoots(JSTracer *trc)
 {
 #ifdef JS_THREADSAFE
     for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
         JSThread *thread = r.front().value;
         ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
         if (ctd->hasStackToScan()) {
-            JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
+            JS_ASSERT_IF(!thread->requestDepth, thread->suspendCount);
             MarkThreadDataConservatively(trc, &thread->data);
         } else {
             JS_ASSERT(!thread->suspendCount);
-            JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
+            JS_ASSERT(thread->requestDepth <= ctd->requestThreshold);
         }
     }
 #else
     MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
 #endif
 }
 
 JS_NEVER_INLINE void
@@ -790,18 +790,18 @@ ConservativeGCThreadData::recordStackTop
 
 static inline void
 RecordNativeStackTopForGC(JSContext *cx)
 {
     ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
 
 #ifdef JS_THREADSAFE
     /* Record the stack top here only if we are called from a request. */
-    JS_ASSERT(cx->thread->data.requestDepth >= ctd->requestThreshold);
-    if (cx->thread->data.requestDepth == ctd->requestThreshold)
+    JS_ASSERT(cx->thread->requestDepth >= ctd->requestThreshold);
+    if (cx->thread->requestDepth == ctd->requestThreshold)
         return;
 #endif
     ctd->recordStackTop();
 }
 
 } /* namespace js */
 
 #ifdef DEBUG
@@ -1625,17 +1625,17 @@ MarkRuntime(JSTracer *trc)
      * builds for now, see bug 574313.
      */
     JSContext *iter;
 #if 1
     iter = NULL;
     while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
         for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
 #ifdef JS_THREADSAFE
-            JS_ASSERT_IF(!acx->thread->data.requestDepth, acx->thread->suspendCount);
+            JS_ASSERT_IF(!acx->thread->requestDepth, acx->thread->suspendCount);
 #endif
             JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan());
             void *thing;
             switch (gcr->tag) {
               default:
                 continue;
               case AutoGCRooter::JSVAL: {
                 const Value &v = static_cast<AutoValueRooter *>(gcr)->val;
@@ -2332,17 +2332,17 @@ js_WaitForGC(JSRuntime *rt)
  */
 static void
 LetOtherGCFinish(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(rt->gcThread);
     JS_ASSERT(cx->thread != rt->gcThread);
 
-    size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
+    size_t requestDebit = cx->thread->requestDepth ? 1 : 0;
     JS_ASSERT(requestDebit <= rt->requestCount);
 #ifdef JS_TRACER
     JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
 #endif
     if (requestDebit != 0) {
 #ifdef JS_TRACER
         if (JS_ON_TRACE(cx)) {
             /*
@@ -2426,26 +2426,26 @@ AutoGCSession::AutoGCSession(JSContext *
      * Notify operation callbacks on other threads, which will give them a
      * chance to yield their requests. Threads without requests perform their
      * callback at some later point, which then will be unnecessary, but
      * harmless.
      */
     for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
         JSThread *thread = r.front().value;
         if (thread != cx->thread)
-            thread->data.triggerOperationCallback(rt);
+            thread->data.triggerOperationCallback();
     }
 
     /*
      * Discount the request on the current thread from contributing to
      * rt->requestCount before we wait for all other requests to finish.
      * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
      * rt->requestCount transitions to 0.
      */
-    size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
+    size_t requestDebit = cx->thread->requestDepth ? 1 : 0;
     JS_ASSERT(requestDebit <= rt->requestCount);
     if (requestDebit != rt->requestCount) {
         rt->requestCount -= requestDebit;
 
         /*
          * Share any title that is owned by the GC thread before we wait, to
          * avoid a deadlock with ClaimTitle. We also set the gcWaiting flag so
          * that ClaimTitle can claim the title ownership from the GC thread if
@@ -2558,17 +2558,24 @@ js_GC(JSContext *cx, JSGCInvocationKind 
      */
     if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
         return;
 
     RecordNativeStackTopForGC(cx);
 
 #ifdef DEBUG
     int stackDummy;
-    JS_ASSERT(JS_CHECK_STACK_SIZE(cx->stackLimit, &stackDummy));
+# if JS_STACK_GROWTH_DIRECTION > 0
+    /* cx->stackLimit is set to jsuword(-1) by default. */
+    JS_ASSERT_IF(cx->stackLimit != jsuword(-1),
+                 JS_CHECK_STACK_SIZE(cx->stackLimit + 4096, &stackDummy));
+# else
+    /* -4k because it is possible to perform a GC during an overrecursion report. */
+    JS_ASSERT_IF(cx->stackLimit, JS_CHECK_STACK_SIZE(cx->stackLimit - 4096, &stackDummy));
+# endif
 #endif
 
     GCTIMER_BEGIN();
 
     do {
         /*
          * Let the API user decide to defer a GC if it wants to (unless this
          * is the last context).  Invoke the callback regardless. Sample the
@@ -2608,17 +2615,17 @@ namespace gc {
 bool
 SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
 {
     /*
      * This function cannot be called during the GC and always requires a
      * request.
      */
 #ifdef JS_THREADSAFE
-    JS_ASSERT(cx->thread->data.requestDepth);
+    JS_ASSERT(cx->thread->requestDepth);
 
     /*
      * This is only necessary if AutoGCSession below would wait for GC to
      * finish on another thread, but to capture the minimal stack space and
      * for code simplicity we do it here unconditionally.
      */
     RecordNativeStackTopForGC(cx);
 #endif
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -77,17 +77,16 @@
 #include "jsvector.h"
 #include "methodjit/MethodJIT.h"
 #include "methodjit/Logging.h"
 
 #include "jsatominlines.h"
 #include "jscntxtinlines.h"
 #include "jsinterpinlines.h"
 #include "jsobjinlines.h"
-#include "jsprobes.h"
 #include "jspropertycacheinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 #include "jsstrinlines.h"
 #include "jsopcodeinlines.h"
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
@@ -729,37 +728,22 @@ Invoke(JSContext *cx, const CallArgs &ar
             JSObject *thisp = thisv.toObject().thisObject(cx);
             if (!thisp)
                  return false;
             JS_ASSERT(IsSaneThisObject(*thisp));
             thisv.setObject(*thisp);
         }
     }
 
-    JSInterpreterHook hook = cx->debugHooks->callHook;
-    void *hookData = NULL;
-    if (JS_UNLIKELY(hook != NULL))
-        hookData = hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData);
-
     /* Run function until JSOP_STOP, JSOP_RETURN or error. */
     JSBool ok;
     {
         AutoPreserveEnumerators preserve(cx);
-        Probes::enterJSFun(cx, fun);
         ok = RunScript(cx, script, fp);
-        Probes::exitJSFun(cx, fun);
-    }
-
-    if (JS_UNLIKELY(hookData != NULL)) {
-        hook = cx->debugHooks->callHook;
-        if (hook)
-            hook(cx, fp, JS_FALSE, &ok, hookData);
-    }
-
-    PutActivationObjects(cx, fp);
+    }
 
     args.rval() = fp->returnValue();
     JS_ASSERT_IF(ok && (flags & JSINVOKE_CONSTRUCT), !args.rval().isPrimitive());
 
     return ok;
 }
 
 bool
@@ -2156,21 +2140,38 @@ IteratorNext(JSContext *cx, JSObject *it
             *rval = *ni->currentValue();
             ni->incValueCursor();
             return true;
         }
     }
     return js_IteratorNext(cx, iterobj, rval);
 }
 
+static inline bool
+ScriptPrologue(JSContext *cx, JSStackFrame *fp)
+{
+    if (fp->isConstructing()) {
+        JSObject *obj = js_CreateThisForFunction(cx, &fp->callee());
+        if (!obj)
+            return false;
+        fp->functionThis().setObject(*obj);
+    }
+    JSInterpreterHook hook = cx->debugHooks->callHook;
+    if (JS_UNLIKELY(hook != NULL) && !fp->isExecuteFrame())
+        fp->setHookData(hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData));
+
+    Probes::enterJSFun(cx, fp->maybeFun());
+
+    return true;
+}
 
 namespace js {
 
 JS_REQUIRES_STACK JS_NEVER_INLINE bool
-Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, uintN interpFlags)
+Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount, JSInterpMode interpMode)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_INTERP);
 #endif
     JSAutoResolveFlags rf(cx, JSRESOLVE_INFER);
 
 # ifdef DEBUG
     /*
@@ -2391,30 +2392,30 @@ Interpret(JSContext *cx, JSStackFrame *e
 #if defined(JS_TRACER) && defined(JS_METHODJIT)
 # define LEAVE_ON_SAFE_POINT()                                                \
     do {                                                                      \
         JS_ASSERT_IF(leaveOnSafePoint, !TRACE_RECORDER(cx));                  \
         if (leaveOnSafePoint && !regs.fp->hasImacropc() &&                    \
             script->maybeNativeCodeForPC(regs.fp->isConstructing(), regs.pc)) { \
             JS_ASSERT(!TRACE_RECORDER(cx));                                   \
             interpReturnOK = true;                                            \
-            goto stop_recording;                                              \
+            goto leave_on_safe_point;                                         \
         }                                                                     \
     } while (0)
 #else
 # define LEAVE_ON_SAFE_POINT() /* nop */
 #endif
 
 #define BRANCH(n)                                                             \
     JS_BEGIN_MACRO                                                            \
         regs.pc += (n);                                                       \
         op = (JSOp) *regs.pc;                                                 \
         if ((n) <= 0) {                                                       \
             CHECK_BRANCH();                                                   \
-            if (op == JSOP_NOP) {                                             \
+            if (op == JSOP_NOTRACE) {                                         \
                 if (TRACE_RECORDER(cx)) {                                     \
                     MONITOR_BRANCH();                                         \
                     op = (JSOp) *regs.pc;                                     \
                 }                                                             \
             } else if (op == JSOP_TRACE) {                                    \
                 MONITOR_BRANCH();                                             \
                 op = (JSOp) *regs.pc;                                         \
             }                                                                 \
@@ -2427,35 +2428,47 @@ Interpret(JSContext *cx, JSStackFrame *e
     JS_BEGIN_MACRO                                                            \
         if (cx->debugHooks->interruptHook)                                    \
             ENABLE_INTERRUPTS();                                              \
     JS_END_MACRO
 
     /* Check for too deep of a native thread stack. */
     JS_CHECK_RECURSION(cx, return JS_FALSE);
 
-    MUST_FLOW_THROUGH("exit");
-    ++cx->interpLevel;
+    JSFrameRegs regs = *cx->regs;
 
     /* Repoint cx->regs to a local variable for faster access. */
-    JSFrameRegs *const prevContextRegs = cx->regs;
-    JSFrameRegs regs = *cx->regs;
-    cx->setCurrentRegs(&regs);
+    struct InterpExitGuard {
+        JSContext *cx;
+        const JSFrameRegs &regs;
+        JSFrameRegs *prevContextRegs;
+        InterpExitGuard(JSContext *cx, JSFrameRegs &regs)
+          : cx(cx), regs(regs), prevContextRegs(cx->regs) {
+            cx->setCurrentRegs(&regs);
+            ++cx->interpLevel;
+        }
+        ~InterpExitGuard() {
+            --cx->interpLevel;
+            JS_ASSERT(cx->regs == &regs);
+            *prevContextRegs = regs;
+            cx->setCurrentRegs(prevContextRegs);
+        }
+    } interpGuard(cx, regs);
 
     /* Copy in hot values that change infrequently. */
     JSRuntime *const rt = cx->runtime;
     JSScript *script = regs.fp->script();
     Value *argv = regs.fp->maybeFormalArgs();
     CHECK_INTERRUPT_HANDLER();
 
     JS_ASSERT(!script->isEmpty());
-    JS_ASSERT(script->length > 1);
+    JS_ASSERT(script->length >= 1);
 
 #if defined(JS_TRACER) && defined(JS_METHODJIT)
-    bool leaveOnSafePoint = !!(interpFlags & JSINTERP_SAFEPOINT);
+    bool leaveOnSafePoint = (interpMode == JSINTERP_SAFEPOINT);
 # define CLEAR_LEAVE_ON_TRACE_POINT() ((void) (leaveOnSafePoint = false))
 #else
 # define CLEAR_LEAVE_ON_TRACE_POINT() ((void) 0)
 #endif
 
     if (!entryFrame)
         entryFrame = regs.fp;
 
@@ -2465,17 +2478,17 @@ Interpret(JSContext *cx, JSStackFrame *e
      * the atom map to turn frequently executed LOAD_ATOM into simple array
      * access. For less frequent object and regexp loads we have to recover
      * the segment from atoms pointer first.
      */
     JSAtom **atoms = script->atomMap.vector;
 
 #if JS_HAS_GENERATORS
     if (JS_UNLIKELY(regs.fp->isGeneratorFrame())) {
-        JS_ASSERT(prevContextRegs == &cx->generatorFor(regs.fp)->regs);
+        JS_ASSERT(interpGuard.prevContextRegs == &cx->generatorFor(regs.fp)->regs);
         JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
         JS_ASSERT((size_t) (regs.sp - regs.fp->base()) <= StackDepth(script));
 
         /*
          * To support generator_throw and to catch ignored exceptions,
          * fail if cx->throwing is set.
          */
         if (cx->throwing)
@@ -2484,27 +2497,36 @@ Interpret(JSContext *cx, JSStackFrame *e
 #endif
 
 #ifdef JS_TRACER
     /*
      * The method JIT may have already initiated a recording, in which case
      * there should already be a valid recorder. Otherwise...
      * we cannot reenter the interpreter while recording.
      */
-    if (interpFlags & JSINTERP_RECORD) {
+    if (interpMode == JSINTERP_RECORD) {
         JS_ASSERT(TRACE_RECORDER(cx));
         ENABLE_INTERRUPTS();
     } else if (TRACE_RECORDER(cx)) {
         AbortRecording(cx, "attempt to reenter interpreter while recording");
     }
 
     if (regs.fp->hasImacropc())
         atoms = COMMON_ATOMS_START(&rt->atomState);
 #endif
 
+    /* Don't call the script prologue if executing between Method and Trace JIT. */
+    if (interpMode == JSINTERP_NORMAL) {
+        JS_ASSERT_IF(!regs.fp->isGeneratorFrame(), regs.pc == script->code);
+        if (!ScriptPrologue(cx, regs.fp))
+            goto error;
+    }
+
+    CHECK_INTERRUPT_HANDLER();
+
     /* State communicated between non-local jumps: */
     JSBool interpReturnOK;
     JSAtom *atomNotDefined;
 
     /*
      * It is important that "op" be initialized before calling DO_OP because
      * it is possible for "op" to be specially assigned during the normal
      * processing of an opcode while looping. We rely on DO_NEXT_OP to manage
@@ -2580,17 +2602,18 @@ Interpret(JSContext *cx, JSStackFrame *e
             moreInterrupts = true;
         }
 
 #ifdef JS_TRACER
         if (TraceRecorder* tr = TRACE_RECORDER(cx)) {
             AbortableRecordingStatus status = tr->monitorRecording(op);
             JS_ASSERT_IF(cx->throwing, status == ARECORD_ERROR);
 
-            if (interpFlags & (JSINTERP_RECORD | JSINTERP_SAFEPOINT)) {
+            if (interpMode != JSINTERP_NORMAL) {
+                JS_ASSERT(interpMode == JSINTERP_RECORD || JSINTERP_SAFEPOINT);
                 switch (status) {
                   case ARECORD_IMACRO_ABORTED:
                   case ARECORD_ABORTED:
                   case ARECORD_COMPLETED:
                   case ARECORD_STOP:
 #ifdef JS_METHODJIT
                     leaveOnSafePoint = true;
                     LEAVE_ON_SAFE_POINT();
@@ -2644,21 +2667,21 @@ Interpret(JSContext *cx, JSStackFrame *e
 /* No-ops for ease of decompilation. */
 ADD_EMPTY_CASE(JSOP_NOP)
 ADD_EMPTY_CASE(JSOP_CONDSWITCH)
 ADD_EMPTY_CASE(JSOP_TRY)
 #if JS_HAS_XML_SUPPORT
 ADD_EMPTY_CASE(JSOP_STARTXML)
 ADD_EMPTY_CASE(JSOP_STARTXMLEXPR)
 #endif
-ADD_EMPTY_CASE(JSOP_UNUSED180)
 ADD_EMPTY_CASE(JSOP_NULLBLOCKCHAIN)
 END_EMPTY_CASES
 
 BEGIN_CASE(JSOP_TRACE)
+BEGIN_CASE(JSOP_NOTRACE)
     LEAVE_ON_SAFE_POINT();
 END_CASE(JSOP_TRACE)
 
 /* ADD_EMPTY_CASE is not used here as JSOP_LINENO_LENGTH == 3. */
 BEGIN_CASE(JSOP_LINENO)
 END_CASE(JSOP_LINENO)
 
 BEGIN_CASE(JSOP_BLOCKCHAIN)
@@ -2755,37 +2778,21 @@ BEGIN_CASE(JSOP_STOP)
     }
 #endif
 
     interpReturnOK = true;
     if (entryFrame != regs.fp)
   inline_return:
     {
         JS_ASSERT(!js_IsActiveWithOrBlock(cx, &regs.fp->scopeChain(), 0));
-        if (JS_UNLIKELY(regs.fp->hasHookData())) {
-            if (JSInterpreterHook hook = cx->debugHooks->callHook) {
-                hook(cx, regs.fp, JS_FALSE, &interpReturnOK, regs.fp->hookData());
-                CHECK_INTERRUPT_HANDLER();
-            }
-        }
-
-        PutActivationObjects(cx, regs.fp);
-
-        Probes::exitJSFun(cx, regs.fp->maybeFun());
-
-        /*
-         * If inline-constructing, replace primitive rval with the new object
-         * passed in via |this|, and instrument this constructor invocation.
-         */
-        if (regs.fp->isConstructing()) {
-            if (regs.fp->returnValue().isPrimitive())
-                regs.fp->setReturnValue(ObjectValue(regs.fp->constructorThis()));
-            JS_RUNTIME_METER(cx->runtime, constructs);
-        }
-
+        interpReturnOK = ScriptEpilogue(cx, regs.fp, interpReturnOK);
+        CHECK_INTERRUPT_HANDLER();
+
+        /* The JIT inlines ScriptEpilogue. */
+  jit_return:
         Value *newsp = regs.fp->actualArgs() - 1;
         newsp[-1] = regs.fp->returnValue();
         cx->stack().popInlineFrame(cx, regs.fp->prev(), newsp);
 
         /* Sync interpreter registers. */
         script = regs.fp->script();
         argv = regs.fp->maybeFormalArgs();
         atoms = FrameAtomBase(cx, regs.fp);
@@ -2798,28 +2805,16 @@ BEGIN_CASE(JSOP_STOP)
                       == JSOP_CALL_LENGTH);
             TRACE_0(LeaveFrame);
             len = JSOP_CALL_LENGTH;
             DO_NEXT_OP(len);
         }
         goto error;
     } else {
         JS_ASSERT(regs.sp == regs.fp->base());
-        if (regs.fp->isConstructing() && regs.fp->returnValue().isPrimitive())
-            regs.fp->setReturnValue(ObjectValue(regs.fp->constructorThis()));
-
-#if defined(JS_TRACER) && defined(JS_METHODJIT)
-        /* Hack: re-push rval so either JIT will read it properly. */
-        regs.fp->setBailedAtReturn();
-        if (TRACE_RECORDER(cx)) {
-            AbortRecording(cx, "recording out of Interpret");
-            interpReturnOK = true;
-            goto stop_recording;
-        }
-#endif
     }
     interpReturnOK = true;
     goto exit;
 }
 
 BEGIN_CASE(JSOP_DEFAULT)
     regs.sp--;
     /* FALL THROUGH */
@@ -4564,51 +4559,16 @@ BEGIN_CASE(JSOP_ENUMELEM)
     FETCH_ELEMENT_ID(obj, -1, id);
     Value rval = regs.sp[-3];
     if (!obj->setProperty(cx, id, &rval, script->strictModeCode))
         goto error;
     regs.sp -= 3;
 }
 END_CASE(JSOP_ENUMELEM)
 
-BEGIN_CASE(JSOP_BEGIN)
-{
-    if (regs.fp->isConstructing()) {
-        JSObject *obj2 = js_CreateThisForFunction(cx, &regs.fp->callee());
-        if (!obj2)
-            goto error;
-        regs.fp->functionThis().setObject(*obj2);
-    }
-
-    /* Call the debugger hook if present. */
-    if (JSInterpreterHook hook = cx->debugHooks->callHook) {
-        regs.fp->setHookData(hook(cx, regs.fp, JS_TRUE, 0,
-                                  cx->debugHooks->callHookData));
-        CHECK_INTERRUPT_HANDLER();
-    }
-
-    JS_RUNTIME_METER(rt, inlineCalls);
-
-    Probes::enterJSFun(cx, regs.fp->fun());
-
-#ifdef JS_METHODJIT
-    /* Try to ensure methods are method JIT'd.  */
-    mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, regs.fp);
-    if (status == mjit::Compile_Error)
-        goto error;
-    if (!TRACE_RECORDER(cx) && status == mjit::Compile_Okay) {
-        if (!mjit::JaegerShot(cx))
-            goto error;
-        interpReturnOK = true;
-        goto inline_return;
-    }
-#endif
-}
-END_CASE(JSOP_BEGIN)
-
 {
     JSFunction *newfun;
     JSObject *callee;
     uint32 flags;
     uintN argc;
     Value *vp;
 
 BEGIN_CASE(JSOP_NEW)
@@ -4698,22 +4658,41 @@ BEGIN_CASE(JSOP_APPLY)
             argv = regs.fp->formalArgsEnd() - newfun->nargs;
             atoms = script->atomMap.vector;
 
             /* Now that the new frame is rooted, maybe create a call object. */
             if (newfun->isHeavyweight() && !js_GetCallObject(cx, regs.fp))
                 goto error;
 
             inlineCallCount++;
+            JS_RUNTIME_METER(rt, inlineCalls);
 
             TRACE_0(EnterFrame);
 
+            CHECK_INTERRUPT_HANDLER();
+
+#ifdef JS_METHODJIT
+            /* Try to ensure methods are method JIT'd.  */
+            mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, regs.fp);
+            if (status == mjit::Compile_Error)
+                goto error;
+            if (!TRACE_RECORDER(cx) && status == mjit::Compile_Okay) {
+                interpReturnOK = mjit::JaegerShot(cx);
+                CHECK_INTERRUPT_HANDLER();
+                goto jit_return;
+            }
+#endif
+
+            if (!ScriptPrologue(cx, regs.fp))
+                goto error;
+
+            CHECK_INTERRUPT_HANDLER();
+
             /* Load first op and dispatch it (safe since JSOP_STOP). */
             op = (JSOp) *regs.pc;
-            JS_ASSERT(op == JSOP_BEGIN);
             DO_OP();
         }
 
         Probes::enterJSFun(cx, newfun);
         JSBool ok = CallJSNative(cx, newfun->u.n.native, argc, vp);
         Probes::exitJSFun(cx, newfun);
         regs.sp = vp + 1;
         if (!ok)
@@ -6928,58 +6907,57 @@ END_CASE(JSOP_ARRAYPUSH)
 #ifdef DEBUG
     cx->tracePrevPc = NULL;
 #endif
 
     if (entryFrame != regs.fp)
         goto inline_return;
 
   exit:
+    interpReturnOK = ScriptEpilogue(cx, regs.fp, interpReturnOK);
+    regs.fp->setFinishedInInterpreter();
+
     /*
      * At this point we are inevitably leaving an interpreted function or a
      * top-level script, and returning to one of:
      * (a) an "out of line" call made through js_Invoke;
      * (b) a js_Execute activation;
      * (c) a generator (SendToGenerator, jsiter.c).
      *
      * We must not be in an inline frame. The check above ensures that for the
      * error case and for a normal return, the code jumps directly to parent's
      * frame pc.
      */
     JS_ASSERT(entryFrame == regs.fp);
-    JS_ASSERT(cx->regs == &regs);
-    *prevContextRegs = regs;
-    cx->setCurrentRegs(prevContextRegs);
 
 #ifdef JS_TRACER
-    JS_ASSERT_IF(interpReturnOK && (interpFlags & JSINTERP_RECORD), !TRACE_RECORDER(cx));
+    JS_ASSERT_IF(interpReturnOK && interpMode == JSINTERP_RECORD, !TRACE_RECORDER(cx));
     if (TRACE_RECORDER(cx))
         AbortRecording(cx, "recording out of Interpret");
 #endif
 
     JS_ASSERT_IF(!regs.fp->isGeneratorFrame(), !js_IsActiveWithOrBlock(cx, &regs.fp->scopeChain(), 0));
 
-    --cx->interpLevel;
-
     return interpReturnOK;
 
   atom_not_defined:
     {
         const char *printable;
 
         printable = js_AtomToPrintableString(cx, atomNotDefined);
         if (printable)
             js_ReportIsNotDefined(cx, printable);
         goto error;
     }
 
+    /*
+     * This path is used when it's guaranteed the method can be finished
+     * inside the JIT.
+     */
 #if defined(JS_TRACER) && defined(JS_METHODJIT)
-  stop_recording:
+  leave_on_safe_point:
 #endif
-    JS_ASSERT(cx->regs == &regs);
-    *prevContextRegs = regs;
-    cx->setCurrentRegs(prevContextRegs);
     return interpReturnOK;
 }
 
 } /* namespace js */
 
 #endif /* !defined jsinvoke_cpp___ */
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -54,20 +54,21 @@ struct JSFrameRegs
 {
     STATIC_SKIP_INFERENCE
     js::Value       *sp;                  /* stack pointer */
     jsbytecode      *pc;                  /* program counter */
     JSStackFrame    *fp;                  /* active frame */
 };
 
 /* Flags to toggle js::Interpret() execution. */
-enum JSInterpFlags
+enum JSInterpMode
 {
-    JSINTERP_RECORD            =     0x1, /* interpreter has been started to record/run traces */
-    JSINTERP_SAFEPOINT         =     0x2  /* interpreter should leave on a method JIT safe point */
+    JSINTERP_NORMAL            =     0, /* Interpreter is running normally. */
+    JSINTERP_RECORD            =     1, /* interpreter has been started to record/run traces */
+    JSINTERP_SAFEPOINT         =     2  /* interpreter should leave on a method JIT safe point */
 };
 
 /* Flags used in JSStackFrame::flags_ */
 enum JSFrameFlags
 {
     /* Primary frame type */
     JSFRAME_GLOBAL             =     0x1, /* frame pushed for a global script */
     JSFRAME_FUNCTION           =     0x2, /* frame pushed for a scripted call */
@@ -78,17 +79,17 @@ enum JSFrameFlags
     JSFRAME_DEBUGGER           =    0x10, /* frame pushed by JS_EvaluateInStackFrame */
     JSFRAME_GENERATOR          =    0x20, /* frame is associated with a generator */
     JSFRAME_FLOATING_GENERATOR =    0x40, /* frame is is in generator obj, not on stack */
     JSFRAME_CONSTRUCTING       =    0x80, /* frame is for a constructor invocation */
 
     /* Temporary frame states */
     JSFRAME_ASSIGNING          =   0x100, /* not-JOF_ASSIGNING op is assigning */
     JSFRAME_YIELDING           =   0x200, /* js::Interpret dispatched JSOP_YIELD */
-    JSFRAME_BAILED_AT_RETURN   =   0x400, /* bailed at JSOP_RETURN */
+    JSFRAME_FINISHED_IN_INTERPRETER = 0x400, /* set if frame finished in Interpret() */
 
     /* Concerning function arguments */
     JSFRAME_OVERRIDE_ARGS      =  0x1000, /* overridden arguments local variable */
     JSFRAME_OVERFLOW_ARGS      =  0x2000, /* numActualArgs > numFormalArgs */
     JSFRAME_UNDERFLOW_ARGS     =  0x4000, /* numActualArgs < numFormalArgs */
 
     /* Lazy frame initialization */
     JSFRAME_HAS_IMACRO_PC      =   0x8000, /* frame has imacpc value available */
@@ -168,16 +169,20 @@ struct JSStackFrame
         return !!(flags_ & (JSFRAME_FUNCTION | JSFRAME_GLOBAL));
     }
 
     bool isEvalFrame() const {
         JS_ASSERT_IF(flags_ & JSFRAME_EVAL, isScriptFrame());
         return flags_ & JSFRAME_EVAL;
     }
 
+    bool isExecuteFrame() const {
+        return !!(flags_ & (JSFRAME_GLOBAL | JSFRAME_EVAL));
+    }
+
     /*
      * Frame initialization
      *
      * After acquiring a pointer to an uninitialized stack frame on the VM
      * stack from js::StackSpace, these members are used to initialize the
      * stack frame before officially pushing the frame into the context.
      * Collecting frame initialization into a set of inline helpers allows
      * simpler reasoning and makes call-optimization easier.
@@ -675,22 +680,22 @@ struct JSStackFrame
     void setYielding() {
         flags_ |= JSFRAME_YIELDING;
     }
 
     void clearYielding() {
         flags_ &= ~JSFRAME_YIELDING;
     }
 
-    bool isBailedAtReturn() const {
-        return flags_ & JSFRAME_BAILED_AT_RETURN;
+    void setFinishedInInterpreter() {
+        flags_ |= JSFRAME_FINISHED_IN_INTERPRETER;
     }
 
-    void setBailedAtReturn() {
-        flags_ |= JSFRAME_BAILED_AT_RETURN;
+    bool finishedInInterpreter() const {
+        return !!(flags_ & JSFRAME_FINISHED_IN_INTERPRETER);
     }
 
     /*
      * Variables object accessors
      *
      * A stack frame's 'varobj' refers to the 'variables object' (ES3 term)
      * associated with the Execution Context's VariableEnvironment (ES5 10.3).
      *
@@ -977,17 +982,17 @@ extern JS_FORCES_STACK bool
 Execute(JSContext *cx, JSObject *chain, JSScript *script,
         JSStackFrame *prev, uintN flags, Value *result);
 
 /*
  * Execute the caller-initialized frame for a user-defined script or function
  * pointed to by cx->fp until completion or error.
  */
 extern JS_REQUIRES_STACK JS_NEVER_INLINE bool
-Interpret(JSContext *cx, JSStackFrame *stopFp, uintN inlineCallCount = 0, uintN interpFlags = 0);
+Interpret(JSContext *cx, JSStackFrame *stopFp, uintN inlineCallCount = 0, JSInterpMode mode = JSINTERP_NORMAL);
 
 extern JS_REQUIRES_STACK bool
 RunScript(JSContext *cx, JSScript *script, JSStackFrame *fp);
 
 #define JSPROP_INITIALIZER 0x100   /* NB: Not a valid property attribute. */
 
 extern bool
 CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
--- a/js/src/jsinterpinlines.h
+++ b/js/src/jsinterpinlines.h
@@ -1,9 +1,10 @@
 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=99:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
  * http://www.mozilla.org/MPL/
@@ -106,17 +107,17 @@ JSStackFrame::resetInvokeCallFrame()
                            JSFRAME_UNDERFLOW_ARGS |
                            JSFRAME_HAS_CALL_OBJ |
                            JSFRAME_HAS_ARGS_OBJ |
                            JSFRAME_OVERRIDE_ARGS |
                            JSFRAME_HAS_PREVPC |
                            JSFRAME_HAS_RVAL |
                            JSFRAME_HAS_SCOPECHAIN |
                            JSFRAME_HAS_ANNOTATION |
-                           JSFRAME_BAILED_AT_RETURN)));
+                           JSFRAME_FINISHED_IN_INTERPRETER)));
     flags_ &= JSFRAME_FUNCTION |
               JSFRAME_OVERFLOW_ARGS |
               JSFRAME_HAS_PREVPC |
               JSFRAME_UNDERFLOW_ARGS;
 
     JS_ASSERT_IF(!hasCallObj(), scopeChain_ == calleeValue().toObject().getParent());
     JS_ASSERT_IF(hasCallObj(), scopeChain_ == callObj().getParent());
     if (hasCallObj())
@@ -257,16 +258,21 @@ JSStackFrame::stealFrameAndSlots(js::Val
      * Repoint Call, Arguments, Block and With objects to the new live frame.
      * Call and Arguments are done directly because we have pointers to them.
      * Block and With objects are done indirectly through 'liveFrame'. See
      * js_LiveFrameToFloating comment in jsiter.h.
      */
     if (hasCallObj()) {
         callObj().setPrivate(this);
         otherfp->flags_ &= ~JSFRAME_HAS_CALL_OBJ;
+        if (js_IsNamedLambda(fun())) {
+            JSObject *env = callObj().getParent();
+            JS_ASSERT(env->getClass() == &js_DeclEnvClass);
+            env->setPrivate(this);
+        }
     }
     if (hasArgsObj()) {
         argsObj().setPrivate(this);
         otherfp->flags_ &= ~JSFRAME_HAS_ARGS_OBJ;
     }
 }
 
 inline js::Value &
@@ -668,11 +674,40 @@ ValuePropertyBearer(JSContext *cx, const
     }
 
     JSObject *pobj;
     if (!js_GetClassPrototype(cx, NULL, protoKey, &pobj))
         return NULL;
     return pobj;
 }
 
+static inline bool
+ScriptEpilogue(JSContext *cx, JSStackFrame *fp, JSBool ok)
+{
+    Probes::exitJSFun(cx, fp->maybeFun());
+    JSInterpreterHook hook = cx->debugHooks->callHook;
+    if (hook && fp->hasHookData() && !fp->isExecuteFrame())
+        hook(cx, fp, JS_FALSE, &ok, fp->hookData());
+
+    /*
+     * An eval frame's parent owns its activation objects. A yielding frame's
+     * activation objects are transferred to the floating frame, stored in the
+     * generator.
+     */
+    if (fp->isFunctionFrame() && !fp->isEvalFrame() && !fp->isYielding())
+        PutActivationObjects(cx, fp);
+
+    /*
+     * If inline-constructing, replace primitive rval with the new object
+     * passed in via |this|, and instrument this constructor invocation.
+     */
+    if (fp->isConstructing()) {
+        if (fp->returnValue().isPrimitive())
+            fp->setReturnValue(ObjectValue(fp->constructorThis()));
+        JS_RUNTIME_METER(cx->runtime, constructs);
+    }
+
+    return ok;
+}
+
 }
 
 #endif /* jsinterpinlines_h__ */
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1267,17 +1267,17 @@ SendToGenerator(JSContext *cx, JSGenerat
                             JS_GetFunctionId(gen->floatingFrame()->fun()));
         return JS_FALSE;
     }
 
     /* Check for OOM errors here, where we can fail easily. */
     if (!cx->ensureGeneratorStackSpace())
         return JS_FALSE;
 
-    JS_ASSERT(gen->state ==  JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
+    JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
     switch (op) {
       case JSGENOP_NEXT:
       case JSGENOP_SEND:
         if (gen->state == JSGEN_OPEN) {
             /*
              * Store the argument to send as the result of the yield
              * expression.
              */
--- a/js/src/jslock.cpp
+++ b/js/src/jslock.cpp
@@ -426,17 +426,17 @@ js_unlog_title(JSTitle *title)
 static bool
 WillDeadlock(JSContext *ownercx, JSThread *thread)
 {
     JS_ASSERT(CURRENT_THREAD_IS_ME(thread));
     JS_ASSERT(ownercx->thread != thread);
 
      for (;;) {
         JS_ASSERT(ownercx->thread);
-        JS_ASSERT(ownercx->thread->data.requestDepth);
+        JS_ASSERT(ownercx->thread->requestDepth);
         JSTitle *title = ownercx->thread->titleToShare;
         if (!title || !title->ownercx) {
             /*
              * ownercx->thread doesn't wait or has just been notified that the
              * title became shared.
              */
             return false;
         }
@@ -533,17 +533,17 @@ FinishSharingTitle(JSContext *cx, JSTitl
  * set to null (indicating that title is multi-threaded); or if waiting would
  * deadlock, we set ownercx to null ourselves via ShareTitle.  In any case,
  * once ownercx is null we return false.
  */
 static JSBool
 ClaimTitle(JSTitle *title, JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
-    JS_ASSERT_IF(!cx->thread->data.requestDepth, cx->thread == rt->gcThread && rt->gcRunning);
+    JS_ASSERT_IF(!cx->thread->requestDepth, cx->thread == rt->gcThread && rt->gcRunning);
 
     JS_RUNTIME_METER(rt, claimAttempts);
     AutoLockGC lock(rt);
 
     /* Reload in case ownercx went away while we blocked on the lock. */
     while (JSContext *ownercx = title->ownercx) {
         /*
          * Avoid selflock if ownercx is dead, or is not running a request, or
@@ -560,23 +560,23 @@ ClaimTitle(JSTitle *title, JSContext *cx
          * the GC can not run at this moment as it must wait until all the
          * titles are shared and the threads that want to lock them finish
          * their requests. Thus we can claim the title if its thread matches
          * ours.
          */
         bool canClaim;
         if (title->u.link) {
             JS_ASSERT(js_ValidContextPointer(rt, ownercx));
-            JS_ASSERT(ownercx->thread->data.requestDepth);
+            JS_ASSERT(ownercx->thread->requestDepth);
             JS_ASSERT(!rt->gcRunning);
             canClaim = (ownercx->thread == cx->thread);
         } else {
             canClaim = (!js_ValidContextPointer(rt, ownercx) ||
                         !ownercx->thread ||
-                        !ownercx->thread->data.requestDepth ||
+                        !ownercx->thread->requestDepth ||
                         cx->thread == ownercx->thread  ||
                         cx->thread == rt->gcThread ||
                         ownercx->thread->gcWaiting);
         }
         if (canClaim) {
             title->ownercx = cx;
             JS_RUNTIME_METER(rt, claimedTitles);
             return JS_TRUE;
@@ -612,17 +612,17 @@ ClaimTitle(JSTitle *title, JSContext *cx
 
         /*
          * We know that some other thread's context owns title, which is now
          * linked onto rt->titleSharingTodo, awaiting the end of that other
          * thread's request. So it is safe to wait on rt->titleSharingDone.
          * But before waiting, we force the operation callback for that other
          * thread so it can quickly suspend.
          */
-        JS_THREAD_DATA(ownercx)->triggerOperationCallback(rt);
+        JS_THREAD_DATA(ownercx)->triggerOperationCallback();
 
         JS_ASSERT(!cx->thread->titleToShare);
         cx->thread->titleToShare = title;
 #ifdef DEBUG
         PRStatus stat =
 #endif
             PR_WaitCondVar(rt->titleSharingDone, PR_INTERVAL_NO_TIMEOUT);
         JS_ASSERT(stat != PR_FAILURE);
--- a/js/src/jsmath.cpp
+++ b/js/src/jsmath.cpp
@@ -243,18 +243,18 @@ math_atan2(JSContext *cx, uintN argc, Va
         return JS_FALSE;
     if (!ValueToNumber(cx, vp[3], &y))
         return JS_FALSE;
     z = math_atan2_kernel(x, y);
     vp->setDouble(z);
     return JS_TRUE;
 }
 
-static inline jsdouble JS_FASTCALL
-math_ceil_kernel(jsdouble x)
+jsdouble
+js_math_ceil_impl(jsdouble x)
 {
 #ifdef __APPLE__
     if (x < 0 && x > -1.0)
         return js_copysign(0, -1);
 #endif
     return ceil(x);
 }
 
@@ -264,17 +264,17 @@ js_math_ceil(JSContext *cx, uintN argc, 
     jsdouble x, z;
 
     if (argc == 0) {
         vp->setDouble(js_NaN);
         return JS_TRUE;
     }
     if (!ValueToNumber(cx, vp[2], &x))
         return JS_FALSE;
-    z = math_ceil_kernel(x);
+    z = js_math_ceil_impl(x);
     vp->setNumber(z);
     return JS_TRUE;
 }
 
 static JSBool
 math_cos(JSContext *cx, uintN argc, Value *vp)
 {
     jsdouble x, z;
@@ -321,28 +321,34 @@ math_exp(JSContext *cx, uintN argc, Valu
     MathCache *mathCache = JS_THREAD_DATA(cx)->getMathCache(cx);
     if (!mathCache)
         return JS_FALSE;
     z = mathCache->lookup(math_exp_body, x);
     vp->setNumber(z);
     return JS_TRUE;
 }
 
+jsdouble
+js_math_floor_impl(jsdouble x)
+{
+    return floor(x);
+}
+
 JSBool
 js_math_floor(JSContext *cx, uintN argc, Value *vp)
 {
     jsdouble x, z;
 
     if (argc == 0) {
         vp->setDouble(js_NaN);
         return JS_TRUE;
     }
     if (!ValueToNumber(cx, vp[2], &x))
         return JS_FALSE;
-    z = floor(x);
+    z = js_math_floor_impl(x);
     vp->setNumber(z);
     return JS_TRUE;
 }
 
 static JSBool
 math_log(JSContext *cx, uintN argc, Value *vp)
 {
     jsdouble x, z;
@@ -568,16 +574,22 @@ js_copysign(double x, double y)
     xu.d = x;
     yu.d = y;
     xu.s.hi &= ~JSDOUBLE_HI32_SIGNBIT;
     xu.s.hi |= yu.s.hi & JSDOUBLE_HI32_SIGNBIT;
     return xu.d;
 }
 #endif
 
+jsdouble
+js_math_round_impl(jsdouble x)
+{
+    return js_copysign(floor(x + 0.5), x);
+}
+
 JSBool
 js_math_round(JSContext *cx, uintN argc, Value *vp)
 {
     jsdouble x, z;
 
     if (argc == 0) {
         vp->setDouble(js_NaN);
         return JS_TRUE;
@@ -772,29 +784,29 @@ static jsdouble FASTCALL
 math_random_tn(JSContext *cx)
 {
     return random_nextDouble(cx);
 }
 
 static jsdouble FASTCALL
 math_round_tn(jsdouble x)
 {
-    return js_copysign(floor(x + 0.5), x);
+    return js_math_round_impl(x);
 }
 
 static jsdouble FASTCALL
 math_ceil_tn(jsdouble x)
 {
-    return math_ceil_kernel(x);
+    return js_math_ceil_impl(x);
 }
 
 static jsdouble FASTCALL
 math_floor_tn(jsdouble x)
 {
-    return floor(x);
+    return js_math_floor_impl(x);
 }
 
 JS_DEFINE_TRCINFO_1(math_acos,
     (2, (static, DOUBLE, math_acos_tn, MATHCACHE, DOUBLE, 1, nanojit::ACCSET_NONE)))
 JS_DEFINE_TRCINFO_1(math_asin,
     (2, (static, DOUBLE, math_asin_tn, MATHCACHE, DOUBLE, 1, nanojit::ACCSET_NONE)))
 JS_DEFINE_TRCINFO_1(math_atan2,
     (2, (static, DOUBLE, math_atan2_kernel, DOUBLE, DOUBLE, 1, nanojit::ACCSET_NONE)))
--- a/js/src/jsmath.h
+++ b/js/src/jsmath.h
@@ -100,9 +100,18 @@ extern JSBool
 js_math_max(JSContext *cx, uintN argc, js::Value *vp);
 
 extern JSBool
 js_math_min(JSContext *cx, uintN argc, js::Value *vp);
 
 extern JSBool
 js_math_round(JSContext *cx, uintN argc, js::Value *vp);
 
+extern jsdouble
+js_math_ceil_impl(jsdouble x);
+
+extern jsdouble
+js_math_floor_impl(jsdouble x);
+
+extern jsdouble
+js_math_round_impl(jsdouble x);
+
 #endif /* jsmath_h___ */
--- a/js/src/jsnum.cpp
+++ b/js/src/jsnum.cpp
@@ -774,18 +774,18 @@ num_toLocaleString(JSContext *cx, uintN 
         cx->free(buf);
         return JS_FALSE;
     }
 
     vp->setString(str);
     return JS_TRUE;
 }
 
-static JSBool
-num_valueOf(JSContext *cx, uintN argc, Value *vp)
+JSBool
+js_num_valueOf(JSContext *cx, uintN argc, Value *vp)
 {
     double d;
     if (!GetPrimitiveThis(cx, vp, &d))
         return false;
 
     vp->setNumber(d);
     return true;
 }
@@ -874,18 +874,18 @@ JS_DEFINE_TRCINFO_2(num_toString,
 #endif /* JS_TRACER */
 
 static JSFunctionSpec number_methods[] = {
 #if JS_HAS_TOSOURCE
     JS_FN(js_toSource_str,       num_toSource,          0, JSFUN_PRIMITIVE_THIS),
 #endif
     JS_TN(js_toString_str,       num_toString,          1, JSFUN_PRIMITIVE_THIS, &num_toString_trcinfo),
     JS_FN(js_toLocaleString_str, num_toLocaleString,    0, JSFUN_PRIMITIVE_THIS),
-    JS_FN(js_valueOf_str,        num_valueOf,           0, JSFUN_PRIMITIVE_THIS),
-    JS_FN(js_toJSON_str,         num_valueOf,           0, JSFUN_PRIMITIVE_THIS),
+    JS_FN(js_valueOf_str,        js_num_valueOf,        0, JSFUN_PRIMITIVE_THIS),
+    JS_FN(js_toJSON_str,         js_num_valueOf,        0, JSFUN_PRIMITIVE_THIS),
     JS_FN("toFixed",             num_toFixed,           1, JSFUN_PRIMITIVE_THIS),
     JS_FN("toExponential",       num_toExponential,     1, JSFUN_PRIMITIVE_THIS),
     JS_FN("toPrecision",         num_toPrecision,       1, JSFUN_PRIMITIVE_THIS),
     JS_FS_END
 };
 
 /* NB: Keep this in synch with number_constants[]. */
 enum nc_slot {
--- a/js/src/jsnum.h
+++ b/js/src/jsnum.h
@@ -610,16 +610,19 @@ js_DoubleToInteger(jsdouble d)
  * the appropriate sign.  The case of the "Infinity" string must match exactly.
  * If the string does not contain a number, set *ep to s and return 0.0 in dp.
  * Return false if out of memory.
  */
 extern JSBool
 js_strtod(JSContext *cx, const jschar *s, const jschar *send,
           const jschar **ep, jsdouble *dp);
 
+extern JSBool
+js_num_valueOf(JSContext *cx, uintN argc, js::Value *vp);
+
 namespace js {
 
 static JS_ALWAYS_INLINE bool
 ValueFitsInInt32(const Value &v, int32_t *pi)
 {
     if (v.isInt32()) {
         *pi = v.toInt32();
         return true;
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -5147,16 +5147,33 @@ js_GetPropertyHelper(JSContext *cx, JSOb
 JSBool
 js_GetProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
 {
     /* This call site is hot -- use the always-inlined variant of js_GetPropertyHelper(). */
     return js_GetPropertyHelperInline(cx, obj, id, JSGET_METHOD_BARRIER, vp);
 }
 
 JSBool
+js::GetPropertyDefault(JSContext *cx, JSObject *obj, jsid id, Value def, Value *vp)
+{
+    JSProperty *prop;
+    JSObject *obj2;
+    if (js_LookupPropertyWithFlags(cx, obj, id, JSRESOLVE_QUALIFIED, &obj2, &prop) < 0)
+        return false;
+
+    if (!prop) {
+        *vp = def;
+        return true;
+    }
+
+    obj2->dropProperty(cx, prop);
+    return js_GetProperty(cx, obj2, id, vp);
+}
+
+JSBool
 js_GetMethod(JSContext *cx, JSObject *obj, jsid id, uintN getHow, Value *vp)
 {
     JSAutoResolveFlags rf(cx, JSRESOLVE_QUALIFIED);
 
     PropertyIdOp op = obj->getOps()->getProperty;
     if (!op) {
 #if JS_HAS_XML_SUPPORT
         JS_ASSERT(!obj->isXML());
@@ -5621,29 +5638,30 @@ namespace js {
  * When we have an object of a builtin class, we don't quite know what its
  * valueOf/toString methods are, since these methods may have been overwritten
  * or shadowed. However, we can still do better than js_TryMethod by
  * hard-coding the necessary properties for us to find the native we expect.
  *
  * TODO: a per-thread shape-based cache would be faster and simpler.
  */
 static JS_ALWAYS_INLINE bool
-StringMethodIsNative(JSContext *cx, JSObject *obj, jsid methodid, Native native)
-{
-    JS_ASSERT(obj->getClass() == &js_StringClass);
+ClassMethodIsNative(JSContext *cx, JSObject *obj, Class *classp, jsid methodid,
+                    Native native)
+{
+    JS_ASSERT(obj->getClass() == classp);
 
     JS_LOCK_OBJ(cx, obj);
     JSObject *lockedobj = obj;
     const Shape *shape = obj->nativeLookup(methodid);
     JSObject *pobj = obj;
 
     if (!shape) {
         pobj = obj->getProto();
 
-        if (pobj && pobj->getClass() == &js_StringClass) {
+        if (pobj && pobj->getClass() == classp) {
             JS_UNLOCK_OBJ(cx, obj);
             JS_LOCK_OBJ(cx, pobj);
             lockedobj = pobj;
             shape = pobj->nativeLookup(methodid);
         }
     }
 
     if (shape && shape->hasDefaultGetter() && pobj->containsSlot(shape->slot)) {
@@ -5666,35 +5684,41 @@ JSBool
 DefaultValue(JSContext *cx, JSObject *obj, JSType hint, Value *vp)
 {
     JS_ASSERT(hint != JSTYPE_OBJECT && hint != JSTYPE_FUNCTION);
 
     Value v = ObjectValue(*obj);
     if (hint == JSTYPE_STRING) {
         /* Optimize (new String(...)).toString(). */
         if (obj->getClass() == &js_StringClass &&
-            StringMethodIsNative(cx, obj,
+            ClassMethodIsNative(cx, obj,
+                                 &js_StringClass,
                                  ATOM_TO_JSID(cx->runtime->atomState.toStringAtom),
                                  js_str_toString)) {
             *vp = obj->getPrimitiveThis();
             return true;
         }
 
         if (!js_TryMethod(cx, obj, cx->runtime->atomState.toStringAtom, 0, NULL, &v))
             return false;
         if (!v.isPrimitive()) {
             if (!obj->getClass()->convert(cx, obj, hint, &v))
                 return false;
         }
     } else {
         /* Optimize (new String(...)).valueOf(). */
-        if (obj->getClass() == &js_StringClass &&
-            StringMethodIsNative(cx, obj,
+        Class *clasp = obj->getClass();
+        if ((clasp == &js_StringClass &&
+             ClassMethodIsNative(cx, obj, &js_StringClass,
                                  ATOM_TO_JSID(cx->runtime->atomState.valueOfAtom),
-                                 js_str_toString)) {
+                                 js_str_toString)) ||
+            (clasp == &js_NumberClass &&
+             ClassMethodIsNative(cx, obj, &js_NumberClass,
+                                 ATOM_TO_JSID(cx->runtime->atomState.valueOfAtom),
+                                 js_num_valueOf))) {
             *vp = obj->getPrimitiveThis();
             return true;
         }
 
         if (!obj->getClass()->convert(cx, obj, hint, &v))
             return false;
         if (v.isObject()) {
             JS_ASSERT(hint != TypeOfValue(cx, v));
@@ -6565,16 +6589,17 @@ js_DumpObject(JSObject *obj)
     if (flags & JSObject::DELEGATE) fprintf(stderr, " delegate");
     if (flags & JSObject::SYSTEM) fprintf(stderr, " system");
     if (flags & JSObject::NOT_EXTENSIBLE) fprintf(stderr, " not extensible");
     if (flags & JSObject::BRANDED) fprintf(stderr, " branded");
     if (flags & JSObject::GENERIC) fprintf(stderr, " generic");
     if (flags & JSObject::METHOD_BARRIER) fprintf(stderr, " method_barrier");
     if (flags & JSObject::INDEXED) fprintf(stderr, " indexed");
     if (flags & JSObject::OWN_SHAPE) fprintf(stderr, " own_shape");
+    if (flags & JSObject::HAS_EQUALITY) fprintf(stderr, " has_equality");
 
     bool anyFlags = flags != 0;
     if (obj->isNative()) {
         if (obj->inDictionaryMode()) {
             fprintf(stderr, " inDictionaryMode");
             anyFlags = true;
         }
         if (obj->hasPropertyTable()) {
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -207,16 +207,23 @@ js_LookupProperty(JSContext *cx, JSObjec
 
 extern JSBool
 js_DefineProperty(JSContext *cx, JSObject *obj, jsid id, const js::Value *value,
                   js::PropertyOp getter, js::PropertyOp setter, uintN attrs);
 
 extern JSBool
 js_GetProperty(JSContext *cx, JSObject *obj, jsid id, js::Value *vp);
 
+namespace js {
+
+extern JSBool
+GetPropertyDefault(JSContext *cx, JSObject *obj, jsid id, Value def, Value *vp);
+
+} /* namespace js */
+
 extern JSBool
 js_SetProperty(JSContext *cx, JSObject *obj, jsid id, js::Value *vp, JSBool strict);
 
 extern JSBool
 js_GetAttributes(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp);
 
 extern JSBool
 js_SetAttributes(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp);
@@ -234,16 +241,20 @@ js_TypeOf(JSContext *cx, JSObject *obj);
 namespace js {
 
 struct NativeIterator;
 
 }
 
 struct JSFunction;
 
+namespace nanojit {
+class ValidateWriter;
+}
+
 /*
  * JSObject struct, with members sized to fit in 32 bytes on 32-bit targets,
  * 64 bytes on 64-bit systems. The JSFunction struct is an extension of this
  * struct allocated from a larger GC size-class.
  *
  * The clasp member stores the js::Class pointer for this object. We do *not*
  * synchronize updates of clasp or flags -- API clients must take care.
  *
@@ -266,23 +277,29 @@ struct JSFunction;
  * the (is|set)(Delegate|System) inline methods.
  *
  * The slots member is a pointer to the slot vector for the object.
  * This can be either a fixed array allocated immediately after the object,
  * or a dynamically allocated array.  A dynamic array can be tested for with
  * hasSlotsArray().  In all cases, capacity gives the number of usable slots.
  * Two objects with the same shape have the same number of fixed slots,
  * and either both have or neither have dynamically allocated slot arrays.
+ *
+ * If you change this struct, you'll probably need to change the AccSet values
+ * in jsbuiltins.h.
  */
 struct JSObject : js::gc::Cell {
     /*
      * TraceRecorder must be a friend because it generates code that
      * manipulates JSObjects, which requires peeking under any encapsulation.
+     * ValidateWriter must be a friend because it works in tandem with
+     * TraceRecorder.
      */
     friend class js::TraceRecorder;
+    friend class nanojit::ValidateWriter;
 
     /*
      * Private pointer to the last added property and methods to manipulate the
      * list it links among properties in this scope. The {remove,insert} pair
      * for DictionaryProperties assert that the scope is in dictionary mode and
      * any reachable properties are flagged as dictionary properties.
      *
      * NB: these private methods do *not* update this scope's shape to track
@@ -316,19 +333,20 @@ struct JSObject : js::gc::Cell {
 
     enum {
         DELEGATE        = 0x01,
         SYSTEM          = 0x02,
         NOT_EXTENSIBLE  = 0x04,
         BRANDED         = 0x08,
         GENERIC         = 0x10,
         METHOD_BARRIER  = 0x20,
-        INDEXED         =  0x40,
-        OWN_SHAPE       =  0x80,
-        BOUND_FUNCTION  = 0x100
+        INDEXED         = 0x40,
+        OWN_SHAPE       = 0x80,
+        BOUND_FUNCTION  = 0x100,
+        HAS_EQUALITY    = 0x200
     };
 
     /*
      * Impose a sane upper bound, originally checked only for dense arrays, on
      * number of slots in an object.
      */
     enum {
         NSLOTS_BITS     = 29,
@@ -404,16 +422,18 @@ struct JSObject : js::gc::Cell {
     bool branded()              { return !!(flags & BRANDED); }
 
     bool brand(JSContext *cx, uint32 slot, js::Value v);
     bool unbrand(JSContext *cx);
 
     bool generic()              { return !!(flags & GENERIC); }
     void setGeneric()           { flags |= GENERIC; }
 
+    bool hasSpecialEquality()   { return !!(flags & HAS_EQUALITY); }
+    
   private:
     void generateOwnShape(JSContext *cx);
 
     void setOwnShape(uint32 s)  { flags |= OWN_SHAPE; objShape = s; }
     void clearOwnShape()        { flags &= ~OWN_SHAPE; objShape = map->shape; }
 
   public:
     inline bool nativeEmpty() const;
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -5155,27 +5155,24 @@ js_DecompileValueGenerator(JSContext *cx
                     pcdepth = -1;
                     goto release_pcstack;
                 }
             } while (*--sp != v);
 
             /*
              * The value may have come from beyond stackBase + pcdepth, meaning
              * that it came from a temporary slot pushed by the interpreter or
-             * arguments pushed for an InvokeFromEngine call. Only update pc if
-             * beneath stackBase + pcdepth. If above, we don't know whether the
-             * value is associated with the current pc or from a fast native
-             * whose arguments have been pushed, so just print the value.
+             * arguments pushed for an Invoke call. Only update pc if beneath
+             * stackBase + pcdepth. If above, the value may or may not be
+             * produced by the current pc. Since it takes a fairly contrived
+             * combination of calls to produce a situation where this is not
+             * what we want, we just use the current pc.
              */
-            if (sp >= stackBase + pcdepth) {
-                pcdepth = -1;
-                goto release_pcstack;
-            }
-
-            pc = pcstack[sp - stackBase];
+            if (sp < stackBase + pcdepth)
+                pc = pcstack[sp - stackBase];
         }
 
       release_pcstack:
         cx->free(pcstack);
         if (pcdepth < 0)
             goto do_fallback;
     }
 
--- a/js/src/jsopcode.tbl
+++ b/js/src/jsopcode.tbl
@@ -425,17 +425,17 @@ OPDEF(JSOP_SETXMLNAME,    171,"setxmlnam
 OPDEF(JSOP_XMLNAME,       172,"xmlname",    NULL,     1,  1,  1, 19,  JOF_BYTE)
 OPDEF(JSOP_DESCENDANTS,   173,"descendants",NULL,     1,  2,  1, 18,  JOF_BYTE)
 OPDEF(JSOP_FILTER,        174,"filter",     NULL,     3,  1,  1,  0,  JOF_JUMP)
 OPDEF(JSOP_ENDFILTER,     175,"endfilter",  NULL,     3,  2,  1, 18,  JOF_JUMP)
 OPDEF(JSOP_TOXML,         176,"toxml",      NULL,     1,  1,  1, 19,  JOF_BYTE)
 OPDEF(JSOP_TOXMLLIST,     177,"toxmllist",  NULL,     1,  1,  1, 19,  JOF_BYTE)
 OPDEF(JSOP_XMLTAGEXPR,    178,"xmltagexpr", NULL,     1,  1,  1,  0,  JOF_BYTE)
 OPDEF(JSOP_XMLELTEXPR,    179,"xmleltexpr", NULL,     1,  1,  1,  0,  JOF_BYTE)
-OPDEF(JSOP_UNUSED180,     180,"unused180",  NULL,     1,  0,  0,  0,  JOF_BYTE)
+OPDEF(JSOP_NOTRACE,       180,"notrace",    NULL,     3,  0,  0,  0,  JOF_UINT16)
 OPDEF(JSOP_XMLCDATA,      181,"xmlcdata",   NULL,     3,  0,  1, 19,  JOF_ATOM)
 OPDEF(JSOP_XMLCOMMENT,    182,"xmlcomment", NULL,     3,  0,  1, 19,  JOF_ATOM)
 OPDEF(JSOP_XMLPI,         183,"xmlpi",      NULL,     3,  1,  1, 19,  JOF_ATOM)
 OPDEF(JSOP_CALLPROP,      184,"callprop",   NULL,     3,  1,  2, 18,  JOF_ATOM|JOF_PROP|JOF_CALLOP|JOF_TMPSLOT3)
 
 /*
  * Get a display (free) variable from the closure's reserved slots.
  */
@@ -577,17 +577,18 @@ OPDEF(JSOP_LAMBDA_FC,     226,"lambda_fc
 
 /*
  * Ensure that the value on the top of the stack is an object. The one
  * argument is an error message, defined in js.msg, that takes one parameter
  * (the decompilation of the primitive value).
  */
 OPDEF(JSOP_OBJTOP,        227,"objtop",        NULL,  3,  0,  0,  0,  JOF_UINT16)
 
-OPDEF(JSOP_TRACE,         228, "trace",         NULL,  1,  0,  0,  0,  JOF_BYTE)
+/* This opcode stores an index that is unique to the given loop. */
+OPDEF(JSOP_TRACE,         228, "trace",         NULL,  3,  0,  0,  0,  JOF_UINT16)
 
 /*
  * Debugger versions of JSOP_{GET,CALL}UPVAR and the flat closure (_FC) ops.
  */
 OPDEF(JSOP_GETUPVAR_DBG,  229,"getupvar_dbg",  NULL,  3,  0,  1, 19,  JOF_UINT16|JOF_NAME)
 OPDEF(JSOP_CALLUPVAR_DBG, 230,"callupvar_dbg", NULL,  3,  0,  2, 19,  JOF_UINT16|JOF_NAME|JOF_CALLOP)
 OPDEF(JSOP_DEFFUN_DBGFC,     231,"deffun_dbgfc",     NULL,  3,  0,  0,  0,  JOF_OBJECT|JOF_DECLARING)
 OPDEF(JSOP_DEFLOCALFUN_DBGFC,232,"deflocalfun_dbgfc",NULL,  5,  0,  0,  0,  JOF_SLOTOBJECT|JOF_DECLARING|JOF_TMPSLOT)
@@ -618,11 +619,9 @@ OPDEF(JSOP_FORGLOBAL,     246,"forglobal
  * They are emitted directly after instructions, such as DEFFUN, that need fast access to
  * the blockChain. The special NULLBLOCKCHAIN is needed because the JOF_OBJECT
  * does not permit NULL object references, since it stores an index into a table of
  * objects.
  */
 OPDEF(JSOP_BLOCKCHAIN,    247,"blockchain",    NULL,  3,  0,  0,  0, JOF_OBJECT)
 OPDEF(JSOP_NULLBLOCKCHAIN,248,"nullblockchain",NULL,  1,  0,  0,  0, JOF_BYTE)
 
-OPDEF(JSOP_BEGIN,         249,"begin",         NULL,  1,  0,  0,  0,  JOF_BYTE|JOF_TMPSLOT)
-
-/* When adding bytecodes, don't forget to update JSXDR_BYTECODE_VERSION. */
+/* When changing bytecodes, don't forget to update JSXDR_BYTECODE_VERSION. */
--- a/js/src/jsotypes.h
+++ b/js/src/jsotypes.h
@@ -70,17 +70,17 @@ typedef JSIntn intn;
 #ifdef XP_UNIX
 #include <sys/types.h>
 #else
 typedef JSUintn uint;
 #endif
 
 typedef JSUintn uintn;
 typedef JSUint64 uint64;
-#if !defined(_WIN32) && !defined(XP_OS2)
+#if !defined(XP_OS2)
 typedef JSUint32 uint32;
 #else
 typedef unsigned long uint32;
 #endif
 typedef JSUint16 uint16;
 typedef JSUint8 uint8;
 
 #ifndef _XP_Core_
@@ -94,17 +94,17 @@ typedef JSIntn intn;
  * the code also includes sys/types.h.
  */
 #if defined(AIX) && defined(HAVE_SYS_INTTYPES_H)
 #include <sys/inttypes.h>
 #else
 typedef JSInt64 int64;
 
 /* /usr/include/model.h on HP-UX defines int8, int16, and int32 */
-#if !defined(_WIN32) && !defined(XP_OS2)
+#if !defined(XP_OS2)
 typedef JSInt32 int32;
 #else
 typedef long int32;
 #endif
 typedef JSInt16 int16;
 typedef JSInt8 int8;
 #endif /* AIX && HAVE_SYS_INTTYPES_H */
 
--- a/js/src/jsparse.cpp
+++ b/js/src/jsparse.cpp
@@ -860,21 +860,16 @@ Compiler::compileScript(JSContext *cx, J
     cg.bodyid = bodyid;
 
 #if JS_HAS_XML_SUPPORT
     pn = NULL;
     bool onlyXML;
     onlyXML = true;
 #endif
 
-    CG_SWITCH_TO_PROLOG(&cg);
-    if (js_Emit1(cx, &cg, JSOP_TRACE) < 0)
-        goto out;
-    CG_SWITCH_TO_MAIN(&cg);
-
     inDirectivePrologue = true;
     for (;;) {
         tt = tokenStream.peekToken(TSF_OPERAND);
         if (tt <= TOK_EOF) {
             if (tt == TOK_EOF)
                 break;
             JS_ASSERT(tt == TOK_ERROR);
             goto out;
@@ -5761,16 +5756,17 @@ Parser::statement()
              */
             stmt->flags |= SIF_SCOPE;
             stmt->downScope = tc->topScopeStmt;
             tc->topScopeStmt = stmt;
             JS_SCOPE_DEPTH_METERING(++tc->scopeDepth > tc->maxScopeDepth &&
                                     (tc->maxScopeDepth = tc->scopeDepth));
 
             obj->setParent(tc->blockChain());
+            blockbox->parent = tc->blockChainBox;
             tc->blockChainBox = blockbox;
             stmt->blockBox = blockbox;
 
 #ifdef DEBUG
             pn1 = tc->blockNode;
             JS_ASSERT(!pn1 || pn1->pn_type != TOK_LEXICALSCOPE);
 #endif
 
--- a/js/src/jsparse.h
+++ b/js/src/jsparse.h
@@ -911,16 +911,17 @@ JSParseNode::setFunArg()
     pn_dflags |= PND_FUNARG;
 }
 
 struct JSObjectBox {
     JSObjectBox         *traceLink;
     JSObjectBox         *emitLink;
     JSObject            *object;
     uintN               index;
+    JSObjectBox         *parent;
 };
 
 #define JSFB_LEVEL_BITS 14
 
 struct JSFunctionBox : public JSObjectBox
 {
     JSParseNode         *node;
     JSFunctionBox       *siblings;
--- a/js/src/jspropertycache.cpp
+++ b/js/src/jspropertycache.cpp
@@ -303,17 +303,17 @@ PropertyCache::fill(JSContext *cx, JSObj
 static inline JSAtom *
 GetAtomFromBytecode(JSContext *cx, jsbytecode *pc, JSOp op, const JSCodeSpec &cs)
 {
     if (op == JSOP_LENGTH)
         return cx->runtime->atomState.lengthAtom;
 
     // The method JIT's implementation of instanceof contains an internal lookup
     // of the prototype property.
-    if (op == JSOP_INSTANCEOF || op == JSOP_BEGIN)
+    if (op == JSOP_INSTANCEOF)
         return cx->runtime->atomState.classPrototypeAtom;
 
     ptrdiff_t pcoff = (JOF_TYPE(cs.format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
     JSAtom *atom;
     GET_ATOM_FROM_BYTECODE(cx->fp()->script(), pc, pcoff, atom);
     return atom;
 }
 
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -44,25 +44,25 @@
  */
 #include "jstypes.h"
 #include "jscompat.h"
 #include "jsval.h"
 
 JS_BEGIN_EXTERN_C
 
 /* Scalar typedefs. */
-typedef int32     jsint;
-typedef uint32    jsuint;
+typedef JSInt32   jsint;
+typedef JSUint32  jsuint;
 typedef float64   jsdouble;
-typedef int32     jsrefcount;   /* PRInt32 if JS_THREADSAFE, see jslock.h */
+typedef JSInt32   jsrefcount;   /* PRInt32 if JS_THREADSAFE, see jslock.h */
 
 #ifdef WIN32
 typedef wchar_t   jschar;
 #else
-typedef uint16    jschar;
+typedef JSUint16  jschar;
 #endif
 
 
 /*
  * Run-time version enumeration.  See jsversion.h for compile-time counterparts
  * to these values that may be selected by the JS_VERSION macro, and tested by
  * #if expressions.
  */
--- a/js/src/jsreflect.cpp
+++ b/js/src/jsreflect.cpp
@@ -56,16 +56,17 @@
 #include "jsemit.h"
 #include "jsscan.h"
 #include "jsprf.h"
 #include "jsiter.h"
 #include "jsbool.h"
 #include "jsval.h"
 #include "jsvalue.h"
 #include "jsobjinlines.h"
+#include "jsobj.h"
 #include "jsarray.h"
 #include "jsnum.h"
 
 using namespace js;
 
 namespace js {
 
 char const *aopNames[] = {
@@ -155,22 +156,23 @@ typedef Vector<Value, 8> NodeVector;
  *
  *     https://developer.mozilla.org/en/SpiderMonkey/Parser_API
  *
  * Bug 569487: generalize builder interface
  */
 class NodeBuilder
 {
     JSContext    *cx;
-    char const   *src;   /* source filename or null          */
-    Value        srcval; /* source filename JS value or null */
+    bool         saveLoc; /* save source location information? */
+    char const   *src;    /* source filename or null           */
+    Value        srcval;  /* source filename JS value or null  */
 
   public:
-    NodeBuilder(JSContext *c, char const *s)
-        : cx(c), src(s) {
+    NodeBuilder(JSContext *c, bool l, char const *s)
+        : cx(c), saveLoc(l), src(s) {
     }
 
     bool init() {
         if (src)
             return atomValue(src, &srcval);
 
         srcval.setNull();
         return true;
@@ -528,17 +530,17 @@ NodeBuilder::newArray(NodeVector &elts, 
 
     dst->setObject(*array);
     return true;
 }
 
 bool
 NodeBuilder::setNodeLoc(JSObject *node, TokenPos *pos)
 {
-    if (!pos)
+    if (!saveLoc || !pos)
         return setProperty(node, "loc", NullValue());
 
     JSObject *loc, *to;
     Value tv;
 
     return newObject(&loc) &&
            setProperty(node, "loc", ObjectValue(*loc)) &&
            setProperty(loc, "source", srcval) &&
@@ -1197,17 +1199,17 @@ NodeBuilder::xmlPI(Value target, Value c
  *
  * All serialization methods take a non-nullable JSParseNode pointer.
  */
 
 class ASTSerializer
 {
     JSContext     *cx;
     NodeBuilder   builder;
-    uintN         lineno;
+    uint32        lineno;
 
     Value atomContents(JSAtom *atom) {
         return Valueify(ATOM_TO_JSVAL(atom ? atom : cx->runtime->atomState.emptyAtom));
     }
 
     BinaryOperator binop(TokenKind tk, JSOp op);
     UnaryOperator unop(TokenKind tk, JSOp op);
     AssignmentOperator aop(JSOp op);
@@ -1278,18 +1280,18 @@ class ASTSerializer
 
     bool comprehensionBlock(JSParseNode *pn, Value *dst);
     bool comprehension(JSParseNode *pn, Value *dst);
     bool generatorExpression(JSParseNode *pn, Value *dst);
 
     bool xml(JSParseNode *pn, Value *dst);
 
   public:
-    ASTSerializer(JSContext *c, char const *src, uintN ln)
-        : cx(c), builder(c, src), lineno(ln) {
+    ASTSerializer(JSContext *c, bool l, char const *src, uint32 ln)
+        : cx(c), builder(c, l, src), lineno(ln) {
     }
 
     bool init() {
         return builder.init();
     }
 
     bool program(JSParseNode *pn, Value *dst);
 };
@@ -2698,17 +2700,17 @@ ASTSerializer::functionArgsAndBody(JSPar
         LOCAL_NOT_REACHED("unexpected function contents");
     }
 }
 
 bool
 ASTSerializer::functionArgs(JSParseNode *pn, JSParseNode *pnargs, JSParseNode *pndestruct,
                             JSParseNode *pnbody, NodeVector &args)
 {
-    uintN i = 0;
+    uint32 i = 0;
     JSParseNode *arg = pnargs ? pnargs->pn_head : NULL;
     JSParseNode *destruct = pndestruct ? pndestruct->pn_head : NULL;
     Value node;
 
     /*
      * Arguments are found in potentially two different places: 1) the
      * argsbody sequence (which ends with the body node), or 2) a
      * destructuring initialization at the beginning of the body. Loop
@@ -2772,57 +2774,96 @@ Class js_ReflectClass = {
     PropertyStub,
     PropertyStub,
     EnumerateStub,
     ResolveStub,
     ConvertStub
 };
 
 static JSBool
-reflect_parse(JSContext *cx, uintN argc, jsval *vp)
+reflect_parse(JSContext *cx, uint32 argc, jsval *vp)
 {
     if (argc < 1) {
         JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_MORE_ARGS_NEEDED,
                              "Reflect.parse", "0", "s");
         return JS_FALSE;
     }
 
     JSString *src = js_ValueToString(cx, Valueify(JS_ARGV(cx, vp)[0]));
     if (!src)
         return JS_FALSE;
 
-    const char *filename = NULL;
+    char *filename = NULL;
+    AutoReleaseNullablePtr filenamep(cx, filename);
+    uint32 lineno = 1;
+    bool loc = true;
+
     if (argc > 1) {
-        JSString *str = js_ValueToString(cx, Valueify(JS_ARGV(cx, vp)[1]));
-        if (!str)
+        Value arg = Valueify(JS_ARGV(cx, vp)[1]);
+
+        if (!arg.isObject()) {
+            js_ReportValueErrorFlags(cx, JSREPORT_ERROR, JSMSG_UNEXPECTED_TYPE,
+                                     JSDVG_SEARCH_STACK, arg, NULL, "not an object", NULL);
+            return JS_FALSE;
+        }
+
+        JSObject *config = &arg.toObject();
+
+        Value prop;
+
+        /* config.loc */
+        if (!GetPropertyDefault(cx, config, ATOM_TO_JSID(cx->runtime->atomState.locAtom),
+                                BooleanValue(true), &prop)) {
             return JS_FALSE;
-        filename = js_GetStringBytes(NULL, str);
-    }
-
-    uintN lineno = 1;
-    if (argc > 2) {
-        if (!ValueToECMAUint32(cx, Valueify(JS_ARGV(cx, vp)[2]), &lineno))
-            return JS_FALSE;
+        }
+
+        loc = js_ValueToBoolean(prop);
+
+        if (loc) {
+            /* config.source */
+            if (!GetPropertyDefault(cx, config, ATOM_TO_JSID(cx->runtime->atomState.sourceAtom),
+                                    NullValue(), &prop)) {
+                return JS_FALSE;
+            }
+
+            if (!prop.isNullOrUndefined()) {
+                JSString *str = js_ValueToString(cx, prop);
+                if (!str)
+                    return JS_FALSE;
+
+                filename = js_DeflateString(cx, str->chars(), str->length());
+                if (!filename)
+                    return JS_FALSE;
+                filenamep.reset(filename);
+            }
+
+            /* config.line */
+            if (!GetPropertyDefault(cx, config, ATOM_TO_JSID(cx->runtime->atomState.lineAtom),
+                                    Int32Value(1), &prop) ||
+                !ValueToECMAUint32(cx, prop, &lineno)) {
+                return JS_FALSE;
+            }
+        }
     }
 
     const jschar *chars;
     size_t length;
 
     src->getCharsAndLength(chars, length);
 
     Parser parser(cx);
 
     if (!parser.init(chars, length, NULL, filename, lineno))
         return JS_FALSE;
 
     JSParseNode *pn = parser.parse(NULL);
     if (!pn)
         return JS_FALSE;
 
-    ASTSerializer serialize(cx, filename, lineno);
+    ASTSerializer serialize(cx, loc, filename, lineno);
     if (!serialize.init())
         return JS_FALSE;
 
     Value val;
     if (!serialize.program(pn, &val)) {
         JS_SET_RVAL(cx, vp, JSVAL_NULL);
         return JS_FALSE;
     }
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -1092,17 +1092,18 @@ JSScript::NewScriptFromCG(JSContext *cx,
     /* The counts of indexed things must be checked during code generation. */
     JS_ASSERT(cg->atomList.count <= INDEX_LIMIT);
     JS_ASSERT(cg->objectList.length <= INDEX_LIMIT);
     JS_ASSERT(cg->regexpList.length <= INDEX_LIMIT);
 
     mainLength = CG_OFFSET(cg);
     prologLength = CG_PROLOG_OFFSET(cg);
 
-    if (prologLength + mainLength <= 3) {
+    if (prologLength + mainLength <= 3 &&
+        !(cg->flags & TCF_IN_FUNCTION)) {
         /*
          * Check very short scripts to see whether they are "empty" and return
          * the const empty-script singleton if so.
          */
         jsbytecode *pc = prologLength ? CG_PROLOG_BASE(cg) : CG_BASE(cg);
 
         if ((cg->flags & TCF_NO_SCRIPT_RVAL) && JSOp(*pc) == JSOP_FALSE)
             ++pc;
--- a/js/src/jsscriptinlines.h
+++ b/js/src/jsscriptinlines.h
@@ -65,23 +65,28 @@ JSScript::getRegExp(size_t index)
     JSObject *obj = arr->vector[index];
     JS_ASSERT(obj->getClass() == &js_RegExpClass);
     return obj;
 }
 
 inline bool
 JSScript::isEmpty() const
 {
+    return (this == emptyScript());
+
+    // See bug 603044 comment #21.
+#if 0
     if (this == emptyScript())
         return true;
 
     if (length <= 3) {
         jsbytecode *pc = code;
 
         if (noScriptRval && JSOp(*pc) == JSOP_FALSE)
             ++pc;
         if (JSOp(*pc) == JSOP_STOP)
             return true;
     }
     return false;
+#endif
 }
 
 #endif /* jsscriptinlines_h___ */
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -3926,16 +3926,19 @@ js_GetDeflatedUTF8StringLength(JSContext
     nbytes = nchars;
     for (end = chars + nchars; chars != end; chars++) {
         c = *chars;
         if (c < 0x80)
             continue;
         if (0xD800 <= c && c <= 0xDFFF) {
             /* Surrogate pair. */
             chars++;
+
+            /* nbytes sets 1 length since this is surrogate pair. */
+            nbytes--;
             if (c >= 0xDC00 || chars == end)
                 goto bad_surrogate;
             c2 = *chars;
             if (c2 < 0xDC00 || c2 > 0xDFFF)
                 goto bad_surrogate;
             c = ((c - 0xD800) << 10) + (c2 - 0xDC00) + 0x10000;
         }
         c >>= 11;
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -83,16 +83,20 @@
 #include "jsinterpinlines.h"
 #include "jspropertycacheinlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 #include "jscntxtinlines.h"
 #include "jsopcodeinlines.h"
 
+#ifdef JS_METHODJIT
+#include "methodjit/MethodJIT.h"
+#endif
+
 #include "jsautooplen.h"        // generated headers last
 #include "imacros.c.out"
 
 #if defined(NANOJIT_ARM) && defined(__GNUC__) && defined(AVMPLUS_LINUX)
 #include <stdlib.h>
 #include <unistd.h>
 #include <sys/types.h>
 #include <sys/stat.h>
@@ -139,52 +143,16 @@ nanojit::Allocator::postReset() {
 
 int
 StackFilter::getTop(LIns* guard)
 {
     VMSideExit* e = (VMSideExit*)guard->record()->exit;
     return e->sp_adj;
 }
 
-#ifdef DEBUG
-void ValidateWriter::checkAccSet(LOpcode op, LIns* base, int32_t disp, AccSet accSet)
-{
-    LIns* sp = (LIns*)checkAccSetExtras[0];
-    LIns* rp = (LIns*)checkAccSetExtras[1];
-
-    bool isRstack = base == rp;
-    bool isStack =
-        base == sp ||
-        (base->isop(LIR_addp) && base->oprnd1() == sp && base->oprnd2()->isImmP());
-    bool isUnknown = !isStack && !isRstack;
-
-    bool ok;
-
-    NanoAssert(accSet != ACCSET_NONE);
-    switch (accSet) {
-    case ACCSET_STACK:  ok = isStack;       break;
-    case ACCSET_RSTACK: ok = isRstack;      break;
-    case ACCSET_OTHER:  ok = isUnknown;     break;
-    default:
-        // This assertion will fail if any single-region AccSets aren't covered
-        // by the switch -- only multi-region AccSets should be handled here.
-        JS_ASSERT(compressAccSet(accSet).val == MINI_ACCSET_MULTIPLE.val);
-        ok = true;
-        break;
-    }
-
-    if (!ok) {
-        InsBuf b1, b2;
-        printer->formatIns(&b1, base);
-        JS_snprintf(b2.buf, b2.len, "but the base pointer (%s) doesn't match", b1.buf);
-        errorAccSet(lirNames[op], accSet, b2.buf);
-     }
-}
-#endif
-
 #if defined NJ_VERBOSE
 void
 LInsPrinter::formatGuard(InsBuf *buf, LIns *ins)
 {
     RefBuf b1, b2;
     VMSideExit *x = (VMSideExit *)ins->record()->exit;
     VMPI_snprintf(buf->buf, buf->len,
             "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld (GuardID=%03d)",
@@ -213,23 +181,47 @@ LInsPrinter::formatGuardXov(InsBuf *buf,
             (void *)x->imacpc,
             (long int)x->sp_adj,
             (long int)x->rp_adj,
             ins->record()->profGuardID);
 }
 
 const char*
 nanojit::LInsPrinter::accNames[] = {
-    "s",    // (1 << 0) == ACCSET_STACK
-    "r",    // (1 << 1) == ACCSET_RSTACK
-    "o",    // (1 << 2) == ACCSET_OTHER
-              "?", "?", "?", "?", "?", "?", "?", "?",   //  3..10 (unused)
-    "?", "?", "?", "?", "?", "?", "?", "?", "?", "?",   // 11..20 (unused)
-    "?", "?", "?", "?", "?", "?", "?", "?", "?", "?",   // 21..30 (unused)
-    "?"                                                 //     31 (unused)
+    "state",        // (1 <<  0) == ACCSET_STATE
+    "sp",           // (1 <<  1) == ACCSET_STACK
+    "rp",           // (1 <<  2) == ACCSET_RSTACK
+    "cx",           // (1 <<  3) == ACCSET_CX
+    "eos",          // (1 <<  4) == ACCSET_EOS
+    "alloc",        // (1 <<  5) == ACCSET_ALLOC
+    "regs",         // (1 <<  6) == ACCSET_FRAMEREGS
+    "sf",           // (1 <<  7) == ACCSET_STACKFRAME
+    "rt",           // (1 <<  8) == ACCSET_RUNTIME
+
+    "objclasp",     // (1 <<  9) == ACCSET_OBJ_CLASP
+    "objflags",     // (1 << 10) == ACCSET_OBJ_FLAGS
+    "objshape",     // (1 << 11) == ACCSET_OBJ_SHAPE
+    "objproto",     // (1 << 12) == ACCSET_OBJ_PROTO
+    "objparent",    // (1 << 13) == ACCSET_OBJ_PARENT
+    "objprivate",   // (1 << 14) == ACCSET_OBJ_PRIVATE
+    "objcapacity",  // (1 << 15) == ACCSET_OBJ_CAPACITY
+    "objslots",     // (1 << 16) == ACCSET_OBJ_SLOTS
+
+    "slots",        // (1 << 17) == ACCSET_SLOTS
+    "tarray",       // (1 << 18) == ACCSET_TARRAY
+    "tdata",        // (1 << 19) == ACCSET_TARRAY_DATA
+    "iter",         // (1 << 20) == ACCSET_ITER
+    "iterprops",    // (1 << 21) == ACCSET_ITER_PROPS
+    "str",          // (1 << 22) == ACCSET_STRING
+    "strmchars",    // (1 << 23) == ACCSET_STRING_MCHARS
+    "typemap",      // (1 << 24) == ACCSET_TYPEMAP
+    "fcslots",      // (1 << 25) == ACCSET_FCSLOTS
+    "argsdata",     // (1 << 26) == ACCSET_ARGS_DATA
+
+    "?!"            // this entry should never be used, have it just in case
 };
 #endif
 
 } /* namespace nanojit */
 
 JS_DEFINE_CALLINFO_2(extern, STRING, js_IntToString, CONTEXT, INT32, 1, nanojit::ACCSET_NONE)
 
 namespace js {
@@ -948,17 +940,18 @@ TraceRecorder::tprint(const char *format
 {
     size_t size = strlen(format) + 1;
     char* data = (char*) traceMonitor->traceAlloc->alloc(size);
     memcpy(data, format, size);
 
     double *args = (double*) traceMonitor->traceAlloc->alloc(count * sizeof(double));
     for (int i = 0; i < count; ++i) {
         JS_ASSERT(insa[i]);
-        lir->insStore(insa[i], INS_CONSTPTR(args), sizeof(double) * i, ACCSET_OTHER);
+        // The AccSet doesn't matter much here, this isn't perf-critical code.
+        lir->insStore(insa[i], INS_CONSTPTR(args), sizeof(double) * i, ACCSET_STORE_ANY);
     }
 
     LIns* args_ins[] = { INS_CONSTPTR(args), INS_CONST(count), INS_CONSTPTR(data) };
     LIns* call_ins = lir->insCall(&PrintOnTrace_ci, args_ins);
     guard(false, lir->insEqI_0(call_ins), MISMATCH_EXIT);
 }
 
 // Generate a 'printf'-type call from trace for debugging.
@@ -1273,22 +1266,37 @@ Oracle::markInstructionUndemotable(jsbyt
 
 /* Consult with the oracle whether we shouldn't demote a certain bytecode location. */
 bool
 Oracle::isInstructionUndemotable(jsbytecode* pc) const
 {
     return _pcDontDemote.get(PCHash(pc));
 }
 
+/* Tell the oracle that the instruction at bytecode location should use a stronger (slower) test for -0. */
+void
+Oracle::markInstructionSlowZeroTest(jsbytecode* pc)
+{
+    _pcSlowZeroTest.set(PCHash(pc));
+}
+
+/* Consult with the oracle whether we should use a stronger (slower) test for -0. */
+bool
+Oracle::isInstructionSlowZeroTest(jsbytecode* pc) const
+{
+    return _pcSlowZeroTest.get(PCHash(pc));
+}
+
 void
 Oracle::clearDemotability()
 {
     _stackDontDemote.reset();
     _globalDontDemote.reset();
     _pcDontDemote.reset();
+    _pcSlowZeroTest.reset();
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::markSlotUndemotable(LinkableFragment* f, unsigned slot)
 {
     if (slot < f->nStackTypes) {
         traceMonitor->oracle->markStackSlotUndemotable(cx, slot);
         return;
@@ -1377,27 +1385,41 @@ FrameInfoCache::FrameInfoCache(VMAllocat
 }
 
 #define PC_HASH_COUNT 1024
 
 static void
 Blacklist(jsbytecode* pc)
 {
     AUDIT(blacklisted);
-    JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOP);
-    *pc = JSOP_NOP;
+    JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOTRACE);
+    *pc = JSOP_NOTRACE;
+}
+
+static void
+Unblacklist(JSScript *script, jsbytecode *pc)
+{
+    JS_ASSERT(*pc == JSOP_NOTRACE || *pc == JSOP_TRACE);
+    if (*pc == JSOP_NOTRACE) {
+        *pc = JSOP_TRACE;
+
+#ifdef JS_METHODJIT
+        /* This code takes care of unblacklisting in the method JIT. */
+        js::mjit::EnableTraceHint(script, pc, GET_UINT16(pc));
+#endif
+    }
 }
 
 static bool
 IsBlacklisted(jsbytecode* pc)
 {
-    if (*pc == JSOP_NOP)
+    if (*pc == JSOP_NOTRACE)
         return true;
     if (*pc == JSOP_CALL)
-        return *(pc + JSOP_CALL_LENGTH) == JSOP_NOP;
+        return *(pc + JSOP_CALL_LENGTH) == JSOP_NOTRACE;
     return false;
 }
 
 static void
 Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree = NULL)
 {
     /* N.B. This code path cannot assume the recorder is/is not alive. */
     RecordAttemptMap &table = *JS_TRACE_MONITOR(cx).recordAttempts;
@@ -1581,23 +1603,23 @@ AssertTreeIsUnique(TraceMonitor* tm, Tre
         if (!peer->code() || peer == f)
             continue;
         JS_ASSERT(!f->typeMap.matches(peer->typeMap));
     }
 }
 #endif
 
 static void
-AttemptCompilation(JSContext *cx, JSObject* globalObj, jsbytecode* pc, uint32 argc)
+AttemptCompilation(JSContext *cx, JSObject* globalObj,
+                   JSScript* script, jsbytecode* pc, uint32 argc)
 {
     TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
 
     /* If we already permanently blacklisted the location, undo that. */
-    JS_ASSERT(*pc == JSOP_NOP || *pc == JSOP_TRACE);
-    *pc = JSOP_TRACE;
+    Unblacklist(script, pc);
     ResetRecordingAttempts(cx, pc);
 
     /* Breathe new life into all peer fragments at the designated loop header. */
     TreeFragment* f = LookupLoop(tm, pc, globalObj, globalObj->shape(), argc);
     if (!f) {
         /*
          * If the global object's shape changed, we can't easily find the
          * corresponding loop header via a hash table lookup. In this
@@ -2303,42 +2325,51 @@ TrashTree(TreeFragment* f);
 
 template <class T>
 static T&
 InitConst(const T &t)
 {
     return const_cast<T &>(t);
 }
 
+/* These must be macros because 'field' is a field name. */
+#define loadFromState(op, field) \
+    lir->insLoad(op, lirbuf->state, offsetof(TracerState, field), ACCSET_STATE)
+
+#define storeToState(value, field) \
+    lir->insStore(value, lirbuf->state, offsetof(TracerState, field), ACCSET_STATE)
+
 JS_REQUIRES_STACK
 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* fragment,
                              unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
-                             VMSideExit* innermost, jsbytecode* outer, uint32 outerArgc,
-                             bool speculate)
+                             VMSideExit* innermost, JSScript* outerScript, jsbytecode* outerPC,
+                             uint32 outerArgc, bool speculate)
   : cx(cx),
     traceMonitor(&JS_TRACE_MONITOR(cx)),
     oracle(speculate ? JS_TRACE_MONITOR(cx).oracle : NULL),
     fragment(fragment),
     tree(fragment->root),
     globalObj(tree->globalObj),
-    outer(outer),
+    outerScript(outerScript),
+    outerPC(outerPC),
     outerArgc(outerArgc),
     anchor(anchor),
     lir(NULL),
+    cse_filter(NULL),
     cx_ins(NULL),
     eos_ins(NULL),
     eor_ins(NULL),
     loopLabel(NULL),
     importTypeMap(&tempAlloc()),
     lirbuf(new (tempAlloc()) LirBuffer(tempAlloc())),
     mark(*traceMonitor->traceAlloc),
     numSideExitsBefore(tree->sideExits.length()),
     tracker(),
     nativeFrameTracker(),
-    global_dslots(NULL),
+    global_slots(NULL),
     callDepth(anchor ? anchor->calldepth : 0),
     atoms(FrameAtomBase(cx, cx->fp())),
     consts(cx->fp()->script()->constOffset
            ? cx->fp()->script()->consts()->vector
            : NULL),
     strictModeCode_ins(NULL),
     cfgMerges(&tempAlloc()),
     trashSelf(false),
@@ -2395,31 +2426,32 @@ TraceRecorder::TraceRecorder(JSContext* 
 
     debug_only_printf(LC_TMTracer, "globalObj=%p, shape=%d\n",
                       (void*)this->globalObj, this->globalObj->shape());
     debug_only_printf(LC_TMTreeVis, "TREEVIS RECORD FRAG=%p ANCHOR=%p\n", (void*)fragment,
                       (void*)anchor);
 #endif
 
     nanojit::LirWriter*& lir = InitConst(this->lir);
+    nanojit::CseFilter*& cse_filter = InitConst(this->cse_filter);
     lir = new (tempAlloc()) LirBufWriter(lirbuf, nanojit::AvmCore::config);
 #ifdef DEBUG
     ValidateWriter* validate2;
     lir = validate2 =
         new (tempAlloc()) ValidateWriter(lir, lirbuf->printer, "end of writer pipeline");
 #endif
     debug_only_stmt(
         if (LogController.lcbits & LC_TMRecorder) {
            lir = new (tempAlloc()) VerboseWriter(tempAlloc(), lir, lirbuf->printer,
                                                &LogController);
         }
     )
     // CseFilter must be downstream of SoftFloatFilter (see bug 527754 for why).
     if (avmplus::AvmCore::config.cseopt)
-        lir = new (tempAlloc()) CseFilter(lir, TM_NUM_USED_ACCS, tempAlloc());
+        lir = cse_filter = new (tempAlloc()) CseFilter(lir, TM_NUM_USED_ACCS, tempAlloc());
 #if NJ_SOFTFLOAT_SUPPORTED
     if (nanojit::AvmCore::config.soft_float)
         lir = new (tempAlloc()) SoftFloatFilter(lir);
 #endif
     lir = new (tempAlloc()) ExprFilter(lir);
     lir = new (tempAlloc()) FuncFilter(lir);
 #ifdef DEBUG
     ValidateWriter* validate1;
@@ -2451,88 +2483,71 @@ TraceRecorder::TraceRecorder(JSContext* 
         } else {
             entryLabel = lir->ins0(LIR_label);
         }
         NanoAssert(entryLabel);
         NanoAssert(!fragment->loopLabel);
         fragment->loopLabel = entryLabel;
     })
 
-#ifdef DEBUG
-    // Need to set these up before any loads/stores occur.
-    // 'extras' is heap-allocated because its lifetime matches validate[12]'s.
-    void** extras = new (tempAlloc()) void*[2];
-    extras[0] = 0;      // we'll set it shortly
-    extras[1] = 0;      // we'll set it shortly
-    validate1->setCheckAccSetExtras(extras);
-    validate2->setCheckAccSetExtras(extras);
-#endif
-
-    lirbuf->sp =
-        addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(TracerState, sp), ACCSET_OTHER), "sp");
-    lirbuf->rp =
-        addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(TracerState, rp), ACCSET_OTHER), "rp");
-    InitConst(cx_ins) =
-        addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(TracerState, cx), ACCSET_OTHER), "cx");
-    InitConst(eos_ins) =
-        addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(TracerState, eos), ACCSET_OTHER), "eos");
-    InitConst(eor_ins) =
-        addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(TracerState, eor), ACCSET_OTHER), "eor");
+    lirbuf->sp = addName(loadFromState(LIR_ldp, sp), "sp");
+    lirbuf->rp = addName(loadFromState(LIR_ldp, rp), "rp");
+    InitConst(cx_ins) = addName(loadFromState(LIR_ldp, cx), "cx");
+    InitConst(eos_ins) = addName(loadFromState(LIR_ldp, eos), "eos");
+    InitConst(eor_ins) = addName(loadFromState(LIR_ldp, eor), "eor");
 
     strictModeCode_ins = addName(lir->insImmI(cx->fp()->script()->strictModeCode), "strict");
 
-#ifdef DEBUG
-    // Need to update these before any stack/rstack loads/stores occur.
-    extras[0] = lirbuf->sp;
-    extras[1] = lirbuf->rp;
-#endif
-
     /* If we came from exit, we might not have enough global types. */
     if (tree->globalSlots->length() > tree->nGlobalTypes())
         SpecializeTreesToMissingGlobals(cx, globalObj, tree);
 
     /* read into registers all values on the stack and all globals we know so far */
     import(tree, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
 
     if (fragment == fragment->root) {
         /*
          * We poll the operation callback request flag. It is updated asynchronously whenever
          * the callback is to be invoked. We can use INS_CONSTPTR here as JIT-ed code is per
          * thread and cannot outlive the corresponding JSThreadData.
          */
         LIns* flagptr = INS_CONSTPTR((void *) &JS_THREAD_DATA(cx)->interruptFlags);
-        LIns* x = lir->insLoad(LIR_ldi, flagptr, 0, ACCSET_OTHER, LOAD_VOLATILE);
+        LIns* x = lir->insLoad(LIR_ldi, flagptr, 0, ACCSET_LOAD_ANY, LOAD_VOLATILE);
         guard(true, lir->insEqI_0(x), snapshot(TIMEOUT_EXIT));
 
         /*
          * Count the number of iterations run by a trace, so that we can blacklist if
          * the trace runs too few iterations to be worthwhile. Do this only if the methodjit
          * is on--otherwise we must try to trace as much as possible.
          */
 #ifdef JS_METHODJIT
         if (cx->methodJitEnabled) {
             LIns* counterPtr = INS_CONSTPTR((void *) &JS_THREAD_DATA(cx)->iterationCounter);
-            LIns* counterValue = lir->insLoad(LIR_ldi, counterPtr, 0, ACCSET_OTHER, LOAD_VOLATILE);
+            LIns* counterValue = lir->insLoad(LIR_ldi, counterPtr, 0, ACCSET_LOAD_ANY,
+                                              LOAD_VOLATILE);
             LIns* test =  lir->ins2ImmI(LIR_lti, counterValue, MIN_LOOP_ITERS);
             LIns *branch = unoptimizableCondBranch(LIR_jf, test);
             counterValue = lir->ins2(LIR_addi, counterValue, INS_CONST(1));
-            lir->insStore(counterValue, counterPtr, 0, ACCSET_OTHER);
+            /* 
+             * ACCSET_STORE_ANY doesn't matter right now.  But if LICM is
+             * implemented (bug 545406) this counter will need its own region.
+             */
+            lir->insStore(counterValue, counterPtr, 0, ACCSET_STORE_ANY);
             labelForBranch(branch);
         }
 #endif
     }
 
     /*
      * If we are attached to a tree call guard, make sure the guard the inner
      * tree exited from is what we expect it to be.
      */
     if (anchor && anchor->exitType == NESTED_EXIT) {
-        LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
-                                                offsetof(TracerState, outermostTreeExitGuard),
-                                                ACCSET_OTHER), "outermostTreeExitGuard");
+        LIns* nested_ins =
+            addName(loadFromState(LIR_ldp, outermostTreeExitGuard), "outermostTreeExitGuard");
         guard(true, lir->ins2(LIR_eqp, nested_ins, INS_CONSTPTR(innermost)), NESTED_EXIT);
     }
 }
 
 TraceRecorder::~TraceRecorder()
 {
     /* Should already have been adjusted by callers before calling delete. */
     JS_ASSERT(traceMonitor->recorder != this);
@@ -3461,17 +3476,17 @@ GetFromClosure(JSContext* cx, JSObject* 
     if (fp) {
         v = T::get_slot(fp, cv->slot);
     } else {
         /*
          * Get the value from the object. We know we have a Call object, and
          * that our slot index is fine, so don't monkey around with calling the
          * property getter (which just looks in the slot) or calling
          * js_GetReservedSlot. Just get the slot directly. Note the static
-         * asserts in jsfun.cpp which make sure Call objects use dslots.
+         * asserts in jsfun.cpp which make sure Call objects use slots.
          */
         JS_ASSERT(cv->slot < T::slot_count(call));
         v = T::get_slot(call, cv->slot);
     }
     JSValueType type = getCoercedType(v);
     ValueToNative(v, type, result);
     return type;
 }
@@ -3485,17 +3500,17 @@ struct ArgClosureTraits
         return fp->formalArg(slot);
     }
 
     // Get the right object slots to use our slot index with.
     static inline Value get_slot(JSObject* obj, unsigned slot) {
         return obj->getSlot(slot_offset(obj) + slot);
     }
 
-    // Get the offset of our object slots from the object's dslots pointer.
+    // Get the offset of our object slots from the object's slots pointer.
     static inline uint32 slot_offset(JSObject* obj) {
         return JSObject::CALL_RESERVED_SLOTS;
     }
 
     // Get the maximum slot index of this type that should be allowed
     static inline uint16 slot_count(JSObject* obj) {
         return obj->getCallObjCalleeFunction()->nargs;
     }
@@ -3562,21 +3577,20 @@ FlushNativeStackFrame(JSContext* cx, uns
     VisitStackSlots(visitor, cx, callDepth);
 
     debug_only_print0(LC_TMTracer, "\n");
     return visitor.getTypeMap() - mp;
 }
 
 /* Emit load instructions onto the trace that read the initial stack state. */
 JS_REQUIRES_STACK void
-TraceRecorder::importImpl(LIns* base, ptrdiff_t offset, const void* p, JSValueType t,
-                          const char *prefix, uintN index, JSStackFrame *fp)
+TraceRecorder::importImpl(LIns* base, ptrdiff_t offset, AccSet accSet, const void* p,
+                          JSValueType t, const char *prefix, uintN index, JSStackFrame *fp)
 {
     LIns* ins;
-    AccSet accSet = base == lirbuf->sp ? ACCSET_STACK : ACCSET_OTHER;
     if (t == JSVAL_TYPE_INT32) { /* demoted */
         JS_ASSERT(hasInt32Repr(*(const Value *)p));
 
         /*
          * Ok, we have a valid demotion attempt pending, so insert an integer
          * read and promote it to double since all arithmetic operations expect
          * to see doubles on entry. The first op to use this slot will emit a
          * d2i cast which will cancel out the i2d we insert here.
@@ -3634,20 +3648,20 @@ TraceRecorder::importImpl(LIns* base, pt
     addName(ins, name);
 
     debug_only_printf(LC_TMTracer, "import vp=%p name=%s type=%c\n",
                       p, name, TypeToChar(t));
 #endif
 }
 
 JS_REQUIRES_STACK void
-TraceRecorder::import(LIns* base, ptrdiff_t offset, const Value* p, JSValueType t,
-                          const char *prefix, uintN index, JSStackFrame *fp)
-{
-    return importImpl(base, offset, p, t, prefix, index, fp);
+TraceRecorder::import(LIns* base, ptrdiff_t offset, AccSet accSet, const Value* p, JSValueType t,
+                      const char *prefix, uintN index, JSStackFrame *fp)
+{
+    return importImpl(base, offset, accSet, p, t, prefix, index, fp);
 }
 
 class ImportBoxedStackSlotVisitor : public SlotVisitorBase
 {
     TraceRecorder &mRecorder;
     LIns *mBase;
     ptrdiff_t mStackOffset;
     JSValueType *mTypemap;
@@ -3662,19 +3676,19 @@ public:
         mStackOffset(stackOffset),
         mTypemap(typemap)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
     visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
         for (size_t i = 0; i < count; ++i) {
             if (*mTypemap == JSVAL_TYPE_BOXED) {
-                mRecorder.import(mBase, mStackOffset, vp, JSVAL_TYPE_BOXED,
+                mRecorder.import(mBase, mStackOffset, ACCSET_STACK, vp, JSVAL_TYPE_BOXED,
                                  "jsval", i, fp);
-                LIns *vp_ins = mRecorder.unbox_value(*vp, mBase, mStackOffset,
+                LIns *vp_ins = mRecorder.unbox_value(*vp, mBase, mStackOffset, ACCSET_STACK,
                                                      mRecorder.copy(mRecorder.anchor));
                 mRecorder.set(vp, vp_ins);
             }
             vp++;
             mTypemap++;
             mStackOffset += sizeof(double);
         }
         return true;
@@ -3783,17 +3797,17 @@ TraceRecorder::importGlobalSlot(unsigned
         tree->globalSlots->add(uint16(slot));
         tree->typeMap.add(type);
         SpecializeTreesToMissingGlobals(cx, globalObj, tree);
         JS_ASSERT(tree->nGlobalTypes() == tree->globalSlots->length());
     } else {
         type = importTypeMap[importStackSlots + index];
         JS_ASSERT(type != JSVAL_TYPE_UNINITIALIZED);
     }
-    import(eos_ins, slot * sizeof(double), vp, type, "global", index, NULL);
+    import(eos_ins, slot * sizeof(double), ACCSET_EOS, vp, type, "global", index, NULL);
 }
 
 /* Lazily import a global slot if we don't already have it in the tracker. */
 JS_REQUIRES_STACK bool
 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
 {
     if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
         return false;
@@ -3814,19 +3828,20 @@ TraceRecorder::lazilyImportGlobalSlot(un
 LIns*
 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset, bool shouldDemote)
 {
     /*
      * Sink all type casts targeting the stack into the side exit by simply storing the original
      * (uncasted) value. Each guard generates the side exit map based on the types of the
      * last stores to every stack location, so it's safe to not perform them on-trace.
      */
+    JS_ASSERT(base == lirbuf->sp || base == eos_ins);
     if (shouldDemote && isPromoteInt(i))
         i = demote(lir, i);
-    return lir->insStore(i, base, offset, (base == lirbuf->sp) ? ACCSET_STACK : ACCSET_OTHER);
+    return lir->insStore(i, base, offset, (base == lirbuf->sp) ? ACCSET_STACK : ACCSET_EOS);
 }
 
 /* Update the tracker, then issue a write back store. */
 JS_REQUIRES_STACK void
 TraceRecorder::setImpl(void* p, LIns* i, bool demote)
 {
     JS_ASSERT(i != NULL);
     checkForGlobalObjectReallocation();
@@ -3921,17 +3936,17 @@ TraceRecorder::getImpl(const void *p)
     if (isVoidPtrGlobal(p)) {
         unsigned slot = nativeGlobalSlot((const Value *)p);
         JS_ASSERT(tree->globalSlots->offsetOf(uint16(slot)) != -1);
         importGlobalSlot(slot);
     } else {
         unsigned slot = nativeStackSlotImpl(p);
         JSValueType type = importTypeMap[slot];
         JS_ASSERT(type != JSVAL_TYPE_UNINITIALIZED);
-        importImpl(lirbuf->sp, -tree->nativeStackBase + slot * sizeof(jsdouble),
+        importImpl(lirbuf->sp, -tree->nativeStackBase + slot * sizeof(jsdouble), ACCSET_STACK,
                    p, type, "stack", slot, cx->fp());
     }
     JS_ASSERT(knownImpl(p));
     return tracker.get(p);
 }
 
 JS_REQUIRES_STACK LIns*
 TraceRecorder::get(const Value *p)
@@ -3989,28 +4004,28 @@ TraceRecorder::known(JSObject** p)
 /*
  * The slots of the global object are sometimes reallocated by the interpreter.
  * This function check for that condition and re-maps the entries of the tracker
  * accordingly.
  */
 JS_REQUIRES_STACK void
 TraceRecorder::checkForGlobalObjectReallocationHelper()
 {
-    debug_only_print0(LC_TMTracer, "globalObj->dslots relocated, updating tracker\n");
-    Value* src = global_dslots;
+    debug_only_print0(LC_TMTracer, "globalObj->slots relocated, updating tracker\n");
+    Value* src = global_slots;
     Value* dst = globalObj->getSlots();
     jsuint length = globalObj->capacity;
     LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
     for (jsuint n = 0; n < length; ++n) {
         map[n] = tracker.get(src);
         tracker.set(src++, NULL);
     }
     for (jsuint n = 0; n < length; ++n)
         tracker.set(dst++, map[n]);
-    global_dslots = globalObj->getSlots();
+    global_slots = globalObj->getSlots();
 }
 
 /* Determine whether the current branch is a loop edge (taken or not taken). */
 static JS_REQUIRES_STACK bool
 IsLoopEdge(jsbytecode* pc, jsbytecode* header)
 {
     switch (*pc) {
       case JSOP_IFEQ:
@@ -4049,17 +4064,17 @@ public:
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
     visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
         LIns *ins = mRecorder.get(vp);
         bool isPromote = isPromoteInt(ins);
         if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
             mLir->insStore(mRecorder.get(vp), mRecorder.eos_ins,
-                            mRecorder.nativeGlobalOffset(vp), ACCSET_OTHER);
+                            mRecorder.nativeGlobalOffset(vp), ACCSET_EOS);
 
             /*
              * Aggressively undo speculation so the inner tree will compile
              * if this fails.
              */
             JS_TRACE_MONITOR(mCx).oracle->markGlobalSlotUndemotable(mCx, slot);
         }
         JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
@@ -4386,16 +4401,33 @@ TraceRecorder::createGuardRecord(VMSideE
         gr->profGuardID = fragment->guardNumberer++;
         gr->nextInFrag = fragment->guardsForFrag;
         fragment->guardsForFrag = gr;
     )
 
     return gr;
 }
 
+/* Test if 'ins' is in a form that can be used as a guard/branch condition. */
+static bool
+isCond(LIns* ins)
+{
+    return ins->isCmp() || ins->isImmI(0) || ins->isImmI(1);
+}
+
+/* Ensure 'ins' is in a form suitable for a guard/branch condition. */
+void
+TraceRecorder::ensureCond(LIns** ins, bool* cond)
+{
+    if (!isCond(*ins)) {
+        *cond = !*cond;
+        *ins = (*ins)->isI() ? lir->insEqI_0(*ins) : lir->insEqP_0(*ins);
+    }
+}
+
 /*
  * Emit a guard for condition (cond), expecting to evaluate to boolean result
  * (expected) and using the supplied side exit if the condition doesn't hold.
  *
  * Callers shouldn't generate guards that always exit (which can occur due to
  * optimization of the guard condition) because it's bad for both compile-time
  * speed (all the code generated after the guard is dead) and run-time speed
  * (fragment that always exit are slow).  This function has two modes for
@@ -4418,20 +4450,17 @@ TraceRecorder::createGuardRecord(VMSideE
  */
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit,
                      bool abortIfAlwaysExits/* = false */)
 {
     if (exit->exitType == LOOP_EXIT)
         tree->sideExits.add(exit);
 
-    if (!cond->isCmp()) {
-        expected = !expected;
-        cond = cond->isI() ? lir->insEqI_0(cond) : lir->insEqP_0(cond);
-    }
+    JS_ASSERT(isCond(cond));
 
     if ((cond->isImmI(0) && expected) || (cond->isImmI(1) && !expected)) {
         if (abortIfAlwaysExits) {
             /* The guard always exits, the caller must check for an abort. */
             RETURN_STOP("Constantly false guard detected");
         }
         /*
          * If this assertion fails, first decide if you want recording to
@@ -4962,19 +4991,19 @@ TraceRecorder::closeLoop(VMSideExit* exi
  * the recorder was deleted. Outparam is always set.
  */
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit)
 {
     /*
      * We should have arrived back at the loop header, and hence we don't want
      * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
-     * case this loop was blacklisted in the meantime, JSOP_NOP.
-     */
-    JS_ASSERT((*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOP) &&
+     * case this loop was blacklisted in the meantime, JSOP_NOTRACE.
+     */
+    JS_ASSERT((*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOTRACE) &&
               !cx->fp()->hasImacropc());
 
     if (callDepth != 0) {
         debug_only_print0(LC_TMTracer,
                           "Blacklisted: stack depth mismatch, possible recursion.\n");
         Blacklist((jsbytecode*)tree->ip);
         trashSelf = true;
         return ARECORD_STOP;
@@ -5084,18 +5113,18 @@ TraceRecorder::closeLoop(SlotMap& slotMa
                       "updating specializations on dependent and linked trees\n");
     if (tree->code())
         SpecializeTreesToMissingGlobals(cx, globalObj, tree);
 
     /*
      * If this is a newly formed tree, and the outer tree has not been compiled yet, we
      * should try to compile the outer tree again.
      */
-    if (outer)
-        AttemptCompilation(cx, globalObj, outer, outerArgc);
+    if (outerPC)
+        AttemptCompilation(cx, globalObj, outerScript, outerPC, outerArgc);
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMMinimal,
                       "Recording completed at  %s:%u@%u via closeLoop (FragID=%06u)\n",
                       cx->fp()->script()->filename,
                       js_FramePCToLineNumber(cx, cx->fp()),
                       FramePCOffset(cx, cx->fp()),
                       fragment->profFragID);
     debug_only_print0(LC_TMMinimal, "\n");
@@ -5250,18 +5279,18 @@ TraceRecorder::endLoop(VMSideExit* exit)
                       "updating specializations on dependent and linked trees\n");
     if (tree->code())
         SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root);
 
     /*
      * If this is a newly formed tree, and the outer tree has not been compiled
      * yet, we should try to compile the outer tree again.
      */
-    if (outer)
-        AttemptCompilation(cx, globalObj, outer, outerArgc);
+    if (outerPC)
+        AttemptCompilation(cx, globalObj, outerScript, outerPC, outerArgc);
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMMinimal,
                       "Recording completed at  %s:%u@%u via endLoop (FragID=%06u)\n",
                       cx->fp()->script()->filename,
                       js_FramePCToLineNumber(cx, cx->fp()),
                       FramePCOffset(cx, cx->fp()),
                       fragment->profFragID);
     debug_only_print0(LC_TMTracer, "\n");
@@ -5315,20 +5344,18 @@ TraceRecorder::prepareTreeCall(TreeFragm
         LIns* rp_top = lir->ins2(LIR_addp, lirbuf->rp, INS_CONSTWORD(rp_offset));
         guard(true, lir->ins2(LIR_ltp, rp_top, eor_ins), exit);
 
         sp_offset =
                 - tree->nativeStackBase /* rebase sp to beginning of outer tree's stack */
                 + sp_adj /* adjust for stack in outer frame inner tree can't see */
                 + inner->nativeStackBase; /* plus the inner tree's stack base */
         /* We have enough space, so adjust sp and rp to their new level. */
-        lir->insStore(lir->ins2(LIR_addp, lirbuf->sp, INS_CONSTWORD(sp_offset)),
-                lirbuf->state, offsetof(TracerState, sp), ACCSET_OTHER);
-        lir->insStore(lir->ins2(LIR_addp, lirbuf->rp, INS_CONSTWORD(rp_adj)),
-                lirbuf->state, offsetof(TracerState, rp), ACCSET_OTHER);
+        storeToState(lir->ins2(LIR_addp, lirbuf->sp, INS_CONSTWORD(sp_offset)), sp);
+        storeToState(lir->ins2(LIR_addp, lirbuf->rp, INS_CONSTWORD(rp_adj)), rp);
     }
 
     /*
      * The inner tree will probably access stack slots. So tell nanojit not to
      * discard or defer stack writes before emitting the call tree code.
      *
      * (The ExitType of this snapshot is nugatory. The exit can't be taken.)
      */
@@ -5369,63 +5396,62 @@ TraceRecorder::emitTreeCall(TreeFragment
     ci->_typesig = CallInfo::typeSig1(ARGTYPE_P, ARGTYPE_P);
     ci->_isPure = 0;
     ci->_storeAccSet = ACCSET_STORE_ANY;
     ci->_abi = ABI_FASTCALL;
 #ifdef DEBUG
     ci->_name = "fragment";
 #endif
     LIns* rec = lir->insCall(ci, args);
-    LIns* lr = lir->insLoad(LIR_ldp, rec, offsetof(GuardRecord, exit), ACCSET_OTHER);
+    // We use ACCSET_LOAD_ANY for the GuardRecord and VMSideExit loads;
+    // they're immediately after a fragment call, and so won't be optimizable
+    // anyway.
+    LIns* lr = lir->insLoad(LIR_ldp, rec, offsetof(GuardRecord, exit), ACCSET_LOAD_ANY);
     LIns* nested =
         unoptimizableCondBranch(LIR_jt,
                                 lir->ins2ImmI(LIR_eqi,
                                               lir->insLoad(LIR_ldi, lr,
                                                            offsetof(VMSideExit, exitType),
-                                                           ACCSET_OTHER),
+                                                           ACCSET_LOAD_ANY),
                                               NESTED_EXIT));
-
     /*
      * If the tree exits on a regular (non-nested) guard, keep updating lastTreeExitGuard
      * with that guard. If we mismatch on a tree call guard, this will contain the last
      * non-nested guard we encountered, which is the innermost loop or branch guard.
      */
-    lir->insStore(lr, lirbuf->state, offsetof(TracerState, lastTreeExitGuard), ACCSET_OTHER);
+    storeToState(lr, lastTreeExitGuard);
     LIns* done1 = lir->insBranch(LIR_j, NULL, NULL);
 
     /*
      * The tree exited on a nested guard. This only occurs once a tree call guard mismatches
      * and we unwind the tree call stack. We store the first (innermost) tree call guard in state
      * and we will try to grow the outer tree the failing call was in starting at that guard.
      */
     labelForBranch(nested);
     LIns* done2 =
         unoptimizableCondBranch(LIR_jf,
-                                lir->insEqP_0(lir->insLoad(LIR_ldp,
-                                                           lirbuf->state,
-                                                           offsetof(TracerState, lastTreeCallGuard),
-                                                           ACCSET_OTHER)));
-    lir->insStore(lr, lirbuf->state, offsetof(TracerState, lastTreeCallGuard), ACCSET_OTHER);
-    lir->insStore(lir->ins2(LIR_addp,
-                             lir->insLoad(LIR_ldp, lirbuf->state, offsetof(TracerState, rp),
-                                          ACCSET_OTHER),
-                             lir->insI2P(lir->ins2ImmI(LIR_lshi,
+                                lir->insEqP_0(loadFromState(LIR_ldp, lastTreeCallGuard)));
+    storeToState(lr, lastTreeCallGuard);
+    storeToState(lir->ins2(LIR_addp,
+                           loadFromState(LIR_ldp, rp),
+                           lir->insI2P(lir->ins2ImmI(LIR_lshi,
+                                                     // See the other VMSideExit load above for
+                                                     // why ACCSET_LOAD_ANY is used here.
                                                      lir->insLoad(LIR_ldi, lr,
                                                                   offsetof(VMSideExit, calldepth),
-                                                                  ACCSET_OTHER),
+                                                                  ACCSET_LOAD_ANY),
                                                      sizeof(void*) == 4 ? 2 : 3))),
-                   lirbuf->state,
-                   offsetof(TracerState, rpAtLastTreeCall), ACCSET_OTHER);
+                 rpAtLastTreeCall);
     labelForBranches(done1, done2);
 
     /*
      * Keep updating outermostTreeExit so that TracerState always contains the most recent
      * side exit.
      */
-    lir->insStore(lr, lirbuf->state, offsetof(TracerState, outermostTreeExitGuard), ACCSET_OTHER);
+    storeToState(lr, outermostTreeExitGuard);
 
     /* Read back all registers, in case the called tree changed any of them. */
 #ifdef DEBUG
     JSValueType* map;
     size_t i;
     map = exit->globalTypeMap();
     for (i = 0; i < exit->numGlobalSlots; i++)
         JS_ASSERT(map[i] != JSVAL_TYPE_BOXED);
@@ -5485,18 +5511,18 @@ TraceRecorder::emitTreeCall(TreeFragment
      */
     BuildGlobalTypeMapFromInnerTree(importTypeMap, exit);
 
     importGlobalSlots = importTypeMap.length() - importStackSlots;
     JS_ASSERT(importGlobalSlots == tree->globalSlots->length());
 
     /* Restore sp and rp to their original values (we still have them in a register). */
     if (callDepth > 0) {
-        lir->insStore(lirbuf->sp, lirbuf->state, offsetof(TracerState, sp), ACCSET_OTHER);
-        lir->insStore(lirbuf->rp, lirbuf->state, offsetof(TracerState, rp), ACCSET_OTHER);
+        storeToState(lirbuf->sp, sp);
+        storeToState(lirbuf->rp, rp);
     }
 
     /*
      * Guard that we come out of the inner tree along the same side exit we came out when
      * we called the inner tree at recording time.
      */
     VMSideExit* nestedExit = snapshot(NESTED_EXIT);
     JS_ASSERT(exit->exitType == LOOP_EXIT);
@@ -5556,16 +5582,22 @@ TraceRecorder::emitIf(jsbytecode* pc, bo
          */
         if (x->isImmI()) {
             pendingLoop = (x->immI() == int32(cond));
             return;
         }
     } else {
         exitType = BRANCH_EXIT;
     }
+    /*
+     * Put 'x' in a form suitable for a guard/branch condition if it isn't
+     * already.  This lets us detect if the comparison is optimized to 0 or 1,
+     * in which case we avoid the guard() call below.
+     */
+    ensureCond(&x, &cond);
     if (!x->isImmI())
         guard(cond, x, exitType);
 }
 
 /* Emit code for a fused IFEQ/IFNE. */
 JS_REQUIRES_STACK void
 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
 {
@@ -5700,24 +5732,26 @@ CheckGlobalObjectShape(JSContext* cx, Tr
 /*
  * Return whether or not the recorder could be started. If 'false', the JIT has
  * been reset in response to an OOM.
  */
 bool JS_REQUIRES_STACK
 TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f,
                              unsigned stackSlots, unsigned ngslots,
                              JSValueType* typeMap, VMSideExit* expectedInnerExit,
-                             jsbytecode* outer, uint32 outerArgc, bool speculate)
+                             JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc,
+                             bool speculate)
 {
     TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(!tm->needFlush);
     JS_ASSERT_IF(cx->fp()->hasImacropc(), f->root != f);
 
     tm->recorder = new TraceRecorder(cx, anchor, f, stackSlots, ngslots, typeMap,
-                                     expectedInnerExit, outer, outerArgc, speculate);
+                                     expectedInnerExit, outerScript, outerPC, outerArgc,
+                                     speculate);
 
     if (!tm->recorder || tm->outOfMemory() || OverfullJITCache(tm)) {
         ResetJIT(cx, FR_OOM);
         return false;
     }
 
     return true;
 }
@@ -5802,17 +5836,17 @@ SynthesizeFrame(JSContext* cx, const Fra
     JSInterpreterHook hook = cx->debugHooks->callHook;
     if (hook) {
         newfp->setHookData(hook(cx, newfp, JS_TRUE, 0,
                                 cx->debugHooks->callHookData));
     }
 }
 
 static JS_REQUIRES_STACK bool
-RecordTree(JSContext* cx, TreeFragment* first, jsbytecode* outer,
+RecordTree(JSContext* cx, TreeFragment* first, JSScript* outerScript, jsbytecode* outerPC,
            uint32 outerArgc, SlotList* globalSlots)
 {
     TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Try to find an unused peer fragment, or allocate a new one. */
     JS_ASSERT(first->first == first);
     TreeFragment* f = NULL;
     size_t count = 0;
@@ -5865,17 +5899,17 @@ RecordTree(JSContext* cx, TreeFragment* 
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[f->nStackTypes + i]));
     debug_only_print0(LC_TMTreeVis, "\"\n");
 #endif
 
     /* Recording primary trace. */
     return TraceRecorder::startRecorder(cx, NULL, f, f->nStackTypes,
                                         f->globalSlots->length(),
                                         f->typeMap.data(), NULL,
-                                        outer, outerArgc, speculate);
+                                        outerScript, outerPC, outerArgc, speculate);
 }
 
 static JS_REQUIRES_STACK TypeConsensus
 FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp)
 {
     TreeFragment* from = exit->root();
 
     JS_ASSERT(from->code());
@@ -5911,18 +5945,18 @@ FindLoopEdgeTarget(JSContext* cx, VMSide
             return consensus;
         }
     }
 
     return TypeConsensus_Bad;
 }
 
 static JS_REQUIRES_STACK bool
-AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsbytecode* outer,
-                       uint32 outerArgc)
+AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit,
+                       JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc)
 {
     TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     if (tm->needFlush) {
         ResetJIT(cx, FR_DEEP_BAIL);
         return false;
     }
 
     TreeFragment* from = exit->root();
@@ -5954,20 +5988,20 @@ AttemptToStabilizeTree(JSContext* cx, JS
         return false;
     }
 
     SlotList *globalSlots = from->globalSlots;
 
     JS_ASSERT(from == from->root);
 
     /* If this tree has been blacklisted, don't try to record a new one. */
-    if (*(jsbytecode*)from->ip == JSOP_NOP)
+    if (*(jsbytecode*)from->ip == JSOP_NOTRACE)
         return false;
 
-    return RecordTree(cx, from->first, outer, outerArgc, globalSlots);
+    return RecordTree(cx, from->first, outerScript, outerPC, outerArgc, globalSlots);
 }
 
 static JS_REQUIRES_STACK VMFragment*
 CreateBranchFragment(JSContext* cx, TreeFragment* root, VMSideExit* anchor)
 {
     TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     verbose_only(
@@ -5986,17 +6020,18 @@ CreateBranchFragment(JSContext* cx, Tree
 
     f->root = root;
     if (anchor)
         anchor->target = f;
     return f;
 }
 
 static JS_REQUIRES_STACK bool
-AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer
+AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom,
+                    JSScript *outerScript, jsbytecode* outerPC
 #ifdef MOZ_TRACEVIS
     , TraceVisStateObj* tvso = NULL
 #endif
     )
 {
     TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(!tm->recorder);
 
@@ -6038,17 +6073,17 @@ AttemptToExtendTree(JSContext* cx, VMSid
 
     debug_only_printf(LC_TMTracer,
                       "trying to attach another branch to the tree (hits = %d)\n", c->hits());
 
     int32_t& hits = c->hits();
     int32_t maxHits = HOTEXIT + MAXEXIT;
     if (anchor->exitType == CASE_EXIT)
         maxHits *= anchor->switchInfo->count;
-    if (outer || (hits++ >= HOTEXIT && hits <= maxHits)) {
+    if (outerPC || (hits++ >= HOTEXIT && hits <= maxHits)) {
         /* start tracing secondary trace from this point */
         unsigned stackSlots;
         unsigned ngslots;
         JSValueType* typeMap;
         TypeMap fullMap(NULL);
         if (!exitedFrom) {
             /*
              * If we are coming straight from a simple side exit, just use that
@@ -6072,17 +6107,17 @@ AttemptToExtendTree(JSContext* cx, VMSid
             stackSlots = fullMap.length();
             ngslots = BuildGlobalTypeMapFromInnerTree(fullMap, e2);
             JS_ASSERT(ngslots >= e1->numGlobalSlots); // inner tree must have all globals
             JS_ASSERT(ngslots == fullMap.length() - stackSlots);
             typeMap = fullMap.data();
         }
         JS_ASSERT(ngslots >= anchor->numGlobalSlots);
         bool rv = TraceRecorder::startRecorder(cx, anchor, c, stackSlots, ngslots, typeMap,
-                                               exitedFrom, outer, entryFrameArgc(cx),
+                                               exitedFrom, outerScript, outerPC, entryFrameArgc(cx),
                                                hits < maxHits);
 #ifdef MOZ_TRACEVIS
         if (!rv && tvso)
             tvso->r = R_FAIL_EXTEND_START;
 #endif
         return rv;
     }
 #ifdef MOZ_TRACEVIS
@@ -6148,24 +6183,25 @@ TraceRecorder::recordLoopEdge(JSContext*
                       FramePCOffset(cx, cx->fp()));
 
     // Find a matching inner tree. If none can be found, compile one.
     TreeFragment* f = r->findNestedCompatiblePeer(first);
     if (!f || !f->code()) {
         AUDIT(noCompatInnerTrees);
 
         TreeFragment* outerFragment = root;
-        jsbytecode* outer = (jsbytecode*) outerFragment->ip;
+        JSScript* outerScript = outerFragment->script;
+        jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
         uint32 outerArgc = outerFragment->argc;
         JS_ASSERT(entryFrameArgc(cx) == first->argc);
 
         if (AbortRecording(cx, "No compatible inner tree") == JIT_RESET)
             return MONITOR_NOT_RECORDING;
 
-        return RecordingIfTrue(RecordTree(cx, first, outer, outerArgc, globalSlots));
+        return RecordingIfTrue(RecordTree(cx, first, outerScript, outerPC, outerArgc, globalSlots));
     }
 
     AbortableRecordingStatus status = r->attemptTreeCall(f, inlineCallCount);
     if (status == ARECORD_CONTINUE)
         return MONITOR_RECORDING;
     if (status == ARECORD_ERROR) {
         if (TRACE_RECORDER(cx))
             AbortRecording(cx, "Error returned while recording loop edge");
@@ -6215,26 +6251,27 @@ TraceRecorder::attemptTreeCall(TreeFragm
         return ARECORD_ABORTED;
 
     if (!lr) {
         AbortRecording(cx, "Couldn't call inner tree");
         return ARECORD_ABORTED;
     }
 
     TreeFragment* outerFragment = tree;
-    jsbytecode* outer = (jsbytecode*) outerFragment->ip;
+    JSScript* outerScript = outerFragment->script;
+    jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
     switch (lr->exitType) {
       case LOOP_EXIT:
         /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
         if (innermostNestedGuard) {
             if (AbortRecording(cx, "Inner tree took different side exit, abort current "
                                    "recording and grow nesting tree") == JIT_RESET) {
                 return ARECORD_ABORTED;
             }
-            return AttemptToExtendTree(localCx, innermostNestedGuard, lr, outer)
+            return AttemptToExtendTree(localCx, innermostNestedGuard, lr, outerScript, outerPC)
                    ? ARECORD_CONTINUE
                    : ARECORD_ABORTED;
         }
 
         JS_ASSERT(oldInlineCallCount == inlineCallCount);
 
         /* Emit a call to the inner tree and continue recording the outer tree trace. */
         emitTreeCall(f, lr);
@@ -6243,32 +6280,37 @@ TraceRecorder::attemptTreeCall(TreeFragm
       case UNSTABLE_LOOP_EXIT:
       {
         /* Abort recording so the inner loop can become type stable. */
         JSObject* _globalObj = globalObj;
         if (AbortRecording(cx, "Inner tree is trying to stabilize, "
                                "abort outer recording") == JIT_RESET) {
             return ARECORD_ABORTED;
         }
-        return AttemptToStabilizeTree(localCx, _globalObj, lr, outer, outerFragment->argc)
+        return AttemptToStabilizeTree(localCx, _globalObj, lr, outerScript, outerPC,
+                                      outerFragment->argc)
                ? ARECORD_CONTINUE
                : ARECORD_ABORTED;
       }
 
+      case MUL_ZERO_EXIT:
       case OVERFLOW_EXIT:
-        traceMonitor->oracle->markInstructionUndemotable(cx->regs->pc);
+        if (lr->exitType == MUL_ZERO_EXIT)
+            traceMonitor->oracle->markInstructionSlowZeroTest(cx->regs->pc);
+        else
+            traceMonitor->oracle->markInstructionUndemotable(cx->regs->pc);
         /* FALL THROUGH */
       case BRANCH_EXIT:
       case CASE_EXIT:
         /* Abort recording the outer tree, extend the inner tree. */
         if (AbortRecording(cx, "Inner tree is trying to grow, "
                                "abort outer recording") == JIT_RESET) {
             return ARECORD_ABORTED;
         }
-        return AttemptToExtendTree(localCx, lr, NULL, outer)
+        return AttemptToExtendTree(localCx, lr, NULL, outerScript, outerPC)
                ? ARECORD_CONTINUE
                : ARECORD_ABORTED;
 
       case NESTED_EXIT:
         JS_NOT_REACHED("NESTED_EXIT should be replaced by innermost side exit");
       default:
         debug_only_printf(LC_TMTracer, "exit_type=%s\n", getExitName(lr->exitType));
         AbortRecording(cx, "Inner tree not suitable for calling");
@@ -6551,18 +6593,17 @@ TracerState::TracerState(JSContext* cx, 
     rpAtLastTreeCall(NULL),
     outermostTree(f),
     inlineCallCountp(&inlineCallCount),
     innermostNestedGuardp(innermostNestedGuardp),
 #ifdef EXECUTE_TREE_TIMER
     startTime(rdtsc()),
 #endif
     builtinStatus(0),
-    nativeVp(NULL),
-    bailedSlowNativeRegs(bailedSlowNativeRegs)
+    nativeVp(NULL)
 {
     JS_ASSERT(!tm->tracecx);
     tm->tracecx = cx;
     prev = cx->tracerState;
     cx->tracerState = this;
 
     JS_ASSERT(eos == stackBase + MAX_NATIVE_STACK_SLOTS);
     JS_ASSERT(sp < eos);
@@ -7160,17 +7201,17 @@ MonitorLoopEdge(JSContext* cx, uintN& in
             return MONITOR_NOT_RECORDING;
         }
 
         /*
          * We can give RecordTree the root peer. If that peer is already taken,
          * it will walk the peer list and find us a free slot or allocate a new
          * tree if needed.
          */
-        bool rv = RecordTree(cx, f->first, NULL, 0, globalSlots);
+        bool rv = RecordTree(cx, f->first, NULL, 0, NULL, globalSlots);
 #ifdef MOZ_TRACEVIS
         if (!rv)
             tvso.r = R_FAIL_RECORD_TREE;
 #endif
         return RecordingIfTrue(rv);
     }
 
     debug_only_printf(LC_TMTracer,
@@ -7212,38 +7253,42 @@ MonitorLoopEdge(JSContext* cx, uintN& in
     /*
      * If we exit on a branch, or on a tree call guard, try to grow the inner
      * tree (in case of a branch exit), or the tree nested around the tree we
      * exited from (in case of the tree call guard).
      */
     bool rv;
     switch (lr->exitType) {
       case UNSTABLE_LOOP_EXIT:
-          rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, 0);
+        rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, 0, NULL);
 #ifdef MOZ_TRACEVIS
-          if (!rv)
-              tvso.r = R_FAIL_STABILIZE;
-#endif
-          return RecordingIfTrue(rv);
-
+        if (!rv)
+            tvso.r = R_FAIL_STABILIZE;
+#endif
+        return RecordingIfTrue(rv);
+
+      case MUL_ZERO_EXIT:
       case OVERFLOW_EXIT:
-          tm->oracle->markInstructionUndemotable(cx->regs->pc);
+        if (lr->exitType == MUL_ZERO_EXIT)
+            tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
+        else
+            tm->oracle->markInstructionUndemotable(cx->regs->pc);
         /* FALL THROUGH */
       case BRANCH_EXIT:
       case CASE_EXIT:
-        rv = AttemptToExtendTree(cx, lr, NULL, NULL
+        rv = AttemptToExtendTree(cx, lr, NULL, NULL, NULL
 #ifdef MOZ_TRACEVIS
                                                    , &tvso
 #endif
                                  );
         return RecordingIfTrue(rv);
 
       case LOOP_EXIT:
         if (innermostNestedGuard) {
-            rv = AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL
+            rv = AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL, NULL
 #ifdef MOZ_TRACEVIS
                                                                        , &tvso
 #endif
                                      );
             return RecordingIfTrue(rv);
         }
 #ifdef MOZ_TRACEVIS
         tvso.r = R_NO_EXTEND_OUTER;
@@ -7297,30 +7342,35 @@ TraceRecorder::monitorRecording(JSOp op)
      * Clear one-shot state used to communicate between record_JSOP_CALL and post-
      * opcode-case-guts record hook (record_NativeCallComplete).
      */
     pendingSpecializedNative = NULL;
     newobj_ins = NULL;
 
     /* Handle one-shot request from finishGetProp or INSTANCEOF to snapshot post-op state and guard. */
     if (pendingGuardCondition) {
-        guard(true, pendingGuardCondition, STATUS_EXIT);
+        LIns* cond = pendingGuardCondition;
+        bool expected = true;
+        /* Put 'cond' in a form suitable for a guard/branch condition if it's not already. */
+        ensureCond(&cond, &expected);
+        guard(expected, cond, STATUS_EXIT);
         pendingGuardCondition = NULL;
     }
 
     /* Handle one-shot request to unbox the result of a property get. */
     if (pendingUnboxSlot) {
         LIns* val_ins = get(pendingUnboxSlot);
         /*
          * We need to know from where to unbox the value. Since pendingUnboxSlot
          * is only set in finishGetProp, we can depend on LIns* tracked for
          * pendingUnboxSlot to have this information.
          */
         LIns* unboxed_ins = unbox_value(*pendingUnboxSlot,
                                         val_ins->oprnd1(), val_ins->disp(),
+                                        ACCSET_LOAD_ANY,
                                         snapshot(BRANCH_EXIT));
         set(pendingUnboxSlot, unboxed_ins);
         pendingUnboxSlot = 0;
     }
 
     debug_only_stmt(
         if (LogController.lcbits & LC_TMRecorder) {
             js_Disassemble1(cx, cx->fp()->script(), cx->regs->pc,
@@ -8034,27 +8084,27 @@ TraceRecorder::scopeChain()
  * Generate LIR to compute the scope chain on entry to the trace. This is
  * generally useful only for getting to the global object, because only
  * the global object is guaranteed to be present.
  */
 JS_REQUIRES_STACK LIns*
 TraceRecorder::entryScopeChain() const
 {
     return lir->insLoad(LIR_ldp, entryFrameIns(),
-                        JSStackFrame::offsetOfScopeChain(), ACCSET_OTHER);
+                        JSStackFrame::offsetOfScopeChain(), ACCSET_STACKFRAME);
 }
 
 /*
  * Generate LIR to compute the stack frame on entry to the trace.
  */
 JS_REQUIRES_STACK LIns*
 TraceRecorder::entryFrameIns() const
 {
-    LIns *regs_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, regs), ACCSET_OTHER);
-    return lir->insLoad(LIR_ldp, regs_ins, offsetof(JSFrameRegs, fp), ACCSET_OTHER);
+    LIns *regs_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, regs), ACCSET_CX);
+    return lir->insLoad(LIR_ldp, regs_ins, offsetof(JSFrameRegs, fp), ACCSET_FRAMEREGS);
 }
 
 /*
  * Return the frame of a call object if that frame is part of the current
  * trace. |depthp| is an optional outparam: if it is non-null, it will be
  * filled in with the depth of the call object's frame relevant to cx->fp().
  */
 JS_REQUIRES_STACK JSStackFrame*
@@ -8278,17 +8328,17 @@ TraceRecorder::callProp(JSObject* obj, J
 
         LIns* call_ins = lir->insCall(ci, args);
 
         JSValueType type = getCoercedType(nr.v);
         guard(true,
               addName(lir->ins2(LIR_eqi, call_ins, lir->insImmI(type)),
                       "guard(type-stable name access)"),
               BRANCH_EXIT);
-        ins = stackLoad(outp, ACCSET_OTHER, type);
+        ins = stackLoad(outp, ACCSET_ALLOC, type);
     }
     nr.tracked = false;
     nr.obj = obj;
     nr.obj_ins = obj_ins;
     nr.shape = shape;
     return RECORD_CONTINUE;
 }
 
@@ -8323,16 +8373,31 @@ TraceRecorder::stack(int n)
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::stack(int n, LIns* i)
 {
     set(&stackval(n), i);
 }
 
+/* Leave trace iff one operand is negative and the other is non-negative. */
+JS_REQUIRES_STACK void
+TraceRecorder::guardNonNeg(LIns* d0, LIns* d1, VMSideExit* exit)
+{
+    if (d0->isImmI())
+        JS_ASSERT(d0->immI() >= 0);
+    else
+        guard(false, lir->ins2ImmI(LIR_lti, d0, 0), exit);
+
+    if (d1->isImmI())
+        JS_ASSERT(d1->immI() >= 0);
+    else
+        guard(false, lir->ins2ImmI(LIR_lti, d1, 0), exit);
+}
+
 JS_REQUIRES_STACK LIns*
 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
 {
     /*
      * To even consider this operation for demotion, both operands have to be
      * integers and the oracle must not give us a negative hint for the
      * instruction.
      */
@@ -8353,17 +8418,17 @@ TraceRecorder::alu(LOpcode v, jsdouble v
     case LIR_addd:
         r = v0 + v1;
         break;
     case LIR_subd:
         r = v0 - v1;
         break;
     case LIR_muld:
         r = v0 * v1;
-        if (r == 0.0)
+        if (r == 0.0 && (v0 < 0.0 || v1 < 0.0))
             goto out;
         break;
 #if defined NANOJIT_IA32 || defined NANOJIT_X64
     case LIR_divd:
         if (v1 == 0)
             goto out;
         r = v0 / v1;
         break;
@@ -8405,19 +8470,19 @@ TraceRecorder::alu(LOpcode v, jsdouble v
          * If the divisor is greater than zero its always safe to execute
          * the division. If not, we have to make sure we are not running
          * into -2147483648 / -1, because it can raise an overflow exception.
          */
         if (!d1->isImmI()) {
             LIns* br;
             if (condBranch(LIR_jt, lir->ins2ImmI(LIR_gti, d1, 0), &br)) {
                 guard(false, lir->insEqI_0(d1), exit);
-                guard(false, lir->ins2(LIR_andi,
-                                       lir->ins2ImmI(LIR_eqi, d0, 0x80000000),
-                                       lir->ins2ImmI(LIR_eqi, d1, -1)), exit);
+                guard(true, lir->insEqI_0(lir->ins2(LIR_andi,
+                                                    lir->ins2ImmI(LIR_eqi, d0, 0x80000000),
+                                                    lir->ins2ImmI(LIR_eqi, d1, -1))), exit);
                 labelForBranch(br);
             }
         } else {
             if (d1->immI() == -1)
                 guard(false, lir->ins2ImmI(LIR_eqi, d0, 0x80000000), exit);
         }
         result = lir->ins2(v = LIR_divi, d0, d1);
 
@@ -8468,21 +8533,40 @@ TraceRecorder::alu(LOpcode v, jsdouble v
         ChecksRequired(v, d0, d1, &needsOverflowCheck, &needsNegZeroCheck);
         if (needsOverflowCheck) {
             exit = snapshot(OVERFLOW_EXIT);
             result = guard_xov(v, d0, d1, exit);
         } else {
             result = lir->ins2(v, d0, d1);
         }
         if (needsNegZeroCheck) {
-            // make sure we don't lose a -0
             JS_ASSERT(v == LIR_muli);
-            if (!exit)
-                exit = snapshot(OVERFLOW_EXIT);
-            guard(false, lir->insEqI_0(result), exit);
+
+            /*
+             * Make sure we don't lose a -0. We exit if the result is zero and if
+             * either operand is negative. We start out using a weaker guard, checking
+             * if either argument is negative. If this ever fails, we recompile with
+             * a stronger, but slower, guard.
+             */
+            if (v0 < 0.0 || v1 < 0.0
+                || !oracle || oracle->isInstructionSlowZeroTest(cx->regs->pc))
+            {
+                if (!exit)
+                    exit = snapshot(OVERFLOW_EXIT);
+
+                guard(true,
+                      lir->insEqI_0(lir->ins2(LIR_andi,
+                                              lir->insEqI_0(result),
+                                              lir->ins2(LIR_ori,
+                                                        lir->ins2ImmI(LIR_lti, d0, 0),
+                                                        lir->ins2ImmI(LIR_lti, d1, 0)))),
+                      exit);
+            } else {
+                guardNonNeg(d0, d1, snapshot(MUL_ZERO_EXIT));
+            }
         }
         break;
     }
     JS_ASSERT_IF(d0->isImmI() && d1->isImmI(),
                  result->isImmI() && result->immI() == jsint(r));
     return lir->ins1(LIR_i2d, result);
 }
 
@@ -8775,17 +8859,19 @@ TraceRecorder::tableswitch()
     /* Generate switch LIR. */
     SwitchInfo* si = new (traceAlloc()) SwitchInfo();
     si->count = count;
     si->table = 0;
     si->index = (uint32) -1;
     LIns* diff = lir->ins2(LIR_subi, v_ins, lir->insImmI(low));
     LIns* cmp = lir->ins2(LIR_ltui, diff, lir->insImmI(si->count));
     lir->insGuard(LIR_xf, cmp, createGuardRecord(snapshot(DEFAULT_EXIT)));
-    lir->insStore(diff, lir->insImmP(&si->index), 0, ACCSET_OTHER);
+    // We use ACCSET_STORE_ANY;  it's imprecise but this case is rare and not
+    // worth its own access region.
+    lir->insStore(diff, lir->insImmP(&si->index), 0, ACCSET_STORE_ANY);
     VMSideExit* exit = snapshot(CASE_EXIT);
     exit->switchInfo = si;
     LIns* guardIns = lir->insGuard(LIR_xtbl, diff, createGuardRecord(exit));
     fragment->lastIns = guardIns;
     CHECK_STATUS_A(compile());
     return finishSuccessfully();
 }
 #endif
@@ -8877,18 +8963,18 @@ TraceRecorder::incProp(jsint incr, bool 
     CHECK_STATUS_A(prop(obj, obj_ins, &slot, &v_ins, NULL));
 
     if (slot == SHAPE_INVALID_SLOT)
         RETURN_STOP_A("incProp on invalid slot");
 
     Value& v = obj->getSlotRef(slot);
     CHECK_STATUS_A(inc(v, v_ins, incr, pre));
 
-    LIns* dslots_ins = NULL;
-    stobj_set_slot(obj, obj_ins, slot, dslots_ins, v, v_ins);
+    LIns* slots_ins = NULL;
+    stobj_set_slot(obj, obj_ins, slot, slots_ins, v, v_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::incElem(jsint incr, bool pre)
 {
     Value& r = stackval(-1);
     Value& l = stackval(-2);
@@ -8897,17 +8983,17 @@ TraceRecorder::incElem(jsint incr, bool 
     LIns* addr_ins;
 
     if (!l.isPrimitive() && l.toObject().isDenseArray() && r.isInt32()) {
         guardDenseArray(get(&l), MISMATCH_EXIT);
         CHECK_STATUS(denseArrayElement(l, r, vp, v_ins, addr_ins, snapshot(BRANCH_EXIT)));
         if (!addr_ins) // if we read a hole, abort
             return RECORD_STOP;
         CHECK_STATUS(inc(*vp, v_ins, incr, pre));
-        box_value_into(*vp, v_ins, addr_ins, 0, ACCSET_OTHER);
+        box_value_into(*vp, v_ins, addr_ins, 0, ACCSET_SLOTS);
         return RECORD_CONTINUE;
     }
 
     return callImacro((incr == 1)
                       ? pre ? incelem_imacros.incelem : incelem_imacros.eleminc
                       : pre ? decelem_imacros.decelem : decelem_imacros.elemdec);
 }
 
@@ -9025,31 +9111,38 @@ TraceRecorder::equalityHelper(Value& l, 
     if (getPromotedType(l) == getPromotedType(r)) {
         if (l.isUndefined() || l.isNull()) {
             cond = true;
             if (l.isNull())
                 op = LIR_eqp;
         } else if (l.isObject()) {
             if (l.toObject().getClass()->ext.equality)
                 RETURN_STOP_A("Can't trace extended class equality operator");
+            LIns* flags_ins = lir->insLoad(LIR_ldi, l_ins, offsetof(JSObject, flags),
+                                           ACCSET_OBJ_FLAGS);
+            LIns* flag_ins = lir->ins2(LIR_andi, flags_ins, INS_CONSTU(JSObject::HAS_EQUALITY));
+            guard(true, lir->insEqI_0(flag_ins), BRANCH_EXIT);
+
             op = LIR_eqp;
             cond = (l == r);
         } else if (l.isBoolean()) {
             JS_ASSERT(r.isBoolean());
             cond = (l == r);
         } else if (l.isString()) {
             JSString *l_str = l.toString();
             JSString *r_str = r.toString();
             if (!l_str->isRope() && !r_str->isRope() && l_str->length() == 1 && r_str->length() == 1) {
                 VMSideExit *exit = snapshot(BRANCH_EXIT);
                 LIns *c = INS_CONSTWORD(1);
                 guard(true, lir->ins2(LIR_eqp, getStringLength(l_ins), c), exit);
                 guard(true, lir->ins2(LIR_eqp, getStringLength(r_ins), c), exit);
-                l_ins = lir->insLoad(LIR_ldus2ui, getStringChars(l_ins), 0, ACCSET_OTHER, LOAD_CONST);
-                r_ins = lir->insLoad(LIR_ldus2ui, getStringChars(r_ins), 0, ACCSET_OTHER, LOAD_CONST);
+                l_ins = lir->insLoad(LIR_ldus2ui, getStringChars(l_ins), 0, ACCSET_STRING_MCHARS,
+                                     LOAD_CONST);
+                r_ins = lir->insLoad(LIR_ldus2ui, getStringChars(r_ins), 0, ACCSET_STRING_MCHARS,
+                                     LOAD_CONST);
             } else {
                 args[0] = r_ins, args[1] = l_ins;
                 l_ins = lir->insCall(&js_EqualStrings_ci, args);
                 r_ins = lir->insImmI(1);
             }
             cond = !!js_EqualStrings(l.toString(), r.toString());
         } else {
             JS_ASSERT(l.isNumber() && r.isNumber());
@@ -9442,20 +9535,27 @@ TraceRecorder::forgetGuardedShapes()
     dumpGuardedShapes("forget-all");
 #endif
     guardedShapeTable.clear();
 }
 
 inline LIns*
 TraceRecorder::shape_ins(LIns* obj_ins)
 {
-    return addName(lir->insLoad(LIR_ldi, obj_ins, int(offsetof(JSObject, objShape)), ACCSET_OTHER),
+    return addName(lir->insLoad(LIR_ldi, obj_ins, offsetof(JSObject, objShape), ACCSET_OBJ_SHAPE),
                    "objShape");
 }
 
+inline LIns*
+TraceRecorder::slots(LIns* obj_ins)
+{
+    return addName(lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OBJ_SLOTS),
+                   "slots");
+}
+
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, PCVal& pcval)
 {
     jsbytecode* pc = cx->regs->pc;
     JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_INITMETHOD &&
               *pc != JSOP_SETNAME && *pc != JSOP_SETPROP && *pc != JSOP_SETMETHOD);
 
     // Mimic the interpreter's special case for dense arrays by skipping up one
@@ -9542,17 +9642,17 @@ TraceRecorder::test_property_cache(JSObj
             RETURN_STOP_A("failed to fill property cache");
     }
 
 #ifdef JS_THREADSAFE
     // There's a potential race in any JS_THREADSAFE embedding that's nuts
     // enough to share mutable objects on the scope or proto chain, but we
     // don't care about such insane embeddings. Anyway, the (scope, proto)
     // entry->vcap coordinates must reach obj2 from aobj at this point.
-    JS_ASSERT(cx->thread->data.requestDepth);
+    JS_ASSERT(cx->thread->requestDepth);
 #endif
 
     return InjectStatus(guardPropertyCacheHit(obj_ins, aobj, obj2, entry, pcval));
 }
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::guardPropertyCacheHit(LIns* obj_ins,
                                      JSObject* aobj,
@@ -9581,19 +9681,19 @@ TraceRecorder::guardPropertyCacheHit(LIn
     } else {
         CHECK_STATUS(guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", exit));
     }
 
     if (entry->adding()) {
         LIns *vshape_ins = addName(
             lir->insLoad(LIR_ldi,
                          addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, runtime),
-                                              ACCSET_OTHER, LOAD_CONST),
+                                              ACCSET_CX, LOAD_CONST),
                                  "runtime"),
-                         offsetof(JSRuntime, protoHazardShape), ACCSET_OTHER),
+                         offsetof(JSRuntime, protoHazardShape), ACCSET_RUNTIME),
             "protoHazardShape");
 
         guard(true,
               addName(lir->ins2ImmI(LIR_eqi, vshape_ins, vshape), "guard_protoHazardShape"),
               MISMATCH_EXIT);
     }
 
     // For any hit that goes up the scope and/or proto chains, we will need to
@@ -9616,80 +9716,79 @@ TraceRecorder::guardPropertyCacheHit(LIn
 
     pcval = entry->vword;
     return RECORD_CONTINUE;
 }
 
 void
 TraceRecorder::stobj_set_fslot(LIns *obj_ins, unsigned slot, const Value &v, LIns* v_ins)
 {
-    box_value_into(v, v_ins, obj_ins, JSObject::getFixedSlotOffset(slot), ACCSET_OTHER);
+    box_value_into(v, v_ins, obj_ins, JSObject::getFixedSlotOffset(slot), ACCSET_SLOTS);
 }
 
 void
-TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& dslots_ins, 
+TraceRecorder::stobj_set_dslot(LIns *obj_ins, unsigned slot, LIns*& slots_ins, 
                                const Value &v, LIns* v_ins)
 {
-    if (!dslots_ins)
-        dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER);
-    box_value_into(v, v_ins, dslots_ins, slot * sizeof(Value), ACCSET_OTHER);
+    if (!slots_ins)
+        slots_ins = slots(obj_ins);
+    box_value_into(v, v_ins, slots_ins, slot * sizeof(Value), ACCSET_SLOTS);
 }
 
 void
-TraceRecorder::stobj_set_slot(JSObject *obj, LIns* obj_ins, unsigned slot, LIns*& dslots_ins,
+TraceRecorder::stobj_set_slot(JSObject *obj, LIns* obj_ins, unsigned slot, LIns*& slots_ins,
                               const Value &v, LIns* v_ins)
 {
     /*
      * A shape guard must have already been generated for obj, which will
      * ensure that future objects have the same number of fixed slots.
      */
     if (!obj->hasSlotsArray()) {
         JS_ASSERT(slot < obj->numSlots());
         stobj_set_fslot(obj_ins, slot, v, v_ins);
     } else {
-        stobj_set_dslot(obj_ins, slot, dslots_ins, v, v_ins);
+        stobj_set_dslot(obj_ins, slot, slots_ins, v, v_ins);
     }
 }
 
 #if JS_BITS_PER_WORD == 32 || JS_BITS_PER_WORD == 64
 LIns*
 TraceRecorder::stobj_get_slot_uint32(LIns* obj_ins, unsigned slot)
 {
-    LIns *vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER);
-    return lir->insLoad(LIR_ldi, vaddr_ins, slot * sizeof(Value) + sPayloadOffset, ACCSET_OTHER);
+    LIns *vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OBJ_SLOTS);
+    return lir->insLoad(LIR_ldi, vaddr_ins, slot * sizeof(Value) + sPayloadOffset, ACCSET_SLOTS);
 }
 #endif
 
 LIns*
 TraceRecorder::unbox_slot(JSObject *obj, LIns *obj_ins, uint32 slot, VMSideExit *exit)
 {
     LIns *vaddr_ins;
     ptrdiff_t offset;
 
     /* Same guarantee about fixed slots as stobj_set_slot. */
     if (!obj->hasSlotsArray()) {
         vaddr_ins = obj_ins;
         offset = JSObject::getFixedSlotOffset(slot);
     } else {
-        vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER);
+        vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OBJ_SLOTS);
         offset = slot * sizeof(Value);
     }
 
-    const Value &v = obj->getSlot(slot);
-    return unbox_value(v, vaddr_ins, offset, exit);
+    return unbox_value(obj->getSlot(slot), vaddr_ins, offset, ACCSET_SLOTS, exit);
 }
 
 #if JS_BITS_PER_WORD == 32
 
 LIns*
 TraceRecorder::stobj_get_const_private_ptr(LIns *obj_ins, unsigned slot)
 {
-    LIns *vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER);
+    LIns *vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OBJ_SLOTS);
     return lir->insLoad(LIR_ldi, vaddr_ins,
-                        slot * sizeof(Value) + sPayloadOffset, ACCSET_OTHER, LOAD_CONST);
+                        slot * sizeof(Value) + sPayloadOffset, ACCSET_SLOTS, LOAD_CONST);
 }
 
 void
 TraceRecorder::box_undefined_into(LIns *vaddr_ins, ptrdiff_t offset, AccSet accSet)
 {
     lir->insStore(INS_CONSTU(JSVAL_TAG_UNDEFINED), vaddr_ins, offset + sTagOffset, accSet);
     lir->insStore(INS_CONST(0), vaddr_ins, offset + sPayloadOffset, accSet);
 }
@@ -9741,20 +9840,19 @@ TraceRecorder::unbox_object(LIns* vaddr_
     if (type == JSVAL_TYPE_FUNOBJ)
         guardClass(payload_ins, &js_FunctionClass, exit, LOAD_NORMAL);
     else
         guardNotClass(payload_ins, &js_FunctionClass, exit, LOAD_NORMAL);
     return payload_ins;
 }
 
 LIns*
-TraceRecorder::unbox_value(const Value &v, LIns *vaddr_ins, ptrdiff_t offset, VMSideExit *exit,
-                           bool force_double)
-{
-    AccSet accSet = vaddr_ins == lirbuf->sp ? ACCSET_STACK : ACCSET_OTHER;
+TraceRecorder::unbox_value(const Value &v, LIns *vaddr_ins, ptrdiff_t offset, AccSet accSet,
+                           VMSideExit *exit, bool force_double)
+{
     LIns *tag_ins = lir->insLoad(LIR_ldi, vaddr_ins, offset + sTagOffset, accSet);
 
     if (v.isNumber() && force_double)
         return unbox_number_as_double(vaddr_ins, offset, tag_ins, exit, accSet);
 
     if (v.isInt32()) {
         guard(true, lir->ins2(LIR_eqi, tag_ins, INS_CONSTU(JSVAL_TAG_INT32)), exit);
         return i2d(lir->insLoad(LIR_ldi, vaddr_ins, offset + sPayloadOffset, accSet));
@@ -9827,38 +9925,30 @@ TraceRecorder::box_value_into(const Valu
     } else {
         JSValueTag tag = v.isObject() ? JSVAL_TAG_OBJECT : v.extractNonDoubleObjectTraceTag();
         lir->insStore(INS_CONSTU(tag), dstaddr_ins, offset + sTagOffset, accSet);
         lir->insStore(v_ins, dstaddr_ins, offset + sPayloadOffset, accSet);
     }
 }
 
 LIns*
-TraceRecorder::box_value_into_alloc(const Value &v, LIns *v_ins)
-{
-    LIns *boxed_ins = lir->insAlloc(sizeof(Value));
-    box_value_into(v, v_ins, boxed_ins, 0, ACCSET_OTHER);
-    return boxed_ins;
-}
-
-LIns*
 TraceRecorder::box_value_for_native_call(const Value &v, LIns *v_ins)
 {
     return box_value_into_alloc(v, v_ins);
 }
 
 #elif JS_BITS_PER_WORD == 64
 
 LIns*
 TraceRecorder::stobj_get_const_private_ptr(LIns *obj_ins, unsigned slot)
 {
     /* N.B. On 64-bit, privates are encoded differently from other pointers. */
-    LIns *vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER);
+    LIns *vaddr_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OBJ_SLOTS);
     LIns *v_ins = lir->insLoad(LIR_ldq, vaddr_ins,
-                               slot * sizeof(Value) + sPayloadOffset, ACCSET_OTHER, LOAD_CONST);
+                               slot * sizeof(Value) + sPayloadOffset, ACCSET_SLOTS, LOAD_CONST);
     return lir->ins2ImmI(LIR_lshq, v_ins, 1);
 }
 
 void
 TraceRecorder::box_undefined_into(LIns *vaddr_ins, ptrdiff_t offset, AccSet accSet)
 {
     lir->insStore(INS_CONSTQWORD(JSVAL_BITS(JSVAL_VOID)), vaddr_ins, offset, accSet);
 }
@@ -9919,20 +10009,19 @@ TraceRecorder::unbox_object(LIns* v_ins,
     if (type == JSVAL_TYPE_FUNOBJ)
         guardClass(v_ins, &js_FunctionClass, exit, LOAD_NORMAL);
     else
         guardNotClass(v_ins, &js_FunctionClass, exit, LOAD_NORMAL);
     return v_ins;
 }
 
 LIns*
-TraceRecorder::unbox_value(const Value &v, LIns *vaddr_ins, ptrdiff_t offset, VMSideExit *exit,
-                           bool force_double)
-{
-    AccSet accSet = vaddr_ins == lirbuf->sp ? ACCSET_STACK : ACCSET_OTHER;
+TraceRecorder::unbox_value(const Value &v, LIns *vaddr_ins, ptrdiff_t offset, AccSet accSet,
+                           VMSideExit *exit, bool force_double)
+{
     LIns *v_ins = lir->insLoad(LIR_ldq, vaddr_ins, offset, accSet);
 
     if (v.isNumber() && force_double)
         return unbox_number_as_double(v_ins, exit);
 
     if (v.isInt32()) {
         guard(true, non_double_object_value_has_type(v_ins, JSVAL_TYPE_INT32), exit);
         return i2d(lir->ins1(LIR_q2i, v_ins));
@@ -10007,52 +10096,54 @@ TraceRecorder::box_value_for_native_call
 void
 TraceRecorder::box_value_into(const Value &v, LIns *v_ins, LIns *dstaddr_ins, ptrdiff_t offset,
                               AccSet accSet)
 {
     LIns *boxed_ins = box_value_for_native_call(v, v_ins);
     lir->insStore(boxed_ins, dstaddr_ins, offset, accSet);
 }
 
+#endif  /* JS_BITS_PER_WORD */
+
 LIns*
 TraceRecorder::box_value_into_alloc(const Value &v, LIns *v_ins)
 {
     LIns *alloc_ins = lir->insAlloc(sizeof(Value));
-    box_value_into(v, v_ins, alloc_ins, 0, ACCSET_OTHER);
+    box_value_into(v, v_ins, alloc_ins, 0, ACCSET_ALLOC);
     return alloc_ins;
 }
 
-#endif  /* JS_BITS_PER_WORD */
-
 LIns*
 TraceRecorder::stobj_get_parent(nanojit::LIns* obj_ins)
 {
-    return lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, parent), ACCSET_OTHER);
+    return addName(lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, parent), ACCSET_OBJ_PARENT),
+                   "parent");
 }
 
 LIns*
 TraceRecorder::stobj_get_private(nanojit::LIns* obj_ins)
 {
-    return lir->insLoad(LIR_ldp, obj_ins,
-                        offsetof(JSObject, privateData),
-                        ACCSET_OTHER);
+    return addName(lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, privateData),
+                                ACCSET_OBJ_PRIVATE),
+                   "private");
 }
 
 LIns*
 TraceRecorder::stobj_get_private_uint32(nanojit::LIns* obj_ins)
 {
-    return lir->insLoad(LIR_ldi, obj_ins,
-                        offsetof(JSObject, privateData),
-                        ACCSET_OTHER);
+    return addName(lir->insLoad(LIR_ldi, obj_ins, offsetof(JSObject, privateData),
+                                ACCSET_OBJ_PRIVATE),
+                   "private_uint32");
 }
 
 LIns*
 TraceRecorder::stobj_get_proto(nanojit::LIns* obj_ins)
 {
-    return lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, proto), ACCSET_OTHER);
+    return addName(lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, proto), ACCSET_OBJ_PROTO),
+                   "proto");
 }
 
 LIns*
 TraceRecorder::is_string_id(LIns *id_ins)
 {
     return lir->insEqP_0(lir->ins2(LIR_andp, id_ins, INS_CONSTWORD(JSID_TYPE_MASK)));
 }
 
@@ -10134,17 +10225,17 @@ TraceRecorder::getThis(LIns*& this_ins)
     return RECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::guardClassHelper(bool cond, LIns* obj_ins, Class* clasp, VMSideExit* exit,
                                 LoadQual loadQual)
 {
     LIns* class_ins =
-        lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, clasp), ACCSET_OTHER, loadQual);
+        lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, clasp), ACCSET_OBJ_CLASP, loadQual);
 
 #ifdef JS_JIT_SPEW
     char namebuf[32];
     JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
 #else
     static const char namebuf[] = "";
 #endif
     guard(cond, addName(lir->ins2(LIR_eqp, class_ins, INS_CONSTPTR(clasp)), namebuf), exit);
@@ -10270,24 +10361,25 @@ TraceRecorder::clearCurrentFrameSlotsFro
     Value *vpend = fp->slots() + fp->functionScript()->nslots;
     for (; vp < vpend; ++vp)
         which.set(vp, (LIns*)0);
 }
 
 class BoxArg
 {
   public:
-    BoxArg(TraceRecorder *tr, ptrdiff_t offset, LIns *base_ins)
-        : tr(tr), offset(offset), base_ins(base_ins) {}
+    BoxArg(TraceRecorder *tr, LIns *base_ins, ptrdiff_t offset, AccSet accSet)
+        : tr(tr), base_ins(base_ins), offset(offset), accSet(accSet) {}
     TraceRecorder *tr;
+    LIns *base_ins;
     ptrdiff_t offset;
-    LIns *base_ins;
+    AccSet accSet;
     void operator()(uintN argi, Value *src) {
         tr->box_value_into(*src, tr->get(src), base_ins,
-                           offset + argi * sizeof(Value), ACCSET_OTHER);
+                           offset + argi * sizeof(Value), accSet);
     }
 };
 
 /*
  * If we have created an |arguments| object for the frame, we must copy the
  * argument values into the object as properties in case it is used after
  * this frame returns.
  */
@@ -10306,19 +10398,19 @@ TraceRecorder::putActivationObjects()
     uintN nargs = have_args && have_call ? Max(nformal, nactual)
                                          : have_args ? nactual : nformal;
 
     LIns *args_ins;
     if (nargs > 0) {
         args_ins = lir->insAlloc(nargs * sizeof(Value));
         /* Don't copy all the actuals if we are only boxing for the callobj. */
         if (nargs == nactual)
-            fp->forEachCanonicalActualArg(BoxArg(this, 0, args_ins));
+            fp->forEachCanonicalActualArg(BoxArg(this, args_ins, 0, ACCSET_ALLOC));
         else
-            fp->forEachFormalArg(BoxArg(this, 0, args_ins));
+            fp->forEachFormalArg(BoxArg(this, args_ins, 0, ACCSET_ALLOC));
     } else {
         args_ins = INS_CONSTPTR(0);
     }
 
     if (have_args) {
         LIns* argsobj_ins = getFrameObjPtr(fp->addressOfArgs());
         LIns* args[] = { args_ins, argsobj_ins, cx_ins };
         lir->insCall(&js_PutArgumentsOnTrace_ci, args);
@@ -10326,17 +10418,17 @@ TraceRecorder::putActivationObjects()
 
     if (have_call) {
         int nslots = fp->fun()->countVars();
         LIns* slots_ins;
         if (nslots) {
             slots_ins = lir->insAlloc(sizeof(Value) * nslots);
             for (int i = 0; i < nslots; ++i) {
                 box_value_into(fp->slots()[i], get(&fp->slots()[i]), slots_ins,
-                               i * sizeof(Value), ACCSET_OTHER);
+                               i * sizeof(Value), ACCSET_ALLOC);
             }
         } else {
             slots_ins = INS_CONSTPTR(0);
         }
 
         LIns* scopeChain_ins = getFrameObjPtr(fp->addressOfScopeChain());
         LIns* args[] = { slots_ins, INS_CONST(nslots), args_ins,
                          INS_CONST(fp->numFormalArgs()), scopeChain_ins, cx_ins };
@@ -10448,16 +10540,23 @@ TraceRecorder::record_EnterFrame()
     }
 
     /* Try inlining one level in case this recursion doesn't go too deep. */
     if (fp->script() == fp->prev()->script() &&
         fp->prev()->prev() && fp->prev()->prev()->script() == fp->script()) {
         RETURN_STOP_A("recursion started inlining");
     }
 
+    if (fp->isConstructing()) {
+        LIns* args[] = { callee_ins, INS_CONSTPTR(&js_ObjectClass), cx_ins };
+        LIns* tv_ins = lir->insCall(&js_CreateThisFromTrace_ci, args);
+        guard(false, lir->insEqP_0(tv_ins), OOM_EXIT);
+        set(&fp->thisValue(), tv_ins);
+    }
+
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_LeaveFrame()
 {
     debug_only_stmt(JSStackFrame *fp = cx->fp();)
 
@@ -10492,17 +10591,17 @@ JS_REQUIRES_STACK AbortableRecordingStat
 TraceRecorder::record_JSOP_POPV()
 {
     Value& rval = stackval(-1);
 
     // Store it in cx->fp()->rval. NB: Tricky dependencies. cx->fp() is the right
     // frame because POPV appears only in global and eval code and we don't
     // trace JSOP_EVAL or leaving the frame where tracing started.
     LIns *fp_ins = entryFrameIns();
-    box_value_into(rval, get(&rval), fp_ins, JSStackFrame::offsetOfReturnValue(), ACCSET_OTHER);
+    box_value_into(rval, get(&rval), fp_ins, JSStackFrame::offsetOfReturnValue(), ACCSET_STACKFRAME);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_ENTERWITH()
 {
     return ARECORD_STOP;
 }
@@ -10603,17 +10702,18 @@ TraceRecorder::newArguments(LIns* callee
 
     LIns* args[] = { callee_ins, argc_ins, global_ins, cx_ins };
     LIns* argsobj_ins = lir->insCall(&js_NewArgumentsOnTrace_ci, args);
     guard(false, lir->insEqP_0(argsobj_ins), OOM_EXIT);
 
     if (strict) {
         LIns* argsData_ins = stobj_get_const_private_ptr(argsobj_ins, JSObject::JSSLOT_ARGS_DATA);
         ptrdiff_t slotsOffset = offsetof(ArgumentsData, slots);
-        cx->fp()->forEachCanonicalActualArg(BoxArg(this, slotsOffset, argsData_ins));
+        cx->fp()->forEachCanonicalActualArg(BoxArg(this, argsData_ins, slotsOffset,
+                                                   ACCSET_ARGS_DATA));
     }
 
     return argsobj_ins;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_ARGUMENTS()
 {
@@ -10634,31 +10734,31 @@ TraceRecorder::record_JSOP_ARGUMENTS()
         args_ins = newArguments(callee_ins, strict);
     } else {
         // Generate LIR to create arguments only if it has not already been created.
 
         LIns* mem_ins = lir->insAlloc(sizeof(JSObject *));
 
         LIns* isZero_ins = lir->insEqP_0(a_ins);
         if (isZero_ins->isImmI(0)) {
-            lir->insStore(a_ins, mem_ins, 0, ACCSET_OTHER);
+            lir->insStore(a_ins, mem_ins, 0, ACCSET_ALLOC);
         } else if (isZero_ins->isImmI(1)) {
             LIns* call_ins = newArguments(callee_ins, strict);
-            lir->insStore(call_ins, mem_ins, 0, ACCSET_OTHER);
+            lir->insStore(call_ins, mem_ins, 0, ACCSET_ALLOC);
         } else {
             LIns* br1 = unoptimizableCondBranch(LIR_jt, isZero_ins);
-            lir->insStore(a_ins, mem_ins, 0, ACCSET_OTHER);
+            lir->insStore(a_ins, mem_ins, 0, ACCSET_ALLOC);
             LIns* br2 = lir->insBranch(LIR_j, NULL, NULL);
             labelForBranch(br1);
 
             LIns* call_ins = newArguments(callee_ins, strict);
-            lir->insStore(call_ins, mem_ins, 0, ACCSET_OTHER);
+            lir->insStore(call_ins, mem_ins, 0, ACCSET_ALLOC);
             labelForBranch(br2);
         }
-        args_ins = lir->insLoad(LIR_ldp, mem_ins, 0, ACCSET_OTHER);
+        args_ins = lir->insLoad(LIR_ldp, mem_ins, 0, ACCSET_ALLOC);
     }
 
     stack(0, args_ins);
     setFrameObjPtr(fp->addressOfArgs(), args_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
@@ -11074,20 +11174,20 @@ TraceRecorder::newArray(JSObject* ctor, 
         LIns *args[] = { argc == 0 ? lir->insImmI(0) : d2i(get(argv)), proto_ins, cx_ins };
         arr_ins = lir->insCall(&js_NewEmptyArray_ci, args);
         guard(false, lir->insEqP_0(arr_ins), OOM_EXIT);
     } else {
         LIns *args[] = { INS_CONST(argc), proto_ins, cx_ins };
         arr_ins = lir->insCall(&js_NewPreallocatedArray_ci, args);
         guard(false, lir->insEqP_0(arr_ins), OOM_EXIT);
 
-        // arr->dslots[i] = box_jsval(vp[i]);  for i in 0..argc
-        LIns *dslots_ins = NULL;
+        // arr->slots[i] = box_jsval(vp[i]);  for i in 0..argc
+        LIns *slots_ins = NULL;
         for (uint32 i = 0; i < argc && !outOfMemory(); i++) {
-            stobj_set_dslot(arr_ins, i, dslots_ins, argv[i], get(&argv[i]));
+            stobj_set_dslot(arr_ins, i, slots_ins, argv[i], get(&argv[i]));
         }
     }
 
     set(rval, arr_ins);
     pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
     return RECORD_CONTINUE;
 }
 
@@ -11108,90 +11208,74 @@ TraceRecorder::propagateFailureToBuiltin
     JS_STATIC_ASSERT(((JS_FALSE & 1) ^ 1) << 1 == BUILTIN_ERROR);
     status_ins = lir->ins2(LIR_ori,
                            status_ins,
                            lir->ins2ImmI(LIR_lshi,
                                       lir->ins2ImmI(LIR_xori,
                                                  lir->ins2ImmI(LIR_andi, ok_ins, 1),
                                                  1),
                                       1));
-    lir->insStore(status_ins, lirbuf->state, (int) offsetof(TracerState, builtinStatus),
-                  ACCSET_OTHER);
+    storeToState(status_ins, builtinStatus);
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::emitNativePropertyOp(const Shape* shape, LIns* obj_ins,
                                     bool setflag, LIns* addr_boxed_val_ins)
 {
     JS_ASSERT(addr_boxed_val_ins->isop(LIR_allocp));
     JS_ASSERT(setflag ? !shape->hasSetterValue() : !shape->hasGetterValue());
     JS_ASSERT(setflag ? !shape->hasDefaultSetter() : !shape->hasDefaultGetterOrIsMethod());
 
     enterDeepBailCall();
 
-    lir->insStore(addr_boxed_val_ins, lirbuf->state, offsetof(TracerState, nativeVp), ACCSET_OTHER);
-    lir->insStore(INS_CONST(1), lirbuf->state, offsetof(TracerState, nativeVpLen), ACCSET_OTHER);
+    storeToState(addr_boxed_val_ins, nativeVp);
+    storeToState(INS_CONST(1), nativeVpLen);
 
     CallInfo* ci = new (traceAlloc()) CallInfo();
     ci->_address = uintptr_t(setflag ? shape->setterOp() : shape->getterOp());
     ci->_typesig = CallInfo::typeSig4(ARGTYPE_I, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P);
     ci->_isPure = 0;
     ci->_storeAccSet = ACCSET_STORE_ANY;
     ci->_abi = ABI_CDECL;
 #ifdef DEBUG
     ci->_name = "JSPropertyOp";
 #endif
     LIns* args[] = { addr_boxed_val_ins, INS_CONSTID(SHAPE_USERID(shape)), obj_ins, cx_ins };
     LIns* ok_ins = lir->insCall(ci, args);
 
     // Cleanup. Immediately clear nativeVp before we might deep bail.
-    lir->insStore(INS_NULL(), lirbuf->state, offsetof(TracerState, nativeVp), ACCSET_OTHER);
+    storeToState(INS_NULL(), nativeVp);
     leaveDeepBailCall();
 
     // Guard that the call succeeded and builtinStatus is still 0.
     // If the native op succeeds but we deep-bail here, the result value is
     // lost!  Therefore this can only be used for setters of shared properties.
     // In that case we ignore the result value anyway.
-    LIns* status_ins = lir->insLoad(LIR_ldi, lirbuf->state,
-                                    (int) offsetof(TracerState, builtinStatus), ACCSET_OTHER);
+    LIns* status_ins = loadFromState(LIR_ldi, builtinStatus);
     propagateFailureToBuiltinStatus(ok_ins, status_ins);
     guard(true, lir->insEqI_0(status_ins), STATUS_EXIT);
 }
 
-JS_REQUIRES_STACK AbortableRecordingStatus
-TraceRecorder::record_JSOP_BEGIN()
-{
-    JSStackFrame* fp = cx->fp();
-    if (fp->isConstructing()) {
-        LIns* callee_ins = get(&cx->fp()->calleeValue());
-        LIns* args[] = { callee_ins, INS_CONSTPTR(&js_ObjectClass), cx_ins };
-        LIns* tv_ins = lir->insCall(&js_CreateThisFromTrace_ci, args);
-        guard(false, lir->insEqP_0(tv_ins), OOM_EXIT);
-        set(&fp->thisValue(), tv_ins);
-    }
-    return ARECORD_CONTINUE;
-}
-
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::emitNativeCall(JSSpecializedNative* sn, uintN argc, LIns* args[], bool rooted)
 {
     if (JSTN_ERRTYPE(sn) == FAIL_STATUS) {
         // This needs to capture the pre-call state of the stack. So do not set
         // pendingSpecializedNative before taking this snapshot.
         JS_ASSERT(!pendingSpecializedNative);
 
         // Take snapshot for DeepBail and store it in cx->bailExit.
         enterDeepBailCall();
     }
 
     LIns* res_ins = lir->insCall(sn->builtin, args);
 
     // Immediately unroot the vp as soon we return since we might deep bail next.
     if (rooted)
-        lir->insStore(INS_NULL(), lirbuf->state, offsetof(TracerState, nativeVp), ACCSET_OTHER);
+        storeToState(INS_NULL(), nativeVp);
 
     rval_ins = res_ins;
     switch (JSTN_ERRTYPE(sn)) {
       case FAIL_NULL:
         guard(false, lir->insEqP_0(res_ins), OOM_EXIT);
         break;
       case FAIL_NEG:
         res_ins = lir->ins1(LIR_i2d, res_ins);
@@ -11328,16 +11412,65 @@ TraceRecorder::callSpecializedNative(JSN
         return emitNativeCall(sn, argc, args, false);
 
 next_specialization:;
     } while ((sn++)->flags & JSTN_MORE);
 
     return RECORD_STOP;
 }
 
+static JSBool FASTCALL
+ceilReturningInt(jsdouble x, int32 *out)
+{
+    jsdouble r = js_math_ceil_impl(x);
+    return JSDOUBLE_IS_INT32(r, out);
+}
+
+static JSBool FASTCALL
+floorReturningInt(jsdouble x, int32 *out)
+{
+    jsdouble r = js_math_floor_impl(x);
+    return JSDOUBLE_IS_INT32(r, out);
+}
+
+static JSBool FASTCALL
+roundReturningInt(jsdouble x, int32 *out)
+{
+    jsdouble r = js_math_round_impl(x);
+    return JSDOUBLE_IS_INT32(r, out);
+}
+
+/*
+ * These functions store into their second argument, so they need to
+ * be annotated accordingly. To be future-proof, we use ACCSET_STORE_ANY
+ * so that new callers don't have to remember to update the annotation.
+ */
+JS_DEFINE_CALLINFO_2(static, BOOL, ceilReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
+JS_DEFINE_CALLINFO_2(static, BOOL, floorReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
+JS_DEFINE_CALLINFO_2(static, BOOL, roundReturningInt, DOUBLE, INT32PTR, 0, ACCSET_STORE_ANY)
+
+JS_REQUIRES_STACK RecordingStatus
+TraceRecorder::callFloatReturningInt(uintN argc, const nanojit::CallInfo *ci)
+{
+    Value& arg = stackval(-1);
+    LIns* resptr_ins = lir->insAlloc(sizeof(int32));
+    LIns* args[] = { resptr_ins, get(&arg) };
+    LIns* fits_ins = lir->insCall(ci, args);
+
+    guard(false, lir->insEqI_0(fits_ins), OVERFLOW_EXIT);
+
+    LIns* res_ins = lir->insLoad(LIR_ldi, resptr_ins, 0, ACCSET_ALLOC);
+
+    set(&stackval(0 - (2 + argc)), lir->ins1(LIR_i2d, res_ins));
+
+    pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
+
+    return RECORD_CONTINUE;
+}
+
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::callNative(uintN argc, JSOp mode)
 {
     LIns* args[5];
 
     JS_ASSERT(mode == JSOP_CALL || mode == JSOP_NEW || mode == JSOP_APPLY);
 
     Value* vp = &stackval(0 - (2 + argc));
@@ -11345,21 +11478,32 @@ TraceRecorder::callNative(uintN argc, JS
     JSFunction* fun = GET_FUNCTION_PRIVATE(cx, funobj);
     Native native = fun->u.n.native;
 
     switch (argc) {
       case 1:
         if (vp[2].isNumber() && mode == JSOP_CALL) {
             if (native == js_math_ceil || native == js_math_floor || native == js_math_round) {
                 LIns* a = get(&vp[2]);
+                int32 result;
                 if (isPromote(a)) {
                     set(&vp[0], a);
                     pendingSpecializedNative = IGNORE_NATIVE_CALL_COMPLETE_CALLBACK;
                     return RECORD_CONTINUE;
                 }
+                if (native == js_math_floor) {
+                    if (floorReturningInt(vp[2].toNumber(), &result))
+                        return callFloatReturningInt(argc, &floorReturningInt_ci);
+                } else if (native == js_math_ceil) {
+                    if (ceilReturningInt(vp[2].toNumber(), &result))
+                        return callFloatReturningInt(argc, &ceilReturningInt_ci);
+                } else if (native == js_math_round) {
+                    if (roundReturningInt(vp[2].toNumber(), &result))
+                        return callFloatReturningInt(argc, &roundReturningInt_ci);
+                }
             }
             if (vp[1].isString()) {
                 JSString *str = vp[1].toString();
                 if (native == js_str_charAt) {
                     jsdouble i = vp[2].toNumber();
                     if (i < 0 || i >= str->length())
                         RETURN_STOP("charAt out of bounds");
                     LIns* str_ins = get(&vp[1]);
@@ -11475,17 +11619,17 @@ TraceRecorder::callNative(uintN argc, JS
     if (native == js_fun_apply || native == js_fun_call)
         RETURN_STOP("trying to call native apply or call");
 
     // Allocate the vp vector and emit code to root it.
     uintN vplen = 2 + argc;
     LIns* invokevp_ins = lir->insAlloc(vplen * sizeof(Value));
 
     // vp[0] is the callee.
-    box_value_into(vp[0], INS_CONSTOBJ(funobj), invokevp_ins, 0, ACCSET_OTHER);
+    box_value_into(vp[0], INS_CONSTOBJ(funobj), invokevp_ins, 0, ACCSET_ALLOC);
 
     // Calculate |this|.
     LIns* this_ins;
     if (mode == JSOP_NEW) {
         Class* clasp = fun->u.n.clasp;
         JS_ASSERT(clasp != &js_SlowArrayClass);
         if (!clasp)
             clasp = &js_ObjectClass;
@@ -11524,31 +11668,31 @@ TraceRecorder::callNative(uintN argc, JS
              */
             vp[1].setObject(*globalObj);
         }
         this_ins = newobj_ins;
     } else {
         this_ins = get(&vp[1]);
     }
     set(&vp[1], this_ins);
-    box_value_into(vp[1], this_ins, invokevp_ins, 1 * sizeof(Value), ACCSET_OTHER);
+    box_value_into(vp[1], this_ins, invokevp_ins, 1 * sizeof(Value), ACCSET_ALLOC);
 
     // Populate argv.
     for (uintN n = 2; n < 2 + argc; n++) {
-        box_value_into(vp[n], get(&vp[n]), invokevp_ins, n * sizeof(Value), ACCSET_OTHER);
+        box_value_into(vp[n], get(&vp[n]), invokevp_ins, n * sizeof(Value), ACCSET_ALLOC);
         // For a very long argument list we might run out of LIR space, so
         // check inside the loop.
         if (outOfMemory())
             RETURN_STOP("out of memory in argument list");
     }
 
     // Populate extra slots, including the return value slot for a slow native.
     if (2 + argc < vplen) {
         for (uintN n = 2 + argc; n < vplen; n++) {
-            box_undefined_into(invokevp_ins, n * sizeof(Value), ACCSET_OTHER);
+            box_undefined_into(invokevp_ins, n * sizeof(Value), ACCSET_ALLOC);
             if (outOfMemory())
                 RETURN_STOP("out of memory in extra slots");
         }
     }
 
     // Set up arguments for the JSNative or JSFastNative.
     if (mode == JSOP_NEW)
         RETURN_STOP("untraceable fast native constructor");
@@ -11580,18 +11724,18 @@ TraceRecorder::callNative(uintN argc, JS
     generatedSpecializedNative.prefix = NULL;
     generatedSpecializedNative.argtypes = NULL;
 
     // We only have to ensure that the values we wrote into the stack buffer
     // are rooted if we actually make it to the call, so only set nativeVp and
     // nativeVpLen immediately before emitting the call code. This way we avoid
     // leaving trace with a bogus nativeVp because we fall off trace while unboxing
     // values into the stack buffer.
-    lir->insStore(INS_CONST(vplen), lirbuf->state, offsetof(TracerState, nativeVpLen), ACCSET_OTHER);
-    lir->insStore(invokevp_ins, lirbuf->state, offsetof(TracerState, nativeVp), ACCSET_OTHER);
+    storeToState(INS_CONST(vplen), nativeVpLen);
+    storeToState(invokevp_ins, nativeVp);
 
     // argc is the original argc here. It is used to calculate where to place
     // the return value.
     return emitNativeCall(&generatedSpecializedNative, argc, args, true);
 }
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::functionCall(uintN argc, JSOp mode)
@@ -11721,19 +11865,17 @@ TraceRecorder::record_JSOP_DELPROP()
         RETURN_STOP_A("JSOP_DELPROP on global property");
 
     JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)];
 
     enterDeepBailCall();
     LIns* args[] = { strictModeCode_ins, INS_ATOM(atom), get(&lval), cx_ins };
     LIns* rval_ins = lir->insCall(&DeleteStrKey_ci, args);
 
-    LIns* status_ins = lir->insLoad(LIR_ldi,
-                                    lirbuf->state,
-                                    offsetof(TracerState, builtinStatus), ACCSET_OTHER);
+    LIns* status_ins = loadFromState(LIR_ldi, builtinStatus);
     pendingGuardCondition = lir->insEqI_0(status_ins);
     leaveDeepBailCall();
 
     set(&lval, rval_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
@@ -11756,19 +11898,17 @@ TraceRecorder::record_JSOP_DELELEM()
         rval_ins = lir->insCall(&DeleteIntKey_ci, args);
     } else if (idx.isString()) {
         LIns* args[] = { strictModeCode_ins, get(&idx), get(&lval), cx_ins };
         rval_ins = lir->insCall(&DeleteStrKey_ci, args);
     } else {
         RETURN_STOP_A("JSOP_DELELEM on non-int, non-string index");
     }
 
-    LIns* status_ins = lir->insLoad(LIR_ldi,
-                                    lirbuf->state,
-                                    offsetof(TracerState, builtinStatus), ACCSET_OTHER);
+    LIns* status_ins = loadFromState(LIR_ldi, builtinStatus);
     pendingGuardCondition = lir->insEqI_0(status_ins);
     leaveDeepBailCall();
 
     set(&lval, rval_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
@@ -11958,18 +12098,18 @@ TraceRecorder::nativeSet(JSObject* obj, 
     if (slot != SHAPE_INVALID_SLOT) {
         JS_ASSERT(obj->containsSlot(shape->slot));
         JS_ASSERT(shape->hasSlot());
         if (obj == globalObj) {
             if (!lazilyImportGlobalSlot(slot))
                 RETURN_STOP("lazy import of global slot failed");
             set(&obj->getSlotRef(slot), v_ins);
         } else {
-            LIns* dslots_ins = NULL;
-            stobj_set_slot(obj, obj_ins, slot, dslots_ins, v, v_ins);
+            LIns* slots_ins = NULL;
+            stobj_set_slot(obj, obj_ins, slot, slots_ins, v, v_ins);
         }
     }
 
     return RECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 MethodWriteBarrier(JSContext* cx, JSObject* obj, Shape* shape, JSObject* funobj)
@@ -12127,18 +12267,18 @@ TraceRecorder::setCallProp(JSObject *cal
             RETURN_STOP("can't trace special CallClass setter");
         }
 
         // Now assert that the shortid get we did above was ok. Have to do it
         // after the RETURN_STOP above, since in that case we may in fact not
         // have a valid shortid; but we don't use it in that case anyway.
         JS_ASSERT(shape->hasShortID());
 
-        LIns* dslots_ins = NULL;
-        stobj_set_dslot(callobj_ins, slot, dslots_ins, v, v_ins);
+        LIns* slots_ins = NULL;
+        stobj_set_dslot(callobj_ins, slot, slots_ins, v, v_ins);
         return RECORD_CONTINUE;
     }
 
     // This is the hard case: we have a JSStackFrame private, but it's not in
     // range.  During trace execution we may or may not have a JSStackFrame
     // anymore.  Call the standard builtins, which handle that situation.
 
     // Set variables in off-trace-stack call objects by calling standard builtins.
@@ -12196,42 +12336,43 @@ TraceRecorder::record_SetPropHit(Propert
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK VMSideExit*
 TraceRecorder::enterDeepBailCall()
 {
     // Take snapshot for DeepBail and store it in cx->bailExit.
     VMSideExit* exit = snapshot(DEEP_BAIL_EXIT);
-    lir->insStore(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit), ACCSET_OTHER);
+    lir->insStore(INS_CONSTPTR(exit), cx_ins, offsetof(JSContext, bailExit), ACCSET_CX);
 
     // Tell nanojit not to discard or defer stack writes before this call.
     GuardRecord* guardRec = createGuardRecord(exit);
     lir->insGuard(LIR_xbarrier, NULL, guardRec);
 
     // Forget about guarded shapes, since deep bailers can reshape the world.
     forgetGuardedShapes();
     return exit;
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::leaveDeepBailCall()
 {
     // Keep cx->bailExit null when it's invalid.
-    lir->insStore(INS_NULL(), cx_ins, offsetof(JSContext, bailExit), ACCSET_OTHER);
+    lir->insStore(INS_NULL(), cx_ins, offsetof(JSContext, bailExit), ACCSET_CX);
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::finishGetProp(LIns* obj_ins, LIns* vp_ins, LIns* ok_ins, Value* outp)
 {
     // Store the boxed result (and this-object, if JOF_CALLOP) before the
     // guard. The deep-bail case requires this. If the property get fails,
     // these slots will be ignored anyway.
     // N.B. monitorRecording expects get(outp)->isLoad()
-    LIns* result_ins = lir->insLoad(LIR_ldd, vp_ins, 0, ACCSET_OTHER);
+    JS_ASSERT(vp_ins->isop(LIR_allocp));
+    LIns* result_ins = lir->insLoad(LIR_ldd, vp_ins, 0, ACCSET_ALLOC);
     set(outp, result_ins);
     if (js_CodeSpec[*cx->regs->pc].format & JOF_CALLOP)
         set(outp + 1, obj_ins);
 
     // We need to guard on ok_ins, but this requires a snapshot of the state
     // after this op. monitorRecording will do it for us.
     pendingGuardCondition = ok_ins;
 
@@ -12385,17 +12526,17 @@ TraceRecorder::getPropertyByName(LIns* o
     LIns* args[] = {pic_ins, vp_ins, idvalp_ins, obj_ins, cx_ins};
     LIns* ok_ins = lir->insCall(&GetPropertyByName_ci, args);
 
     // GetPropertyByName can assign to *idvalp, so the tracker has an incorrect
     // entry for that address. Correct it. (If the value in the address is
     // never used again, the usual case, Nanojit will kill this load.)
     // The AccSet could be made more precise with some effort (idvalp_ins may
     // equal 'sp+k'), but it's not worth it because this case is rare.
-    tracker.set(idvalp, lir->insLoad(LIR_ldp, idvalp_ins, 0, ACCSET_STACK|ACCSET_OTHER));
+    tracker.set(idvalp, lir->insLoad(LIR_ldp, idvalp_ins, 0, ACCSET_LOAD_ANY));
 
     finishGetProp(obj_ins, vp_ins, ok_ins, outp);
     leaveDeepBailCall();
     return RECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 GetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp)
@@ -12556,88 +12697,94 @@ TraceRecorder::getPropertyWithScriptGett
         return callImacroInfallibly(getthisprop_imacros.scriptgetter);
 
       default:
         RETURN_STOP("cannot trace script getter for this opcode");
     }
 }
 
 JS_REQUIRES_STACK LIns*
+TraceRecorder::getStringLengthAndFlags(LIns* str_ins)
+{
+    return addName(lir->insLoad(LIR_ldp, str_ins, offsetof(JSString, mLengthAndFlags),
+                                ACCSET_STRING),
+                   "mLengthAndFlags");
+}
+
+JS_REQUIRES_STACK LIns*
 TraceRecorder::getStringLength(LIns* str_ins)
 {
-    return addName(lir->ins2ImmI(LIR_rshup,
-                                 addName(lir->insLoad(LIR_ldp, str_ins,
-                                                      offsetof(JSString, mLengthAndFlags),
-                                                      ACCSET_OTHER), "mLengthAndFlags"),
-                                 JSString::FLAGS_LENGTH_SHIFT), "length");
+    return addName(lir->ins2ImmI(LIR_rshup, getStringLengthAndFlags(str_ins),
+                                 JSString::FLAGS_LENGTH_SHIFT),
+                   "length");
 }
 
 JS_REQUIRES_STACK LIns*
 TraceRecorder::getStringChars(LIns* str_ins)
 {
     return addName(lir->insLoad(LIR_ldp, str_ins,
                                 offsetof(JSString, mChars),
-                                ACCSET_OTHER), "chars");
+                                ACCSET_STRING), "mChars");
 }
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::getCharCodeAt(JSString *str, LIns* str_ins, LIns* idx_ins, LIns** out)
 {
     CHECK_STATUS(makeNumberInt32(idx_ins, &idx_ins));
     idx_ins = lir->insUI2P(idx_ins);
-    LIns *length_ins = lir->insLoad(LIR_ldp, str_ins, offsetof(JSString, mLengthAndFlags),
-                                    ACCSET_OTHER);
+    LIns *lengthAndFlags_ins = getStringLengthAndFlags(str_ins);
     LIns *br;
     if (condBranch(LIR_jt,
                    lir->insEqP_0(lir->ins2(LIR_andp,
-                                           length_ins,
+                                           lengthAndFlags_ins,
                                            INS_CONSTWORD(JSString::ROPE_BIT))),
                    &br))
     {
         lir->insCall(&js_Flatten_ci, &str_ins);
         labelForBranch(br);
     }
 
     guard(true,
-          lir->ins2(LIR_ltup, idx_ins, lir->ins2ImmI(LIR_rshup, length_ins, JSString::FLAGS_LENGTH_SHIFT)),
+          lir->ins2(LIR_ltup, idx_ins, lir->ins2ImmI(LIR_rshup, lengthAndFlags_ins,
+                                                     JSString::FLAGS_LENGTH_SHIFT)),
           snapshot(MISMATCH_EXIT));
     LIns *chars_ins = getStringChars(str_ins);
     *out = i2d(lir->insLoad(LIR_ldus2ui,
                             lir->ins2(LIR_addp, chars_ins, lir->ins2ImmI(LIR_lshp, idx_ins, 1)), 0,
-                            ACCSET_OTHER, LOAD_CONST));
+                            ACCSET_STRING_MCHARS, LOAD_CONST));
     return RECORD_CONTINUE;
 }
 
 JS_STATIC_ASSERT(sizeof(JSString) == 16 || sizeof(JSString) == 32);
 
 
 JS_REQUIRES_STACK LIns*
 TraceRecorder::getUnitString(LIns* str_ins, LIns* idx_ins)
 {
     LIns *chars_ins = getStringChars(str_ins);
     LIns *ch_ins = lir->insLoad(LIR_ldus2ui,
                                 lir->ins2(LIR_addp,
                                           chars_ins,
                                           lir->ins2ImmI(LIR_lshp, idx_ins, 1)),
-                                0, ACCSET_OTHER, LOAD_CONST);
+                                0, ACCSET_STRING_MCHARS, LOAD_CONST);
     guard(true, lir->ins2ImmI(LIR_ltui, ch_ins, UNIT_STRING_LIMIT), snapshot(MISMATCH_EXIT));
     return lir->ins2(LIR_addp,
                      INS_CONSTPTR(JSString::unitStringTable),
                      lir->ins2ImmI(LIR_lshp,
                                    lir->insUI2P(ch_ins),
                                    (sizeof(JSString) == 16) ? 4 : 5));
 }
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::getCharAt(JSString *str, LIns* str_ins, LIns* idx_ins, JSOp mode, LIns** out)
 {
     CHECK_STATUS(makeNumberInt32(idx_ins, &idx_ins));
     idx_ins = lir->insUI2P(idx_ins);
     LIns *length_ins = lir->insLoad(LIR_ldp, str_ins, offsetof(JSString, mLengthAndFlags),
-                                    ACCSET_OTHER);
+                                    ACCSET_STRING);
 
     LIns *br1;
     if (condBranch(LIR_jt,
                    lir->insEqP_0(lir->ins2(LIR_andp,
                                            length_ins,
                                            INS_CONSTWORD(JSString::ROPE_BIT))),
                    &br1))
     {
@@ -12650,25 +12797,25 @@ TraceRecorder::getCharAt(JSString *str, 
                               lir->ins2ImmI(LIR_rshup, length_ins, JSString::FLAGS_LENGTH_SHIFT));
 
     if (mode == JSOP_GETELEM) {
         guard(true, inRange, MISMATCH_EXIT);
 
         *out = getUnitString(str_ins, idx_ins);
     } else {
         LIns *phi_ins = lir->insAlloc(sizeof(JSString *));
-        lir->insStore(LIR_stp, INS_CONSTSTR(cx->runtime->emptyString), phi_ins, 0, ACCSET_OTHER);
+        lir->insStore(LIR_stp, INS_CONSTSTR(cx->runtime->emptyString), phi_ins, 0, ACCSET_ALLOC);
 
         LIns* br2;
         if (condBranch(LIR_jf, inRange, &br2)) {
             LIns *unitstr_ins = getUnitString(str_ins, idx_ins);
-            lir->insStore(LIR_stp, unitstr_ins, phi_ins, 0, ACCSET_OTHER);
+            lir->insStore(LIR_stp, unitstr_ins, phi_ins, 0, ACCSET_ALLOC);
             labelForBranch(br2);
         }
-        *out = lir->insLoad(LIR_ldp, phi_ins, 0, ACCSET_OTHER);
+        *out = lir->insLoad(LIR_ldp, phi_ins, 0, ACCSET_ALLOC);
     }
     return RECORD_CONTINUE;
 }
 
 // Typed array tracing depends on EXPANDED_LOADSTORE and F2I
 #if NJ_EXPANDED_LOADSTORE_SUPPORTED && NJ_F2I_SUPPORTED
 static bool OkToTraceTypedArrays = true;
 #else
@@ -12681,17 +12828,17 @@ TraceRecorder::guardNotHole(LIns *argsob
     // vp = &argsobj->slots[JSSLOT_ARGS_DATA].slots[idx]
     LIns* argsData_ins = stobj_get_const_private_ptr(argsobj_ins, JSObject::JSSLOT_ARGS_DATA);
     LIns* slotOffset_ins = lir->ins2(LIR_addp,
                                      INS_CONSTWORD(offsetof(ArgumentsData, slots)),
                                      lir->insUI2P(lir->ins2ImmI(LIR_muli, idx_ins, sizeof(Value))));
     LIns* vp_ins = lir->ins2(LIR_addp, argsData_ins, slotOffset_ins);
 
     guard(false,
-          addName(is_boxed_magic(vp_ins, JS_ARGS_HOLE, ACCSET_OTHER), "guard(not deleted arg)"),
+          addName(is_boxed_magic(vp_ins, JS_ARGS_HOLE, ACCSET_ARGS_DATA), "guard(not deleted arg)"),
           MISMATCH_EXIT);
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_GETELEM()
 {
     bool call = *cx->regs->pc == JSOP_CALLELEM;
 
@@ -12789,17 +12936,17 @@ TraceRecorder::record_JSOP_GETELEM()
                                                  ACCSET_RSTACK);
                     typemap_ins = lir->ins2(LIR_addp, fip_ins, INS_CONSTWORD(sizeof(FrameInfo) + 2/*callee,this*/ * sizeof(JSValueType)));
                 }
 
                 LIns* typep_ins = lir->ins2(LIR_addp, typemap_ins,
                                             lir->insUI2P(lir->ins2(LIR_muli,
                                                                    idx_ins,
                                                                    INS_CONST(sizeof(JSValueType)))));
-                LIns* type_ins = lir->insLoad(LIR_lduc2ui, typep_ins, 0, ACCSET_OTHER, LOAD_CONST);
+                LIns* type_ins = lir->insLoad(LIR_lduc2ui, typep_ins, 0, ACCSET_TYPEMAP, LOAD_CONST);
                 guard(true,
                       addName(lir->ins2(LIR_eqi, type_ins, lir->insImmI(type)),
                               "guard(type-stable upvar)"),
                       BRANCH_EXIT);
 
                 // Read the value out of the native stack area.
                 size_t stackOffset = nativespOffset(&afp->canonicalActualArg(0));
                 LIns* args_addr_ins = lir->ins2(LIR_addp, lirbuf->sp, INS_CONSTWORD(stackOffset));
@@ -13007,22 +13154,22 @@ TraceRecorder::setElem(int lval_spindex,
         // be an integer.                              
         CHECK_STATUS_A(makeNumberInt32(idx_ins, &idx_ins));
 
         // Ensure idx >= 0 && idx < length (by using uint32)
         lir->insGuard(LIR_xf,
                       lir->ins2(LIR_ltui,
                                 idx_ins,
                                 lir->insLoad(LIR_ldi, priv_ins, js::TypedArray::lengthOffset(),
-                                             ACCSET_OTHER, LOAD_CONST)),
+                                             ACCSET_TARRAY, LOAD_CONST)),
                       createGuardRecord(snapshot(OVERFLOW_EXIT)));
 
         // We're now ready to store
         LIns* data_ins = lir->insLoad(LIR_ldp, priv_ins, js::TypedArray::dataOffset(),
-                                      ACCSET_OTHER, LOAD_CONST);
+                                      ACCSET_TARRAY, LOAD_CONST);
         LIns* pidx_ins = lir->insUI2P(idx_ins);
         LIns* addr_ins = 0;
 
         LIns* typed_v_ins = v_ins;
 
         // If it's not a number, convert objects to NaN,
         // null to 0, and call StringToNumber or BooleanOrUndefinedToNumber
         // for those.
@@ -13077,35 +13224,35 @@ TraceRecorder::setElem(int lval_spindex,
             JS_NOT_REACHED("Unknown typed array type in tracer");       
         }
 
         switch (tarray->type) {
           case js::TypedArray::TYPE_INT8:
           case js::TypedArray::TYPE_UINT8_CLAMPED:
           case js::TypedArray::TYPE_UINT8:
             addr_ins = lir->ins2(LIR_addp, data_ins, pidx_ins);
-            lir->insStore(LIR_sti2c, typed_v_ins, addr_ins, 0, ACCSET_OTHER);
+            lir->insStore(LIR_sti2c, typed_v_ins, addr_ins, 0, ACCSET_TARRAY_DATA);
             break;
           case js::TypedArray::TYPE_INT16:
           case js::TypedArray::TYPE_UINT16:
             addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 1));
-            lir->insStore(LIR_sti2s, typed_v_ins, addr_ins, 0, ACCSET_OTHER);
+            lir->insStore(LIR_sti2s, typed_v_ins, addr_ins, 0, ACCSET_TARRAY_DATA);
             break;
           case js::TypedArray::TYPE_INT32:
           case js::TypedArray::TYPE_UINT32:
             addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 2));
-            lir->insStore(LIR_sti, typed_v_ins, addr_ins, 0, ACCSET_OTHER);
+            lir->insStore(LIR_sti, typed_v_ins, addr_ins, 0, ACCSET_TARRAY_DATA);
             break;
           case js::TypedArray::TYPE_FLOAT32:
             addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 2));
-            lir->insStore(LIR_std2f, typed_v_ins, addr_ins, 0, ACCSET_OTHER);
+            lir->insStore(LIR_std2f, typed_v_ins, addr_ins, 0, ACCSET_TARRAY_DATA);
             break;
           case js::TypedArray::TYPE_FLOAT64:
             addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 3));
-            lir->insStore(LIR_std, typed_v_ins, addr_ins, 0, ACCSET_OTHER);
+            lir->insStore(LIR_std, typed_v_ins, addr_ins, 0, ACCSET_TARRAY_DATA);
             break;
           default:
             JS_NOT_REACHED("Unknown typed array type in tracer");       
         }
     } else if (idx.toInt32() < 0 || !obj->isDenseArray()) {
         CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v,
                                                 *cx->regs->pc == JSOP_INITELEM));
     } else {
@@ -13124,58 +13271,66 @@ TraceRecorder::setElem(int lval_spindex,
 
         if (!js_EnsureDenseArrayCapacity(cx, obj, idx.toInt32()))
             RETURN_STOP_A("couldn't ensure dense array capacity for setelem");
 
         // Grow the array if the index exceeds the capacity.  This happens
         // rarely, eg. less than 1% of the time in SunSpider.
         LIns* capacity_ins =
             addName(lir->insLoad(LIR_ldi, obj_ins,
-                                 offsetof(JSObject, capacity), ACCSET_OTHER),
+                                 offsetof(JSObject, capacity), ACCSET_OBJ_CAPACITY),
                     "capacity");
+        /*
+         * It's important that CSE works around this control-flow diamond
+         * because it really helps series of GETELEM and SETELEM operations.
+         * Likewise with the diamond below.
+         */
+        suspendCSE();
         LIns* br;
         if (condBranch(LIR_jt, lir->ins2(LIR_ltui, idx_ins, capacity_ins), &br)) {
             LIns* args[] = { idx_ins, obj_ins, cx_ins };
             LIns* res_ins = lir->insCall(&js_EnsureDenseArrayCapacity_ci, args);
             guard(false, lir->insEqI_0(res_ins), mismatchExit);
             labelForBranch(br);
         }
+        resumeCSE();
 
         // Get the address of the element.
-        LIns *dslots_ins =
-            addName(lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER), "dslots");
+        LIns *slots_ins = slots(obj_ins);
         JS_ASSERT(sizeof(Value) == 8); // The |3| in the following statement requires this.
-        LIns *addr_ins = lir->ins2(LIR_addp, dslots_ins,
+        LIns *addr_ins = lir->ins2(LIR_addp, slots_ins,
                                    lir->ins2ImmI(LIR_lshp, lir->insUI2P(idx_ins), 3));
 
         // If we are overwriting a hole:
         // - Guard that we don't have any indexed properties along the prototype chain.
         // - Check if the length has changed;  if so, update it to index+1.
         // This happens moderately often, eg. close to 10% of the time in
         // SunSpider, and for some benchmarks it's close to 100%.
         LIns* cond = lir->ins2(LIR_eqi,
 #if JS_BITS_PER_WORD == 32
-                               lir->insLoad(LIR_ldi, addr_ins, sTagOffset, ACCSET_OTHER),
+                               lir->insLoad(LIR_ldi, addr_ins, sTagOffset, ACCSET_SLOTS),
 #else
                                lir->ins1(LIR_q2i, lir->ins2ImmI(LIR_rshuq,
-                                                                lir->insLoad(LIR_ldq, addr_ins, 0, ACCSET_OTHER),
+                                                                lir->insLoad(LIR_ldq, addr_ins, 0, ACCSET_SLOTS),
                                                                 JSVAL_TAG_SHIFT)),
 #endif
                                INS_CONSTU(JSVAL_TAG_MAGIC));
+        suspendCSE();
         LIns* br2;
         if (condBranch(LIR_jf, cond, &br2)) {
             LIns* args[] = { idx_ins, obj_ins, cx_ins };
             LIns* res_ins = addName(lir->insCall(&js_Array_dense_setelem_hole_ci, args),
                                      "hasNoIndexedProperties");
             guard(false, lir->insEqI_0(res_ins), mismatchExit);
             labelForBranch(br2);
         }
+        resumeCSE();
 
         // Right, actually set the element.
-        box_value_into(v, v_ins, addr_ins, 0, ACCSET_OTHER);
+        box_value_into(v, v_ins, addr_ins, 0, ACCSET_SLOTS);
     }
 
     jsbytecode* pc = cx->regs->pc;
     if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
         set(&lval, v_ins);
 
     return ARECORD_CONTINUE;
 }
@@ -13275,17 +13430,17 @@ TraceRecorder::upvar(JSScript* script, J
         cx_ins
     };
     LIns* call_ins = lir->insCall(ci, args);
     JSValueType type = getCoercedType(v);
     guard(true,
           addName(lir->ins2(LIR_eqi, call_ins, lir->insImmI(type)),
                   "guard(type-stable upvar)"),
           BRANCH_EXIT);
-    return stackLoad(outp, ACCSET_OTHER, type);
+    return stackLoad(outp, ACCSET_ALLOC, type);
 }
 
 /*
  * Generate LIR to load a value from the native stack. This method ensures that
  * the correct LIR load operator is used.
  */
 LIns*
 TraceRecorder::stackLoad(LIns* base, AccSet accSet, uint8 type)
@@ -13349,17 +13504,17 @@ TraceRecorder::record_JSOP_GETFCSLOT()
     JSObject& callee = cx->fp()->callee();
     LIns* callee_ins = get(&cx->fp()->calleeValue());
 
     LIns* upvars_ins = stobj_get_const_private_ptr(callee_ins,
                                                    JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
 
     unsigned index = GET_UINT16(cx->regs->pc);
     LIns *v_ins = unbox_value(callee.getFlatClosureUpvar(index), upvars_ins, index * sizeof(Value),
-                              snapshot(BRANCH_EXIT));
+                              ACCSET_FCSLOTS, snapshot(BRANCH_EXIT));
     stack(0, v_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_CALLFCSLOT()
 {
     CHECK_STATUS_A(record_JSOP_GETFCSLOT());
@@ -13668,59 +13823,58 @@ TraceRecorder::record_NativeCallComplete
      * to restore by the length of the current opcode.  If the native's return
      * type is jsval, snapshot() will also indicate in the type map that the
      * element on top of the stack is a boxed value which doesn't need to be
      * boxed if the type guard generated by unbox_value() fails.
      */
 
     if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS) {
         /* Keep cx->bailExit null when it's invalid. */
-        lir->insStore(INS_NULL(), cx_ins, (int) offsetof(JSContext, bailExit), ACCSET_OTHER);
-
-        LIns* status = lir->insLoad(LIR_ldi, lirbuf->state,
-                                    (int) offsetof(TracerState, builtinStatus), ACCSET_OTHER);
+        lir->insStore(INS_NULL(), cx_ins, offsetof(JSContext, bailExit), ACCSET_CX);
+
+        LIns* status = loadFromState(LIR_ldi, builtinStatus);
         if (pendingSpecializedNative == &generatedSpecializedNative) {
             LIns* ok_ins = v_ins;
 
             /*
              * If we run a generic traceable native, the return value is in the argument
              * vector for native function calls. The actual return value of the native is a JSBool
              * indicating the error status.
              */
 
             if (pendingSpecializedNative->flags & JSTN_CONSTRUCTOR) {
                 LIns *cond_ins;
                 LIns *x;
 
                 // v_ins    := the object payload from native_rval_ins
                 // cond_ins := true if native_rval_ins contains a JSObject*
-                unbox_any_object(native_rval_ins, &v_ins, &cond_ins, ACCSET_OTHER);
+                unbox_any_object(native_rval_ins, &v_ins, &cond_ins, ACCSET_ALLOC);
                 // x        := v_ins if native_rval_ins contains a JSObject*, NULL otherwise
                 x = lir->insChoose(cond_ins, v_ins, INS_CONSTWORD(0), avmplus::AvmCore::use_cmov());
                 // v_ins    := newobj_ins if native_rval_ins doesn't contain a JSObject*,
                 //             the object payload from native_rval_ins otherwise
                 v_ins = lir->insChoose(lir->insEqP_0(x), newobj_ins, x, avmplus::AvmCore::use_cmov());
             } else {
-                v_ins = lir->insLoad(LIR_ldd, native_rval_ins, 0, ACCSET_OTHER);
+                v_ins = lir->insLoad(LIR_ldd, native_rval_ins, 0, ACCSET_ALLOC);
             }
             set(&v, v_ins);
 
             propagateFailureToBuiltinStatus(ok_ins, status);
         }
         guard(true, lir->insEqI_0(status), STATUS_EXIT);
     }
 
     if (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER) {
         /*
          * If we side exit on the unboxing code due to a type change, make sure that the boxed
          * value is actually currently associated with that location, and that we are talking
          * about the top of the stack here, which is where we expected boxed values.
          */
         JS_ASSERT(&v == &cx->regs->sp[-1] && get(&v) == v_ins);
-        set(&v, unbox_value(v, native_rval_ins, 0, snapshot(BRANCH_EXIT)));
+        set(&v, unbox_value(v, native_rval_ins, 0, ACCSET_ALLOC, snapshot(BRANCH_EXIT)));
     } else if (pendingSpecializedNative->flags &
                (JSTN_RETURN_NULLABLE_STR | JSTN_RETURN_NULLABLE_OBJ)) {
         guard(v.isNull(),
               addName(lir->insEqP_0(v_ins), "guard(nullness)"),
               BRANCH_EXIT);
     } else if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_NEG) {
         /* Already added i2d in functionCall. */
         JS_ASSERT(v.isNumber());
@@ -13991,17 +14145,17 @@ TraceRecorder::denseArrayElement(Value& 
      * Arrays have both a length and a capacity, but we only need to check
      * |index < capacity|;  in the case where |length < index < capacity|
      * the entries [length..capacity-1] will have already been marked as
      * holes by resizeDenseArrayElements() so we can read them and get
      * the correct value.
      */
     LIns* capacity_ins =
         addName(lir->insLoad(LIR_ldi, obj_ins,
-                             offsetof(JSObject, capacity), ACCSET_OTHER),
+                             offsetof(JSObject, capacity), ACCSET_OBJ_CAPACITY),
                 "capacity");
 
     jsuint capacity = obj->getDenseArrayCapacity();
     bool within = (jsuint(idx) < capacity);
     if (!within) {
         /* If not idx < capacity, stay on trace (and read value as undefined). */
         guard(true, lir->ins2(LIR_geui, idx_ins, capacity_ins), branchExit);
 
@@ -14012,23 +14166,21 @@ TraceRecorder::denseArrayElement(Value& 
         addr_ins = NULL;
         return RECORD_CONTINUE;
     }
 
     /* Guard that index is within capacity. */
     guard(true, lir->ins2(LIR_ltui, idx_ins, capacity_ins), branchExit);
 
     /* Load the value and guard on its type to unbox it. */
-    LIns* dslots_ins =
-        addName(lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, slots), ACCSET_OTHER), "dslots");
     vp = &obj->slots[jsuint(idx)];
 	JS_ASSERT(sizeof(Value) == 8); // The |3| in the following statement requires this.
-    addr_ins = lir->ins2(LIR_addp, dslots_ins,
+    addr_ins = lir->ins2(LIR_addp, slots(obj_ins),
                          lir->ins2ImmI(LIR_lshp, lir->insUI2P(idx_ins), 3));
-    v_ins = unbox_value(*vp, addr_ins, 0, branchExit);
+    v_ins = unbox_value(*vp, addr_ins, 0, ACCSET_SLOTS, branchExit);
 
     /* Don't let the hole value escape. Turn it into an undefined. */
     if (vp->isMagic()) {
         CHECK_STATUS(guardPrototypeHasNoIndexedProperties(obj, obj_ins, snapshot(MISMATCH_EXIT)));
         v_ins = INS_UNDEFINED();
     }
     return RECORD_CONTINUE;
 }
@@ -14062,17 +14214,17 @@ TraceRecorder::typedArrayElement(Value& 
     LIns* priv_ins = stobj_get_private(obj_ins);
 
     /* for out-of-range, do the same thing that the interpreter does, which is return undefined */
     if ((jsuint) idx >= tarray->length) {
         guard(false,
               lir->ins2(LIR_ltui,
                         idx_ins,
                         lir->insLoad(LIR_ldi, priv_ins, js::TypedArray::lengthOffset(),
-                        ACCSET_OTHER, LOAD_CONST)),
+                        ACCSET_TARRAY, LOAD_CONST)),
               BRANCH_EXIT);
         v_ins = INS_UNDEFINED();
         return ARECORD_CONTINUE;
     }
 
     /*
      * Ensure idx < length
      *
@@ -14081,57 +14233,57 @@ TraceRecorder::typedArrayElement(Value& 
      * If the index happens to be negative, when it's treated as
      * unsigned it'll be a very large int, and thus won't be less than
      * length.
      */
     guard(true,
           lir->ins2(LIR_ltui,
                     idx_ins,
                     lir->insLoad(LIR_ldi, priv_ins, js::TypedArray::lengthOffset(),
-                                 ACCSET_OTHER, LOAD_CONST)),
+                                 ACCSET_TARRAY, LOAD_CONST)),
           BRANCH_EXIT);
 
     /* We are now ready to load.  Do a different type of load
      * depending on what type of thing we're loading. */
     LIns* data_ins = lir->insLoad(LIR_ldp, priv_ins, js::TypedArray::dataOffset(),
-                                  ACCSET_OTHER, LOAD_CONST);
+                                  ACCSET_TARRAY, LOAD_CONST);
 
     switch (tarray->type) {
       case js::TypedArray::TYPE_INT8:
         addr_ins = lir->ins2(LIR_addp, data_ins, pidx_ins);
-        v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_ldc2i, addr_ins, 0, ACCSET_OTHER));
+        v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_ldc2i, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_UINT8:
       case js::TypedArray::TYPE_UINT8_CLAMPED:
         addr_ins = lir->ins2(LIR_addp, data_ins, pidx_ins);
-        v_ins = lir->ins1(LIR_ui2d, lir->insLoad(LIR_lduc2ui, addr_ins, 0, ACCSET_OTHER));
+        v_ins = lir->ins1(LIR_ui2d, lir->insLoad(LIR_lduc2ui, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_INT16:
         addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 1));
-        v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_lds2i, addr_ins, 0, ACCSET_OTHER));
+        v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_lds2i, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_UINT16:
         addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 1));
-        v_ins = lir->ins1(LIR_ui2d, lir->insLoad(LIR_ldus2ui, addr_ins, 0, ACCSET_OTHER));
+        v_ins = lir->ins1(LIR_ui2d, lir->insLoad(LIR_ldus2ui, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_INT32:
         addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 2));
-        v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_ldi, addr_ins, 0, ACCSET_OTHER));
+        v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_ldi, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_UINT32:
         addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 2));
-        v_ins = lir->ins1(LIR_ui2d, lir->insLoad(LIR_ldi, addr_ins, 0, ACCSET_OTHER));
+        v_ins = lir->ins1(LIR_ui2d, lir->insLoad(LIR_ldi, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_FLOAT32:
         addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 2));
-        v_ins = canonicalizeNaNs(lir->insLoad(LIR_ldf2d, addr_ins, 0, ACCSET_OTHER));
+        v_ins = canonicalizeNaNs(lir->insLoad(LIR_ldf2d, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       case js::TypedArray::TYPE_FLOAT64:
         addr_ins = lir->ins2(LIR_addp, data_ins, lir->ins2ImmI(LIR_lshp, pidx_ins, 3));
-        v_ins = canonicalizeNaNs(lir->insLoad(LIR_ldd, addr_ins, 0, ACCSET_OTHER));
+        v_ins = canonicalizeNaNs(lir->insLoad(LIR_ldd, addr_ins, 0, ACCSET_TARRAY_DATA));
         break;
       default:
         JS_NOT_REACHED("Unknown typed array type in tracer");
     }
 
     return ARECORD_CONTINUE;
 }
 
@@ -14474,17 +14626,17 @@ TraceRecorder::record_JSOP_ITER()
     LIns* ok_ins = lir->insCall(&ObjectToIterator_ci, args);
 
     // We need to guard on ok_ins, but this requires a snapshot of the state
     // after this op. monitorRecording will do it for us.
     pendingGuardCondition = ok_ins;
 
     leaveDeepBailCall();
 
-    stack(-1, addName(lir->insLoad(LIR_ldp, objp_ins, 0, ACCSET_OTHER), "iterobj"));
+    stack(-1, addName(lir->insLoad(LIR_ldp, objp_ins, 0, ACCSET_ALLOC), "iterobj"));
 
     return ARECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 IteratorMore(JSContext *cx, JSObject *iterobj, Value *vp)
 {
     if (!js_IteratorMore(cx, iterobj, vp)) {
@@ -14518,19 +14670,19 @@ TraceRecorder::record_JSOP_MOREITER()
         guardClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
         NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
         void *cursor = ni->props_cursor;
         void *end = ni->props_end;
 
         LIns *ni_ins = stobj_get_private(iterobj_ins);
         LIns *cursor_ins =
             addName(lir->insLoad(LIR_ldp, ni_ins,
-                                 offsetof(NativeIterator, props_cursor), ACCSET_OTHER), "cursor");
+                                 offsetof(NativeIterator, props_cursor), ACCSET_ITER), "cursor");
         LIns *end_ins = addName(lir->insLoad(LIR_ldp, ni_ins, offsetof(NativeIterator, props_end),
-                                ACCSET_OTHER), "end");
+                                ACCSET_ITER), "end");
 
         /* Figure out whether the native iterator contains more values. */
         cond = cursor < end;
         cond_ins = lir->ins2(LIR_ltp, cursor_ins, end_ins);
     } else {
         guardNotClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
 
         enterDeepBailCall();
@@ -14558,17 +14710,17 @@ TraceRecorder::record_JSOP_MOREITER()
         JSContext *localCx = cx;
         AutoValueRooter rooter(cx);
         if (!js_IteratorMore(cx, iterobj, rooter.addr()))
             RETURN_ERROR_A("error in js_IteratorMore");
         if (!TRACE_RECORDER(localCx))
             return ARECORD_ABORTED;
 
         cond = (rooter.value().isTrue());
-        cond_ins = is_boxed_true(vp_ins, ACCSET_OTHER);
+        cond_ins = is_boxed_true(vp_ins, ACCSET_ALLOC);
     }
 
     jsbytecode* pc = cx->regs->pc;
 
     if (pc[1] == JSOP_IFNE) {
         fuseIf(pc + 1, cond, cond_ins);
         return checkTraceEnd(pc + 1);
     }
@@ -14631,23 +14783,23 @@ TraceRecorder::unboxNextValue(LIns* &v_i
     JSObject *iterobj = &iterobj_val.toObject();
     LIns* iterobj_ins = get(&iterobj_val);
 
     if (iterobj->hasClass(&js_IteratorClass)) {
         guardClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
         NativeIterator *ni = (NativeIterator *) iterobj->getPrivate();
 
         LIns *ni_ins = stobj_get_private(iterobj_ins);
-        LIns *cursor_ins = addName(lir->insLoad(LIR_ldp, ni_ins, offsetof(NativeIterator, props_cursor), ACCSET_OTHER), "cursor");
+        LIns *cursor_ins = addName(lir->insLoad(LIR_ldp, ni_ins, offsetof(NativeIterator, props_cursor), ACCSET_ITER), "cursor");
 
         /* Emit code to stringify the id if necessary. */
         if (!(((NativeIterator *) iterobj->getPrivate())->flags & JSITER_FOREACH)) {
             /* Read the next id from the iterator. */
             jsid id = *ni->currentKey();
-            LIns *id_ins = addName(lir->insLoad(LIR_ldp, cursor_ins, 0, ACCSET_OTHER), "id");
+            LIns *id_ins = addName(lir->insLoad(LIR_ldp, cursor_ins, 0, ACCSET_ITER_PROPS), "id");
 
             /*
              * Most iterations over object properties never have to actually deal with
              * any numeric properties, so we guard here instead of branching.
              */
             guard(JSID_IS_STRING(id), is_string_id(id_ins), snapshot(BRANCH_EXIT));
 
             if (JSID_IS_STRING(id)) {
@@ -14661,30 +14813,30 @@ TraceRecorder::unboxNextValue(LIns* &v_i
                 guard(false, lir->insEqP_0(v_ins), OOM_EXIT);
             }
 
             /* Increment the cursor by one jsid and store it back. */
             cursor_ins = lir->ins2(LIR_addp, cursor_ins, INS_CONSTWORD(sizeof(jsid)));
         } else {
             /* Read the next value from the iterator. */
             Value v = *ni->currentValue();
-            v_ins = unbox_value(v, cursor_ins, 0, snapshot(BRANCH_EXIT));
+            v_ins = unbox_value(v, cursor_ins, 0, ACCSET_ITER_PROPS, snapshot(BRANCH_EXIT));
 
             /* Increment the cursor by one Value and store it back. */
             cursor_ins = lir->ins2(LIR_addp, cursor_ins, INS_CONSTWORD(sizeof(Value)));
         }
 
         lir->insStore(LIR_stp, cursor_ins, ni_ins, offsetof(NativeIterator, props_cursor),
-                      ACCSET_OTHER);
+                      ACCSET_ITER);
     } else {
         guardNotClass(iterobj_ins, &js_IteratorClass, snapshot(BRANCH_EXIT), LOAD_NORMAL);
 
         v_ins = unbox_value(cx->iterValue, cx_ins, offsetof(JSContext, iterValue),
-                            snapshot(BRANCH_EXIT));
-        storeMagic(JS_NO_ITER_VALUE, cx_ins, offsetof(JSContext, iterValue), ACCSET_OTHER);
+                            ACCSET_CX, snapshot(BRANCH_EXIT));
+        storeMagic(JS_NO_ITER_VALUE, cx_ins, offsetof(JSContext, iterValue), ACCSET_CX);
     }
 
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_FORNAME()
 {
@@ -15043,19 +15195,17 @@ TraceRecorder::record_JSOP_INSTANCEOF()
         RETURN_STOP_A("non-object on rhs of instanceof");
 
     Value& val = stackval(-2);
     LIns* val_ins = box_value_for_native_call(val, get(&val));
 
     enterDeepBailCall();
     LIns* args[] = {val_ins, get(&ctor), cx_ins};
     stack(-2, lir->insCall(&HasInstanceOnTrace_ci, args));
-    LIns* status_ins = lir->insLoad(LIR_ldi,
-                                    lirbuf->state,
-                                    offsetof(TracerState, builtinStatus), ACCSET_OTHER);
+    LIns* status_ins = loadFromState(LIR_ldi, builtinStatus);
     pendingGuardCondition = lir->insEqI_0(status_ins);
     leaveDeepBailCall();
 
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_DEBUGGER()
@@ -15309,17 +15459,17 @@ TraceRecorder::record_JSOP_LAMBDA_FC()
                                                        JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
 
         for (uint32 i = 0, n = uva->length; i < n; i++) {
             Value v;
             LIns* v_ins = upvar(fun->u.i.script, uva, i, v);
             if (!v_ins)
                 return ARECORD_STOP;
 
-            box_value_into(v, v_ins, upvars_ins, i * sizeof(Value), ACCSET_OTHER);
+            box_value_into(v, v_ins, upvars_ins, i * sizeof(Value), ACCSET_FCSLOTS);
         }
     }
 
     stack(0, closure_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
@@ -15562,23 +15712,16 @@ TraceRecorder::record_JSOP_REGEXP()
     };
     LIns* regex_ins = lir->insCall(&js_CloneRegExpObject_ci, args);
     guard(false, lir->insEqP_0(regex_ins), OOM_EXIT);
 
     stack(0, regex_ins);
     return ARECORD_CONTINUE;
 }
 
-JS_REQUIRES_STACK AbortableRecordingStatus
-TraceRecorder::record_JSOP_UNUSED180()
-{
-    JS_NOT_REACHED("recording JSOP_UNUSED180?!?");
-    return ARECORD_ERROR;
-}
-
 // begin JS_HAS_XML_SUPPORT
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_DEFXMLNS()
 {
     return ARECORD_STOP;
 }
 
@@ -16097,17 +16240,17 @@ TraceRecorder::record_JSOP_LENGTH()
         }
         v_ins = lir->ins1(LIR_i2d, stobj_get_private_uint32(obj_ins));
     } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
         // Ensure array is a typed array and is the same type as what was written
         guardClass(obj_ins, obj->getClass(), snapshot(BRANCH_EXIT), LOAD_NORMAL);
         v_ins = lir->ins1(LIR_i2d, lir->insLoad(LIR_ldi,
                                                 stobj_get_private(obj_ins),
                                                 js::TypedArray::lengthOffset(),
-                                                ACCSET_OTHER, LOAD_CONST));
+                                                ACCSET_TARRAY, LOAD_CONST));
     } else {
         if (!obj->isNative())
             RETURN_STOP_A("can't trace length property access on non-array, non-native object");
         return getProp(obj, obj_ins);
     }
     set(&l, v_ins);
     return ARECORD_CONTINUE;
 }
@@ -16120,23 +16263,23 @@ TraceRecorder::record_JSOP_NEWARRAY()
 
     uint32 len = GET_UINT16(cx->regs->pc);
     cx->assertValidStackDepth(len);
 
     LIns* args[] = { lir->insImmI(len), proto_ins, cx_ins };
     LIns* v_ins = lir->insCall(&js_NewPreallocatedArray_ci, args);
     guard(false, lir->insEqP_0(v_ins), OOM_EXIT);
 
-    LIns* dslots_ins = NULL;
+    LIns* slots_ins = NULL;
     uint32 count = 0;
     for (uint32 i = 0; i < len; i++) {
         Value& v = stackval(int(i) - int(len));
         if (!v.isMagic())
             count++;
-        stobj_set_dslot(v_ins, i, dslots_ins, v, get(&v));
+        stobj_set_dslot(v_ins, i, slots_ins, v, get(&v));
     }
 
     stack(-int(len), v_ins);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_HOLE()
@@ -16146,16 +16289,22 @@ TraceRecorder::record_JSOP_HOLE()
 }
 
 AbortableRecordingStatus
 TraceRecorder::record_JSOP_TRACE()
 {
     return ARECORD_CONTINUE;
 }
 
+AbortableRecordingStatus
+TraceRecorder::record_JSOP_NOTRACE()
+{
+    return ARECORD_CONTINUE;
+}
+
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_SETMETHOD()
 {
     return record_JSOP_SETPROP();
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_INITMETHOD()
@@ -16171,31 +16320,31 @@ js_Unbrand(JSContext *cx, JSObject *obj)
 
 JS_DEFINE_CALLINFO_2(extern, BOOL, js_Unbrand, CONTEXT, OBJECT, 0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_UNBRAND()
 {
     LIns* args_ins[] = { stack(-1), cx_ins };
     LIns* call_ins = lir->insCall(&js_Unbrand_ci, args_ins);
-    guard(true, call_ins, OOM_EXIT);
+    guard(false, lir->insEqI_0(call_ins), OOM_EXIT);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_UNBRANDTHIS()
 {
     LIns* this_ins;
     RecordingStatus status = getThis(this_ins);
     if (status != RECORD_CONTINUE)
         return InjectStatus(status);
 
     LIns* args_ins[] = { this_ins, cx_ins };
     LIns* call_ins = lir->insCall(&js_Unbrand_ci, args_ins);
-    guard(true, call_ins, OOM_EXIT);
+    guard(false, lir->insEqI_0(call_ins), OOM_EXIT);
     return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_SHARPINIT()
 {
     return ARECORD_STOP;
 }
@@ -16523,33 +16672,37 @@ MonitorTracePoint(JSContext* cx, uintN& 
             if (!ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard, &lr))
                 return TPA_Error;
 
             if (!lr)
                 return TPA_Nothing;
 
             switch (lr->exitType) {
               case UNSTABLE_LOOP_EXIT:
-                if (!AttemptToStabilizeTree(cx, globalObj, lr, NULL, 0))
+                if (!AttemptToStabilizeTree(cx, globalObj, lr, NULL, 0, NULL))
                     return TPA_RanStuff;
                 break;
 
+              case MUL_ZERO_EXIT:
               case OVERFLOW_EXIT:
-                tm->oracle->markInstructionUndemotable(cx->regs->pc);
+                if (lr->exitType == MUL_ZERO_EXIT)
+                    tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
+                else
+                    tm->oracle->markInstructionUndemotable(cx->regs->pc);
                 /* FALL THROUGH */
               case BRANCH_EXIT:
               case CASE_EXIT:
-                if (!AttemptToExtendTree(cx, lr, NULL, NULL))
+                if (!AttemptToExtendTree(cx, lr, NULL, NULL, NULL))
                     return TPA_RanStuff;
                 break;
 
               case LOOP_EXIT:
                 if (!innermostNestedGuard)
                     return TPA_RanStuff;
-                if (!AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL))
+                if (!AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL, NULL))
                     return TPA_RanStuff;
                 break;
 
               default:
                 return TPA_RanStuff;
             }
 
             JS_ASSERT(TRACE_RECORDER(cx));
@@ -16563,24 +16716,408 @@ MonitorTracePoint(JSContext* cx, uintN& 
             return TPA_Nothing;
         }
     }
 
     if (++tree->hits() < HOTLOOP)
         return TPA_Nothing;
     if (!ScopeChainCheck(cx, tree))
         return TPA_Nothing;
-    if (!RecordTree(cx, tree->first, NULL, 0, globalSlots))
+    if (!RecordTree(cx, tree->first, NULL, 0, NULL, globalSlots))
         return TPA_Nothing;
 
   interpret:
     JS_ASSERT(TRACE_RECORDER(cx));
 
     /* Locked and loaded with a recorder. Ask the interperter to go run some code. */
     if (!Interpret(cx, fp, inlineCallCount, JSINTERP_RECORD))
         return TPA_Error;
 
     return TPA_RanStuff;
 }
 
 #endif
 
 } /* namespace js */
+
+
+#ifdef DEBUG
+namespace nanojit {
+
+static bool
+match(LIns* base, LOpcode opcode, AccSet accSet, int32_t disp)
+{
+    return base->isop(opcode) &&
+           base->accSet() == accSet &&
+           base->disp() == disp;
+}
+
+static bool
+match(LIns* base, LOpcode opcode, AccSet accSet, LoadQual loadQual, int32_t disp)
+{
+    return base->isop(opcode) &&
+           base->accSet() == accSet &&
+           base->loadQual() == loadQual &&
+           base->disp() == disp;
+}
+
+static bool
+couldBeObjectOrString(LIns* ins)
+{
+    bool ret = false;
+
+    if (ins->isop(LIR_callp)) {
+        // ins = callp ...      # could be a call to an object-creating function
+        ret = true;
+
+    } else if (ins->isop(LIR_ldp)) {
+        // ins = ldp ...        # could be an object, eg. loaded from the stack
+        ret = true;
+
+    } else if (ins->isImmP()) {
+        // ins = immp ...       # could be a pointer to an object
+        uintptr_t val = uintptr_t(ins->immP());
+        if (val == 0 || val > 4096)
+            ret = true;         // Looks like a pointer
+
+    } else if (ins->isop(LIR_cmovp)) {
+        // ins = cmovp <JSObject>, <JSObject>
+        ret = couldBeObjectOrString(ins->oprnd2()) &&
+              couldBeObjectOrString(ins->oprnd3());
+
+    } else if (!avmplus::AvmCore::use_cmov() &&
+               ins->isop(LIR_ori) &&
+               ins->oprnd1()->isop(LIR_andi) &&
+               ins->oprnd2()->isop(LIR_andi))
+    {
+        // This is a partial check for the insChoose() code that only occurs
+        // is use_cmov() is false.
+        //
+        // ins_oprnd1 = andi ...
+        // ins_oprnd2 = andi ...
+        // ins = ori ins_oprnd1, ins_oprnd2
+        ret = true;
+
+#if JS_BITS_PER_WORD == 64
+    } else if (ins->isop(LIR_andq) &&
+               ins->oprnd1()->isop(LIR_ldq) &&
+               ins->oprnd2()->isImmQ() &&
+               uintptr_t(ins->oprnd2()->immQ()) == JSVAL_PAYLOAD_MASK)
+    {
+        // ins_oprnd1 = ldq ...
+        // ins_oprnd2 = immq JSVAL_PAYLOAD_MASK
+        // ins = andq ins_oprnd1, ins_oprnd2
+        ret = true;
+#endif
+    } else if (ins->isop(LIR_addp) &&
+               ((ins->oprnd1()->isImmP() &&
+                 (void*)ins->oprnd1()->immP() == JSString::unitStringTable) ||
+                (ins->oprnd2()->isImmP() &&
+                 (void*)ins->oprnd2()->immP() == JSString::unitStringTable)))
+    {
+        // (String only)
+        // ins = addp ..., JSString::unitStringTable
+        //   OR
+        // ins = addp JSString::unitStringTable, ...
+        ret = true;
+    }
+
+    return ret;
+}
+
+static bool
+isConstPrivatePtr(LIns* ins, unsigned slot)
+{
+#if JS_BITS_PER_WORD == 32
+    // ins = ldp.slots/c ...[<offset of slot>]
+    return match(ins, LIR_ldp, ACCSET_SLOTS, LOAD_CONST, slot * sizeof(Value) + sPayloadOffset);
+#elif JS_BITS_PER_WORD == 64
+    // ins_oprnd1 = ldp.slots/c ...[<offset of slot>]
+    // ins_oprnd2 = immi 1
+    // ins = lshq ins_oprnd1, ins_oprnd2
+    return ins->isop(LIR_lshq) &&
+           match(ins->oprnd1(), LIR_ldp, ACCSET_SLOTS, LOAD_CONST, slot * sizeof(Value)) &&
+           ins->oprnd2()->isImmI(1);
+#endif
+}
+
+/*
+ * Any time you use an AccSet annotation other than ACCSET_ALL, you are making
+ * a promise to Nanojit about the properties of the annotated load/store/call.
+ * If that annotation is wrong, it could cause rare and subtle bugs.  So this
+ * function does its damnedest to prevent such bugs occurring by carefully
+ * checking every load and store.
+ *
+ * For some access regions, we can check perfectly -- eg. for an ACCSET_STATE
+ * load/store, the base pointer must be 'state'.  For others, we can only
+ * check imperfectly -- eg. for an ACCSET_OBJ_CLASP load/store, we can check that
+ * the base pointer has one of several forms, but it's possible that a
+ * non-object has that form as well.  This imperfect checking is unfortunate
+ * but unavoidable.  Also, multi-region load/store AccSets are not checked,
+ * and so are best avoided (they're rarely needed).  Finally, the AccSet
+ * annotations on calls cannot be checked here;  in some cases they can be
+ * partially checked via assertions (eg. by checking that certain values
+ * are not changed by the function).
+ */
+void ValidateWriter::checkAccSet(LOpcode op, LIns* base, int32_t disp, AccSet accSet)
+{
+    bool ok;
+
+    NanoAssert(accSet != ACCSET_NONE);
+
+    #define dispWithin(Struct) \
+        (0 <= disp && disp < int32_t(sizeof(Struct)))
+
+    switch (accSet) {
+      case ACCSET_STATE:
+        // base = paramp 0 0
+        // ins  = {ld,st}X.state base[<disp within TracerState>]
+        ok = dispWithin(TracerState) && 
+             base->isop(LIR_paramp) &&
+             base->paramKind() == 0 &&
+             base->paramArg() == 0;
+        break;
+
+      case ACCSET_STACK:
+        // base = ldp.state ...[offsetof(TracerState, sp)]
+        // ins  = {ld,st}X.sp base[...]
+        //   OR
+        // base_oprnd1 = ldp.state ...[offsetof(TraceState, sp)]
+        // base        = addp base_oprnd1, ...
+        // ins         = {ld,st}X.sp base[...]
+        ok = match(base, LIR_ldp, ACCSET_STATE, offsetof(TracerState, sp)) ||
+             (base->isop(LIR_addp) &&
+              match(base->oprnd1(), LIR_ldp, ACCSET_STATE, offsetof(TracerState, sp)));
+        break;
+
+      case ACCSET_RSTACK:
+        // base = ldp.state ...[offsetof(TracerState, rp)]
+        // ins  = {ld,st}p.rp base[...]
+        //   OR
+        // base = ldp.state ...[offsetof(TracerState, callstackBaseOffset)]
+        // ins  = {ld,st}p.rp base[...]
+        ok = (op == LIR_ldp || op == LIR_stp) &&
+             (match(base, LIR_ldp, ACCSET_STATE, offsetof(TracerState, rp)) ||
+              match(base, LIR_ldp, ACCSET_STATE, offsetof(TracerState, callstackBase)));
+        break;
+
+      case ACCSET_CX:
+        // base = ldp.state ...[offsetof(TracerState, cx)]
+        // ins  = {ld,st}X.cx base[<disp within JSContext>]
+        ok = dispWithin(JSContext) &&
+             match(base, LIR_ldp, ACCSET_STATE, offsetof(TracerState, cx));
+        break;
+
+      case ACCSET_EOS:
+        // base = ldp.state ...[offsetof(TracerState, eos)]
+        // ins  = {ld,st}X.eos base[...]
+        ok = match(base, LIR_ldp, ACCSET_STATE, offsetof(TracerState, eos));
+        break;
+
+      case ACCSET_ALLOC:
+        // base = allocp ...
+        // ins  = {ld,st}X.alloc base[...]
+        //   OR
+        // base_oprnd1 = allocp ...
+        // base        = addp base_oprnd1, ...
+        // ins         = {ld,st}X.alloc base[...]
+        ok = base->isop(LIR_allocp) ||
+             (base->isop(LIR_addp) &&
+              base->oprnd1()->isop(LIR_allocp));
+        break;
+
+      case ACCSET_FRAMEREGS:
+        // base = ldp.cx ...[offsetof(JSContext, regs)]
+        // ins  = ldp.regs base[<disp within JSFrameRegs>]
+        ok = op == LIR_ldp &&
+             dispWithin(JSFrameRegs) && 
+             match(base, LIR_ldp, ACCSET_CX, offsetof(JSContext, regs));
+        break;
+
+      case ACCSET_STACKFRAME:
+        // base = ldp.regs ...[offsetof(JSFrameRegs, fp)]
+        // ins  = {ld,st}X.sf base[<disp within JSStackFrame>]
+        ok = dispWithin(JSStackFrame) && 
+             match(base, LIR_ldp, ACCSET_FRAMEREGS, offsetof(JSFrameRegs, fp));
+        break;
+
+      case ACCSET_RUNTIME:
+        // base = ldp.cx ...[offsetof(JSContext, runtime)]
+        // ins  = ldp.rt base[<disp within JSRuntime>]
+        ok = dispWithin(JSRuntime) &&
+             match(base, LIR_ldp, ACCSET_CX, offsetof(JSContext, runtime));
+        break;
+
+      // This check is imperfect.
+      //
+      // base = <JSObject>
+      // ins  = ldp.obj<field> base[offsetof(JSObject, <field>)]
+      #define OK_OBJ_FIELD(ldop, field) \
+            op == ldop && \
+            disp == offsetof(JSObject, field) && \
+            couldBeObjectOrString(base)
+
+      case ACCSET_OBJ_CLASP:
+        ok = OK_OBJ_FIELD(LIR_ldp, clasp);
+        break;
+
+      case ACCSET_OBJ_FLAGS:
+        ok = OK_OBJ_FIELD(LIR_ldi, flags);
+        break;
+
+      case ACCSET_OBJ_SHAPE:
+        ok = OK_OBJ_FIELD(LIR_ldi, objShape);
+        break;
+
+      case ACCSET_OBJ_PROTO:
+        ok = OK_OBJ_FIELD(LIR_ldp, proto);
+        break;
+
+      case ACCSET_OBJ_PARENT:
+        ok = OK_OBJ_FIELD(LIR_ldp, parent);
+        break;
+
+      case ACCSET_OBJ_PRIVATE:
+        // base = <JSObject>
+        // ins  = ldp.objprivate base[offsetof(JSObject, privateData)]
+        ok = (op == LIR_ldi || op == LIR_ldp) &&
+             disp == offsetof(JSObject, privateData) &&
+             couldBeObjectOrString(base);
+        break;
+
+      case ACCSET_OBJ_CAPACITY:
+        ok = OK_OBJ_FIELD(LIR_ldi, capacity);
+        break;
+
+      case ACCSET_OBJ_SLOTS:
+        ok = OK_OBJ_FIELD(LIR_ldp, slots);
+        break;
+
+      case ACCSET_SLOTS:
+        // This check is imperfect.
+        //
+        // base = <JSObject>                                          # direct slot access
+        // ins  = {ld,st}X.slots base[...]
+        //   OR
+        // base = ldp.objslots ...[offsetof(JSObject, slots)]         # indirect slot access
+        // ins  = {ld,st}X.slots base[...]
+        //   OR
+        // base_oprnd1 = ldp.objslots ...[offsetof(JSObject, slots)]  # indirect scaled slot access
+        // base        = addp base_oprnd1, ...
+        // ins         = {ld,st}X.slots base[...]
+        ok = couldBeObjectOrString(base) ||
+             match(base, LIR_ldp, ACCSET_OBJ_SLOTS, offsetof(JSObject, slots)) ||
+             (base->isop(LIR_addp) &&
+              match(base->oprnd1(), LIR_ldp, ACCSET_OBJ_SLOTS, offsetof(JSObject, slots)));
+        break;
+
+      case ACCSET_TARRAY:
+        // This check is imperfect.
+        //
+        // base = ldp.objprivate ...[offsetof(JSObject, privateData)]
+        // ins = ld{i,p}.tarray base[<disp within TypedArray>]
+        ok = (op == LIR_ldi || op == LIR_ldp) &&
+             dispWithin(TypedArray) &&
+             match(base, LIR_ldp, ACCSET_OBJ_PRIVATE, offsetof(JSObject, privateData));
+        break;
+
+      case ACCSET_TARRAY_DATA:
+        // base_oprnd1 = ldp.tarray ...[TypedArray::dataOffset()]
+        // base        = addp base_oprnd1, ...
+        // ins         = {ld,st}X.tdata base[...]
+        ok = base->isop(LIR_addp) &&
+             match(base->oprnd1(), LIR_ldp, ACCSET_TARRAY, TypedArray::dataOffset());
+        break;
+
+      case ACCSET_ITER:
+        // base = ldp.objprivate ...[offsetof(JSObject, privateData)]
+        // ins = {ld,st}p.iter base[<disp within NativeIterator>]
+        ok = (op == LIR_ldp || op == LIR_stp) &&
+             dispWithin(NativeIterator) &&
+             match(base, LIR_ldp, ACCSET_OBJ_PRIVATE, offsetof(JSObject, privateData));
+        break;
+
+      case ACCSET_ITER_PROPS:
+        // base = ldp.iter ...[offsetof(NativeIterator, props_cursor)]
+        // ins  = ld{i,p,d}.iterprops base[0|4]
+        ok = (op == LIR_ldi || op == LIR_ldp || op == LIR_ldd) &&
+             (disp == 0 || disp == 4) &&
+             match(base, LIR_ldp, ACCSET_ITER, offsetof(NativeIterator, props_cursor));
+        break;
+
+      case ACCSET_STRING:
+        // This check is imperfect.
+        //
+        // base = <JSString>
+        // ins  = {ld,st}X.str base[<disp within JSString>]
+        ok = dispWithin(JSString) &&
+             couldBeObjectOrString(base);
+        break;
+
+      case ACCSET_STRING_MCHARS:
+        // base = ldp.string ...[offsetof(JSString, mChars)]
+        // ins  = ldus2ui.strchars/c base[0]
+        //   OR
+        // base_oprnd1 = ldp.string ...[offsetof(JSString, mChars)]
+        // base        = addp base_oprnd1, ...
+        // ins         = ldus2ui.strchars/c base[0]
+        ok = op == LIR_ldus2ui &&
+             disp == 0 &&
+             (match(base, LIR_ldp, ACCSET_STRING, offsetof(JSString, mChars)) ||
+              (base->isop(LIR_addp) &&
+               match(base->oprnd1(), LIR_ldp, ACCSET_STRING, offsetof(JSString, mChars))));
+        break;
+
+      case ACCSET_TYPEMAP:
+        // This check is imperfect, things get complicated once you get back
+        // farther than 'base'.  But the parts we check are pretty distinctive
+        // and should be good enough.
+        //
+        // base = addp base_oprnd1, ...
+        // ins  = lduc2ui.typemap/c base[0]
+        ok = op == LIR_lduc2ui &&
+             disp == 0 &&
+             base->isop(LIR_addp);
+        break;
+
+      case ACCSET_FCSLOTS:
+        // This check is imperfect.
+        //
+        // base = <const private ptr slots[JSSLOT_FLAT_CLOSURE_UPVARS]>
+        // ins = {ld,st}X.fcslots base[...]
+        ok = isConstPrivatePtr(base, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS);
+        break;
+
+      case ACCSET_ARGS_DATA:
+        // This check is imperfect.
+        //
+        // base = <const private ptr slots[JSSLOT_ARGS_DATA]>
+        // ins = st{i,p,d}.argsdata base[...]
+        //   OR
+        // base_oprnd1 = <const private ptr slots[JSSLOT_ARGS_DATA]>
+        // base        = addp base_oprnd1, ...
+        // ins         = {ld,st}X.argsdata base[...]
+        ok = (isConstPrivatePtr(base, JSObject::JSSLOT_ARGS_DATA) ||
+              (base->isop(LIR_addp) &&
+               isConstPrivatePtr(base->oprnd1(), JSObject::JSSLOT_ARGS_DATA)));
+        break;
+
+      default:
+        // This assertion will fail if any single-region AccSets aren't covered
+        // by the switch -- only multi-region AccSets should be handled here.
+        JS_ASSERT(!isSingletonAccSet(accSet));
+        ok = true;
+        break;
+    }
+
+    if (!ok) {
+        InsBuf b1, b2;
+        printer->formatIns(&b1, base);
+        JS_snprintf(b2.buf, b2.len, "base = (%s); disp = %d", b1.buf, disp);
+        errorAccSet(lirNames[op], accSet, b2.buf);
+    }
+}
+
+}
+
+#endif
+
+
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -50,32 +50,16 @@
 #include "jsdhash.h"
 #include "jsinterp.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsvector.h"
 
 namespace js {
 
-/*
- * TM-specific access regions:
- *
- * - STACK: the stack.  STACK loads/stores always use 'sp' or 'sp+k' as the
- *   base pointer.
- *
- * - RSTACK: the return stack.  RSTACK loads/stores always use 'rp' as the
- *   base pointer.
- *
- * - OTHER: all other regions of memory.
- */
-static const nanojit::AccSet ACCSET_STACK   = (1 << 0);
-static const nanojit::AccSet ACCSET_RSTACK  = (1 << 1);
-static const nanojit::AccSet ACCSET_OTHER   = (1 << 2);
-static const uint8_t TM_NUM_USED_ACCS = 3;  // number of access regions used by TraceMonkey
-
 #if defined(DEBUG) && !defined(JS_JIT_SPEW)
 #define JS_JIT_SPEW
 #endif
 
 template <typename T>
 class Queue {
     T* _data;
     unsigned _len;
@@ -314,27 +298,30 @@ extern void FragProfiling_FragFinalizer(
  * are not demoted, etc.) but have no correctness implications.
  */
 #define ORACLE_SIZE 4096
 
 class Oracle {
     avmplus::BitSet _stackDontDemote;
     avmplus::BitSet _globalDontDemote;
     avmplus::BitSet _pcDontDemote;
+    avmplus::BitSet _pcSlowZeroTest;
 public:
     Oracle();
 
     JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot);
     JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const;
     JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot);
     JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc);
     JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const;
     JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) const;
     void markInstructionUndemotable(jsbytecode* pc);
     bool isInstructionUndemotable(jsbytecode* pc) const;
+    void markInstructionSlowZeroTest(jsbytecode* pc);
+    bool isInstructionSlowZeroTest(jsbytecode* pc) const;
 
     void clearDemotability();
     void clear() {
         clearDemotability();
     }
 };
 
 typedef Queue<uint16> SlotList;
@@ -380,16 +367,17 @@ public:
      * memory access.                                                           \
      */                                                                         \
     _(MISMATCH)                                                                 \
     /*                                                                          \
      * A specialization of MISMATCH_EXIT to handle allocation failures.         \
      */                                                                         \
     _(OOM)                                                                      \
     _(OVERFLOW)                                                                 \
+    _(MUL_ZERO)                                                                 \
     _(UNSTABLE_LOOP)                                                            \
     _(TIMEOUT)                                                                  \
     _(DEEP_BAIL)                                                                \
     _(STATUS)
 
 enum ExitType {
     #define MAKE_EXIT_CODE(x) x##_EXIT,
     JS_TM_EXITCODES(MAKE_EXIT_CODE)
@@ -865,27 +853,31 @@ class TraceRecorder
     VMFragment* const               fragment;
 
     /* The root fragment representing the tree. */
     TreeFragment* const             tree;
 
     /* The global object from the start of recording until now. */
     JSObject* const                 globalObj;
 
-    /* If non-null, the (pc of the) outer loop aborted to start recording this loop. */
-    jsbytecode* const               outer;
+    /* If non-null, the script of outer loop aborted to start recording this loop. */
+    JSScript* const                 outerScript;
+    
+    /* If non-null, the pc of the outer loop aborted to start recording this loop. */
+    jsbytecode* const               outerPC;
 
-    /* If |outer|, the argc to use when looking up |outer| in the fragments table. */
+    /* If |outerPC|, the argc to use when looking up |outerPC| in the fragments table. */
     uint32 const                    outerArgc;
 
     /* If non-null, the side exit from which we are growing. */
     VMSideExit* const               anchor;
 
     /* The LIR-generation pipeline used to build |fragment|. */
     nanojit::LirWriter* const       lir;
+    nanojit::CseFilter* const       cse_filter;
 
     /* Instructions yielding the corresponding trace-const members of TracerState. */
     nanojit::LIns* const            cx_ins;
     nanojit::LIns* const            eos_ins;
     nanojit::LIns* const            eor_ins;
     nanojit::LIns* const            loopLabel;
 
     /* Lazy slot import state. */
@@ -911,18 +903,18 @@ class TraceRecorder
     /*********************************************************** Recording session mutable state */
 
     /* Maps interpreter stack values to the instruction generating that value. */
     Tracker                         tracker;
 
     /* Maps interpreter stack values to the instruction writing back to the native stack. */
     Tracker                         nativeFrameTracker;
 
-    /* The start of the global object's dslots we assume for the trackers. */
-    Value*                          global_dslots;
+    /* The start of the global object's slots we assume for the trackers. */
+    Value*                          global_slots;
 
     /* The number of interpreted calls entered (and not yet left) since recording began. */
     unsigned                        callDepth;
 
     /* The current atom table, mirroring the interpreter loop's variable of the same name. */
     JSAtom**                        atoms;
     Value*                          consts;
 
@@ -967,16 +959,26 @@ class TraceRecorder
     /* Temporary JSSpecializedNative used to describe non-specialized fast natives. */
     JSSpecializedNative             generatedSpecializedNative;
 
     /* Temporary JSValueType array used to construct temporary typemaps. */
     js::Vector<JSValueType, 256>    tempTypeMap;
 
     /************************************************************* 10 bajillion member functions */
 
+    /* 
+     * These can be put around a control-flow diamond if it's important that
+     * CSE work across the diamond.  Duplicated expressions within the diamond
+     * will be CSE'd, but expressions defined within the diamond won't be
+     * added to the tables of CSEable expressions.  Loads are still
+     * invalidated if they alias any stores that occur within diamonds.
+     */
+    void suspendCSE()   { if (cse_filter) cse_filter->suspend(); }
+    void resumeCSE()    { if (cse_filter) cse_filter->resume();  }
+
     nanojit::LIns* insImmVal(const Value& val);
     nanojit::LIns* insImmObj(JSObject* obj);
     nanojit::LIns* insImmFun(JSFunction* fun);
     nanojit::LIns* insImmStr(JSString* str);
     nanojit::LIns* insImmShape(const js::Shape* shape);
     nanojit::LIns* insImmId(jsid id);
     nanojit::LIns* p2i(nanojit::LIns* ins);
 
@@ -1015,28 +1017,32 @@ class TraceRecorder
     ptrdiff_t nativeGlobalSlot(const Value *p) const;
     ptrdiff_t nativeGlobalOffset(const Value* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackOffsetImpl(const void* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(const Value* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackSlotImpl(const void* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativeStackSlot(const Value* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativespOffsetImpl(const void* p) const;
     JS_REQUIRES_STACK ptrdiff_t nativespOffset(const Value* p) const;
-    JS_REQUIRES_STACK void importImpl(nanojit::LIns* base, ptrdiff_t offset, const void* p, JSValueType t,
+    JS_REQUIRES_STACK void importImpl(nanojit::LIns* base, ptrdiff_t offset, nanojit::AccSet accSet,
+                                      const void* p, JSValueType t,
                                       const char *prefix, uintN index, JSStackFrame *fp);
-    JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, const Value* p, JSValueType t,
+    JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, nanojit::AccSet accSet,
+                                  const Value* p, JSValueType t,
                                   const char *prefix, uintN index, JSStackFrame *fp);
     JS_REQUIRES_STACK void import(TreeFragment* tree, nanojit::LIns* sp, unsigned stackSlots,
                                   unsigned callDepth, unsigned ngslots, JSValueType* typeMap);
     void trackNativeStackUse(unsigned slots);
 
     JS_REQUIRES_STACK bool isValidSlot(JSObject *obj, const js::Shape* shape);
     JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
     JS_REQUIRES_STACK void importGlobalSlot(unsigned slot);
 
+    void ensureCond(nanojit::LIns** ins, bool* cond);
+
     JS_REQUIRES_STACK RecordingStatus guard(bool expected, nanojit::LIns* cond, ExitType exitType,
                                             bool abortIfAlwaysExits = false);
     JS_REQUIRES_STACK RecordingStatus guard(bool expected, nanojit::LIns* cond, VMSideExit* exit,
                                             bool abortIfAlwaysExits = false);
     JS_REQUIRES_STACK nanojit::LIns* guard_xov(nanojit::LOpcode op, nanojit::LIns* d0,
                                                nanojit::LIns* d1, VMSideExit* exit);
 
     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
@@ -1058,22 +1064,22 @@ class TraceRecorder
     JS_REQUIRES_STACK nanojit::LIns* getFrameObjPtr(void* p);
     JS_REQUIRES_STACK nanojit::LIns* attemptImport(const Value* p);
     JS_REQUIRES_STACK nanojit::LIns* addr(Value* p);
 
     JS_REQUIRES_STACK bool knownImpl(const void* p);
     JS_REQUIRES_STACK bool known(const Value* p);
     JS_REQUIRES_STACK bool known(JSObject** p);
     /*
-     * The dslots of the global object are sometimes reallocated by the
+     * The slots of the global object are sometimes reallocated by the
      * interpreter.  This function checks for that condition and re-maps the
      * entries of the tracker accordingly.
      */
     JS_REQUIRES_STACK void checkForGlobalObjectReallocation() {
-        if (global_dslots != globalObj->getSlots())
+        if (global_slots != globalObj->getSlots())
             checkForGlobalObjectReallocationHelper();
     }
     JS_REQUIRES_STACK void checkForGlobalObjectReallocationHelper();
 
     JS_REQUIRES_STACK TypeConsensus selfTypeStability(SlotMap& smap);
     JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, const void* ip,
                                                       TreeFragment** peer);
 
@@ -1107,16 +1113,17 @@ class TraceRecorder
     JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i);
     JS_REQUIRES_STACK nanojit::LIns* var(unsigned n);
     JS_REQUIRES_STACK void var(unsigned n, nanojit::LIns* i);
     JS_REQUIRES_STACK nanojit::LIns* upvar(JSScript* script, JSUpvarArray* uva, uintN index, Value& v);
     nanojit::LIns* stackLoad(nanojit::LIns* addr, nanojit::AccSet accSet, uint8 type);
     JS_REQUIRES_STACK nanojit::LIns* stack(int n);
     JS_REQUIRES_STACK void stack(int n, nanojit::LIns* i);
 
+    JS_REQUIRES_STACK void guardNonNeg(nanojit::LIns* d0, nanojit::LIns* d1, VMSideExit* exit);
     JS_REQUIRES_STACK nanojit::LIns* alu(nanojit::LOpcode op, jsdouble v0, jsdouble v1,
                                          nanojit::LIns* s0, nanojit::LIns* s1);
 
     bool condBranch(nanojit::LOpcode op, nanojit::LIns* cond, nanojit::LIns** brOut);
     nanojit::LIns* unoptimizableCondBranch(nanojit::LOpcode op, nanojit::LIns* cond);
     void labelForBranch(nanojit::LIns* br);
     void labelForBranches(nanojit::LIns* br1, nanojit::LIns* br2);
 
@@ -1162,30 +1169,31 @@ class TraceRecorder
 
 #if defined DEBUG_notme && defined XP_UNIX
     void dumpGuardedShapes(const char* prefix);
 #endif
 
     void forgetGuardedShapes();
 
     inline nanojit::LIns* shape_ins(nanojit::LIns *obj_ins);
+    inline nanojit::LIns* slots(nanojit::LIns *obj_ins);
     JS_REQUIRES_STACK AbortableRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
                                                                      JSObject*& obj2, PCVal& pcval);
     JS_REQUIRES_STACK RecordingStatus guardPropertyCacheHit(nanojit::LIns* obj_ins,
                                                             JSObject* aobj,
                                                             JSObject* obj2,
                                                             PropertyCacheEntry* entry,
                                                             PCVal& pcval);
 
     void stobj_set_fslot(nanojit::LIns *obj_ins, unsigned slot, const Value &v,
                          nanojit::LIns* v_ins);
     void stobj_set_dslot(nanojit::LIns *obj_ins, unsigned slot,
-                         nanojit::LIns*& dslots_ins, const Value &v, nanojit::LIns* v_ins);
+                         nanojit::LIns*& slots_ins, const Value &v, nanojit::LIns* v_ins);
     void stobj_set_slot(JSObject *obj, nanojit::LIns* obj_ins, unsigned slot,
-                        nanojit::LIns*& dslots_ins, const Value &v, nanojit::LIns* v_ins);
+                        nanojit::LIns*& slots_ins, const Value &v, nanojit::LIns* v_ins);
 
     nanojit::LIns* stobj_get_slot_uint32(nanojit::LIns* obj_ins, unsigned slot);
     nanojit::LIns* unbox_slot(JSObject *obj, nanojit::LIns *obj_ins, uint32 slot,
                               VMSideExit *exit);
     nanojit::LIns* stobj_get_parent(nanojit::LIns* obj_ins);
     nanojit::LIns* stobj_get_private(nanojit::LIns* obj_ins);
     nanojit::LIns* stobj_get_private_uint32(nanojit::LIns* obj_ins);
     nanojit::LIns* stobj_get_proto(nanojit::LIns* obj_ins);
@@ -1230,16 +1238,17 @@ class TraceRecorder
     JS_REQUIRES_STACK RecordingStatus getPropertyById(nanojit::LIns* obj_ins, Value* outp);
     JS_REQUIRES_STACK RecordingStatus getPropertyWithNativeGetter(nanojit::LIns* obj_ins,
                                                                   const js::Shape* shape,
                                                                   Value* outp);
     JS_REQUIRES_STACK RecordingStatus getPropertyWithScriptGetter(JSObject *obj,
                                                                   nanojit::LIns* obj_ins,
                                                                   const js::Shape* shape);
 
+    JS_REQUIRES_STACK nanojit::LIns* getStringLengthAndFlags(nanojit::LIns* str_ins);
     JS_REQUIRES_STACK nanojit::LIns* getStringLength(nanojit::LIns* str_ins);
     JS_REQUIRES_STACK nanojit::LIns* getStringChars(nanojit::LIns* str_ins);
     JS_REQUIRES_STACK RecordingStatus getCharCodeAt(JSString *str,
                                                     nanojit::LIns* str_ins, nanojit::LIns* idx_ins,
                                                     nanojit::LIns** out_ins);
     JS_REQUIRES_STACK nanojit::LIns* getUnitString(nanojit::LIns* str_ins, nanojit::LIns* idx_ins);
     JS_REQUIRES_STACK RecordingStatus getCharAt(JSString *str,
                                                 nanojit::LIns* str_ins, nanojit::LIns* idx_ins,
@@ -1280,17 +1289,17 @@ class TraceRecorder
     nanojit::LIns* non_double_object_value_has_type(nanojit::LIns* v_ins, JSValueType type);
     nanojit::LIns* unpack_ptr(nanojit::LIns* v_ins);
     nanojit::LIns* unbox_number_as_double(nanojit::LIns* v_ins, VMSideExit* exit);
     nanojit::LIns* unbox_object(nanojit::LIns* v_ins, JSValueType type, VMSideExit* exit);
     nanojit::LIns* unbox_non_double_object(nanojit::LIns* v_ins, JSValueType type, VMSideExit* exit);
 #endif
 
     nanojit::LIns* unbox_value(const Value& v, nanojit::LIns* vaddr_ins,
-                               ptrdiff_t offset, VMSideExit* exit,
+                               ptrdiff_t offset, nanojit::AccSet accSet, VMSideExit* exit,
                                bool force_double=false);
     void unbox_any_object(nanojit::LIns* vaddr_ins, nanojit::LIns** obj_ins,
                           nanojit::LIns** is_obj_ins, nanojit::AccSet accSet);
     nanojit::LIns* is_boxed_true(nanojit::LIns* vaddr_ins, nanojit::AccSet accSet);
     nanojit::LIns* is_boxed_magic(nanojit::LIns* vaddr_ins, JSWhyMagic why, nanojit::AccSet accSet);
 
     nanojit::LIns* is_string_id(nanojit::LIns* id_ins);
     nanojit::LIns* unbox_string_id(nanojit::LIns* id_ins);
@@ -1350,16 +1359,18 @@ class TraceRecorder
                                                        nanojit::LIns* args[], bool rooted);
     JS_REQUIRES_STACK void emitNativePropertyOp(const js::Shape* shape,
                                                 nanojit::LIns* obj_ins,
                                                 bool setflag,
                                                 nanojit::LIns* addr_boxed_val_ins);
     JS_REQUIRES_STACK RecordingStatus callSpecializedNative(JSNativeTraceInfo* trcinfo, uintN argc,
                                                               bool constructing);
     JS_REQUIRES_STACK RecordingStatus callNative(uintN argc, JSOp mode);
+    JS_REQUIRES_STACK RecordingStatus callFloatReturningInt(uintN argc,
+                                                            const nanojit::CallInfo *ci);
     JS_REQUIRES_STACK RecordingStatus functionCall(uintN argc, JSOp mode);
 
     JS_REQUIRES_STACK void trackCfgMerges(jsbytecode* pc);
     JS_REQUIRES_STACK void emitIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
     JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
     JS_REQUIRES_STACK AbortableRecordingStatus checkTraceEnd(jsbytecode* pc);
 
     AbortableRecordingStatus hasMethod(JSObject* obj, jsid id, bool& found);
@@ -1403,17 +1414,17 @@ class TraceRecorder
 #undef OPDEF
 
     inline void* operator new(size_t size) { return js_calloc(size); }
     inline void operator delete(void *p) { js_free(p); }
 
     JS_REQUIRES_STACK
     TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*,
                   unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
-                  VMSideExit* expectedInnerExit, jsbytecode* outerTree,
+                  VMSideExit* expectedInnerExit, JSScript* outerScript, jsbytecode* outerPC,
                   uint32 outerArgc, bool speculate);
 
     /* The destructor should only be called through finish*, not directly. */
     ~TraceRecorder();
     JS_REQUIRES_STACK AbortableRecordingStatus finishSuccessfully();
 
     enum AbortResult { NORMAL_ABORT, JIT_RESET };
     JS_REQUIRES_STACK AbortResult finishAbort(const char* reason);
@@ -1436,17 +1447,17 @@ class TraceRecorder
     friend AbortResult AbortRecording(JSContext*, const char*);
     friend class BoxArg;
     friend void TraceMonitor::sweep();
 
   public:
     static bool JS_REQUIRES_STACK
     startRecorder(JSContext*, VMSideExit*, VMFragment*,
                   unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
-                  VMSideExit* expectedInnerExit, jsbytecode* outerTree,
+                  VMSideExit* expectedInnerExit, JSScript* outerScript, jsbytecode* outerPC,
                   uint32 outerArgc, bool speculate);
 
     /* Accessors. */
     VMFragment*         getFragment() const { return fragment; }
     TreeFragment*       getTree() const { return tree; }
     bool                outOfMemory() const { return traceMonitor->outOfMemory(); }
     Oracle*             getOracle() const { return oracle; }
 
--- a/js/src/jsxdrapi.h
+++ b/js/src/jsxdrapi.h
@@ -200,17 +200,17 @@ JS_XDRFindClassById(JSXDRState *xdr, uin
  * Bytecode version number. Increment the subtrahend whenever JS bytecode
  * changes incompatibly.
  *
  * This version number should be XDR'ed once near the front of any file or
  * larger storage unit containing XDR'ed bytecode and other data, and checked
  * before deserialization of bytecode.  If the saved version does not match
  * the current version, abort deserialization and invalidate the file.
  */
-#define JSXDR_BYTECODE_VERSION      (0xb973c0de - 73)
+#define JSXDR_BYTECODE_VERSION      (0xb973c0de - 75)
 
 /*
  * Library-private functions.
  */
 extern JSBool
 js_XDRAtom(JSXDRState *xdr, JSAtom **atomp);
 
 JS_END_EXTERN_C
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -208,16 +208,30 @@ xml_isXMLName(JSContext *cx, uintN argc,
 static inline bool
 AppendString(JSCharBuffer &cb, JSString *str)
 {
     const jschar *chars, *end;
     str->getCharsAndEnd(chars, end);
     return cb.append(chars, end);
 }
 
+/*
+ * This wrapper is needed because NewBuiltinClassInstance doesn't
+ * call the constructor, and we need a place to set the
+ * HAS_EQUALITY bit.
+ */
+static inline JSObject *
+NewBuiltinClassInstanceXML(JSContext *cx, Class *clasp)
+{
+    JSObject *obj = NewBuiltinClassInstance(cx, clasp);
+    if (obj && clasp->ext.equality)
+        obj->flags |= JSObject::HAS_EQUALITY;
+    return obj;
+}
+
 #define DEFINE_GETTER(name,code)                                               \
     static JSBool                                                              \
     name(JSContext *cx, JSObject *obj, jsid id, jsval *vp)                     \
     {                                                                          \
         code;                                                                  \
         return true;                                                           \
     }
 
@@ -304,17 +318,17 @@ static JSFunctionSpec namespace_methods[
     JS_FS_END
 };
 
 static JSObject *
 NewXMLNamespace(JSContext *cx, JSString *prefix, JSString *uri, JSBool declared)
 {
     JSObject *obj;
 
-    obj = NewBuiltinClassInstance(cx, &js_NamespaceClass);
+    obj = NewBuiltinClassInstanceXML(cx, &js_NamespaceClass);
     if (!obj)
         return JS_FALSE;
     JS_ASSERT(JSVAL_IS_VOID(obj->getNamePrefix()));
     JS_ASSERT(JSVAL_IS_VOID(obj->getNameURI()));
     JS_ASSERT(JSVAL_IS_VOID(obj->getNamespaceDeclared()));
     if (prefix)
         obj->setNamePrefix(STRING_TO_JSVAL(prefix));
     if (uri)
@@ -515,17 +529,17 @@ InitXMLQName(JSObject *obj, JSString *ur
     if (localName)
         obj->setQNameLocalName(STRING_TO_JSVAL(localName));
 }
 
 static JSObject *
 NewXMLQName(JSContext *cx, JSString *uri, JSString *prefix, JSString *localName,
             Class *clasp = &js_QNameClass)
 {
-    JSObject *obj = NewBuiltinClassInstance(cx, clasp);
+    JSObject *obj = NewBuiltinClassInstanceXML(cx, clasp);
     if (!obj)
         return NULL;
     JS_ASSERT(obj->isQName());
     InitXMLQName(obj, uri, prefix, localName);
     METER(xml_stats.qname);
     return obj;
 }
 
@@ -628,17 +642,17 @@ NamespaceHelper(JSContext *cx, JSObject 
     if (!obj) {
         /* Namespace called as function. */
         if (argc == 1 && isNamespace) {
             /* Namespace called with one Namespace argument is identity. */
             *rval = urival;
             return JS_TRUE;
         }
 
-        obj = NewBuiltinClassInstance(cx, &js_NamespaceClass);
+        obj = NewBuiltinClassInstanceXML(cx, &js_NamespaceClass);
         if (!obj)
             return JS_FALSE;
     }
     *rval = OBJECT_TO_JSVAL(obj);
     METER(xml_stats.xmlnamespace);
 
     empty = cx->runtime->emptyString;
     obj->setNamePrefix(STRING_TO_JSVAL(empty));
@@ -693,16 +707,18 @@ NamespaceHelper(JSContext *cx, JSObject 
     return JS_TRUE;
 }
 
 static JSBool
 Namespace(JSContext *cx, uintN argc, Value *vp)
 {
     JSObject *thisobj = NULL;
     (void)IsConstructing_PossiblyWithGivenThisObject(vp, &thisobj);
+    if (thisobj)
+        thisobj->flags |= JSObject::HAS_EQUALITY;
     return NamespaceHelper(cx, thisobj, argc, Jsvalify(vp + 2), Jsvalify(vp));
 }
 
 /*
  * When argc is -1, it indicates argv is empty but the code should behave as
  * if argc is 1 and argv[0] is JSVAL_VOID.
  */
 static JSBool
@@ -734,17 +750,17 @@ QNameHelper(JSContext *cx, JSObject *obj
             *rval = nameval;
             return JS_TRUE;
         }
 
         /*
          * Create and return a new QName or AttributeName object exactly as if
          * constructed.
          */
-        obj = NewBuiltinClassInstance(cx, clasp);
+        obj = NewBuiltinClassInstanceXML(cx, clasp);
         if (!obj)
             return JS_FALSE;
     }
     *rval = OBJECT_TO_JSVAL(obj);
     METER(xml_stats.qname);
 
     if (isQName) {
         /* If namespace is not specified and name is a QName, clone it. */
@@ -828,16 +844,18 @@ out:
     return JS_TRUE;
 }
 
 static JSBool
 QName(JSContext *cx, uintN argc, Value *vp)
 {
     JSObject *thisobj = NULL;
     (void)IsConstructing_PossiblyWithGivenThisObject(vp, &thisobj);
+    if (thisobj)
+        thisobj->flags |= JSObject::HAS_EQUALITY;
     return QNameHelper(cx, thisobj, &js_QNameClass, argc, Jsvalify(vp + 2), Jsvalify(vp));
 }
 
 static JSBool
 AttributeName(JSContext *cx, uintN argc, Value *vp)
 {
     JSObject *thisobj = NULL;
     (void)IsConstructing_PossiblyWithGivenThisObject(vp, &thisobj);
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -93,16 +93,18 @@ mjit::Compiler::Compiler(JSContext *cx, 
         ? fp->fun()
         : NULL),
     isConstructing(fp->isConstructing()),
     analysis(cx, script), jumpMap(NULL), frame(cx, script, masm),
     branchPatches(ContextAllocPolicy(cx)),
 #if defined JS_MONOIC
     mics(ContextAllocPolicy(cx)),
     callICs(ContextAllocPolicy(cx)),
+    equalityICs(ContextAllocPolicy(cx)),
+    traceICs(ContextAllocPolicy(cx)),
 #endif
 #if defined JS_POLYIC
     pics(ContextAllocPolicy(cx)), 
 #endif
     callPatches(ContextAllocPolicy(cx)),
     callSites(ContextAllocPolicy(cx)), 
     doubleList(ContextAllocPolicy(cx)),
     stubcc(cx, *this, frame, script),
@@ -212,16 +214,20 @@ mjit::Compiler::~Compiler()
     cx->free(jumpMap);
 }
 
 CompileStatus JS_NEVER_INLINE
 mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
 {
     JS_ASSERT(cx->fp() == fp);
 
+    // Ensure that constructors have at least one slot.
+    if (fp->isConstructing() && !fp->script()->nslots)
+        fp->script()->nslots++;
+
     Compiler cc(cx, fp);
 
     return cc.compile();
 }
 
 CompileStatus
 mjit::Compiler::generatePrologue()
 {
@@ -310,16 +316,22 @@ mjit::Compiler::generatePrologue()
                                               FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
             masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
             masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
             masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
             hasScope.linkTo(masm.label(), &masm);
         }
     }
 
+    if (isConstructing)
+        constructThis();
+
+    if (debugMode)
+        stubCall(stubs::EnterScript);
+
     return Compile_Okay;
 }
 
 CompileStatus
 mjit::Compiler::generateEpilogue()
 {
     return Compile_Okay;
 }
@@ -356,16 +368,18 @@ mjit::Compiler::finishThisUp(JITScript *
     JSC::LinkBuffer fullCode(result, totalSize);
     JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
 
     size_t totalBytes = sizeof(JITScript) +
                         sizeof(void *) * script->length +
 #if defined JS_MONOIC
                         sizeof(ic::MICInfo) * mics.length() +
                         sizeof(ic::CallICInfo) * callICs.length() +
+                        sizeof(ic::EqualityICInfo) * equalityICs.length() +
+                        sizeof(ic::TraceICInfo) * traceICs.length() +
 #endif
 #if defined JS_POLYIC
                         sizeof(ic::PICInfo) * pics.length() +
 #endif
                         sizeof(CallSite) * callSites.length();
 
     uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
     if (!cursor) {
@@ -418,29 +432,16 @@ mjit::Compiler::finishThisUp(JITScript *
                 scriptMICs[i].stubCall = stubCode.locationOf(mics[i].call);
                 scriptMICs[i].stubEntry = stubCode.locationOf(mics[i].stubEntry);
                 scriptMICs[i].u.name.typeConst = mics[i].u.name.typeConst;
                 scriptMICs[i].u.name.dataConst = mics[i].u.name.dataConst;
 #if defined JS_PUNBOX64
                 scriptMICs[i].patchValueOffset = mics[i].patchValueOffset;
 #endif
                 break;
-              case ic::MICInfo::TRACER: {
-                uint32 offs = uint32(mics[i].jumpTarget - script->code);
-                JS_ASSERT(jumpMap[offs].isValid());
-                scriptMICs[i].traceHint = fullCode.locationOf(mics[i].traceHint);
-                scriptMICs[i].load = fullCode.locationOf(jumpMap[offs]);
-                scriptMICs[i].u.hints.hasSlowTraceHintOne = mics[i].slowTraceHintOne.isSet();
-                if (mics[i].slowTraceHintOne.isSet())
-                    scriptMICs[i].slowTraceHintOne = stubCode.locationOf(mics[i].slowTraceHintOne.get());
-                scriptMICs[i].u.hints.hasSlowTraceHintTwo = mics[i].slowTraceHintTwo.isSet();
-                if (mics[i].slowTraceHintTwo.isSet())
-                    scriptMICs[i].slowTraceHintTwo = stubCode.locationOf(mics[i].slowTraceHintTwo.get());
-                break;
-              }
               default:
                 JS_NOT_REACHED("Bad MIC kind");
             }
             stubCode.patch(mics[i].addrLabel, &scriptMICs[i]);
         }
     }
 
     jit->nCallICs = callICs.length();
@@ -498,16 +499,73 @@ mjit::Compiler::finishThisUp(JITScript *
             cics[i].argc = callICs[i].argc;
             cics[i].funObjReg = callICs[i].funObjReg;
             cics[i].funPtrReg = callICs[i].funPtrReg;
             cics[i].frameDepth = callICs[i].frameDepth;
             stubCode.patch(callICs[i].addrLabel1, &cics[i]);
             stubCode.patch(callICs[i].addrLabel2, &cics[i]);
         } 
     }
+
+    jit->nEqualityICs = equalityICs.length();
+    if (equalityICs.length()) {
+        jit->equalityICs = (ic::EqualityICInfo *)cursor;
+        cursor += sizeof(ic::EqualityICInfo) * equalityICs.length();
+    } else {
+        jit->equalityICs = NULL;
+    }
+
+    if (ic::EqualityICInfo *scriptEICs = jit->equalityICs) {
+        for (size_t i = 0; i < equalityICs.length(); i++) {
+            uint32 offs = uint32(equalityICs[i].jumpTarget - script->code);
+            JS_ASSERT(jumpMap[offs].isValid());
+            scriptEICs[i].target = fullCode.locationOf(jumpMap[offs]);
+            scriptEICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
+            scriptEICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
+            scriptEICs[i].stub = equalityICs[i].stub;
+            scriptEICs[i].lvr = equalityICs[i].lvr;
+            scriptEICs[i].rvr = equalityICs[i].rvr;
+            scriptEICs[i].tempReg = equalityICs[i].tempReg;
+            scriptEICs[i].cond = equalityICs[i].cond;
+            if (equalityICs[i].jumpToStub.isSet())
+                scriptEICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
+            scriptEICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
+            
+            stubCode.patch(equalityICs[i].addrLabel, &scriptEICs[i]);
+        }
+    }
+
+    jit->nTraceICs = traceICs.length();
+    if (traceICs.length()) {
+        jit->traceICs = (ic::TraceICInfo *)cursor;
+        cursor += sizeof(ic::TraceICInfo) * traceICs.length();
+    } else {
+        jit->traceICs = NULL;
+    }
+
+    if (ic::TraceICInfo *scriptTICs = jit->traceICs) {
+        for (size_t i = 0; i < traceICs.length(); i++) {
+            if (!traceICs[i].initialized)
+                continue;
+
+            uint32 offs = uint32(traceICs[i].jumpTarget - script->code);
+            JS_ASSERT(jumpMap[offs].isValid());
+            scriptTICs[i].traceHint = fullCode.locationOf(traceICs[i].traceHint);
+            scriptTICs[i].jumpTarget = fullCode.locationOf(jumpMap[offs]);
+            scriptTICs[i].stubEntry = stubCode.locationOf(traceICs[i].stubEntry);
+#ifdef DEBUG
+            scriptTICs[i].jumpTargetPC = traceICs[i].jumpTarget;
+#endif
+            scriptTICs[i].hasSlowTraceHint = traceICs[i].slowTraceHint.isSet();
+            if (traceICs[i].slowTraceHint.isSet())
+                scriptTICs[i].slowTraceHint = stubCode.locationOf(traceICs[i].slowTraceHint.get());
+            
+            stubCode.patch(traceICs[i].addrLabel, &scriptTICs[i]);
+        }
+    }
 #endif /* JS_MONOIC */
 
     for (size_t i = 0; i < callPatches.length(); i++) {
         CallPatchInfo &patch = callPatches[i];
 
         fullCode.patch(patch.fastNcodePatch, fullCode.locationOf(patch.joinPoint));
         if (patch.hasSlowNcode)
             stubCode.patch(patch.slowNcodePatch, fullCode.locationOf(patch.joinPoint));
@@ -1362,27 +1420,27 @@ mjit::Compiler::generateMethod()
             PC += JSOP_LOCALINC_LENGTH;
             if (popped)
                 PC += JSOP_POP_LENGTH;
             break;
           }
           END_CASE(JSOP_LOCALDEC)
 
           BEGIN_CASE(JSOP_BINDNAME)
-            jsop_bindname(fullAtomIndex(PC));
+            jsop_bindname(fullAtomIndex(PC), true);
           END_CASE(JSOP_BINDNAME)
 
           BEGIN_CASE(JSOP_SETPROP)
-            if (!jsop_setprop(script->getAtom(fullAtomIndex(PC))))
+            if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
                 return Compile_Error;
           END_CASE(JSOP_SETPROP)
 
           BEGIN_CASE(JSOP_SETNAME)
           BEGIN_CASE(JSOP_SETMETHOD)
-            if (!jsop_setprop(script->getAtom(fullAtomIndex(PC))))
+            if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
                 return Compile_Error;
           END_CASE(JSOP_SETNAME)
 
           BEGIN_CASE(JSOP_THROW)
             prepareStubCall(Uses(1));
             stubCall(stubs::Throw);
             frame.pop();
           END_CASE(JSOP_THROW)
@@ -1673,16 +1731,17 @@ mjit::Compiler::generateMethod()
             masm.move(ImmPtr(fun), Registers::ArgReg1);
             stubCall(stubs::FlatLambda);
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
           }
           END_CASE(JSOP_LAMBDA_FC)
 
           BEGIN_CASE(JSOP_TRACE)
+          BEGIN_CASE(JSOP_NOTRACE)
           {
             if (analysis[PC].nincoming > 0)
                 interruptCheckHelper();
           }
           END_CASE(JSOP_TRACE)
 
           BEGIN_CASE(JSOP_DEBUGGER)
             prepareStubCall(Uses(0));
@@ -1725,23 +1784,16 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_DECGLOBAL)
           BEGIN_CASE(JSOP_GLOBALINC)
           BEGIN_CASE(JSOP_GLOBALDEC)
             /* Advances PC automatically. */
             jsop_globalinc(op, GET_SLOTNO(PC));
             break;
           END_CASE(JSOP_GLOBALINC)
 
-          BEGIN_CASE(JSOP_BEGIN)
-            if (isConstructing) {
-                if (!constructThis())
-                    return Compile_Error;
-            }
-          END_CASE(JSOP_BEGIN)
-
           default:
            /* Sorry, this opcode isn't implemented yet. */
 #ifdef JS_METHODJIT_SPEW
             JaegerSpew(JSpew_Abort, "opcode %s not handled yet (%s line %d)\n", OpcodeNames[op],
                        script->filename, js_PCToLineNumber(cx, script, PC));
 #endif
             return Compile_Abort;
         }
@@ -1955,16 +2007,21 @@ mjit::Compiler::emitReturnValue(Assemble
 void
 mjit::Compiler::emitReturn(FrameEntry *fe)
 {
     JS_ASSERT_IF(!fun, JSOp(*PC) == JSOP_STOP);
 
     /* Only the top of the stack can be returned. */
     JS_ASSERT_IF(fe, fe == frame.peek(-1));
 
+    if (debugMode) {
+        prepareStubCall(Uses(0));
+        stubCall(stubs::LeaveScript);
+    }
+
     /*
      * If there's a function object, deal with the fact that it can escape.
      * Note that after we've placed the call object, all tracked state can
      * be thrown away. This will happen anyway because the next live opcode
      * (if any) must have an incoming edge.
      *
      * However, it's an optimization to throw it away early - the tracker
      * won't be spilled on further exits or join points.
@@ -2016,64 +2073,33 @@ mjit::Compiler::stubCall(void *ptr)
     Call cl = masm.stubCall(ptr, PC, frame.stackDepth() + script->nfixed);
     JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
     return cl;
 }
 
 void
 mjit::Compiler::interruptCheckHelper()
 {
-    RegisterID reg = frame.allocReg();
-
-    /*
-     * Bake in and test the address of the interrupt counter for the runtime.
-     * This is faster than doing two additional loads for the context's
-     * thread data, but will cause this thread to run slower if there are
-     * pending interrupts on some other thread.  For non-JS_THREADSAFE builds
-     * we can skip this, as there is only one flag to poll.
-     */
+    RegisterID cxreg = frame.allocReg();
+    masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxreg);
 #ifdef JS_THREADSAFE
-    void *interrupt = (void*) &cx->runtime->interruptCounter;
-#else
-    void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
-#endif
-
-#if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
-    Jump jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
+    masm.loadPtr(Address(cxreg, offsetof(JSContext, thread)), cxreg);
+    Address flag(cxreg, offsetof(JSThread, data.interruptFlags));
 #else
-    /* Handle processors that can't load from absolute addresses. */
-    masm.move(ImmPtr(interrupt), reg);
-    Jump jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
+    masm.loadPtr(Address(cxreg, offsetof(JSContext, runtime)), cxreg);
+    Address flag(cxreg, offsetof(JSRuntime, threadData.interruptFlags));
 #endif
-
-    stubcc.linkExitDirect(jump, stubcc.masm.label());
-
-#ifdef JS_THREADSAFE
-    /*
-     * Do a slightly slower check for an interrupt on this thread.
-     * We don't want this thread to slow down excessively if the pending
-     * interrupt is on another thread.
-     */
-    stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
-    stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
-    Address flag(reg, offsetof(JSThread, data.interruptFlags));
-    Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
-#endif
-
-    frame.freeReg(reg);
-
-    frame.sync(stubcc.masm, Uses(0));
+    Jump jump = masm.branchTest32(Assembler::NonZero, flag);
+    frame.freeReg(cxreg);
+    stubcc.linkExit(jump, Uses(0));
+    stubcc.leave();
     stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
     stubcc.call(stubs::Interrupt);
     ADD_CALLSITE(true);
     stubcc.rejoin(Changes(0));
-
-#ifdef JS_THREADSAFE
-    stubcc.linkRejoin(noInterrupt);
-#endif
 }
 
 void
 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
 {
     CallPatchInfo callPatch;
     callPatch.hasSlowNcode = false;
 
@@ -2139,24 +2165,21 @@ mjit::Compiler::inlineCallHelper(uint32 
     /*
      * Save constant |this| to optimize thisv stores for common call cases
      * like CALL[LOCAL, GLOBAL, ARG] which push NULL.
      */
     callIC.pc = PC;
     callIC.frameDepth = frame.frameDepth();
 
     /* Grab type and data registers up-front. */
-    MaybeRegisterID typeReg;
-    frame.ensureFullRegs(fe);
-
-    if (!fe->isTypeKnown()) {
-        typeReg = frame.tempRegForType(fe);
+    MaybeRegisterID typeReg, maybeDataReg;
+    frame.ensureFullRegs(fe, &typeReg, &maybeDataReg);
+    RegisterID dataReg = maybeDataReg.reg();
+    if (!fe->isTypeKnown())
         frame.pinReg(typeReg.reg());
-    }
-    RegisterID dataReg = frame.tempRegForData(fe);
     frame.pinReg(dataReg);
 
     /*
      * We rely on the fact that syncAndKill() is not allowed to touch the
      * registers we've preserved.
      */
     frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
     frame.unpinKilledReg(dataReg);
@@ -2387,30 +2410,38 @@ mjit::Compiler::emitStubCmpOp(BoolStub s
                                     : Assembler::NonZero;
         Jump j = masm.branchTest32(cond, Registers::ReturnReg,
                                    Registers::ReturnReg);
         jumpAndTrace(j, target);
     }
 }
 
 void
-mjit::Compiler::jsop_setprop_slow(JSAtom *atom)
+mjit::Compiler::jsop_setprop_slow(JSAtom *atom, bool usePropCache)
 {
     prepareStubCall(Uses(2));
     masm.move(ImmPtr(atom), Registers::ArgReg1);
-    stubCall(STRICT_VARIANT(stubs::SetName));
+    if (usePropCache)
+        stubCall(STRICT_VARIANT(stubs::SetName));
+    else
+        stubCall(STRICT_VARIANT(stubs::SetPropNoCache));
     JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
     frame.shimmy(1);
 }
 
 void
-mjit::Compiler::jsop_getprop_slow()
+mjit::Compiler::jsop_getprop_slow(JSAtom *atom, bool usePropCache)
 {
     prepareStubCall(Uses(1));
-    stubCall(stubs::GetProp);
+    if (usePropCache) {
+        stubCall(stubs::GetProp);
+    } else {
+        masm.move(ImmPtr(atom), Registers::ArgReg1);
+        stubCall(stubs::GetPropNoCache);
+    }
     frame.pop();
     frame.pushSynced();
 }
 
 bool
 mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
 {
     prepareStubCall(Uses(1));
@@ -2466,41 +2497,41 @@ mjit::Compiler::passMICAddress(MICGenInf
 #if defined JS_POLYIC
 void
 mjit::Compiler::passPICAddress(PICGenInfo &pic)
 {
     pic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
 }
 
 bool
-mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
+mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck, bool usePropCache)
 {
     FrameEntry *top = frame.peek(-1);
 
     /* If the incoming type will never PIC, take slow path. */
     if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
         JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
                      top->getKnownType() != JSVAL_TYPE_STRING);
-        jsop_getprop_slow();
+        jsop_getprop_slow(atom, usePropCache);
         return true;
     }
 
     /*
      * These two must be loaded first. The objReg because the string path
      * wants to read it, and the shapeReg because it could cause a spill that
      * the string path wouldn't sink back.
      */
     RegisterID objReg = Registers::ReturnReg;
     RegisterID shapeReg = Registers::ReturnReg;
     if (atom == cx->runtime->atomState.lengthAtom) {
         objReg = frame.copyDataIntoReg(top);
         shapeReg = frame.allocReg();
     }
 
-    PICGenInfo pic(ic::PICInfo::GET);
+    PICGenInfo pic(ic::PICInfo::GET, usePropCache);
 
     /* Guard that the type is an object. */
     Jump typeCheck;
     if (doTypeCheck && !top->isTypeKnown()) {
         RegisterID reg = frame.tempRegForType(top);
         pic.typeReg = reg;
 
         /* Start the hot path where it's easy to patch it. */
@@ -2599,17 +2630,17 @@ mjit::Compiler::jsop_getprop(JSAtom *ato
     return true;
 }
 
 #ifdef JS_POLYIC
 bool
 mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID objReg,
                                  RegisterID idReg, RegisterID shapeReg)
 {
-    PICGenInfo pic(ic::PICInfo::GETELEM);
+    PICGenInfo pic(ic::PICInfo::GETELEM, true);
 
     pic.objRemat = frame.dataRematInfo(obj);
     pic.idRemat = frame.dataRematInfo(id);
     pic.shapeReg = shapeReg;
     pic.hasTypeCheck = false;
 
     pic.fastPathStart = masm.label();
 
@@ -2714,17 +2745,17 @@ mjit::Compiler::jsop_callprop_generic(JS
     /*
      * These two must be loaded first. The objReg because the string path
      * wants to read it, and the shapeReg because it could cause a spill that
      * the string path wouldn't sink back.
      */
     RegisterID objReg = frame.copyDataIntoReg(top);
     RegisterID shapeReg = frame.allocReg();
 
-    PICGenInfo pic(ic::PICInfo::CALL);
+    PICGenInfo pic(ic::PICInfo::CALL, true);
 
     /* Guard that the type is an object. */
     pic.typeReg = frame.copyTypeIntoReg(top);
 
     /* Start the hot path where it's easy to patch it. */
     pic.fastPathStart = masm.label();
 
     /*
@@ -2884,17 +2915,17 @@ mjit::Compiler::jsop_callprop_str(JSAtom
     return true;
 }
 
 bool
 mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
 {
     FrameEntry *top = frame.peek(-1);
 
-    PICGenInfo pic(ic::PICInfo::CALL);
+    PICGenInfo pic(ic::PICInfo::CALL, true);
 
     JS_ASSERT(top->isTypeKnown());
     JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT);
 
     pic.fastPathStart = masm.label();
     pic.hasTypeCheck = false;
     pic.typeReg = Registers::ReturnReg;
 
@@ -3002,58 +3033,50 @@ mjit::Compiler::jsop_callprop(JSAtom *at
     }
 
     if (top->isTypeKnown())
         return jsop_callprop_obj(atom);
     return jsop_callprop_generic(atom);
 }
 
 bool
-mjit::Compiler::jsop_setprop(JSAtom *atom)
+mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
 {
     FrameEntry *lhs = frame.peek(-2);
     FrameEntry *rhs = frame.peek(-1);
 
     /* If the incoming type will never PIC, take slow path. */
     if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
-        jsop_setprop_slow(atom);
+        jsop_setprop_slow(atom, usePropCache);
         return true;
     }
 
     JSOp op = JSOp(*PC);
 
-    PICGenInfo pic(op == JSOP_SETMETHOD ? ic::PICInfo::SETMETHOD : ic::PICInfo::SET);
+    PICGenInfo pic(op == JSOP_SETMETHOD ? ic::PICInfo::SETMETHOD : ic::PICInfo::SET, usePropCache);
     pic.atom = atom;
 
     /* Guard that the type is an object. */
     Jump typeCheck;
     if (!lhs->isTypeKnown()) {
         RegisterID reg = frame.tempRegForType(lhs);
         pic.typeReg = reg;
 
         /* Start the hot path where it's easy to patch it. */
         pic.fastPathStart = masm.label();
         Jump j = masm.testObject(Assembler::NotEqual, reg);
 
         pic.typeCheck = stubcc.linkExit(j, Uses(2));
         stubcc.leave();
 
-        /*
-         * This gets called from PROPINC/PROPDEC which aren't compatible with
-         * the normal SETNAME property cache logic.
-         */
-        JSOp op = JSOp(*PC);
         stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
-        if (op == JSOP_SETNAME || op == JSOP_SETPROP || op == JSOP_SETGNAME || op ==
-            JSOP_SETMETHOD) {
+        if (usePropCache)
             stubcc.call(STRICT_VARIANT(stubs::SetName));
-        } else {
+        else
             stubcc.call(STRICT_VARIANT(stubs::SetPropNoCache));
-        }
-
         typeCheck = stubcc.masm.jump();
         pic.hasTypeCheck = true;
     } else {
         pic.fastPathStart = masm.label();
         pic.hasTypeCheck = false;
         pic.typeReg = Registers::ReturnReg;
     }
 
@@ -3148,17 +3171,17 @@ mjit::Compiler::jsop_setprop(JSAtom *ato
 
     pics.append(pic);
     return true;
 }
 
 void
 mjit::Compiler::jsop_name(JSAtom *atom)
 {
-    PICGenInfo pic(ic::PICInfo::NAME);
+    PICGenInfo pic(ic::PICInfo::NAME, true);
 
     pic.shapeReg = frame.allocReg();
     pic.objReg = frame.allocReg();
     pic.typeReg = Registers::ReturnReg;
     pic.atom = atom;
     pic.hasTypeCheck = false;
     pic.fastPathStart = masm.label();
 
@@ -3180,17 +3203,17 @@ mjit::Compiler::jsop_name(JSAtom *atom)
     stubcc.rejoin(Changes(1));
 
     pics.append(pic);
 }
 
 bool
 mjit::Compiler::jsop_xname(JSAtom *atom)
 {
-    PICGenInfo pic(ic::PICInfo::XNAME);
+    PICGenInfo pic(ic::PICInfo::XNAME, true);
 
     FrameEntry *fe = frame.peek(-1);
     if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
         return jsop_getprop(atom);
     }
 
     if (!fe->isTypeKnown()) {
         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
@@ -3222,19 +3245,19 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
 
     stubcc.rejoin(Changes(1));
 
     pics.append(pic);
     return true;
 }
 
 void
-mjit::Compiler::jsop_bindname(uint32 index)
+mjit::Compiler::jsop_bindname(uint32 index, bool usePropCache)
 {
-    PICGenInfo pic(ic::PICInfo::BIND);
+    PICGenInfo pic(ic::PICInfo::BIND, usePropCache);
 
     pic.shapeReg = frame.allocReg();
     pic.objReg = frame.allocReg();
     pic.typeReg = Registers::ReturnReg;
     pic.atom = script->getAtom(index);
     pic.hasTypeCheck = false;
     pic.fastPathStart = masm.label();
 
@@ -3285,49 +3308,54 @@ mjit::Compiler::jsop_name(JSAtom *atom)
 
 bool
 mjit::Compiler::jsop_xname(JSAtom *atom)
 {
     return jsop_getprop(atom);
 }
 
 bool
-mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck)
+mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck, bool usePropCache)
 {
-    jsop_getprop_slow();
+    jsop_getprop_slow(atom, usePropCache);
     return true;
 }
 
 bool
 mjit::Compiler::jsop_callprop(JSAtom *atom)
 {
     return jsop_callprop_slow(atom);
 }
 
 bool
-mjit::Compiler::jsop_setprop(JSAtom *atom)
+mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
 {
-    jsop_setprop_slow(atom);
+    jsop_setprop_slow(atom, usePropCache);
     return true;
 }
 
 void
-mjit::Compiler::jsop_bindname(uint32 index)
+mjit::Compiler::jsop_bindname(uint32 index, bool usePropCache)
 {
     RegisterID reg = frame.allocReg();
     Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
     masm.loadPtr(scopeChain, reg);
 
     Address address(reg, offsetof(JSObject, parent));
 
     Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(address), ImmPtr(0));
 
     stubcc.linkExit(j, Uses(0));
     stubcc.leave();
-    stubcc.call(stubs::BindName);
+    if (usePropCache) {
+        stubcc.call(stubs::BindName);
+    } else {
+        masm.move(ImmPtr(script->getAtom(index)), Registers::ArgReg1);
+        stubcc.call(stubs::BindNameNoCache);
+    }
 
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
 
     stubcc.rejoin(Changes(1));
 }
 #endif
 
 void
@@ -3460,29 +3488,29 @@ mjit::Compiler::jsop_nameinc(JSOp op, Vo
 
         frame.push(Int32Value(amt));
         // V 1
 
         /* Use sub since it calls ValueToNumber instead of string concat. */
         jsop_binary(JSOP_SUB, stubs::Sub);
         // N+1
 
-        jsop_bindname(index);
+        jsop_bindname(index, false);
         // V+1 OBJ
 
         frame.dup2();
         // V+1 OBJ V+1 OBJ
 
         frame.shift(-3);
         // OBJ OBJ V+1
 
         frame.shift(-1);
         // OBJ V+1
 
-        if (!jsop_setprop(atom))
+        if (!jsop_setprop(atom, false))
             return false;
         // V+1
 
         if (pop)
             frame.pop();
     } else {
         /* The pre-value is observed, making this more tricky. */
 
@@ -3496,29 +3524,29 @@ mjit::Compiler::jsop_nameinc(JSOp op, Vo
         // N N
 
         frame.push(Int32Value(-amt));
         // N N 1
 
         jsop_binary(JSOP_ADD, stubs::Add);
         // N N+1
 
-        jsop_bindname(index);
+        jsop_bindname(index, false);
         // N N+1 OBJ
 
         frame.dup2();
         // N N+1 OBJ N+1 OBJ
 
         frame.shift(-3);
         // N OBJ OBJ N+1
 
         frame.shift(-1);
         // N OBJ N+1
 
-        if (!jsop_setprop(atom))
+        if (!jsop_setprop(atom, false))
             return false;
         // N N+1
 
         frame.pop();
         // N
     }
 
     if (pop)
@@ -3557,17 +3585,17 @@ mjit::Compiler::jsop_propinc(JSOp op, Vo
 
             frame.push(Int32Value(amt));
             // OBJ V 1
 
             /* Use sub since it calls ValueToNumber instead of string concat. */
             jsop_binary(JSOP_SUB, stubs::Sub);
             // OBJ V+1
 
-            if (!jsop_setprop(atom))
+            if (!jsop_setprop(atom, false))
                 return false;
             // V+1
 
             if (pop)
                 frame.pop();
         } else {
             /* The pre-value is observed, making this more tricky. */
 
@@ -3591,17 +3619,17 @@ mjit::Compiler::jsop_propinc(JSOp op, Vo
             // OBJ N N+1
 
             frame.dupAt(-3);
             // OBJ N N+1 OBJ
 
             frame.dupAt(-2);
             // OBJ N N+1 OBJ N+1
 
-            if (!jsop_setprop(atom))
+            if (!jsop_setprop(atom, false))
                 return false;
             // OBJ N N+1 N+1
 
             frame.popn(2);
             // OBJ N
 
             frame.shimmy(1);
             // N
@@ -4272,55 +4300,58 @@ mjit::Compiler::jsop_instanceof()
 
 /*
  * Note: This function emits tracer hooks into the OOL path. This means if
  * it is used in the middle of an in-progress slow path, the stream will be
  * hopelessly corrupted. Take care to only call this before linkExits() and
  * after rejoin()s.
  */
 void
-mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slowOne, Jump *slowTwo)
+mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slow)
 {
 #ifndef JS_TRACER
     jumpInScript(j, target);
-    if (slowOne)
-        stubcc.jumpInScript(*slowOne, target);
-    if (slowTwo)
-        stubcc.jumpInScript(*slowTwo, target);
+    if (slow)
+        stubcc.jumpInScript(*slow, target);
 #else
-    if (!addTraceHints || target >= PC || JSOp(*target) != JSOP_TRACE) {
+    if (!addTraceHints || target >= PC || JSOp(*target) != JSOP_TRACE
+#ifdef JS_MONOIC
+        || GET_UINT16(target) == BAD_TRACEIC_INDEX
+#endif
+        )
+    {
         jumpInScript(j, target);
-        if (slowOne)
-            stubcc.jumpInScript(*slowOne, target);
-        if (slowTwo)
-            stubcc.jumpInScript(*slowTwo, target);
+        if (slow)
+            stubcc.jumpInScript(*slow, target);
         return;
     }
 
 # if JS_MONOIC
-    MICGenInfo mic(ic::MICInfo::TRACER);
-
-    mic.entry = masm.label();
-    mic.jumpTarget = target;
-    mic.traceHint = j;
-    if (slowOne)
-        mic.slowTraceHintOne = *slowOne;
-    if (slowTwo)
-        mic.slowTraceHintTwo = *slowTwo;
+    TraceGenInfo ic;
+
+    ic.initialized = true;
+    ic.stubEntry = stubcc.masm.label();
+    ic.jumpTarget = target;
+    ic.traceHint = j;
+    if (slow)
+        ic.slowTraceHint = *slow;
+
+    uint16 index = GET_UINT16(target);
+    if (traceICs.length() <= index)
+        traceICs.resize(index+1);
 # endif
 
     Label traceStart = stubcc.masm.label();
 
     stubcc.linkExitDirect(j, traceStart);
-    if (slowOne)
-        slowOne->linkTo(traceStart, &stubcc.masm);
-    if (slowTwo)
-        slowTwo->linkTo(traceStart, &stubcc.masm);
+    if (slow)
+        slow->linkTo(traceStart, &stubcc.masm);
 # if JS_MONOIC
-    passMICAddress(mic);
+    ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
+    traceICs[index] = ic;
 # endif
 
     /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
     {
         jsbytecode* pc = PC;
         PC = target;
 
         stubcc.call(stubs::InvokeTracer);
@@ -4329,20 +4360,16 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
     }
 
     Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                         Registers::ReturnReg);
     restoreFrameRegs(stubcc.masm);
     stubcc.masm.jump(Registers::ReturnReg);
     no.linkTo(stubcc.masm.label(), &stubcc.masm);
     stubcc.jumpInScript(stubcc.masm.jump(), target);
-
-# if JS_MONOIC
-    mics.append(mic);
-# endif
 #endif
 }
 
 void
 mjit::Compiler::enterBlock(JSObject *obj)
 {
     // If this is an exception entry point, then jsl_InternalThrow has set
     // VMFrame::fp to the correct fp for the entry point. We need to copy
@@ -4400,17 +4427,17 @@ mjit::Compiler::constructThis()
 
     // Load the callee.
     Address callee(JSFrameReg, JSStackFrame::offsetOfCallee(fun));
     RegisterID calleeReg = frame.allocReg();
     masm.loadPayload(callee, calleeReg);
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, calleeReg);
 
     // Get callee.prototype.
-    if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom))
+    if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false, false))
         return false;
 
     // Reach into the proto Value and grab a register for its data.
     FrameEntry *protoFe = frame.peek(-1);
     RegisterID protoReg = frame.ownRegForData(protoFe);
 
     // Now, get the type. If it's not an object, set protoReg to NULL.
     Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -76,29 +76,52 @@ class Compiler : public BaseCompiler
 #if defined JS_PUNBOX64
         uint32 patchValueOffset;
 #endif
         Label load;
         Call call;
         ic::MICInfo::Kind kind;
         jsbytecode *jumpTarget;
         Jump traceHint;
-        MaybeJump slowTraceHintOne;
-        MaybeJump slowTraceHintTwo;
+        MaybeJump slowTraceHint;
         union {
             struct {
                 bool typeConst;
                 bool dataConst;
             } name;
             struct {
                 uint32 pcOffs;
             } tracer;
         } u;
     };
 
+    struct EqualityGenInfo {
+        DataLabelPtr addrLabel;
+        Label stubEntry;
+        Call stubCall;
+        BoolStub stub;
+        MaybeJump jumpToStub;
+        Label fallThrough;
+        jsbytecode *jumpTarget;
+        ValueRemat lvr, rvr;
+        Assembler::Condition cond;
+        JSC::MacroAssembler::RegisterID tempReg;
+    };
+    
+    struct TraceGenInfo {
+        bool initialized;
+        Label stubEntry;
+        DataLabelPtr addrLabel;
+        jsbytecode *jumpTarget;
+        Jump traceHint;
+        MaybeJump slowTraceHint;
+
+        TraceGenInfo() : initialized(false) {}
+    };
+
     /* InlineFrameAssembler wants to see this. */
   public:
     struct CallGenInfo {
         CallGenInfo(uint32 argc)
           : argc(argc)
         { }
 
         /*
@@ -134,44 +157,47 @@ class Compiler : public BaseCompiler
         Label joinPoint;
         DataLabelPtr fastNcodePatch;
         DataLabelPtr slowNcodePatch;
         bool hasSlowNcode;
     };
 
 #if defined JS_POLYIC
     struct PICGenInfo {
-        PICGenInfo(ic::PICInfo::Kind kind) : kind(kind)
+        PICGenInfo(ic::PICInfo::Kind kind, bool usePropCache)
+          : kind(kind), usePropCache(usePropCache)
         { }
         ic::PICInfo::Kind kind;
         Label fastPathStart;
         Label storeBack;
         Label typeCheck;
         Label slowPathStart;
         DataLabelPtr addrLabel;
         RegisterID shapeReg;
         RegisterID objReg;
         RegisterID idReg;
         RegisterID typeReg;
+        bool usePropCache;
         Label shapeGuard;
         JSAtom *atom;
         StateRemat objRemat;
         StateRemat idRemat;
         Call callReturn;
         bool hasTypeCheck;
         ValueRemat vr;
 # if defined JS_CPU_X64
         ic::PICLabels labels;
 # endif
 
         void copySimpleMembersTo(ic::PICInfo &pi) const {
             pi.kind = kind;
             pi.shapeReg = shapeReg;
             pi.objReg = objReg;
             pi.atom = atom;
+            pi.usePropCache = usePropCache;
             if (kind == ic::PICInfo::SET) {
                 pi.u.vr = vr;
             } else if (kind != ic::PICInfo::NAME) {
                 pi.u.get.idReg = idReg;
                 pi.u.get.typeReg = typeReg;
                 pi.u.get.hasTypeCheck = hasTypeCheck;
                 pi.u.get.objRemat = objRemat.offset;
             }
@@ -210,16 +236,18 @@ class Compiler : public BaseCompiler
     Label *jumpMap;
     jsbytecode *PC;
     Assembler masm;
     FrameState frame;
     js::Vector<BranchPatch, 64> branchPatches;
 #if defined JS_MONOIC
     js::Vector<MICGenInfo, 64> mics;
     js::Vector<CallGenInfo, 64> callICs;
+    js::Vector<EqualityGenInfo, 64> equalityICs;
+    js::Vector<TraceGenInfo, 64> traceICs;
 #endif
 #if defined JS_POLYIC
     js::Vector<PICGenInfo, 16> pics;
 #endif
     js::Vector<CallPatchInfo, 64> callPatches;
     js::Vector<InternalCallSite, 64> callSites;
     js::Vector<DoublePatch, 16> doubleList;
     StubCompiler stubcc;
@@ -269,21 +297,21 @@ class Compiler : public BaseCompiler
     void passPICAddress(PICGenInfo &pic);
 #endif
 #ifdef JS_MONOIC
     void passMICAddress(MICGenInfo &mic);
 #endif
     bool constructThis();
 
     /* Opcode handlers. */
-    void jumpAndTrace(Jump j, jsbytecode *target, Jump *slowOne = NULL, Jump *slowTwo = NULL);
-    void jsop_bindname(uint32 index);
+    void jumpAndTrace(Jump j, jsbytecode *target, Jump *slow = NULL);
+    void jsop_bindname(uint32 index, bool usePropCache);
     void jsop_setglobal(uint32 index);
     void jsop_getglobal(uint32 index);
-    void jsop_getprop_slow();
+    void jsop_getprop_slow(JSAtom *atom, bool usePropCache = true);
     void jsop_getarg(uint32 index);
     void jsop_this();
     void emitReturn(FrameEntry *fe);
     void emitFinalReturn(Assembler &masm);
     void loadReturnValue(Assembler *masm, FrameEntry *fe);
     void emitReturnValue(Assembler *masm, FrameEntry *fe);
     void dispatchCall(VoidPtrStubUInt32 stub, uint32 argc);
     void interruptCheckHelper();
@@ -297,20 +325,20 @@ class Compiler : public BaseCompiler
     void jsop_getgname(uint32 index);
     void jsop_getgname_slow(uint32 index);
     void jsop_setgname(uint32 index);
     void jsop_setgname_slow(uint32 index);
     void jsop_bindgname();
     void jsop_setelem_slow();
     void jsop_getelem_slow();
     void jsop_unbrand();
-    bool jsop_getprop(JSAtom *atom, bool typeCheck = true);
+    bool jsop_getprop(JSAtom *atom, bool typeCheck = true, bool usePropCache = true);
     bool jsop_length();
-    bool jsop_setprop(JSAtom *atom);
-    void jsop_setprop_slow(JSAtom *atom);
+    bool jsop_setprop(JSAtom *atom, bool usePropCache = true);
+    void jsop_setprop_slow(JSAtom *atom, bool usePropCache = true);
     bool jsop_callprop_slow(JSAtom *atom);
     bool jsop_callprop(JSAtom *atom);
     bool jsop_callprop_obj(JSAtom *atom);
     bool jsop_callprop_str(JSAtom *atom);
     bool jsop_callprop_generic(JSAtom *atom);
     bool jsop_instanceof();
     void jsop_name(JSAtom *atom);
     bool jsop_xname(JSAtom *atom);
--- a/js/src/methodjit/FastArithmetic.cpp
+++ b/js/src/methodjit/FastArithmetic.cpp
@@ -952,18 +952,16 @@ mjit::Compiler::jsop_equality_int_string
         (frame.shouldAvoidDataRemat(lhs) && !rhs->isConstant())) {
         FrameEntry *temp = rhs;
         rhs = lhs;
         lhs = temp;
     }
 
     bool lhsInt = lhs->isType(JSVAL_TYPE_INT32);
     bool rhsInt = rhs->isType(JSVAL_TYPE_INT32);
-    bool lhsString = lhs->isType(JSVAL_TYPE_STRING);
-    bool rhsString = rhs->isType(JSVAL_TYPE_STRING);
 
     /* Invert the condition if fusing with an IFEQ branch. */
     bool flipCondition = (target && fused == JSOP_IFEQ);
 
     /* Get the condition being tested. */
     Assembler::Condition cond;
     switch (op) {
       case JSOP_EQ:
@@ -990,136 +988,123 @@ mjit::Compiler::jsop_equality_int_string
         frame.pinEntry(rhs, rvr);
 
         /*
          * Sync everything except the top two entries.
          * We will handle the lhs/rhs in the stub call path.
          */
         frame.syncAndKill(Registers(Registers::AvailRegs), Uses(frame.frameDepth()), Uses(2));
 
-        /* Temporary for OOL string path. */
-        RegisterID T1 = frame.allocReg();
+        RegisterID tempReg = frame.allocReg();
 
         frame.pop();
         frame.pop();
         frame.discardFrame();
 
         /* Start of the slow path for equality stub call. */
-        Label stubCall = stubcc.masm.label();
+        Label stubEntry = stubcc.masm.label();
 
         JaegerSpew(JSpew_Insns, " ---- BEGIN STUB CALL CODE ---- \n");
 
         /* The lhs/rhs need to be synced in the stub call path. */
         frame.ensureValueSynced(stubcc.masm, lhs, lvr);
         frame.ensureValueSynced(stubcc.masm, rhs, rvr);
 
-        /* Call the stub, adjusting for the two values just pushed. */
-        stubcc.call(stub, frame.stackDepth() + script->nfixed + 2);
+        bool needStub = true;
+        
+#ifdef JS_MONOIC
+        EqualityGenInfo ic;
+
+        ic.cond = cond;
+        ic.tempReg = tempReg;
+        ic.lvr = lvr;
+        ic.rvr = rvr;
+        ic.stubEntry = stubEntry;
+        ic.stub = stub;
+
+        bool useIC = !addTraceHints || target >= PC;
+
+        /* Call the IC stub, which may generate a fast path. */
+        if (useIC) {
+            /* Adjust for the two values just pushed. */
+            ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
+            ic.stubCall = stubcc.call(ic::Equality, frame.stackDepth() + script->nfixed + 2);
+            needStub = false;
+        }
+#endif
+
+        if (needStub)
+            stubcc.call(stub, frame.stackDepth() + script->nfixed + 2);
 
         /*
          * The stub call has no need to rejoin, since state is synced.
          * Instead, we can just test the return value.
          */
         Assembler::Condition ncond = (fused == JSOP_IFEQ)
                                    ? Assembler::Zero
                                    : Assembler::NonZero;
         Jump stubBranch =
             stubcc.masm.branchTest32(ncond, Registers::ReturnReg, Registers::ReturnReg);
         Jump stubFallthrough = stubcc.masm.jump();
 
         JaegerSpew(JSpew_Insns, " ---- END STUB CALL CODE ---- \n");
 
-        /* Emit an OOL string path if both sides might be strings. */
-        bool stringPath = !(lhsInt || rhsInt);
-        Label missedInt = stubCall;
-        Jump stringFallthrough;
-        Jump stringMatched;
-
-        if (stringPath) {
-            missedInt = stubcc.masm.label();
-
-            if (!lhsString) {
-                Jump lhsFail = stubcc.masm.testString(Assembler::NotEqual, lvr.typeReg());
-                lhsFail.linkTo(stubCall, &stubcc.masm);
-            }
-            if (!rhsString) {
-                JS_ASSERT(!rhsConst);
-                Jump rhsFail = stubcc.masm.testString(Assembler::NotEqual, rvr.typeReg());
-                rhsFail.linkTo(stubCall, &stubcc.masm);
-            }
-
-            /* Test if lhs/rhs are atomized. */
-            Imm32 atomizedFlags(JSString::FLAT | JSString::ATOMIZED);
-
-            stubcc.masm.load32(Address(lvr.dataReg(), offsetof(JSString, mLengthAndFlags)), T1);
-            stubcc.masm.and32(Imm32(JSString::TYPE_FLAGS_MASK), T1);
-            Jump lhsNotAtomized = stubcc.masm.branch32(Assembler::NotEqual, T1, atomizedFlags);
-            lhsNotAtomized.linkTo(stubCall, &stubcc.masm);
+        Jump fast;
+        MaybeJump firstStubJump;
 
-            if (!rhsConst) {
-                stubcc.masm.load32(Address(rvr.dataReg(), offsetof(JSString, mLengthAndFlags)), T1);
-                stubcc.masm.and32(Imm32(JSString::TYPE_FLAGS_MASK), T1);
-                Jump rhsNotAtomized = stubcc.masm.branch32(Assembler::NotEqual, T1, atomizedFlags);
-                rhsNotAtomized.linkTo(stubCall, &stubcc.masm);
-            }
-
-            if (rhsConst) {
-                JSString *str = rval.toString();
-                JS_ASSERT(str->isAtomized());
-                stringMatched = stubcc.masm.branchPtr(cond, lvr.dataReg(), ImmPtr(str));
-            } else {
-                stringMatched = stubcc.masm.branchPtr(cond, lvr.dataReg(), rvr.dataReg());
-            }
-
-            stringFallthrough = stubcc.masm.jump();
-        }
-
-        Jump fast;
-        if (lhsString || rhsString) {
-            /* Jump straight to the OOL string path. */
-            Jump jump = masm.jump();
-            stubcc.linkExitDirect(jump, missedInt);
-            fast = masm.jump();
-        } else {
-            /* Emit inline integer path. */
+        if (lhsInt || rhsInt || (!lhs->isTypeKnown() && !rhs->isTypeKnown())) {
             if (!lhsInt) {
                 Jump lhsFail = masm.testInt32(Assembler::NotEqual, lvr.typeReg());
-                stubcc.linkExitDirect(lhsFail, missedInt);
+                stubcc.linkExitDirect(lhsFail, stubEntry);
+                firstStubJump = lhsFail;
             }
             if (!rhsInt) {
-                if (rhsConst) {
-                    Jump rhsFail = masm.jump();
-                    stubcc.linkExitDirect(rhsFail, missedInt);
-                } else {
-                    Jump rhsFail = masm.testInt32(Assembler::NotEqual, rvr.typeReg());
-                    stubcc.linkExitDirect(rhsFail, missedInt);
-                }
+                Jump rhsFail = masm.testInt32(Assembler::NotEqual, rvr.typeReg());
+                stubcc.linkExitDirect(rhsFail, stubEntry);
+                if (!firstStubJump.isSet())
+                    firstStubJump = rhsFail;
             }
 
             if (rhsConst)
                 fast = masm.branch32(cond, lvr.dataReg(), Imm32(rval.toInt32()));
             else
                 fast = masm.branch32(cond, lvr.dataReg(), rvr.dataReg());
+
+            jumpInScript(fast, target);
+        } else {
+            Jump j = masm.jump();
+            stubcc.linkExitDirect(j, stubEntry);
+            firstStubJump = j;
+
+            /* This is just a dummy jump. */
+            fast = masm.jump();
         }
 
-        /* Jump from the stub call and string path fallthroughs to here. */
+#ifdef JS_MONOIC
+        ic.jumpToStub = firstStubJump;
+        if (useIC) {
+            ic.fallThrough = masm.label();
+            ic.jumpTarget = target;
+            equalityICs.append(ic);
+        }
+#endif
+
+        /* Jump from the stub call fallthrough to here. */
         stubcc.crossJump(stubFallthrough, masm.label());
-        if (stringPath)
-            stubcc.crossJump(stringFallthrough, masm.label());
 
         /*
          * NB: jumpAndTrace emits to the OOL path, so make sure not to use it
          * in the middle of an in-progress slow path.
          */
-        jumpAndTrace(fast, target, &stubBranch, stringPath ? &stringMatched : NULL);
+        jumpAndTrace(fast, target, &stubBranch);
     } else {
         /* No fusing. Compare, set, and push a boolean. */
 
         /* Should have filtered these out in the caller. */
-        JS_ASSERT(!lhsString && !rhsString);
+        JS_ASSERT(!lhs->isType(JSVAL_TYPE_STRING) && !rhs->isType(JSVAL_TYPE_STRING));
 
         /* Test the types. */
         if (!lhsInt) {
             Jump lhsFail = frame.testInt32(Assembler::NotEqual, lhs);
             stubcc.linkExit(lhsFail, Uses(2));
         }
         if (!rhsInt) {
             Jump rhsFail = frame.testInt32(Assembler::NotEqual, rhs);
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -665,20 +665,31 @@ mjit::Compiler::jsop_equality(JSOp op, B
          * Maybe put them next to each other, subtract, do a single compare?
          */
 
         if (target) {
             frame.syncAndForgetEverything();
 
             if ((op == JSOP_EQ && fused == JSOP_IFNE) ||
                 (op == JSOP_NE && fused == JSOP_IFEQ)) {
-                Jump j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
-                jumpAndTrace(j, target);
-                j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_NULL));
-                jumpAndTrace(j, target);
+                /*
+                 * It would be easier to just have two jumpAndTrace calls here, but since
+                 * each jumpAndTrace creates a TRACE IC, and since we want the bytecode
+                 * to have a reference to the TRACE IC at the top of the loop, it's much
+                 * better to have only one TRACE IC per loop, and hence at most one
+                 * jumpAndTrace.
+                 */
+                Jump b1 = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
+                Jump b2 = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_NULL));
+                Jump j1 = masm.jump();
+                b1.linkTo(masm.label(), &masm);
+                b2.linkTo(masm.label(), &masm);
+                Jump j2 = masm.jump();
+                jumpAndTrace(j2, target);
+                j1.linkTo(masm.label(), &masm);
             } else {
                 Jump j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
                 Jump j2 = masm.branchPtr(Assembler::NotEqual, reg, ImmType(JSVAL_TYPE_NULL));
                 jumpAndTrace(j2, target);
                 j.linkTo(masm.label(), &masm);
             }
         } else {
             Jump j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
@@ -1596,17 +1607,17 @@ mjit::Compiler::jsop_stricteq(JSOp op)
         frame.push(BooleanValue((op == JSOP_STRICTEQ) ? b : !b));
         return;
     }
 
     if (frame.haveSameBacking(lhs, rhs)) {
         /* False iff NaN. */
         if (lhs->isTypeKnown() && lhs->isNotType(JSVAL_TYPE_DOUBLE)) {
             frame.popn(2);
-            frame.push(BooleanValue(true));
+            frame.push(BooleanValue(op == JSOP_STRICTEQ));
             return;
         }
         
         /* Assume NaN is in canonical form. */
         RegisterID result = frame.allocReg(Registers::SingleByteRegs);
         RegisterID treg = frame.tempRegForType(lhs);
 
         Assembler::Condition oppositeCond = (op == JSOP_STRICTEQ) ? Assembler::NotEqual : Assembler::Equal;
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -236,37 +236,44 @@ FrameState::pushSynced(JSValueType type,
     fe->setType(type);
     fe->data.setRegister(reg);
     regstate[reg].associate(fe, RematInfo::DATA);
 }
 
 inline void
 FrameState::push(Address address)
 {
-    FrameEntry *fe = rawPush();
-
-    /* :XXX: X64 */
-    fe->resetUnsynced();
-
-    /* Prevent us from clobbering this reg. */
+#ifdef JS_PUNBOX64
+    // It's okay if either of these clobbers address.base, since we guarantee
+    // eviction will not physically clobber. It's also safe, on x64, for
+    // loadValueAsComponents() to take either type or data regs as address.base.
+    RegisterID typeReg = allocReg();
+    RegisterID dataReg = allocReg();
+    masm.loadValueAsComponents(address, typeReg, dataReg);
+#elif JS_NUNBOX32
+    // Prevent us from clobbering this reg.
     bool free = freeRegs.hasReg(address.base);
     if (free)
         freeRegs.takeReg(address.base);
 
-    RegisterID dreg = allocReg(fe, RematInfo::DATA);
-    masm.loadPayload(address, dreg);
-    fe->data.setRegister(dreg);
+    RegisterID typeReg = allocReg();
+
+    masm.loadTypeTag(address, typeReg);
 
-    /* Now it's safe to grab this register again. */
+    // Allow re-use of the base register. This could avoid a spill, and
+    // is safe because the following allocReg() won't actually emit any
+    // writes to the register.
     if (free)
         freeRegs.putReg(address.base);
 
-    RegisterID treg = allocReg(fe, RematInfo::TYPE);
-    masm.loadTypeTag(address, treg);
-    fe->type.setRegister(treg);
+    RegisterID dataReg = allocReg();
+    masm.loadPayload(address, dataReg);
+#endif
+
+    pushRegs(typeReg, dataReg);
 }
 
 inline void
 FrameState::pushRegs(RegisterID type, RegisterID data)
 {
     JS_ASSERT(!freeRegs.hasReg(type) && !freeRegs.hasReg(data));
 
     FrameEntry *fe = rawPush();
@@ -592,23 +599,23 @@ FrameState::syncFe(FrameEntry *fe)
         masm.loadValue(addressOf(backing), Registers::ValueReg);
         masm.storeValue(Registers::ValueReg, addressOf(fe));
     } else {
         /* Store in case unpinning is necessary. */
         MaybeRegisterID pairReg;
 
         /* Get a register if necessary, without clobbering its pair. */
         if (needTypeReg) {
-            if (backing->data.inRegister()) {
+            if (backing->data.inRegister() && !regstate[backing->data.reg()].isPinned()) {
                 pairReg = backing->data.reg();
                 pinReg(backing->data.reg());
             }
             tempRegForType(backing);
         } else if (needDataReg) {
-            if (backing->type.inRegister()) {
+            if (backing->type.inRegister() && !regstate[backing->type.reg()].isPinned()) {
                 pairReg = backing->type.reg();
                 pinReg(backing->type.reg());
             }
             tempRegForData(backing);
         }
 
         ensureFeSynced(fe, masm);
 
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -1387,40 +1387,54 @@ FrameState::allocForSameBinary(FrameEntr
         alloc.lhsNeedsRemat = true;
     }
 
     if (alloc.lhsType.isSet())
         unpinReg(alloc.lhsType.reg());
 }
 
 void
-FrameState::ensureFullRegs(FrameEntry *fe)
+FrameState::ensureFullRegs(FrameEntry *fe, MaybeRegisterID *type, MaybeRegisterID *data)
 {
-    FrameEntry *backing = fe;
-    if (fe->isCopy())
-        backing = fe->copyOf();
+    fe = fe->isCopy() ? fe->copyOf() : fe;
 
+    JS_ASSERT(!data->isSet() && !type->isSet());
     if (!fe->type.inMemory()) {
-        if (fe->data.inRegister())
+        if (fe->type.inRegister())
+            *type = fe->type.reg();
+        if (fe->data.isConstant())
             return;
+        if (fe->data.inRegister()) {
+            *data = fe->data.reg();
+            return;
+        }
         if (fe->type.inRegister())
             pinReg(fe->type.reg());
-        if (fe->data.inMemory())
-            tempRegForData(fe);
+        *data = tempRegForData(fe);
         if (fe->type.inRegister())
             unpinReg(fe->type.reg());
     } else if (!fe->data.inMemory()) {
-        if (fe->type.inRegister())
+        if (fe->data.inRegister())
+            *data = fe->data.reg();
+        if (fe->type.isConstant())
             return;
+        if (fe->type.inRegister()) {
+            *type = fe->type.reg();
+            return;
+        }
         if (fe->data.inRegister())
             pinReg(fe->data.reg());
-        if (fe->type.inMemory())
-            tempRegForType(fe);
+        *type = tempRegForType(fe);
         if (fe->data.inRegister())
             unpinReg(fe->data.reg());
+    } else {
+        *data = tempRegForData(fe);
+        pinReg(data->reg());
+        *type = tempRegForType(fe);
+        unpinReg(data->reg());
     }
 }
 
 void
 FrameState::allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
                            bool needsResult)
 {
     FrameEntry *backingLeft = lhs;
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -243,22 +243,24 @@ class FrameState
         /* Unassociate this register from the FE. */
         void forget() {
             JS_ASSERT(fe_);
             fe_ = NULL;
             JS_ASSERT(!save_);
         }
 
         void pin() {
+            JS_ASSERT(fe_ != NULL);
             assertConsistency();
             save_ = fe_;
             fe_ = NULL;
         }
 
         void unpin() {
+            JS_ASSERT(save_ != NULL);
             assertConsistency();
             fe_ = save_;
             save_ = NULL;
         }
 
         void unpinUnsafe() {
             assertConsistency();
             save_ = NULL;
@@ -504,17 +506,17 @@ class FrameState
      * One mutable register is allocated as well, holding the LHS payload. If
      * this would cause a spill that could be avoided by using a mutable RHS,
      * and the operation is commutative, then the resultHasRhs is set to true.
      */
     void allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
                         bool resultNeeded = true);
 
     /* Ensures that an FE has both type and data remat'd in registers. */
-    void ensureFullRegs(FrameEntry *fe);
+    void ensureFullRegs(FrameEntry *fe, MaybeRegisterID *typeReg, MaybeRegisterID *dataReg);
 
     /*
      * Similar to allocForBinary, except works when the LHS and RHS have the
      * same backing FE. Only a reduced subset of BinaryAlloc is used:
      *   lhsType
      *   lhsData
      *   result
      *   lhsNeedsRemat
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -68,19 +68,16 @@
 #include "StubCalls-inl.h"
 
 #include "jsautooplen.h"
 
 using namespace js;
 using namespace js::mjit;
 using namespace JSC;
 
-static bool
-InlineReturn(VMFrame &f, JSBool ok, JSBool popFrame = JS_TRUE);
-
 static jsbytecode *
 FindExceptionHandler(JSContext *cx)
 {
     JSStackFrame *fp = cx->fp();
     JSScript *script = fp->script();
 
 top:
     if (cx->throwing && script->trynotesOffset) {
@@ -172,56 +169,29 @@ top:
 /*
  * Clean up a frame and return.  popFrame indicates whether to additionally pop
  * the frame and store the return value on the caller's stack.  The frame will
  * normally be popped by the caller on return from a call into JIT code,
  * so must be popped here when that caller code will not execute.  This can be
  * either because of a call into an un-JITable script, or because the call is
  * throwing an exception.
  */
-static bool
-InlineReturn(VMFrame &f, JSBool ok, JSBool popFrame)
+static void
+InlineReturn(VMFrame &f)
 {
     JSContext *cx = f.cx;
     JSStackFrame *fp = f.regs.fp;
 
     JS_ASSERT(f.fp() != f.entryFp);
 
     JS_ASSERT(!js_IsActiveWithOrBlock(cx, &fp->scopeChain(), 0));
 
-    // Marker for debug support.
-    if (JS_UNLIKELY(fp->hasHookData())) {
-        JSInterpreterHook hook;
-        JSBool status;
-
-        hook = cx->debugHooks->callHook;
-        if (hook) {
-            /*
-             * Do not pass &ok directly as exposing the address inhibits
-             * optimizations and uninitialised warnings.
-             */
-            status = ok;
-            hook(cx, fp, JS_FALSE, &status, fp->hookData());
-            ok = (status == JS_TRUE);
-            // CHECK_INTERRUPT_HANDLER();
-        }
-    }
-
-    PutActivationObjects(cx, fp);
-
-    if (fp->isConstructing() && fp->returnValue().isPrimitive())
-        fp->setReturnValue(fp->thisValue());
-
-    if (popFrame) {
-        Value *newsp = fp->actualArgs() - 1;
-        newsp[-1] = fp->returnValue();
-        cx->stack().popInlineFrame(cx, fp->prev(), newsp);
-    }
-
-    return ok;
+    Value *newsp = fp->actualArgs() - 1;
+    newsp[-1] = fp->returnValue();
+    cx->stack().popInlineFrame(cx, fp->prev(), newsp);
 }
 
 void JS_FASTCALL
 stubs::SlowCall(VMFrame &f, uint32 argc)
 {
     Value *vp = f.regs.sp - (argc + 2);
 
     if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0))
@@ -367,29 +337,28 @@ stubs::CompileFunction(VMFrame &f, uint3
         THROWV(NULL);
 
     CompileStatus status = CanMethodJIT(cx, script, fp);
     if (status == Compile_Okay)
         return script->getJIT(callingNew)->invokeEntry;
 
     /* Function did not compile... interpret it. */
     JSBool ok = Interpret(cx, fp);
-    InlineReturn(f, ok);
+    InlineReturn(f);
 
     if (!ok)
         THROWV(NULL);
 
     return NULL;
 }
 
 static inline bool
 UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, uint32 argc)
 {
     JSContext *cx = f.cx;
-    JSStackFrame *fp = f.fp();
     Value *vp = f.regs.sp - (argc + 2);
     JSObject &callee = vp->toObject();
     JSFunction *newfun = callee.getFunctionPrivate();
     JSScript *newscript = newfun->script();
 
     /* Get pointer to new frame/slots, prepare arguments. */
     StackSpace &stack = cx->stack();
     JSStackFrame *newfp = stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc,
@@ -407,40 +376,34 @@ UncachedInlineCall(VMFrame &f, uint32 fl
     /* Officially push the frame. */
     stack.pushInlineFrame(cx, newscript, newfp, &f.regs);
     JS_ASSERT(newfp == f.regs.fp);
 
     /* Scope with a call object parented by callee's parent. */
     if (newfun->isHeavyweight() && !js_GetCallObject(cx, newfp))
         return false;
 
-    /* Marker for debug support. */
-    if (JSInterpreterHook hook = cx->debugHooks->callHook) {
-        newfp->setHookData(hook(cx, fp, JS_TRUE, 0,
-                                cx->debugHooks->callHookData));
-    }
-
     /* Try to compile if not already compiled. */
     if (newscript->getJITStatus(newfp->isConstructing()) == JITScript_None) {
         if (mjit::TryCompile(cx, newfp) == Compile_Error) {
             /* A runtime exception was thrown, get out. */
-            InlineReturn(f, JS_FALSE);
+            InlineReturn(f);
             return false;
         }
     }
 
     /* If newscript was successfully compiled, run it. */
     if (JITScript *jit = newscript->getJIT(newfp->isConstructing())) {
         *pret = jit->invokeEntry;
         return true;
     }
 
     /* Otherwise, run newscript in the interpreter. */
     bool ok = !!Interpret(cx, cx->fp());
-    InlineReturn(f, JS_TRUE);
+    InlineReturn(f);
 
     *pret = NULL;
     return ok;
 }
 
 void * JS_FASTCALL
 stubs::UncachedNew(VMFrame &f, uint32 argc)
 {
@@ -569,21 +532,28 @@ js_InternalThrow(VMFrame &f)
             break;
 
         // If on the 'topmost' frame (where topmost means the first frame
         // called into through js_Interpret). In this case, we still unwind,
         // but we shouldn't return from a JS function, because we're not in a
         // JS function.
         bool lastFrame = (f.entryFp == f.fp());
         js_UnwindScope(cx, 0, cx->throwing);
+
+        // For consistency with Interpret(), always run the script epilogue.
+        // This simplifies interactions with RunTracer(), since it can assume
+        // no matter how a function exited (error or not), that the epilogue
+        // does not need to be run.
+        ScriptEpilogue(f.cx, f.fp(), false);
+
         if (lastFrame)
             break;
 
         JS_ASSERT(f.regs.sp == cx->regs->sp);
-        InlineReturn(f, JS_FALSE);
+        InlineReturn(f);
     }
 
     JS_ASSERT(f.regs.sp == cx->regs->sp);
 
     if (!pc)
         return NULL;
 
     JSStackFrame *fp = cx->fp();
@@ -606,46 +576,74 @@ stubs::CreateThis(VMFrame &f, JSObject *
     JSStackFrame *fp = f.fp();
     JSObject *callee = &fp->callee();
     JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
     if (!obj)
         THROW();
     fp->formalArgs()[-1].setObject(*obj);
 }
 
-static inline void
-AdvanceReturnPC(JSContext *cx)
+void JS_FASTCALL
+stubs::EnterScript(VMFrame &f)
 {
-    /* Simulate an inline_return by advancing the pc. */
-    JS_ASSERT(*cx->regs->pc == JSOP_CALL ||
-              *cx->regs->pc == JSOP_NEW ||
-              *cx->regs->pc == JSOP_EVAL ||
-              *cx->regs->pc == JSOP_APPLY);
-    cx->regs->pc += JSOP_CALL_LENGTH;
+    JSStackFrame *fp = f.fp();
+    JSContext *cx = f.cx;
+    JSInterpreterHook hook = cx->debugHooks->callHook;
+    if (JS_UNLIKELY(hook != NULL) && !fp->isExecuteFrame()) {
+        fp->setHookData(hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData));
+    }
+
+    Probes::enterJSFun(cx, fp->maybeFun());
+}
+
+void JS_FASTCALL
+stubs::LeaveScript(VMFrame &f)
+{
+    JSStackFrame *fp = f.fp();
+    JSContext *cx = f.cx;
+    Probes::exitJSFun(cx, fp->maybeFun());
+    JSInterpreterHook hook = cx->debugHooks->callHook;
+
+    if (hook && fp->hasHookData() && !fp->isExecuteFrame()) {
+        JSBool ok = JS_TRUE;
+        hook(cx, fp, JS_FALSE, &ok, fp->hookData());
+        if (!ok)
+            THROW();
+    }
 }
 
 #ifdef JS_TRACER
 
+/*
+ * Called when an error is in progress and the topmost frame could not handle
+ * it. This will unwind to a given frame, or find and align to an exception
+ * handler in the process.
+ */
 static inline bool
-HandleErrorInExcessFrames(VMFrame &f, JSStackFrame *stopFp)
+HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostFrame = true)
 {
     JSContext *cx = f.cx;
 
     /*
      * Callers of this called either Interpret() or JaegerShot(), which would
      * have searched for exception handlers already. If we see stopFp, just
      * return false. Otherwise, pop the frame, since it's guaranteed useless.
+     *
+     * Note that this also guarantees ScriptEpilogue() has been called.
      */
     JSStackFrame *fp = cx->fp();
-    if (fp == stopFp)
-        return false;
+    if (searchedTopmostFrame) {
+        if (fp == stopFp)
+            return false;
 
-    bool returnOK = InlineReturn(f, false);
+        InlineReturn(f);
+    }
 
     /* Remove the bottom frame. */
+    bool returnOK = false;
     for (;;) {
         fp = cx->fp();
 
         /* Clear imacros. */
         if (fp->hasImacropc()) {
             cx->regs->pc = fp->imacropc();
             fp->clearImacropc();
         }
@@ -662,141 +660,267 @@ HandleErrorInExcessFrames(VMFrame &f, JS
         }
 
         /* Don't unwind if this was the entry frame. */
         if (fp == stopFp)
             break;
 
         /* Unwind and return. */
         returnOK &= bool(js_UnwindScope(cx, 0, returnOK || cx->throwing));
-        returnOK = InlineReturn(f, returnOK);
+        returnOK = ScriptEpilogue(cx, fp, returnOK);
+        InlineReturn(f);
     }
 
     JS_ASSERT(&f.regs == cx->regs);
     JS_ASSERT_IF(!returnOK, cx->fp() == stopFp);
 
     return returnOK;
 }
 
+/* Returns whether the current PC has method JIT'd code. */
 static inline void *
 AtSafePoint(JSContext *cx)
 {
     JSStackFrame *fp = cx->fp();
     if (fp->hasImacropc())
         return false;
 
     JSScript *script = fp->script();
     return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs->pc);
 }
 
+/*
+ * Interprets until either a safe point is reached that has method JIT'd
+ * code, or the current frame tries to return.
+ */
 static inline JSBool
 PartialInterpret(VMFrame &f)
 {
     JSContext *cx = f.cx;
     JSStackFrame *fp = cx->fp();
 
 #ifdef DEBUG
     JSScript *script = fp->script();
+    JS_ASSERT(!fp->finishedInInterpreter());
     JS_ASSERT(fp->hasImacropc() ||
               !script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs->pc));
 #endif
 
     JSBool ok = JS_TRUE;
     ok = Interpret(cx, fp, 0, JSINTERP_SAFEPOINT);
 
     return ok;
 }
 
 JS_STATIC_ASSERT(JSOP_NOP == 0);
 
+/* Returns whether the current PC would return, popping the frame. */
 static inline JSOp
 FrameIsFinished(JSContext *cx)
 {
     JSOp op = JSOp(*cx->regs->pc);
     return (op == JSOP_RETURN ||
             op == JSOP_RETRVAL ||
             op == JSOP_STOP)
         ? op
         : JSOP_NOP;
 }
 
+
+/* Simulate an inline_return by advancing the pc. */
+static inline void
+AdvanceReturnPC(JSContext *cx)
+{
+    JS_ASSERT(*cx->regs->pc == JSOP_CALL ||
+              *cx->regs->pc == JSOP_NEW ||
+              *cx->regs->pc == JSOP_EVAL ||
+              *cx->regs->pc == JSOP_APPLY);
+    cx->regs->pc += JSOP_CALL_LENGTH;
+}
+
+
+/*
+ * Given a frame that is about to return, make sure its return value and
+ * activation objects are fixed up. Then, pop the frame and advance the
+ * current PC. Note that while we could enter the JIT at this point, the
+ * logic would still be necessary for the interpreter, so it's easier
+ * (and faster) to finish frames in C++ even if at a safe point here.
+ */
+static bool
+HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame)
+{
+    JSContext *cx = f.cx;
+
+    JS_ASSERT(FrameIsFinished(cx));
+
+    /*
+     * This is the most difficult and complicated piece of the tracer
+     * integration, and historically has been very buggy. The problem is that
+     * although this frame has to be popped (see RemoveExcessFrames), it may
+     * be at a JSOP_RETURN opcode, and it might not have ever been executed.
+     * That is, fp->rval may not be set to the top of the stack, and if it
+     * has, the stack has already been decremented. Note that fp->rval is not
+     * the only problem: the epilogue may never have been executed.
+     *
+     * Here are the edge cases and whether the frame has been exited cleanly:
+     *  1. No: A trace exited directly before a RETURN op, and the
+     *         interpreter never ran.
+     *  2. Yes: The interpreter exited cleanly.
+     *  3. No: The interpreter exited on a safe point. LEAVE_ON_SAFE_POINT
+     *         is not used in between JSOP_RETURN and advancing the PC,
+     *         therefore, it cannot have been run if at a safe point.
+     *  4. No: Somewhere in the RunTracer call tree, we removed a frame,
+     *         and we returned to a JSOP_RETURN opcode. Note carefully
+     *         that in this situation, FrameIsFinished() returns true!
+     *  5. Yes: The function exited in the method JIT. However, in this
+     *         case, we'll never enter HandleFinishedFrame(): we always
+     *         immediately pop JIT'd frames.
+     *
+     * Since the only scenario where this fixup is NOT needed is a normal exit
+     * from the interpreter, we can cleanly check for this scenario by checking
+     * a bit it sets in the frame.
+     */
+    bool returnOK = true;
+    if (!cx->fp()->finishedInInterpreter()) {
+        if (JSOp(*cx->regs->pc) == JSOP_RETURN)
+            cx->fp()->setReturnValue(f.regs.sp[-1]);
+
+        returnOK = ScriptEpilogue(cx, cx->fp(), true);
+    }
+
+    JS_ASSERT_IF(cx->fp()->isFunctionFrame() &&
+                 !cx->fp()->isEvalFrame(),
+                 !cx->fp()->hasCallObj());
+
+    if (cx->fp() != entryFrame) {
+        InlineReturn(f);
+        AdvanceReturnPC(cx);
+    }
+
+    return returnOK;
+}
+
+/*
+ * Given a frame newer than the entry frame, try to finish it. If it's at a
+ * return position, pop the frame. If it's at a safe point, execute it in
+ * Jaeger code. Otherwise, try to interpret until a safe point.
+ *
+ * While this function is guaranteed to make progress, it may not actually
+ * finish or pop the current frame. It can either:
+ *   1) Finalize a finished frame, or
+ *   2) Finish and finalize the frame in the Method JIT, or
+ *   3) Interpret, which can:
+ *     a) Propagate an error, or
+ *     b) Finish the frame, but not finalize it, or
+ *     c) Abruptly leave at any point in the frame, or in a newer frame
+ *        pushed by a call, that has method JIT'd code.
+ */
+static bool
+EvaluateExcessFrame(VMFrame &f, JSStackFrame *entryFrame)
+{
+    JSContext *cx = f.cx;
+    JSStackFrame *fp = cx->fp();
+
+    /*
+     * A "finished" frame is when the interpreter rested on a STOP,
+     * RETURN, RETRVAL, etc. We check for finished frames BEFORE looking
+     * for a safe point. If the frame was finished, we could have already
+     * called ScriptEpilogue(), and entering the JIT could call it twice.
+     */
+    if (!fp->hasImacropc() && FrameIsFinished(cx))
+        return HandleFinishedFrame(f, entryFrame);
+
+    if (void *ncode = AtSafePoint(cx)) {
+        if (!JaegerShotAtSafePoint(cx, ncode))
+            return false;
+        InlineReturn(f);
+        AdvanceReturnPC(cx);
+        return true;
+    }
+
+    return PartialInterpret(f);
+}
+
+/*
+ * Evaluate frames newer than the entry frame until all are gone. This will
+ * always leave f.regs.fp == entryFrame.
+ */
 static bool
 FinishExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
 {
     JSContext *cx = f.cx;
+
     while (cx->fp() != entryFrame || entryFrame->hasImacropc()) {
-        if (void *ncode = AtSafePoint(cx)) {
-            if (!JaegerShotAtSafePoint(cx, ncode)) {
-                if (!HandleErrorInExcessFrames(f, entryFrame))
-                    return false;
-
-                /* Could be anywhere - restart outer loop. */
-                continue;
-            }
-            InlineReturn(f, JS_TRUE);
-            AdvanceReturnPC(cx);
-        } else {
-            if (!PartialInterpret(f)) {
-                if (!HandleErrorInExcessFrames(f, entryFrame))
-                    return false;
-            } else if (cx->fp() != entryFrame) {
-                /*
-                 * Partial interpret could have dropped us anywhere. Deduce the
-                 * edge case: at a RETURN, needing to pop a frame.
-                 */
-                JS_ASSERT(!cx->fp()->hasImacropc());
-                if (FrameIsFinished(cx)) {
-                    JSOp op = JSOp(*cx->regs->pc);
-                    if (op == JSOP_RETURN && !cx->fp()->isBailedAtReturn())
-                        cx->fp()->setReturnValue(f.regs.sp[-1]);
-                    InlineReturn(f, JS_TRUE);
-                    AdvanceReturnPC(cx);
-                }
-            }
+        if (!EvaluateExcessFrame(f, entryFrame)) {
+            if (!HandleErrorInExcessFrame(f, entryFrame))
+                return false;
         }
     }
 
     return true;
 }
 
 #if JS_MONOIC
 static void
-DisableTraceHintSingle(JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
+UpdateTraceHintSingle(JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
 {
     /*
      * Hack: The value that will be patched is before the executable address,
      * so to get protection right, just unprotect the general region around
      * the jump.
      */
     uint8 *addr = (uint8 *)(jump.executableAddress());
     JSC::RepatchBuffer repatch(addr - 64, 128);
     repatch.relink(jump, target);
 
     JaegerSpew(JSpew_PICs, "relinking trace hint %p to %p\n",
                jump.executableAddress(), target.executableAddress());
 }
 
 static void
-DisableTraceHint(VMFrame &f, ic::MICInfo &mic)
+DisableTraceHint(VMFrame &f, ic::TraceICInfo &tic)
 {
-    JS_ASSERT(mic.kind == ic::MICInfo::TRACER);
+    UpdateTraceHintSingle(tic.traceHint, tic.jumpTarget);
 
-    DisableTraceHintSingle(mic.traceHint, mic.load);
+    if (tic.hasSlowTraceHint)
+        UpdateTraceHintSingle(tic.slowTraceHint, tic.jumpTarget);
+}
 
-    if (mic.u.hints.hasSlowTraceHintOne)
-        DisableTraceHintSingle(mic.slowTraceHintOne, mic.load);
+static void
+EnableTraceHintAt(JSScript *script, js::mjit::JITScript *jit, jsbytecode *pc, uint16_t index)
+{
+    JS_ASSERT(index < jit->nTraceICs);
+    ic::TraceICInfo &tic = jit->traceICs[index];
 
-    if (mic.u.hints.hasSlowTraceHintTwo)
-        DisableTraceHintSingle(mic.slowTraceHintTwo, mic.load);
+    JS_ASSERT(tic.jumpTargetPC == pc);
+
+    JaegerSpew(JSpew_PICs, "Enabling trace IC %u in script %p\n", index, script);
+
+    UpdateTraceHintSingle(tic.traceHint, tic.stubEntry);
+
+    if (tic.hasSlowTraceHint)
+        UpdateTraceHintSingle(tic.slowTraceHint, tic.stubEntry);
 }
 #endif
 
+void
+js::mjit::EnableTraceHint(JSScript *script, jsbytecode *pc, uint16_t index)
+{
+#if JS_MONOIC
+    if (script->jitNormal)
+        EnableTraceHintAt(script, script->jitNormal, pc, index);
+
+    if (script->jitCtor)
+        EnableTraceHintAt(script, script->jitCtor, pc, index);
+#endif
+}
+
 #if JS_MONOIC
 void *
-RunTracer(VMFrame &f, ic::MICInfo &mic)
+RunTracer(VMFrame &f, ic::TraceICInfo &tic)
 #else
 void *
 RunTracer(VMFrame &f)
 #endif
 {
     JSContext *cx = f.cx;
     JSStackFrame *entryFrame = f.fp();
     TracePointAction tpa;
@@ -817,31 +941,31 @@ RunTracer(VMFrame &f)
 
     bool blacklist;
     uintN inlineCallCount = 0;
     tpa = MonitorTracePoint(f.cx, inlineCallCount, blacklist);
     JS_ASSERT(!TRACE_RECORDER(cx));
 
 #if JS_MONOIC
     if (blacklist)
-        DisableTraceHint(f, mic);
+        DisableTraceHint(f, tic);
 #endif
 
-    if ((tpa == TPA_RanStuff || tpa == TPA_Recorded) && cx->throwing)
-        tpa = TPA_Error;
+    // Even though ExecuteTree() bypasses the interpreter, it should propagate
+    // error failures correctly.
+    JS_ASSERT_IF(cx->throwing, tpa == TPA_Error);
 
-	/* Sync up the VMFrame's view of cx->fp(). */
 	f.fp() = cx->fp();
-
+    JS_ASSERT(f.fp() == cx->fp());
     switch (tpa) {
       case TPA_Nothing:
         return NULL;
 
       case TPA_Error:
-        if (!HandleErrorInExcessFrames(f, entryFrame))
+        if (!HandleErrorInExcessFrame(f, entryFrame, f.fp()->finishedInInterpreter()))
             THROWV(NULL);
         JS_ASSERT(!cx->fp()->hasImacropc());
         break;
 
       case TPA_RanStuff:
       case TPA_Recorded:
         break;
     }
@@ -869,57 +993,50 @@ RunTracer(VMFrame &f)
      */
 
   restart:
     /* Step 1. Finish frames created after the entry frame. */
     if (!FinishExcessFrames(f, entryFrame))
         THROWV(NULL);
 
     /* IMacros are guaranteed to have been removed by now. */
+    JS_ASSERT(f.fp() == entryFrame);
     JS_ASSERT(!entryFrame->hasImacropc());
 
-    /* Step 2. If entryFrame is at a safe point, just leave. */
-    if (void *ncode = AtSafePoint(cx))
-        return ncode;
-
-    /* Step 3. If entryFrame is at a RETURN, then leave slightly differently. */
-    if (JSOp op = FrameIsFinished(cx)) {
-        /* We're not guaranteed that the RETURN was run. */
-        if (op == JSOP_RETURN && !entryFrame->isBailedAtReturn())
-            entryFrame->setReturnValue(f.regs.sp[-1]);
-
-        /* Cleanup activation objects on the frame unless it's owned by an Invoke. */
-        if (f.fp() != f.entryFp) {
-            if (!InlineReturn(f, JS_TRUE, JS_FALSE))
-                THROWV(NULL);
-        }
+    /* Step 2. If entryFrame is done, use a special path to return to EnterMethodJIT(). */
+    if (FrameIsFinished(cx)) {
+        if (!HandleFinishedFrame(f, entryFrame))
+            THROWV(NULL);
 
         void *retPtr = JS_FUNC_TO_DATA_PTR(void *, InjectJaegerReturn);
         *f.returnAddressLocation() = retPtr;
         return NULL;
     }
 
+    /* Step 3. If entryFrame is at a safe point, just leave. */
+    if (void *ncode = AtSafePoint(cx))
+        return ncode;
+
     /* Step 4. Do a partial interp, then restart the whole process. */
     if (!PartialInterpret(f)) {
-        if (!HandleErrorInExcessFrames(f, entryFrame))
+        if (!HandleErrorInExcessFrame(f, entryFrame))
             THROWV(NULL);
     }
 
     goto restart;
 }
 
 #endif /* JS_TRACER */
 
 #if defined JS_TRACER
 # if defined JS_MONOIC
 void *JS_FASTCALL
-stubs::InvokeTracer(VMFrame &f, ic::MICInfo *mic)
+stubs::InvokeTracer(VMFrame &f, ic::TraceICInfo *tic)
 {
-    JS_ASSERT(mic->kind == ic::MICInfo::TRACER);
-    return RunTracer(f, *mic);
+    return RunTracer(f, *tic);
 }
 
 # else
 
 void *JS_FASTCALL
 stubs::InvokeTracer(VMFrame &f)
 {
     return RunTracer(f);
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -813,16 +813,23 @@ mjit::JITScript::release()
 #if defined JS_POLYIC
     for (uint32 i = 0; i < nPICs; i++) {
         pics[i].releasePools();
         Destroy(pics[i].execPools);
     }
 #endif
 
 #if defined JS_MONOIC
+    for (JSC::ExecutablePool **pExecPool = execPools.begin();
+         pExecPool != execPools.end();
+         ++pExecPool)
+    {
+        (*pExecPool)->release();
+    }
+    
     for (uint32 i = 0; i < nCallICs; i++)
         callICs[i].releasePools();
 #endif
 }
 
 void
 mjit::ReleaseScriptCode(JSContext *cx, JSScript *script)
 {
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -141,16 +141,18 @@ extern "C" void JaegerStubVeneer(void);
 
 namespace mjit {
 namespace ic {
 # if defined JS_POLYIC
     struct PICInfo;
 # endif
 # if defined JS_MONOIC
     struct MICInfo;
+    struct EqualityICInfo;
+    struct TraceICInfo;
     struct CallICInfo;
 # endif
 }
 }
 
 typedef void (JS_FASTCALL *VoidStub)(VMFrame &);
 typedef void (JS_FASTCALL *VoidVpStub)(VMFrame &, Value *);
 typedef void (JS_FASTCALL *VoidStubUInt32)(VMFrame &, uint32);
@@ -170,16 +172,18 @@ typedef JSString * (JS_FASTCALL *JSStrSt
 typedef void (JS_FASTCALL *VoidStubJSObj)(VMFrame &, JSObject *);
 typedef void (JS_FASTCALL *VoidStubPC)(VMFrame &, jsbytecode *);
 typedef JSBool (JS_FASTCALL *BoolStubUInt32)(VMFrame &f, uint32);
 #ifdef JS_MONOIC
 typedef void (JS_FASTCALL *VoidStubCallIC)(VMFrame &, js::mjit::ic::CallICInfo *);
 typedef void * (JS_FASTCALL *VoidPtrStubCallIC)(VMFrame &, js::mjit::ic::CallICInfo *);
 typedef void (JS_FASTCALL *VoidStubMIC)(VMFrame &, js::mjit::ic::MICInfo *);
 typedef void * (JS_FASTCALL *VoidPtrStubMIC)(VMFrame &, js::mjit::ic::MICInfo *);
+typedef JSBool (JS_FASTCALL *BoolStubEqualityIC)(VMFrame &, js::mjit::ic::EqualityICInfo *);
+typedef void * (JS_FASTCALL *VoidPtrStubTraceIC)(VMFrame &, js::mjit::ic::TraceICInfo *);
 #endif
 #ifdef JS_POLYIC
 typedef void (JS_FASTCALL *VoidStubPIC)(VMFrame &, js::mjit::ic::PICInfo *);
 #endif
 
 namespace mjit {
 
 struct CallSite;
@@ -191,16 +195,24 @@ struct JITScript {
 
     js::mjit::CallSite *callSites;
     uint32          nCallSites;
 #ifdef JS_MONOIC
     ic::MICInfo     *mics;      /* MICs in this script. */
     uint32          nMICs;      /* number of MonoICs */
     ic::CallICInfo  *callICs;   /* CallICs in this script. */
     uint32          nCallICs;   /* number of call ICs */
+    ic::EqualityICInfo *equalityICs;
+    uint32          nEqualityICs;
+    ic::TraceICInfo *traceICs;
+    uint32          nTraceICs;
+
+    // Additional ExecutablePools that IC stubs were generated into.
+    typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
+    ExecPoolVector execPools;
 #endif
 #ifdef JS_POLYIC
     ic::PICInfo     *pics;      /* PICs in this script */
     uint32          nPICs;      /* number of PolyICs */
 #endif
     void            *invokeEntry;       /* invoke address */
     void            *fastEntry;         /* cached entry, fastest */
     void            *arityCheckEntry;   /* arity check address */
@@ -260,16 +272,20 @@ CanMethodJIT(JSContext *cx, JSScript *sc
 
 struct CallSite
 {
     uint32 codeOffset;
     uint32 pcOffset;
     uint32 id;
 };
 
+/* Re-enables a tracepoint in the method JIT. */
+void
+EnableTraceHint(JSScript *script, jsbytecode *pc, uint16_t index);
+
 } /* namespace mjit */
 
 } /* namespace js */
 
 inline void *
 JSScript::maybeNativeCodeForPC(bool constructing, jsbytecode *pc)
 {
     js::mjit::JITScript *jit = getJIT(constructing);
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -206,16 +206,216 @@ ic::SetGlobalName(VMFrame &f, ic::MICInf
 #elif defined JS_PUNBOX64
     stores.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
 #endif
 
     // Actually implement the op the slow way.
     GetStubForSetGlobalName(f)(f, atom);
 }
 
+class EqualityICLinker : public LinkerHelper
+{
+    VMFrame &f;
+
+  public:
+    EqualityICLinker(JSContext *cx, VMFrame &f)
+        : LinkerHelper(cx), f(f)
+    { }
+
+    bool init(Assembler &masm) {
+        JSC::ExecutablePool *pool = LinkerHelper::init(masm);
+        if (!pool)
+            return false;
+        JSScript *script = f.fp()->script();
+        JITScript *jit = script->getJIT(f.fp()->isConstructing());
+        if (!jit->execPools.append(pool)) {
+            pool->release();
+            js_ReportOutOfMemory(cx);
+            return false;
+        }
+        return true;
+    }
+};
+
+/* Rough over-estimate of how much memory we need to unprotect. */
+static const uint32 INLINE_PATH_LENGTH = 64;
+
+class EqualityCompiler : public BaseCompiler
+{
+    VMFrame &f;
+    EqualityICInfo &ic;
+
+    Vector<Jump, 4, SystemAllocPolicy> jumpList;
+    Jump trueJump;
+    Jump falseJump;
+    
+  public:
+    EqualityCompiler(VMFrame &f, EqualityICInfo &ic)
+        : BaseCompiler(f.cx), f(f), ic(ic), jumpList(SystemAllocPolicy())
+    {
+    }
+
+    void linkToStub(Jump j)
+    {
+        jumpList.append(j);
+    }
+
+    void linkTrue(Jump j)
+    {
+        trueJump = j;
+    }
+
+    void linkFalse(Jump j)
+    {
+        falseJump = j;
+    }
+    
+    void generateStringPath(Assembler &masm)
+    {
+        ValueRemat &lvr = ic.lvr;
+        ValueRemat &rvr = ic.rvr;
+
+        if (!lvr.isConstant && !lvr.isType(JSVAL_TYPE_STRING)) {
+            Jump lhsFail = masm.testString(Assembler::NotEqual, lvr.typeReg());
+            linkToStub(lhsFail);
+        }
+        
+        if (!rvr.isConstant && !rvr.isType(JSVAL_TYPE_STRING)) {
+            Jump rhsFail = masm.testString(Assembler::NotEqual, rvr.typeReg());
+            linkToStub(rhsFail);
+        }
+
+        RegisterID tmp = ic.tempReg;
+        
+        /* Test if lhs/rhs are atomized. */
+        Imm32 atomizedFlags(JSString::FLAT | JSString::ATOMIZED);
+        
+        masm.load32(Address(lvr.dataReg(), offsetof(JSString, mLengthAndFlags)), tmp);
+        masm.and32(Imm32(JSString::TYPE_FLAGS_MASK), tmp);
+        Jump lhsNotAtomized = masm.branch32(Assembler::NotEqual, tmp, atomizedFlags);
+        linkToStub(lhsNotAtomized);
+
+        if (!rvr.isConstant) {
+            masm.load32(Address(rvr.dataReg(), offsetof(JSString, mLengthAndFlags)), tmp);
+            masm.and32(Imm32(JSString::TYPE_FLAGS_MASK), tmp);
+            Jump rhsNotAtomized = masm.branch32(Assembler::NotEqual, tmp, atomizedFlags);
+            linkToStub(rhsNotAtomized);
+        }
+
+        if (rvr.isConstant) {
+            JSString *str = Valueify(rvr.u.v).toString();
+            JS_ASSERT(str->isAtomized());
+            Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), ImmPtr(str));
+            linkTrue(test);
+        } else {
+            Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), rvr.dataReg());
+            linkTrue(test);
+        }
+
+        Jump fallthrough = masm.jump();
+        linkFalse(fallthrough);
+    }
+
+    void generateObjectPath(Assembler &masm)
+    {
+        ValueRemat &lvr = ic.lvr;
+        ValueRemat &rvr = ic.rvr;
+        
+        if (!lvr.isConstant && !lvr.isType(JSVAL_TYPE_OBJECT)) {
+            Jump lhsFail = masm.testObject(Assembler::NotEqual, lvr.typeReg());
+            linkToStub(lhsFail);
+        }
+        
+        if (!rvr.isConstant && !rvr.isType(JSVAL_TYPE_OBJECT)) {
+            Jump rhsFail = masm.testObject(Assembler::NotEqual, rvr.typeReg());
+            linkToStub(rhsFail);
+        }
+
+        Jump lhsHasEq = masm.branchTest32(Assembler::NonZer