Merge tracemonkey to mozilla-central. a=blockers
authorRobert Sayre <sayrer@gmail.com>
Sun, 12 Sep 2010 00:17:49 -0400
changeset 53654 421427ca9809d5d12b039fe0ed4457ef79111c8f
parent 53649 8afc7bd2f49551918fb2bd0ec00b2314ee627e57 (current diff)
parent 53653 0b33419e048dc0a39c9f40538017f239e8673a56 (diff)
child 53655 056fbd8a379426b74f3a0a61a58ca8941d82b49d
push id15666
push userrsayre@mozilla.com
push dateSun, 12 Sep 2010 04:18:05 +0000
treeherdermozilla-central@421427ca9809 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersblockers
milestone2.0b6pre
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge tracemonkey to mozilla-central. a=blockers
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -2960,17 +2960,17 @@ JS_SealObject(JSContext *cx, JSObject *o
     JS_DestroyIdArray(cx, ida);
 
     /* If not sealing an entire object graph, we're done after sealing obj. */
     obj->seal(cx);
     if (!deep)
         return true;
 
     /* Walk slots in obj and if any value is a non-null object, seal it. */
-    for (uint32 i = 0, n = obj->freeslot; i != n; ++i) {
+    for (uint32 i = 0, n = obj->slotSpan(); i != n; ++i) {
         const Value &v = obj->getSlot(i);
         if (i == JSSLOT_PRIVATE && (obj->getClass()->flags & JSCLASS_HAS_PRIVATE))
             continue;
         if (v.isPrimitive())
             continue;
         if (!JS_SealObject(cx, &v.toObject(), deep))
             return false;
     }
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -111,21 +111,24 @@ using namespace js;
 
 /* 2^32 - 1 as a number and a string */
 #define MAXINDEX 4294967295u
 #define MAXSTR   "4294967295"
 
 /* Small arrays are dense, no matter what. */
 #define MIN_SPARSE_INDEX 256
 
-/* Iteration depends on all indexes of a dense array to fit into a JSVAL-sized int. */
+/*
+ * Use the limit on number of object slots for sanity and consistency (see the
+ * assertion in JSObject::makeDenseArraySlow).
+ */
 static inline bool
 INDEX_TOO_BIG(jsuint index)
 {
-    return index > JS_BIT(29) - 1;
+    return index >= JSObject::NSLOTS_LIMIT;
 }
 
 static inline  bool
 INDEX_TOO_SPARSE(JSObject *array, jsuint index)
 {
     /* Small arrays with less than 256 elements are dense, no matter what. */
     if (index < 256)
         return false;
@@ -1053,25 +1056,16 @@ JSObject::makeDenseArraySlow(JSContext *
     } else {
         /*
          * Array.prototype is constructed as a dense array, but is immediately slowified before
          * we have time to set capacity.
          */
         capacity = 0;
     }
 
-    uint32 nslots = numSlots();
-    if (nslots >= JS_NSLOTS_LIMIT) {
-        setMap(oldMap);
-        JS_ReportOutOfMemory(cx);
-        return false;
-    }
-
-    freeslot = nslots;
-
     /* Begin with the length property to share more of the property tree. */
     if (!addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom),
                      array_length_getter, array_length_setter,
                      JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) {
         setMap(oldMap);
         return false;
     }
 
@@ -1083,16 +1077,19 @@ JSObject::makeDenseArraySlow(JSContext *
             return false;
         }
 
         if (getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
             setDenseArrayElement(i, UndefinedValue());
             continue;
         }
 
+        /* Assert that the length covering i fits in the alloted bits. */
+        JS_ASSERT(JS_INITIAL_NSLOTS + i + 1 < NSLOTS_LIMIT);
+
         if (!addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE)) {
             setMap(oldMap);
             return false;
         }
     }
 
     /*
      * Render our formerly-reserved non-private properties GC-safe.  We do not
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -190,22 +190,20 @@ AddPropertyHelper(JSContext* cx, JSObjec
 
     if (obj->nativeEmpty()) {
         if (!obj->ensureClassReservedSlotsForEmptyObject(cx))
             goto exit_trace;
     }
 
     uint32 slot;
     slot = shape->slot;
-    JS_ASSERT(slot == obj->freeslot);
+    JS_ASSERT(slot == obj->slotSpan());
 
     if (slot < obj->numSlots()) {
         JS_ASSERT(obj->getSlot(slot).isUndefined());
-        ++obj->freeslot;
-        JS_ASSERT(obj->freeslot != 0);
     } else {
         if (!obj->allocSlot(cx, &slot))
             goto exit_trace;
         JS_ASSERT(slot == shape->slot);
     }
 
     obj->extend(cx, shape, isDefinitelyAtom);
     if (js_IsPropertyCacheDisabled(cx))
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -229,17 +229,17 @@ UpdateDepth(JSContext *cx, JSCodeGenerat
     ndefs = cs->ndefs;
     if (ndefs < 0) {
         JSObject *blockObj;
 
         /* We just executed IndexParsedObject */
         JS_ASSERT(op == JSOP_ENTERBLOCK);
         JS_ASSERT(nuses == 0);
         blockObj = cg->objectList.lastbox->object;
-        JS_ASSERT(blockObj->getClass() == &js_BlockClass);
+        JS_ASSERT(blockObj->isStaticBlock());
         JS_ASSERT(blockObj->fslots[JSSLOT_BLOCK_DEPTH].isUndefined());
 
         OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
         ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
     }
     cg->stackDepth += ndefs;
     if ((uintN)cg->stackDepth > cg->maxStackDepth)
         cg->maxStackDepth = cg->stackDepth;
@@ -1587,17 +1587,17 @@ js_LexicalLookup(JSTreeContext *tc, JSAt
         if (stmt->type == STMT_WITH)
             break;
 
         /* Skip "maybe scope" statements that don't contain let bindings. */
         if (!(stmt->flags & SIF_SCOPE))
             continue;
 
         JSObject *obj = stmt->blockObj;
-        JS_ASSERT(obj->getClass() == &js_BlockClass);
+        JS_ASSERT(obj->isStaticBlock());
 
         const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
         if (shape) {
             JS_ASSERT(shape->hasShortID());
 
             if (slotp) {
                 JS_ASSERT(obj->fslots[JSSLOT_BLOCK_DEPTH].isInt32());
                 *slotp = obj->fslots[JSSLOT_BLOCK_DEPTH].toInt32() +
@@ -1862,19 +1862,22 @@ EmitEnterBlock(JSContext *cx, JSParseNod
         for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
             JS_ASSERT(pnu->pn_lexdef == dn);
             JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
             JS_ASSERT(pnu->pn_cookie.isFree());
         }
 #endif
     }
 
-    if (!blockObj->growSlots(cx, base))
-        return false;
-    blockObj->freeslot = base;
+    /*
+     * Shrink slots to free blockObj->dslots and ensure a prompt safe crash if
+     * by accident some code tries to get a slot from a compiler-created Block
+     * prototype instead of from a clone.
+     */
+    blockObj->shrinkSlots(cx, base);
     return true;
 }
 
 /*
  * When eval is called from a function, the eval code or function code it
  * compiles may reference upvars that live in the eval-calling function. The
  * eval-invoked compiler does not have explicit definitions for these upvars
  * and we do not attempt to create them a-priori (by inspecting the function's
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -429,19 +429,19 @@ WrapEscapingClosure(JSContext *cx, JSSta
         /* FIXME should copy JSOP_TRAP? */
         JSOp op = js_GetOpcode(cx, wscript, pc);
         const JSCodeSpec *cs = &js_CodeSpec[op];
         ptrdiff_t oplen = cs->length;
         if (oplen < 0)
             oplen = js_GetVariableBytecodeLength(pc);
 
         /*
-         * Rewrite JSOP_{GET,CALL}DSLOT as JSOP_{GET,CALL}UPVAR_DBG for the
+         * Rewrite JSOP_{GET,CALL}FCSLOT as JSOP_{GET,CALL}UPVAR_DBG for the
          * case where fun is an escaping flat closure. This works because the
-         * UPVAR and DSLOT ops by design have the same format: an upvar index
+         * UPVAR and FCSLOT ops by design have the same format: an upvar index
          * immediate operand.
          */
         switch (op) {
           case JSOP_GETUPVAR:       *pc = JSOP_GETUPVAR_DBG; break;
           case JSOP_CALLUPVAR:      *pc = JSOP_CALLUPVAR_DBG; break;
           case JSOP_GETFCSLOT:      *pc = JSOP_GETUPVAR_DBG; break;
           case JSOP_CALLFCSLOT:     *pc = JSOP_CALLUPVAR_DBG; break;
           case JSOP_DEFFUN_FC:      *pc = JSOP_DEFFUN_DBGFC; break;
@@ -963,17 +963,17 @@ NewCallObject(JSContext *cx, JSFunction 
     /* This must come after callobj->lastProp has been set. */
     if (!callobj->ensureInstanceReservedSlots(cx, fun->countArgsAndVars()))
         return NULL;
 
 #ifdef DEBUG
     for (Shape::Range r = callobj->lastProp; !r.empty(); r.popFront()) {
         const Shape &s = r.front();
         if (s.slot != SHAPE_INVALID_SLOT) {
-            JS_ASSERT(s.slot + 1 == callobj->freeslot);
+            JS_ASSERT(s.slot + 1 == callobj->slotSpan());
             break;
         }
     }
 #endif
     return callobj;
 }
 
 static inline JSObject *
@@ -2410,17 +2410,17 @@ JSObject::initBoundFunction(JSContext *c
     fslots[JSSLOT_BOUND_FUNCTION_THIS] = thisArg;
     fslots[JSSLOT_BOUND_FUNCTION_ARGS_COUNT].setPrivateUint32(argslen);
     if (argslen != 0) {
         /* FIXME? Burn memory on an empty scope whose shape covers the args slots. */
         EmptyShape *empty = EmptyShape::create(cx, clasp);
         if (!empty)
             return false;
 
-        empty->slot += argslen;
+        empty->slotSpan += argslen;
         map = empty;
 
         if (!ensureInstanceReservedSlots(cx, argslen))
             return false;
 
         JS_ASSERT(dslots);
         JS_ASSERT(dslots[-1].toPrivateUint32() >= argslen);
         memcpy(&dslots[0], args, argslen * sizeof(Value));
@@ -3207,16 +3207,18 @@ JSFunction::addLocal(JSContext *cx, JSAt
         if (kind == JSLOCAL_ARG && parent->inDictionary())
             findArgInsertionPoint = true;
         id = ATOM_TO_JSID(atom);
     }
 
     if (findArgInsertionPoint) {
         while (parent->parent && parent->getter() != js_GetCallArg) {
             ++parent->slot;
+            JS_ASSERT(parent->slot == parent->slotSpan);
+            ++parent->slotSpan;
             listp = &parent->parent;
             parent = *listp;
         }
     }
 
     Shape child(id, getter, setter, slot, attrs, Shape::HAS_SHORTID, *indexp);
 
     Shape *shape = parent->getChild(cx, child, listp);
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -126,17 +126,17 @@ js_GetScopeChain(JSContext *cx, JSStackF
      * We have one or more lexical scopes to reflect into fp->scopeChain, so
      * make sure there's a call object at the current head of the scope chain,
      * if this frame is a call frame.
      *
      * Also, identify the innermost compiler-allocated block we needn't clone.
      */
     JSObject *limitBlock, *limitClone;
     if (fp->hasFunction() && !fp->hasCallObj()) {
-        JS_ASSERT_IF(fp->getScopeChain()->getClass() == &js_BlockClass,
+        JS_ASSERT_IF(fp->getScopeChain()->isClonedBlock(),
                      fp->getScopeChain()->getPrivate() != js_FloatingFrameIfGenerator(cx, fp));
         if (!js_GetCallObject(cx, fp))
             return NULL;
 
         /* We know we must clone everything on blockChain. */
         limitBlock = limitClone = NULL;
     } else {
         /*
@@ -211,17 +211,17 @@ js_GetScopeChain(JSContext *cx, JSStackF
     newChild->setParent(fp->getScopeChain());
 
 
     /*
      * If we found a limit block belonging to this frame, then we should have
      * found it in blockChain.
      */
     JS_ASSERT_IF(limitBlock &&
-                 limitBlock->getClass() == &js_BlockClass &&
+                 limitBlock->isBlock() &&
                  limitClone->getPrivate() == js_FloatingFrameIfGenerator(cx, fp),
                  sharedBlock);
 
     /* Place our newly cloned blocks at the head of the scope chain.  */
     fp->setScopeChain(innermostNewChild);
     return innermostNewChild;
 }
 
@@ -1210,17 +1210,17 @@ js_UnwindScope(JSContext *cx, jsint stac
     JSObject *obj;
     Class *clasp;
 
     JS_ASSERT(stackDepth >= 0);
     JS_ASSERT(cx->fp()->base() + stackDepth <= cx->regs->sp);
 
     JSStackFrame *fp = cx->fp();
     for (obj = fp->maybeBlockChain(); obj; obj = obj->getParent()) {
-        JS_ASSERT(obj->getClass() == &js_BlockClass);
+        JS_ASSERT(obj->isStaticBlock());
         if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth)
             break;
     }
     fp->setBlockChain(obj);
 
     for (;;) {
         obj = fp->getScopeChain();
         clasp = js_IsActiveWithOrBlock(cx, obj, stackDepth);
@@ -3688,17 +3688,17 @@ BEGIN_CASE(JSOP_GNAMEDEC)
 
     JSObject *obj2;
     PropertyCacheEntry *entry;
     JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom);
     if (!atom) {
         ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
         if (obj == obj2 && entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj->freeslot);
+            JS_ASSERT(obj->containsSlot(slot));
             Value &rref = obj->getSlotRef(slot);
             int32_t tmp;
             if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
                 int32_t inc = tmp + ((js_CodeSpec[op].format & JOF_INC) ? 1 : -1);
                 if (!(js_CodeSpec[op].format & JOF_POST))
                     tmp = inc;
                 rref.getInt32Ref() = inc;
                 PUSH_INT32(tmp);
@@ -3928,17 +3928,17 @@ BEGIN_CASE(JSOP_GETXPROP)
             JSAtom *atom;
             JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
             if (!atom) {
                 ASSERT_VALID_PROPERTY_CACHE_HIT(i, aobj, obj2, entry);
                 if (entry->vword.isFunObj()) {
                     rval.setObject(entry->vword.toFunObj());
                 } else if (entry->vword.isSlot()) {
                     uint32 slot = entry->vword.toSlot();
-                    JS_ASSERT(slot < obj2->freeslot);
+                    JS_ASSERT(obj2->containsSlot(slot));
                     rval = obj2->lockedGetSlot(slot);
                 } else {
                     JS_ASSERT(entry->vword.isShape());
                     const Shape *shape = entry->vword.toShape();
                     NATIVE_GET(cx, obj, obj2, shape,
                                fp->hasIMacroPC() ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER,
                                &rval);
                 }
@@ -4023,17 +4023,17 @@ BEGIN_CASE(JSOP_CALLPROP)
     JSAtom *atom;
     JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
     if (!atom) {
         ASSERT_VALID_PROPERTY_CACHE_HIT(0, aobj, obj2, entry);
         if (entry->vword.isFunObj()) {
             rval.setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj2->freeslot);
+            JS_ASSERT(obj2->containsSlot(slot));
             rval = obj2->lockedGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
             const Shape *shape = entry->vword.toShape();
             NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval);
         }
         regs.sp[-1] = rval;
         PUSH_COPY(lval);
@@ -4191,31 +4191,29 @@ BEGIN_CASE(JSOP_SETMETHOD)
                         goto error;
                 }
 
                 uint32 slot;
                 if (shape->previous() == obj->lastProperty() &&
                     entry->vshape() == rt->protoHazardShape &&
                     shape->hasDefaultSetter()) {
                     slot = shape->slot;
-                    JS_ASSERT(slot == obj->freeslot);
+                    JS_ASSERT(slot == obj->slotSpan());
 
                     /*
                      * Fast path: adding a plain old property that was once at
                      * the frontier of the property tree, whose slot is next to
                      * claim among the already-allocated slots in obj, where
                      * shape->table has not been created yet.
                      */
                     PCMETER(cache->pchits++);
                     PCMETER(cache->addpchits++);
 
                     if (slot < obj->numSlots()) {
                         JS_ASSERT(obj->getSlot(slot).isUndefined());
-                        ++obj->freeslot;
-                        JS_ASSERT(obj->freeslot != 0);
                     } else {
                         if (!obj->allocSlot(cx, &slot))
                             goto error;
                         JS_ASSERT(slot == shape->slot);
                     }
 
                     /* Simply extend obj's property tree path with shape! */
                     obj->extend(cx, shape);
@@ -4678,17 +4676,17 @@ BEGIN_CASE(JSOP_CALLNAME)
     JSAtom *atom;
     JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom);
     if (!atom) {
         ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
         if (entry->vword.isFunObj()) {
             PUSH_OBJECT(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uintN slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj2->freeslot);
+            JS_ASSERT(obj2->containsSlot(slot));
             PUSH_COPY(obj2->lockedGetSlot(slot));
         } else {
             JS_ASSERT(entry->vword.isShape());
             shape = entry->vword.toShape();
             NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval);
             PUSH_COPY(rval);
         }
 
@@ -5177,33 +5175,33 @@ BEGIN_CASE(JSOP_CALLFCSLOT)
 END_CASE(JSOP_GETFCSLOT)
 
 BEGIN_CASE(JSOP_GETGLOBAL)
 BEGIN_CASE(JSOP_CALLGLOBAL)
 {
     uint32 slot = GET_SLOTNO(regs.pc);
     slot = script->getGlobalSlot(slot);
     JSObject *obj = fp->getScopeChain()->getGlobal();
-    JS_ASSERT(slot < obj->freeslot);
+    JS_ASSERT(obj->containsSlot(slot));
     PUSH_COPY(obj->getSlot(slot));
     if (op == JSOP_CALLGLOBAL)
         PUSH_NULL();
 }
 END_CASE(JSOP_GETGLOBAL)
 
 BEGIN_CASE(JSOP_FORGLOBAL)
 {
     Value rval;
     if (!IteratorNext(cx, &regs.sp[-1].toObject(), &rval))
         goto error;
     PUSH_COPY(rval);
     uint32 slot = GET_SLOTNO(regs.pc);
     slot = script->getGlobalSlot(slot);
     JSObject *obj = fp->getScopeChain()->getGlobal();
-    JS_ASSERT(slot < obj->freeslot);
+    JS_ASSERT(obj->containsSlot(slot));
     JS_LOCK_OBJ(cx, obj);
     {
         if (!obj->methodWriteBarrier(cx, slot, rval)) {
             JS_UNLOCK_OBJ(cx, obj);
             goto error;
         }
         obj->lockedSetSlot(slot, rval);
         JS_UNLOCK_OBJ(cx, obj);
@@ -5212,17 +5210,17 @@ BEGIN_CASE(JSOP_FORGLOBAL)
 }
 END_CASE(JSOP_FORGLOBAL)
 
 BEGIN_CASE(JSOP_SETGLOBAL)
 {
     uint32 slot = GET_SLOTNO(regs.pc);
     slot = script->getGlobalSlot(slot);
     JSObject *obj = fp->getScopeChain()->getGlobal();
-    JS_ASSERT(slot < obj->freeslot);
+    JS_ASSERT(obj->containsSlot(slot));
     {
         JS_LOCK_OBJ(cx, obj);
         if (!obj->methodWriteBarrier(cx, slot, regs.sp[-1])) {
             JS_UNLOCK_OBJ(cx, obj);
             goto error;
         }
         obj->lockedSetSlot(slot, regs.sp[-1]);
         JS_UNLOCK_OBJ(cx, obj);
@@ -5544,17 +5542,16 @@ BEGIN_CASE(JSOP_LAMBDA)
                  * break from the outer do-while(0).
                  */
                 if (op2 == JSOP_INITMETHOD) {
 #ifdef DEBUG
                     const Value &lref = regs.sp[-1];
                     JS_ASSERT(lref.isObject());
                     JSObject *obj2 = &lref.toObject();
                     JS_ASSERT(obj2->getClass() == &js_ObjectClass);
-                    JS_ASSERT(obj2->freeslot >= JSSLOT_FREE(&js_ObjectClass));
 #endif
 
                     fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(JSOP_LAMBDA_LENGTH)));
                     JS_FUNCTION_METER(cx, joinedinitmethod);
                     break;
                 }
 
                 if (op2 == JSOP_SETMETHOD) {
@@ -5859,22 +5856,20 @@ BEGIN_CASE(JSOP_INITMETHOD)
     if (CX_OWNS_OBJECT_TITLE(cx, obj) &&
         JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) &&
         shape->hasDefaultSetter() &&
         shape->previous() == obj->lastProperty())
     {
         /* Fast path. Property cache hit. */
         uint32 slot = shape->slot;
 
-        JS_ASSERT(slot == obj->freeslot);
+        JS_ASSERT(slot == obj->slotSpan());
         JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
         if (slot < obj->numSlots()) {
             JS_ASSERT(obj->getSlot(slot).isUndefined());
-            ++obj->freeslot;
-            JS_ASSERT(obj->freeslot != 0);
         } else {
             if (!obj->allocSlot(cx, &slot))
                 goto error;
             JS_ASSERT(slot == shape->slot);
         }
 
         /* A new object, or one we just extended in a recent initprop op. */
         JS_ASSERT(!obj->lastProperty() ||
@@ -6478,17 +6473,17 @@ BEGIN_CASE(JSOP_GETFUNNS)
 }
 END_CASE(JSOP_GETFUNNS)
 #endif /* JS_HAS_XML_SUPPORT */
 
 BEGIN_CASE(JSOP_ENTERBLOCK)
 {
     JSObject *obj;
     LOAD_OBJECT(0, obj);
-    JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
+    JS_ASSERT(obj->isStaticBlock());
     JS_ASSERT(fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
     Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
     JS_ASSERT(regs.sp < vp);
     JS_ASSERT(vp <= fp->slots() + script->nslots);
     SetValueRangeToUndefined(regs.sp, vp);
     regs.sp = vp;
 
 #ifdef DEBUG
@@ -6503,44 +6498,44 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
      */
     JSObject *obj2 = fp->getScopeChain();
     Class *clasp;
     while ((clasp = obj2->getClass()) == &js_WithClass)
         obj2 = obj2->getParent();
     if (clasp == &js_BlockClass &&
         obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
         JSObject *youngestProto = obj2->getProto();
-        JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
+        JS_ASSERT(youngestProto->isStaticBlock());
         JSObject *parent = obj;
         while ((parent = parent->getParent()) != youngestProto)
             JS_ASSERT(parent);
     }
 #endif
 
     fp->setBlockChain(obj);
 }
 END_CASE(JSOP_ENTERBLOCK)
 
 BEGIN_CASE(JSOP_LEAVEBLOCKEXPR)
 BEGIN_CASE(JSOP_LEAVEBLOCK)
 {
 #ifdef DEBUG
-    JS_ASSERT(fp->getBlockChain()->getClass() == &js_BlockClass);
+    JS_ASSERT(fp->getBlockChain()->isStaticBlock());
     uintN blockDepth = OBJ_BLOCK_DEPTH(cx, fp->getBlockChain());
 
     JS_ASSERT(blockDepth <= StackDepth(script));
 #endif
     /*
      * If we're about to leave the dynamic scope of a block that has been
      * cloned onto fp->scopeChain, clear its private data, move its locals from
      * the stack into the clone, and pop it off the chain.
      */
     JSObject *obj = fp->getScopeChain();
     if (obj->getProto() == fp->getBlockChain()) {
-        JS_ASSERT(obj->getClass() == &js_BlockClass);
+        JS_ASSERT(obj->isClonedBlock());
         if (!js_PutBlockObject(cx, JS_TRUE))
             goto error;
     }
 
     /* Pop the block chain, too.  */
     fp->setBlockChain(fp->getBlockChain()->getParent());
 
     /* Move the result of the expression to the new topmost stack slot. */
--- a/js/src/jslock.cpp
+++ b/js/src/jslock.cpp
@@ -502,17 +502,17 @@ ShareTitle(JSContext *cx, JSTitle *title
 static void
 FinishSharingTitle(JSContext *cx, JSTitle *title)
 {
     js_InitLock(&title->lock);
     title->u.count = 0;     /* NULL may not pun as 0 */
 
     JSObject *obj = TITLE_TO_OBJECT(title);
     if (obj) {
-        uint32 nslots = obj->freeslot;
+        uint32 nslots = obj->slotSpan();
         JS_ASSERT(nslots >= JSSLOT_START(obj->getClass()));
         for (uint32 i = JSSLOT_START(obj->getClass()); i != nslots; ++i) {
             Value v = obj->getSlot(i);
             if (v.isString() &&
                 !js_MakeStringImmutable(cx, v.toString())) {
                 /*
                  * FIXME bug 363059: The following error recovery changes
                  * runtime execution semantics, arbitrarily and silently
@@ -669,17 +669,17 @@ js_GetSlotThreadSafe(JSContext *cx, JSOb
 
     OBJ_CHECK_SLOT(obj, slot);
 
     /*
      * Native object locking is inlined here to optimize the single-threaded
      * and contention-free multi-threaded cases.
      */
     JS_ASSERT(obj->title.ownercx != cx);
-    JS_ASSERT(slot < obj->freeslot);
+    JS_ASSERT(obj->containsSlot(slot));
 
     /*
      * Avoid locking if called from the GC.  Also avoid locking a sealed
      * object.  If neither of those special cases applies, try to claim obj's
      * flyweight lock from whatever context may have had it in an earlier
      * request.
      */
     if (CX_THREAD_IS_RUNNING_GC(cx) ||
@@ -747,17 +747,17 @@ js_SetSlotThreadSafe(JSContext *cx, JSOb
         v = JSVAL_NULL;
     }
 
     /*
      * Native object locking is inlined here to optimize the single-threaded
      * and contention-free multi-threaded cases.
      */
     JS_ASSERT(obj->title.ownercx != cx);
-    JS_ASSERT(slot < obj->freeslot);
+    JS_ASSERT(obj->containsSlot(slot));
 
     /*
      * Avoid locking if called from the GC.  Also avoid locking a sealed
      * object.  If neither of those special cases applies, try to claim obj's
      * flyweight lock from whatever context may have had it in an earlier
      * request.
      */
     if (CX_THREAD_IS_RUNNING_GC(cx) ||
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2955,56 +2955,46 @@ js_NewBlockObject(JSContext *cx)
     blockObj->init(&js_BlockClass, NULL, NULL, NullValue(), cx);
     blockObj->setMap(cx->runtime->emptyBlockShape);
     return blockObj;
 }
 
 JSObject *
 js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
 {
-    JS_ASSERT(!OBJ_IS_CLONED_BLOCK(proto));
-    JS_ASSERT(proto->getClass() == &js_BlockClass);
+    JS_ASSERT(proto->isStaticBlock());
 
     JSObject *clone = js_NewGCObject(cx);
     if (!clone)
         return NULL;
 
     JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, fp);
 
     /* The caller sets parent on its own. */
     clone->init(&js_BlockClass, proto, NULL, priv, cx);
     clone->fslots[JSSLOT_BLOCK_DEPTH] = proto->fslots[JSSLOT_BLOCK_DEPTH];
 
-    clone->setMap(cx->runtime->emptyBlockShape);
-    JS_ASSERT(OBJ_IS_CLONED_BLOCK(clone));
-
+    clone->setMap(proto->map);
     if (!clone->ensureInstanceReservedSlots(cx, OBJ_BLOCK_COUNT(cx, proto)))
         return NULL;
+
+    JS_ASSERT(clone->isClonedBlock());
     return clone;
 }
 
 JS_REQUIRES_STACK JSBool
 js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
 {
     /* Blocks have one fixed slot available for the first local.*/
     JS_STATIC_ASSERT(JS_INITIAL_NSLOTS == JSSLOT_BLOCK_DEPTH + 2);
 
     JSStackFrame *const fp = cx->fp();
     JSObject *obj = fp->getScopeChain();
-    JS_ASSERT(obj->getClass() == &js_BlockClass);
+    JS_ASSERT(obj->isClonedBlock());
     JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()));
-    JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
-
-    /*
-     * Block objects should never be exposed to scripts. Therefore the clone
-     * must not have "own" properties, rather it always delegates property
-     * accesses to its compiler-created prototype Block object, which is the
-     * object that has shapes mapping all the let bindings.
-     */
-    JS_ASSERT(obj->nativeEmpty());
 
     /* Block objects should have all reserved slots allocated early. */
     uintN count = OBJ_BLOCK_COUNT(cx, obj);
     JS_ASSERT(obj->numSlots() == JSSLOT_BLOCK_DEPTH + 1 + count);
 
     /* The block and its locals must be on the current stack for GC safety. */
     uintN depth = OBJ_BLOCK_DEPTH(cx, obj);
     JS_ASSERT(depth <= size_t(cx->regs->sp - fp->base()));
@@ -3035,77 +3025,73 @@ js_PutBlockObject(JSContext *cx, JSBool 
 static JSBool
 block_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
 {
     /*
      * Block objects are never exposed to script, and the engine handles them
      * with care. So unlike other getters, this one can assert (rather than
      * check) certain invariants about obj.
      */
-    JS_ASSERT(obj->getClass() == &js_BlockClass);
-    JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
+    JS_ASSERT(obj->isClonedBlock());
     uintN index = (uintN) JSID_TO_INT(id);
     JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj));
 
     JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
     if (fp) {
         fp = js_LiveFrameIfGenerator(fp);
         index += fp->getFixedCount() + OBJ_BLOCK_DEPTH(cx, obj);
         JS_ASSERT(index < fp->getSlotCount());
         *vp = fp->slots()[index];
         return true;
     }
 
-    /* Values are in reserved slots immediately following DEPTH. */
-    uint32 slot = JSSLOT_BLOCK_DEPTH + 1 + index;
-    JS_LOCK_OBJ(cx, obj);
-    JS_ASSERT(slot < obj->numSlots());
-    *vp = obj->getSlot(slot);
-    JS_UNLOCK_OBJ(cx, obj);
+    /* Values are in slots immediately following the class-reserved ones. */
+    JS_ASSERT(obj->getSlot(JSSLOT_FREE(&js_BlockClass) + index) == *vp);
     return true;
 }
 
 static JSBool
 block_setProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
 {
-    JS_ASSERT(obj->getClass() == &js_BlockClass);
-    JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
+    JS_ASSERT(obj->isClonedBlock());
     uintN index = (uintN) JSID_TO_INT(id);
     JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj));
 
     JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
     if (fp) {
         fp = js_LiveFrameIfGenerator(fp);
         index += fp->getFixedCount() + OBJ_BLOCK_DEPTH(cx, obj);
         JS_ASSERT(index < fp->getSlotCount());
         fp->slots()[index] = *vp;
         return true;
     }
 
-    /* Values are in reserved slots immediately following DEPTH. */
-    uint32 slot = JSSLOT_BLOCK_DEPTH + 1 + index;
-    JS_LOCK_OBJ(cx, obj);
-    JS_ASSERT(slot < obj->numSlots());
-    obj->setSlot(slot, *vp);
-    JS_UNLOCK_OBJ(cx, obj);
+    /*
+     * The value in *vp will be written back to the slot in obj that was
+     * allocated when this let binding was defined.
+     */
     return true;
 }
 
-JSBool
-js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index)
-{
-    JS_ASSERT(obj->getClass() == &js_BlockClass);
-    JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
+const Shape *
+JSObject::defineBlockVariable(JSContext *cx, jsid id, intN index)
+{
+    JS_ASSERT(isStaticBlock());
 
     /* Use JSPROP_ENUMERATE to aid the disassembler. */
-    return js_DefineNativeProperty(cx, obj, id, UndefinedValue(),
-                                   block_getProperty,
-                                   block_setProperty,
-                                   JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED,
-                                   Shape::HAS_SHORTID, index, NULL);
+    uint32 slot = JSSLOT_FREE(&js_BlockClass) + index;
+    const Shape *shape = addProperty(cx, id,
+                                     block_getProperty, block_setProperty,
+                                     slot, JSPROP_ENUMERATE | JSPROP_PERMANENT,
+                                     Shape::HAS_SHORTID, index);
+    if (!shape)
+        return NULL;
+    if (slot >= numSlots() && !growSlots(cx, slot + 1))
+        return NULL;
+    return shape;
 }
 
 static size_t
 GetObjectSize(JSObject *obj)
 {
     return (obj->isFunction() && !obj->getPrivate())
            ? sizeof(JSFunction)
            : sizeof(JSObject);
@@ -3152,38 +3138,34 @@ FindObjectIndex(JSObjectArray *array, JS
 }
 
 JSBool
 js_XDRBlockObject(JSXDRState *xdr, JSObject **objp)
 {
     JSContext *cx;
     uint32 parentId;
     JSObject *obj, *parent;
-    uint16 depth, count, i;
-    uint32 tmp;
+    uintN depth, count;
+    uint32 depthAndCount;
     const Shape *shape;
-    jsid propid;
-    JSAtom *atom;
-    int16 shortid;
-    JSBool ok;
 
     cx = xdr->cx;
 #ifdef __GNUC__
     obj = NULL;         /* quell GCC overwarning */
 #endif
 
     if (xdr->mode == JSXDR_ENCODE) {
         obj = *objp;
         parent = obj->getParent();
         parentId = (xdr->script->objectsOffset == 0)
                    ? NO_PARENT_INDEX
                    : FindObjectIndex(xdr->script->objects(), parent);
         depth = (uint16)OBJ_BLOCK_DEPTH(cx, obj);
         count = (uint16)OBJ_BLOCK_COUNT(cx, obj);
-        tmp = (uint32)(depth << 16) | count;
+        depthAndCount = (uint32)(depth << 16) | count;
     }
 #ifdef __GNUC__ /* suppress bogus gcc warnings */
     else count = 0;
 #endif
 
     /* First, XDR the parent atomid. */
     if (!JS_XDRUint32(xdr, &parentId))
         return JS_FALSE;
@@ -3203,56 +3185,61 @@ js_XDRBlockObject(JSXDRState *xdr, JSObj
             parent = NULL;
         else
             parent = xdr->script->getObject(parentId);
         obj->setParent(parent);
     }
 
     AutoObjectRooter tvr(cx, obj);
 
-    if (!JS_XDRUint32(xdr, &tmp))
+    if (!JS_XDRUint32(xdr, &depthAndCount))
         return false;
 
+    Vector<const Shape *, 8> shapes(cx);
+    shapes.growByUninitialized(count);
+
     if (xdr->mode == JSXDR_DECODE) {
-        depth = (uint16)(tmp >> 16);
-        count = (uint16)tmp;
+        depth = (uint16)(depthAndCount >> 16);
+        count = (uint16)depthAndCount;
         obj->setSlot(JSSLOT_BLOCK_DEPTH, Value(Int32Value(depth)));
+    } else {
+        for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) {
+            shape = &r.front();
+            shapes[shape->shortid] = shape;
+        }
     }
 
     /*
      * XDR the block object's properties. We know that there are 'count'
-     * properties to XDR, stored as id/shortid pairs. We do not XDR any
-     * non-native properties, only those that the compiler created.
+     * properties to XDR, stored as id/shortid pairs.
      */
-    shape = NULL;
-    ok = JS_TRUE;
-    for (i = 0; i < count; i++) {
+    for (uintN i = 0; i < count; i++) {
+        JSAtom *atom;
+        uint16 shortid;
+
         if (xdr->mode == JSXDR_ENCODE) {
-            /* Find a property to XDR. */
-            do {
-                /* If shape is NULL, this is the first property. */
-                shape = shape ? shape->previous() : obj->lastProperty();
-            } while (!shape->hasShortID());
-
+            shape = shapes[i];
             JS_ASSERT(shape->getter() == block_getProperty);
-            propid = shape->id;
+
+            jsid propid = shape->id;
             JS_ASSERT(JSID_IS_ATOM(propid));
             atom = JSID_TO_ATOM(propid);
-            shortid = shape->shortid;
-            JS_ASSERT(shortid >= 0);
+
+            shortid = uint16(shape->shortid);
+            JS_ASSERT(shortid == i);
         }
 
         /* XDR the real id, then the shortid. */
         if (!js_XDRAtom(xdr, &atom) ||
-            !JS_XDRUint16(xdr, (uint16 *)&shortid)) {
+            !JS_XDRUint16(xdr, &shortid)) {
             return false;
         }
 
         if (xdr->mode == JSXDR_DECODE) {
-            if (!js_DefineBlockVariable(cx, obj, ATOM_TO_JSID(atom), shortid))
+            if (!obj->defineBlockVariable(cx, ATOM_TO_JSID(atom), shortid))
                 return false;
         }
     }
     return true;
 }
 
 #endif
 
@@ -3527,18 +3514,18 @@ JSObject::growSlots(JSContext *cx, size_
      * growth no to waste too much memory.
      */
     const size_t LINEAR_GROWTH_STEP = JS_BIT(16);
 
     /* If we are allocating fslots, there is nothing to do. */
     if (nslots <= JS_INITIAL_NSLOTS)
         return true;
 
-    /* Don't let nslots (or JSObject::freeslot) get close to overflowing. */
-    if (nslots >= JS_NSLOTS_LIMIT) {
+    /* Don't let nslots get close to wrapping around uint32. */
+    if (nslots >= NSLOTS_LIMIT) {
         JS_ReportOutOfMemory(cx);
         return false;
     }
 
     size_t nwords = slotsToDynamicWords(nslots);
 
     /*
      * Round up nslots so the number of bytes in dslots array is power
@@ -3606,27 +3593,19 @@ JSObject::shrinkSlots(JSContext *cx, siz
     }
 }
 
 bool
 JSObject::ensureInstanceReservedSlots(JSContext *cx, size_t nreserved)
 {
     JS_ASSERT_IF(isNative(),
                  isBlock() || isCall() || (isFunction() && getFunctionPrivate()->isBound()));
-    JS_ASSERT_IF(isBlock(), nativeEmpty());
 
     uintN nslots = JSSLOT_FREE(clasp) + nreserved;
-    if (nslots > numSlots() && !allocSlots(cx, nslots))
-        return false;
-
-    JS_ASSERT(freeslot >= JSSLOT_START(clasp));
-    JS_ASSERT(freeslot <= JSSLOT_FREE(clasp));
-    if (freeslot < nslots)
-        freeslot = nslots;
-    return true;
+    return nslots <= numSlots() || allocSlots(cx, nslots);
 }
 
 static JSObject *
 js_InitNullClass(JSContext *cx, JSObject *obj)
 {
     JS_ASSERT(0);
     return NULL;
 }
@@ -3882,54 +3861,71 @@ js_ConstructObject(JSContext *cx, Class 
         return NULL;
     }
     return obj;
 }
 
 bool
 JSObject::allocSlot(JSContext *cx, uint32 *slotp)
 {
-    JS_ASSERT(freeslot >= JSSLOT_FREE(clasp));
-
+    uint32 slot = slotSpan();
+    JS_ASSERT(slot >= JSSLOT_FREE(clasp));
+
+    /*
+     * If this object is in dictionary mode and it has a property table, try to
+     * pull a free slot from the property table's slot-number freelist.
+     */
     if (inDictionaryMode() && lastProp->table) {
-        uint32 &last = lastProp->table->freeslot;
+        uint32 &last = lastProp->table->freelist;
         if (last != SHAPE_INVALID_SLOT) {
-            JS_ASSERT(last < freeslot);
+#ifdef DEBUG
+            JS_ASSERT(last < slot);
+            uint32 next = getSlot(last).toPrivateUint32();
+            JS_ASSERT_IF(next != SHAPE_INVALID_SLOT, next < slot);
+#endif
+
             *slotp = last;
 
             Value &vref = getSlotRef(last);
             last = vref.toPrivateUint32();
             vref.setUndefined();
             return true;
         }
     }
 
-    if (freeslot >= numSlots() && !growSlots(cx, freeslot + 1))
+    if (slot >= numSlots() && !growSlots(cx, slot + 1))
         return false;
 
     /* JSObject::growSlots or JSObject::freeSlot should set the free slots to void. */
-    JS_ASSERT(getSlot(freeslot).isUndefined());
-    *slotp = freeslot++;
-    JS_ASSERT(freeslot != 0);
+    JS_ASSERT(getSlot(slot).isUndefined());
+    *slotp = slot;
     return true;
 }
 
 void
 JSObject::freeSlot(JSContext *cx, uint32 slot)
 {
-    JS_ASSERT(freeslot > JSSLOT_FREE(clasp));
+    uint32 limit = slotSpan();
+    JS_ASSERT(slot < limit);
 
     Value &vref = getSlotRef(slot);
-    if (freeslot == slot + 1) {
-        freeslot = slot;
-    } else {
-        if (inDictionaryMode() && lastProp->table) {
-            uint32 &last = lastProp->table->freeslot;
-
-            JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < freeslot);
+    if (inDictionaryMode() && lastProp->table) {
+        uint32 &last = lastProp->table->freelist;
+
+        /* Can't afford to check the whole freelist, but let's check the head. */
+        JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < limit && last != slot);
+
+        /*
+         * Freeing a slot other than the last one mapped by this object's
+         * shape: push the slot onto the dictionary table's freelist. We want
+         * to let the last slot be freed by shrinking the dslots vector; see
+         * js_TraceObject.
+         */
+        if (slot + 1 < limit) {
+            JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < slotSpan());
             vref.setPrivateUint32(last);
             last = slot;
             return;
         }
     }
     vref.setUndefined();
 }
 
@@ -4099,17 +4095,17 @@ js_DefineProperty(JSContext *cx, JSObjec
                   PropertyOp getter, PropertyOp setter, uintN attrs)
 {
     return js_DefineNativeProperty(cx, obj, id, *value, getter, setter, attrs,
                                    0, 0, NULL);
 }
 
 /*
  * Backward compatibility requires allowing addProperty hooks to mutate the
- * nominal initial value of a slot-full property, while GC safety wants that
+ * nominal initial value of a slotful property, while GC safety wants that
  * value to be stored before the call-out through the hook.  Optimize to do
  * both while saving cycles for classes that stub their addProperty hook.
  */
 static inline bool
 CallAddPropertyHook(JSContext *cx, Class *clasp, JSObject *obj, const Shape *shape, Value *vp)
 {
     if (clasp->addProperty != PropertyStub) {
         Value nominal = *vp;
@@ -4503,21 +4499,22 @@ js_FindPropertyHelper(JSContext *cx, jsi
         if (prop) {
 #ifdef DEBUG
             if (parent) {
                 Class *clasp = obj->getClass();
                 JS_ASSERT(pobj->isNative());
                 JS_ASSERT(pobj->getClass() == clasp);
                 if (clasp == &js_BlockClass) {
                     /*
-                     * A block instance on the scope chain is immutable and
-                     * the compile-time prototype provides all its properties.
+                     * A block instance on the scope chain is immutable and it
+                     * shares its shapes with its compile-time prototype.
                      */
-                    JS_ASSERT(pobj == obj->getProto());
-                    JS_ASSERT(protoIndex == 1);
+                    JS_ASSERT(pobj == obj);
+                    JS_ASSERT(pobj->isClonedBlock());
+                    JS_ASSERT(protoIndex == 0);
                 } else {
                     /* Call and DeclEnvClass objects have no prototypes. */
                     JS_ASSERT(!obj->getProto());
                     JS_ASSERT(protoIndex == 0);
                 }
             } else {
                 JS_ASSERT(obj->isNative());
             }
@@ -4952,17 +4949,17 @@ js_SetPropertyHelper(JSContext *cx, JSOb
                                             &pobj, &prop);
     if (protoIndex < 0)
         return JS_FALSE;
     if (prop) {
         if (!pobj->isNative())
             prop = NULL;
     } else {
         /* We should never add properties to lexical blocks.  */
-        JS_ASSERT(obj->getClass() != &js_BlockClass);
+        JS_ASSERT(!obj->isBlock());
 
         if (!obj->getParent() &&
             (defineHow & JSDNP_UNQUALIFIED) &&
             !js_CheckUndeclaredVarAssignment(cx, JSID_TO_STRING(id))) {
             return JS_FALSE;
         }
     }
     shape = (Shape *) prop;
@@ -5919,17 +5916,17 @@ js_TraceObject(JSTracer *trc, JSObject *
     JSContext *cx = trc->context;
     if (!obj->nativeEmpty() && IS_GC_MARKING_TRACER(trc)) {
         /*
          * Trim overlong dslots allocations from the GC, to avoid thrashing in
          * case of delete-happy code that settles down at a given population.
          * The !obj->nativeEmpty() guard above is due to the bug described by
          * the FIXME comment below.
          */
-        size_t slots = obj->freeslot;
+        size_t slots = obj->slotSpan();
         if (obj->numSlots() != slots)
             obj->shrinkSlots(cx, slots);
     }
 
 #ifdef JS_DUMP_SCOPE_METERS
     MeterEntryCount(obj->propertyCount);
 #endif
 
@@ -5951,24 +5948,24 @@ js_TraceObject(JSTracer *trc, JSObject *
         compartment->marked = true;
     }
 
     /*
      * NB: In case clasp->mark mutates something (which would be a bug, but we
      * want to be defensive), leave this code here -- don't move it up and
      * unify it with the |if (!traceScope)| section above.
      *
-     * FIXME: We minimize nslots against obj->freeslot because native objects
-     * such as Date instances may have failed to advance freeslot to cover all
+     * FIXME: We minimize nslots against obj->slotSpan because native objects
+     * such as Date instances may have failed to advance slotSpan to cover all
      * reserved slots (this Date issue may be a bug in JSObject::growSlots, but
      * the general problem occurs in other built-in class implementations).
      */
     uint32 nslots = obj->numSlots();
-    if (!obj->nativeEmpty() && obj->freeslot < nslots)
-        nslots = obj->freeslot;
+    if (!obj->nativeEmpty() && obj->slotSpan() < nslots)
+        nslots = obj->slotSpan();
     JS_ASSERT(nslots >= JSSLOT_START(clasp));
 
     for (uint32 i = JSSLOT_START(clasp); i != nslots; ++i) {
         const Value &v = obj->getSlot(i);
         JS_SET_TRACING_DETAILS(trc, js_PrintObjectSlotName, obj, i);
         MarkValueRaw(trc, v);
     }
 }
@@ -5980,22 +5977,21 @@ js_ClearNative(JSContext *cx, JSObject *
      * Clear obj of all obj's properties. FIXME: we do not clear reserved slots
      * lying below JSSLOT_FREE(clasp). JS_ClearScope does that.
      */
     JS_LOCK_OBJ(cx, obj);
     if (!obj->nativeEmpty()) {
         /* Now that we're done using real properties, clear obj. */
         obj->clear(cx);
 
-        /* Clear slot values and reset freeslot so we're consistent. */
+        /* Clear slot values since obj->clear reset our shape to empty. */
         uint32 freeslot = JSSLOT_FREE(obj->getClass());
         uint32 n = obj->numSlots();
         for (uint32 i = freeslot; i < n; ++i)
             obj->setSlot(i, UndefinedValue());
-        obj->freeslot = freeslot;
     }
     JS_UNLOCK_OBJ(cx, obj);
 }
 
 bool
 js_GetReservedSlot(JSContext *cx, JSObject *obj, uint32 index, Value *vp)
 {
     if (!obj->isNative()) {
@@ -6027,19 +6023,16 @@ js_SetReservedSlot(JSContext *cx, JSObje
         uint32 nslots = JSSLOT_FREE(clasp);
         JS_ASSERT(slot < nslots);
         if (!obj->allocSlots(cx, nslots)) {
             JS_UNLOCK_OBJ(cx, obj);
             return false;
         }
     }
 
-    if (slot >= obj->freeslot)
-        obj->freeslot = slot + 1;
-
     obj->setSlot(slot, v);
     GC_POKE(cx, JS_NULL);
     JS_UNLOCK_OBJ(cx, obj);
     return true;
 }
 
 JSObject *
 JSObject::wrappedObject(JSContext *cx) const
@@ -6334,17 +6327,17 @@ js_DumpObject(JSObject *obj)
     i = JSSLOT_PRIVATE;
     if (clasp->flags & JSCLASS_HAS_PRIVATE) {
         i = JSSLOT_PRIVATE + 1;
         fprintf(stderr, "private %p\n", obj->getPrivate());
     }
 
     fprintf(stderr, "slots:\n");
     reservedEnd = i + JSCLASS_RESERVED_SLOTS(clasp);
-    slots = obj->freeslot;
+    slots = obj->slotSpan();
     for (; i < slots; i++) {
         fprintf(stderr, " %3d ", i);
         if (i < reservedEnd)
             fprintf(stderr, "(reserved) ");
         fprintf(stderr, "= ");
         dumpValue(obj->getSlot(i));
         fputc('\n', stderr);
     }
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -176,18 +176,20 @@ namespace js {
 typedef Vector<PropDesc, 1> PropDescArray;
 
 } /* namespace js */
 
 struct JSObjectMap {
     static JS_FRIEND_DATA(const JSObjectMap) sharedNonNative;
 
     uint32 shape;       /* shape identifier */
+    uint32 slotSpan;    /* one more than maximum live slot number */
 
-    explicit JSObjectMap(uint32 shape) : shape(shape) {}
+    explicit JSObjectMap(uint32 shape) : shape(shape), slotSpan(0) {}
+    JSObjectMap(uint32 shape, uint32 slotSpan) : shape(shape), slotSpan(slotSpan) {}
 
     enum { INVALID_SHAPE = 0x8fffffff, SHAPELESS = 0xffffffff };
 
     bool isNative() const { return this != &sharedNonNative; }
 
   private:
     /* No copy or assignment semantics. */
     JSObjectMap(JSObjectMap &);
@@ -300,16 +302,20 @@ struct JSObject {
     };
 
     js::Class           *clasp;
 
   private:
     inline void setLastProperty(const js::Shape *shape);
     inline void removeLastProperty();
 
+#ifdef DEBUG
+    void checkShapeConsistency();
+#endif
+
   public:
     inline const js::Shape *lastProperty() const;
 
     inline js::Shape **nativeSearch(jsid id, bool adding = false);
     inline const js::Shape *nativeLookup(jsid id);
 
     inline bool nativeContains(jsid id);
     inline bool nativeContains(const js::Shape &shape);
@@ -320,40 +326,43 @@ struct JSObject {
         SEALED          = 0x04,
         BRANDED         = 0x08,
         GENERIC         = 0x10,
         METHOD_BARRIER  = 0x20,
         INDEXED         = 0x40,
         OWN_SHAPE       = 0x80
     };
 
+    /*
+     * Impose a sane upper bound, originally checked only for dense arrays, on
+     * number of slots in an object.
+     */
     enum {
-        JS_NSLOTS_BITS  = 24,
-        JS_NSLOTS_LIMIT = JS_BIT(JS_NSLOTS_BITS)
+        NSLOTS_BITS     = 29,
+        NSLOTS_LIMIT    = JS_BIT(NSLOTS_BITS)
     };
 
-    uint32      flags: 32-JS_NSLOTS_BITS,   /* flags */
-                freeslot: JS_NSLOTS_BITS;   /* next free slot in abstract slot space */
+    uint32      flags;                      /* flags */
     uint32      objShape;                   /* copy of lastProp->shape, or override if different */
 
     JSObject    *proto;                     /* object's prototype */
     JSObject    *parent;                    /* object's parent */
     js::Value   *dslots;                    /* dynamically allocated slots */
 
     /* Empty shape of kids if prototype, located here to align fslots on 32 bit targets. */
     js::EmptyShape *emptyShape;
 
     js::Value   fslots[JS_INITIAL_NSLOTS];  /* small number of fixed slots */
 #ifdef JS_THREADSAFE
     JSTitle     title;
 #endif
 
     /*
-     * Return an immutable, shareable, empty scope with the same ops as this
-     * and the same freeslot as this had when empty.
+     * Return an immutable, shareable, empty shape with the same clasp as this
+     * and the same slotSpan as this had when empty.
      *
      * If |this| is the scope of an object |proto|, the resulting scope can be
      * used as the scope of a new object whose prototype is |proto|.
      */
     inline bool canProvideEmptyShape(js::Class *clasp);
     inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp);
 
     bool isNative() const       { return map->isNative(); }
@@ -366,19 +375,16 @@ struct JSObject {
     }
 
     const js::ObjectOps *getOps() const {
         return &getClass()->ops;
     }
 
     inline void trace(JSTracer *trc);
 
-    static size_t flagsOffset();
-    uint32 flagsAndFreeslot();
-
     uint32 shape() const {
         JS_ASSERT(objShape != JSObjectMap::INVALID_SHAPE);
         return objShape;
     }
 
     bool isDelegate() const     { return !!(flags & DELEGATE); }
     void setDelegate()          { flags |= DELEGATE; }
 
@@ -566,17 +572,19 @@ struct JSObject {
      * NB: ensureClassReservedSlotsForEmptyObject asserts that nativeEmpty()
      * Use ensureClassReservedSlots for any object, either empty or already
      * extended with properties.
      */
     bool ensureClassReservedSlotsForEmptyObject(JSContext *cx);
 
     inline bool ensureClassReservedSlots(JSContext *cx);
 
-    bool containsSlot(uint32 slot) const { return slot < freeslot; }
+    uint32 slotSpan() const { return map->slotSpan; }
+
+    bool containsSlot(uint32 slot) const { return slot < slotSpan(); }
 
     js::Value& getSlotRef(uintN slot) {
         return (slot < JS_INITIAL_NSLOTS)
                ? fslots[slot]
                : (JS_ASSERT(slot < dslots[-1].toPrivateUint32()),
                   dslots[slot - JS_INITIAL_NSLOTS]);
     }
 
@@ -1056,16 +1064,18 @@ struct JSObject {
     static bool thisObject(JSContext *cx, const js::Value &v, js::Value *vp);
 
     inline void dropProperty(JSContext *cx, JSProperty *prop);
 
     JS_FRIEND_API(JSCompartment *) getCompartment(JSContext *cx);
 
     inline JSObject *getThrowTypeError() const;
 
+    const js::Shape *defineBlockVariable(JSContext *cx, jsid id, intN index);
+
     void swap(JSObject *obj);
 
     inline bool canHaveMethodBarrier() const;
 
     inline bool isArguments() const;
     inline bool isNormalArguments() const;
     inline bool isStrictArguments() const;
     inline bool isArray() const;
@@ -1075,16 +1085,18 @@ struct JSObject {
     inline bool isBoolean() const;
     inline bool isString() const;
     inline bool isPrimitive() const;
     inline bool isDate() const;
     inline bool isFunction() const;
     inline bool isObject() const;
     inline bool isWith() const;
     inline bool isBlock() const;
+    inline bool isStaticBlock() const;
+    inline bool isClonedBlock() const;
     inline bool isCall() const;
     inline bool isRegExp() const;
     inline bool isXML() const;
     inline bool isXMLId() const;
     inline bool isNamespace() const;
     inline bool isQName() const;
 
     inline bool isProxy() const;
@@ -1110,17 +1122,17 @@ JS_STATIC_ASSERT(sizeof(JSObject) % JS_G
 /*
  * Maximum capacity of the obj->dslots vector, net of the hidden slot at
  * obj->dslots[-1] that is used to store the length of the vector biased by
  * JS_INITIAL_NSLOTS (and again net of the slot at index -1).
  */
 #define MAX_DSLOTS_LENGTH   (~size_t(0) / sizeof(js::Value) - 1)
 #define MAX_DSLOTS_LENGTH32 (~uint32(0) / sizeof(js::Value) - 1)
 
-#define OBJ_CHECK_SLOT(obj,slot) (JS_ASSERT(slot < (obj)->freeslot))
+#define OBJ_CHECK_SLOT(obj,slot) JS_ASSERT((obj)->containsSlot(slot))
 
 #ifdef JS_THREADSAFE
 
 /*
  * The GC runs only when all threads except the one on which the GC is active
  * are suspended at GC-safe points, so calling obj->getSlot() from the GC's
  * thread is safe when rt->gcRunning is set. See jsgc.cpp for details.
  */
@@ -1182,33 +1194,36 @@ inline bool JSObject::isBlock() const  {
  * telling the stack depth of the relevant slots (the slot whose value is the
  * object named in the with statement, the slots containing the block's local
  * variables); and both have a private slot referring to the JSStackFrame in
  * whose activation they were created (or null if the with or block object
  * outlives the frame).
  */
 static const uint32 JSSLOT_BLOCK_DEPTH = JSSLOT_PRIVATE + 1;
 
-static inline bool
-OBJ_IS_CLONED_BLOCK(JSObject *obj)
+inline bool
+JSObject::isStaticBlock() const
 {
-    return obj->getProto() != NULL;
+    return isBlock() && !getProto();
+}
+
+inline bool
+JSObject::isClonedBlock() const
+{
+    return isBlock() && !!getProto();
 }
 
 static const uint32 JSSLOT_WITH_THIS = JSSLOT_PRIVATE + 2;
 
-extern JSBool
-js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index);
-
 #define OBJ_BLOCK_COUNT(cx,obj)                                               \
-    ((OBJ_IS_CLONED_BLOCK(obj) ? obj->getProto() : obj)->propertyCount())
+    (obj)->propertyCount()
 #define OBJ_BLOCK_DEPTH(cx,obj)                                               \
-    obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32()
+    (obj)->getSlot(JSSLOT_BLOCK_DEPTH).toInt32()
 #define OBJ_SET_BLOCK_DEPTH(cx,obj,depth)                                     \
-    obj->setSlot(JSSLOT_BLOCK_DEPTH, Value(Int32Value(depth)))
+    (obj)->setSlot(JSSLOT_BLOCK_DEPTH, Value(Int32Value(depth)))
 
 /*
  * To make sure this slot is well-defined, always call js_NewWithObject to
  * create a With object, don't call js_NewObject directly.  When creating a
  * With object that does not correspond to a stack slot, pass -1 for depth.
  *
  * When popping the stack across this object's "with" statement, client code
  * must call withobj->setPrivate(NULL).
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -571,17 +571,16 @@ JSObject::setWithThis(JSObject *thisp)
 inline void
 JSObject::initCommon(js::Class *aclasp, JSObject *proto, JSObject *parent,
                      JSContext *cx)
 {
     JS_STATIC_ASSERT(JSSLOT_PRIVATE + 3 == JS_INITIAL_NSLOTS);
 
     clasp = aclasp;
     flags = 0;
-    freeslot = JSSLOT_START(aclasp);
 
 #ifdef DEBUG
     /*
      * NB: objShape must not be set here; rather, the caller must call setMap
      * or setSharedNonNativeMap after calling init. To defend this requirement
      * we set map to null in DEBUG builds, and set objShape to a value we then
      * assert obj->shape() never returns.
      */
@@ -701,50 +700,16 @@ JSObject::isCallable()
 }
 
 static inline bool
 js_IsCallable(const js::Value &v)
 {
     return v.isObject() && v.toObject().isCallable();
 }
 
-inline size_t
-JSObject::flagsOffset()
-{
-    static size_t offset = 0;
-    if (offset)
-        return offset;
-
-    /* 
-     * We can't address a bitfield, so instead we create a struct, set only
-     * the field we care about, then search for it.
-     */
-    JSObject fakeObj;
-    memset(&fakeObj, 0, sizeof(fakeObj));
-    fakeObj.flags = 1;
-    for (unsigned testOffset = 0; testOffset < sizeof(fakeObj); testOffset += sizeof(uint32)) {
-        uint32 *ptr = reinterpret_cast<uint32 *>(reinterpret_cast<char *>(&fakeObj) + testOffset);
-        if (*ptr) {
-            JS_ASSERT(*ptr == 1);
-            offset = testOffset;
-            return offset;
-        }
-    }
-    JS_NOT_REACHED("memory weirdness");
-    return 0;
-}
-
-inline uint32
-JSObject::flagsAndFreeslot()
-{
-    size_t offset = flagsOffset();
-    char *ptr = offset + (char*) this;
-    return *(uint32*)ptr;
-}
-
 namespace js {
 
 class AutoPropDescArrayRooter : private AutoGCRooter
 {
   public:
     AutoPropDescArrayRooter(JSContext *cx)
       : AutoGCRooter(cx, DESCRIPTORS), descriptors(cx)
     { }
@@ -814,17 +779,16 @@ InitScopeForObject(JSContext* cx, JSObje
         uint32 freeslot = JSSLOT_FREE(clasp);
         JS_ASSERT(freeslot >= JSSLOT_PRIVATE);
 
         empty = js::EmptyShape::create(cx, clasp);
         if (!empty)
             goto bad;
         if (freeslot > JS_INITIAL_NSLOTS && !obj->allocSlots(cx, freeslot))
             goto bad;
-        obj->freeslot = freeslot;
     }
 
     obj->setMap(empty);
     return true;
 
   bad:
     /* The GC nulls map initially. It should still be null on error. */
     JS_ASSERT(!obj->map);
--- a/js/src/jsparse.cpp
+++ b/js/src/jsparse.cpp
@@ -772,17 +772,17 @@ Compiler::compileScript(JSContext *cx, J
         JS_ASSERT(globalObj->isNative());
         JS_ASSERT((globalObj->getClass()->flags & JSCLASS_GLOBAL_FLAGS) == JSCLASS_GLOBAL_FLAGS);
 
         /* Make sure function and object classes are initialized. */
         JSObject *tobj;
         if (!js_GetClassPrototype(cx, scopeChain, JSProto_Function, &tobj))
             return NULL;
 
-        globalScope.globalFreeSlot = globalObj->freeslot;
+        globalScope.globalFreeSlot = globalObj->slotSpan();
     }
 
     /* Null script early in case of error, to reduce our code footprint. */
     script = NULL;
 
     globalScope.cg = &cg;
     cg.flags |= tcflags;
     cg.scopeChain = scopeChain;
@@ -942,17 +942,17 @@ Compiler::compileScript(JSContext *cx, J
                 if (slot >= SLOTNO_LIMIT)
                     goto too_many_slots;
                 SET_SLOTNO(code, slot);
             }
         }
     }
 
     if (globalScope.defs.length()) {
-        JS_ASSERT(globalObj->freeslot == globalScope.globalFreeSlot);
+        JS_ASSERT(globalObj->slotSpan() == globalScope.globalFreeSlot);
         JS_ASSERT(!cg.compilingForEval());
         for (size_t i = 0; i < globalScope.defs.length(); i++) {
             GlobalScope::GlobalDef &def = globalScope.defs[i];
             jsid id = ATOM_TO_JSID(def.atom);
             Value rval;
 
             if (def.funbox) {
                 JSFunction *fun = (JSFunction *)def.funbox->object;
@@ -3321,44 +3321,42 @@ BindLet(JSContext *cx, BindData *data, J
      * and eval code, Compiler::compileScript will adjust the slot again to
      * include script->nfixed.
      */
     pn->pn_op = JSOP_GETLOCAL;
     pn->pn_cookie.set(tc->staticLevel, uint16(n));
     pn->pn_dflags |= PND_LET | PND_BOUND;
 
     /*
-     * Define the let binding's property before storing pn in reserved slot at
-     * reserved slot index (NB: not slot number) n.
+     * Define the let binding's property before storing pn in the the binding's
+     * slot indexed by n off the class-reserved slot base.
      */
-    if (!js_DefineBlockVariable(cx, blockObj, ATOM_TO_JSID(atom), n))
+    const Shape *shape = blockObj->defineBlockVariable(cx, ATOM_TO_JSID(atom), n);
+    if (!shape)
         return false;
 
     /*
-     * Store pn temporarily in what would be reserved slots in a cloned block
-     * object (once the prototype's final population is known, after all 'let'
-     * bindings for this block have been parsed). We will free these reserved
-     * slots in jsemit.cpp:EmitEnterBlock.
+     * Store pn temporarily in what would be shape-mapped slots in a cloned
+     * block object (once the prototype's final population is known, after all
+     * 'let' bindings for this block have been parsed). We free these slots in
+     * jsemit.cpp:EmitEnterBlock so they don't tie up unused space in the so-
+     * called "static" prototype Block.
      */
-    uintN slot = JSSLOT_FREE(&js_BlockClass) + n;
-    if (slot >= blockObj->numSlots() && !blockObj->growSlots(cx, slot + 1))
-        return false;
-    blockObj->freeslot = slot + 1;
-    blockObj->setSlot(slot, PrivateValue(pn));
+    blockObj->setSlot(shape->slot, PrivateValue(pn));
     return true;
 }
 
 static void
 PopStatement(JSTreeContext *tc)
 {
     JSStmtInfo *stmt = tc->topStmt;
 
     if (stmt->flags & SIF_SCOPE) {
         JSObject *obj = stmt->blockObj;
-        JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
+        JS_ASSERT(!obj->isClonedBlock());
 
         for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
             JSAtom *atom = JSID_TO_ATOM(r.front().id);
 
             /* Beware the empty destructuring dummy. */
             if (atom == tc->parser->context->runtime->atomState.emptyAtom)
                 continue;
             tc->decls.remove(tc->parser, atom);
@@ -4131,22 +4129,19 @@ CheckDestructuring(JSContext *cx, BindDa
      * Note that we add such a property even if the block has locals due to
      * later let declarations in it. We optimize for code simplicity here,
      * not the fastest runtime performance with empty [] or {}.
      */
     if (data &&
         data->binder == BindLet &&
         OBJ_BLOCK_COUNT(cx, tc->blockChain) == 0) {
         ok = !!js_DefineNativeProperty(cx, tc->blockChain,
-                                       ATOM_TO_JSID(cx->runtime->
-                                                    atomState.emptyAtom),
+                                       ATOM_TO_JSID(cx->runtime->atomState.emptyAtom),
                                        UndefinedValue(), NULL, NULL,
-                                       JSPROP_ENUMERATE |
-                                       JSPROP_PERMANENT |
-                                       JSPROP_SHARED,
+                                       JSPROP_ENUMERATE | JSPROP_PERMANENT,
                                        Shape::HAS_SHORTID, 0, NULL);
         if (!ok)
             goto out;
     }
 
     ok = JS_TRUE;
 
   out:
--- a/js/src/jspropertycacheinlines.h
+++ b/js/src/jspropertycacheinlines.h
@@ -128,17 +128,17 @@ PropertyCache::testForSet(JSContext *cx,
     *atomp = atom;
     return false;
 }
 
 JS_ALWAYS_INLINE bool
 PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj,
                            const js::Shape **shapep, PropertyCacheEntry **entryp)
 {
-    JS_ASSERT(obj->freeslot >= JSSLOT_FREE(obj->getClass()));
+    JS_ASSERT(obj->slotSpan() >= JSSLOT_FREE(obj->getClass()));
     JS_ASSERT(!obj->sealed());
     uint32 kshape = obj->shape();
     PropertyCacheEntry *entry = &table[hash(pc, kshape)];
     *entryp = entry;
     PCMETER(pctestentry = entry);
     PCMETER(tests++);
     PCMETER(initests++);
     JS_ASSERT(entry->kshape < SHAPE_OVERFLOW_BIT);
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -146,17 +146,17 @@ bool
 PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child)
 {
     JS_ASSERT(!parent->inDictionary());
     JS_ASSERT(!child->parent);
     JS_ASSERT(!child->inDictionary());
     JS_ASSERT(!JSID_IS_VOID(parent->id));
     JS_ASSERT(!JSID_IS_VOID(child->id));
 
-    child->parent = parent;
+    child->setParent(parent);
 
     KidsPointer *kidp = &parent->kids;
     if (kidp->isNull()) {
         kidp->setShape(child);
         return true;
     }
 
     Shape *shape;
@@ -422,18 +422,17 @@ PropertyTree::getChild(JSContext *cx, Sh
     JS_LOCK_GC(cx->runtime);
 
   locked_not_found:
     shape = newShape(cx, true);
     if (!shape)
         return NULL;
 
     new (shape) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
-                      child.flags, child.shortid);
-    shape->shape = js_GenerateShape(cx, true);
+                      child.flags, child.shortid, js_GenerateShape(cx, true));
 
     if (!insertChild(cx, parent, shape))
         return NULL;
 
   out:
     JS_UNLOCK_GC(cx->runtime);
     return shape;
 }
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -97,38 +97,36 @@ js_GenerateShape(JSContext *cx, bool gcL
 
 bool
 JSObject::ensureClassReservedSlotsForEmptyObject(JSContext *cx)
 {
     JS_ASSERT(nativeEmpty());
 
     /*
      * Subtle rule: objects that call JSObject::ensureInstanceReservedSlots
-     * either must:
-     *
-     * (a) never escape anywhere an ad-hoc property could be set on them;
+     * must either:
      *
-     * (b) have at least JSSLOT_FREE(this->clasp) >= JS_INITIAL_NSLOTS.
+     * (a) never escape anywhere an ad-hoc property could be set on them; or
      *
-     * Note that (b) depends on fine-tuning of JS_INITIAL_NSLOTS (3).
+     * (b) protect their instance-reserved slots with shapes, at least a custom
+     * empty shape with the right slotSpan member.
      *
      * Block objects are the only objects that fall into category (a). While
      * Call objects cannot escape, they can grow ad-hoc properties via eval
-     * of a var declaration, but they have slots mapped by compiler-created
-     * shapes, and thus no problem predicting first ad-hoc property slot.
+     * of a var declaration, or due to a function statement being evaluated,
+     * but they have slots mapped by compiler-created shapes, and thus (b) no
+     * problem predicting first ad-hoc property slot. Bound Function objects
+     * have a custom empty shape.
      *
-     * (Note that Block and Call objects are the only native classes that are
-     * allowed to call ensureInstanceReservedSlots.)
+     * (Note that Block, Call, and bound Function objects are the only native
+     * class objects that are allowed to call ensureInstanceReservedSlots.)
      */
     uint32 nfixed = JSSLOT_FREE(getClass());
-    if (nfixed > freeslot) {
-        if (nfixed > numSlots() && !allocSlots(cx, nfixed))
-            return false;
-        freeslot = nfixed;
-    }
+    if (nfixed > numSlots() && !allocSlots(cx, nfixed))
+        return false;
 
     return true;
 }
 
 #define PROPERTY_TABLE_NBYTES(n) ((n) * sizeof(Shape *))
 
 #ifdef DEBUG
 JS_FRIEND_DATA(JSScopeStats) js_scope_stats = {0};
@@ -464,50 +462,50 @@ JSObject::getChildProperty(JSContext *cx
      * another property's slot allocate a slot here, but may lose it due to a
      * JS_ClearScope call.
      */
     if (!child.isAlias()) {
         if (child.attrs & JSPROP_SHARED) {
             child.slot = SHAPE_INVALID_SLOT;
         } else {
             /*
-             * We may have set slot from a nearly-matching shape, above.
-             * If so, we're overwriting that nearly-matching shape, so we
-             * can reuse its slot -- we don't need to allocate a new one.
-             * Similarly, we use a specific slot if provided by the caller.
+             * We may have set slot from a nearly-matching shape, above. If so,
+             * we're overwriting that nearly-matching shape, so we can reuse
+             * its slot -- we don't need to allocate a new one. Similarly, we
+             * use a specific slot if provided by the caller.
              */
             if (child.slot == SHAPE_INVALID_SLOT && !allocSlot(cx, &child.slot))
                 return NULL;
         }
     }
 
+    Shape *shape;
+
     if (inDictionaryMode()) {
         JS_ASSERT(parent == lastProp);
         if (parent->frozen()) {
             parent = Shape::newDictionaryList(cx, &lastProp);
             if (!parent)
                 return NULL;
             JS_ASSERT(!parent->frozen());
         }
-        if (Shape::newDictionaryShape(cx, child, &lastProp)) {
-            updateFlags(lastProp);
-            updateShape(cx);
-            return lastProp;
+        shape = Shape::newDictionaryShape(cx, child, &lastProp);
+        if (!shape)
+            return NULL;
+    } else {
+        shape = JS_PROPERTY_TREE(cx).getChild(cx, parent, child);
+        if (shape) {
+            JS_ASSERT(shape->parent == parent);
+            JS_ASSERT_IF(parent != lastProp, parent == lastProp->parent);
+            setLastProperty(shape);
         }
-        return NULL;
     }
 
-    Shape *shape = JS_PROPERTY_TREE(cx).getChild(cx, parent, child);
-    if (shape) {
-        JS_ASSERT(shape->parent == parent);
-        JS_ASSERT_IF(parent != lastProp, parent == lastProp->parent);
-        setLastProperty(shape);
-        updateFlags(shape);
-        updateShape(cx);
-    }
+    updateFlags(shape);
+    updateShape(cx);
     return shape;
 }
 
 void
 JSObject::reportReadOnlyScope(JSContext *cx)
 {
     JSString *str;
     const char *bytes;
@@ -524,19 +522,18 @@ JSObject::reportReadOnlyScope(JSContext 
 Shape *
 Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
 {
     Shape *dprop = JS_PROPERTY_TREE(cx).newShape(cx);
     if (!dprop)
         return NULL;
 
     new (dprop) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
-                      (child.flags & ~FROZEN) | IN_DICTIONARY,
-                      child.shortid);
-    dprop->shape = js_GenerateShape(cx, false);
+                      (child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid,
+                      js_GenerateShape(cx, false), child.slotSpan);
 
     dprop->listp = NULL;
     dprop->insertIntoDictionary(listp);
 
     JS_RUNTIME_METER(cx->runtime, liveDictModeNodes);
     return dprop;
 }
 
@@ -618,16 +615,122 @@ NormalizeGetterAndSetter(JSContext *cx, 
         if (!setter) {
             METER(wrapWatchFails);
             return false;
         }
     }
     return true;
 }
 
+#ifdef DEBUG
+# define CHECK_SHAPE_CONSISTENCY(obj) obj->checkShapeConsistency()
+
+void
+JSObject::checkShapeConsistency()
+{
+    static int throttle = -1;
+    if (throttle < 0) {
+        if (const char *var = getenv("JS_CHECK_SHAPE_THROTTLE"))
+            throttle = atoi(var);
+        if (throttle < 0)
+            throttle = 0;
+    }
+    if (throttle == 0)
+        return;
+
+    JS_ASSERT(isNative());
+    if (hasOwnShape())
+        JS_ASSERT(objShape != lastProp->shape);
+    else
+        JS_ASSERT(objShape == lastProp->shape);
+
+    Shape *shape = lastProp;
+    Shape *prev = NULL;
+
+    if (inDictionaryMode()) {
+        if (PropertyTable *table = shape->table) {
+            for (uint32 fslot = table->freelist; fslot != SHAPE_INVALID_SLOT;
+                 fslot = getSlotRef(fslot).toPrivateUint32()) {
+                JS_ASSERT(fslot < shape->slotSpan);
+            }
+
+            for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) {
+                JS_ASSERT_IF(shape != lastProp, !shape->table);
+
+                Shape **spp = table->search(shape->id, false);
+                JS_ASSERT(SHAPE_FETCH(spp) == shape);
+            }
+        } else {
+            shape = shape->parent;
+            for (int n = throttle; --n >= 0 && shape; shape = shape->parent)
+                JS_ASSERT(!shape->table);
+        }
+
+        shape = lastProp;
+        for (int n = throttle; --n >= 0 && shape; shape = shape->parent) {
+            JS_ASSERT_IF(shape->slot != SHAPE_INVALID_SLOT, shape->slot < shape->slotSpan);
+            if (!prev) {
+                JS_ASSERT(shape == lastProp);
+                JS_ASSERT(shape->listp == &lastProp);
+            } else {
+                JS_ASSERT(shape->listp == &prev->parent);
+                JS_ASSERT(prev->slotSpan >= shape->slotSpan);
+            }
+            prev = shape;
+        }
+    } else {
+        for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) {
+            if (PropertyTable *table = shape->table) {
+                JS_ASSERT(shape->parent);
+                for (Shape::Range r(shape); !r.empty(); r.popFront()) {
+                    Shape **spp = table->search(r.front().id, false);
+                    JS_ASSERT(SHAPE_FETCH(spp) == &r.front());
+                }
+            }
+            if (prev) {
+                JS_ASSERT(prev->slotSpan >= shape->slotSpan);
+                if (shape->kids.isShape()) {
+                    JS_ASSERT(shape->kids.toShape() == prev);
+                } else if (shape->kids.isChunk()) {
+                    bool found = false;
+                    for (KidsChunk *chunk = shape->kids.toChunk(); chunk; chunk = chunk->next) {
+                        for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
+                            if (!chunk->kids[i]) {
+                                JS_ASSERT(!chunk->next);
+                                for (uintN j = i + 1; j < MAX_KIDS_PER_CHUNK; j++)
+                                    JS_ASSERT(!chunk->kids[j]);
+                                JS_ASSERT(found);
+                            }
+                            if (chunk->kids[i] == prev) {
+                                JS_ASSERT(!found);
+                                found = true;
+                            }
+                        }
+                    }
+                } else {
+                    JS_ASSERT(shape->kids.isHash());
+                    KidsHash *hash = shape->kids.toHash();
+                    KidsHash::Ptr ptr = hash->lookup(prev);
+                    JS_ASSERT(*ptr == prev);
+                }
+            }
+            prev = shape;
+        }
+
+        if (throttle == 0) {
+            JS_ASSERT(!shape->table);
+            JS_ASSERT(JSID_IS_EMPTY(shape->id));
+            JS_ASSERT(shape->slot == SHAPE_INVALID_SLOT);
+        }
+    }
+}
+#else
+# define CHECK_SHAPE_CONSISTENCY(obj) ((void)0)
+#endif
+
 const Shape *
 JSObject::addProperty(JSContext *cx, jsid id,
                       PropertyOp getter, PropertyOp setter,
                       uint32 slot, uintN attrs,
                       uintN flags, intN shortid)
 {
     JS_ASSERT(!JSID_IS_VOID(id));
 
@@ -716,20 +819,22 @@ JSObject::addPropertyCommon(JSContext *c
          * modern OSes), stick with linear search rather than whining about
          * this little set-back.  Therefore we must test !lastProp->table and
          * entry count >= PropertyTable::HASH_THRESHOLD, not merely whether the
          * entry count just reached the threshold.
          */
         if (!lastProp->table)
             lastProp->maybeHash(cx);
 
+        CHECK_SHAPE_CONSISTENCY(this);
         METER(adds);
         return shape;
     }
 
+    CHECK_SHAPE_CONSISTENCY(this);
     METER(addFails);
     return NULL;
 }
 
 const Shape *
 JSObject::putProperty(JSContext *cx, jsid id,
                       PropertyOp getter, PropertyOp setter,
                       uint32 slot, uintN attrs,
@@ -783,22 +888,35 @@ JSObject::putProperty(JSContext *cx, jsi
      */
     Shape *oldLastProp = lastProp;
     if (shape == lastProp && !inDictionaryMode()) {
         removeLastProperty();
     } else {
         if (!inDictionaryMode()) {
             if (!toDictionaryMode(cx))
                 return NULL;
+
             spp = nativeSearch(id);
             shape = SHAPE_FETCH(spp);
+            table = lastProp->table;
+            oldLastProp = lastProp;
         }
         shape->removeFromDictionary(this);
     }
 
+#ifdef DEBUG
+    if (shape == oldLastProp) {
+        JS_ASSERT(lastProp->slotSpan <= shape->slotSpan);
+        if (shape->hasSlot())
+            JS_ASSERT(shape->slot < shape->slotSpan);
+        if (lastProp->slotSpan < numSlots())
+            getSlotRef(lastProp->slotSpan).setUndefined();
+    }
+#endif
+
     /*
      * If we fail later on trying to find or create a new shape, we will
      * restore *spp from |overwriting|. Note that we don't bother to keep
      * table->removedCount in sync, because we will fix up both *spp and
      * table->entryCount shortly.
      */
     if (table)
         SHAPE_STORE_PRESERVING_COLLISION(spp, NULL);
@@ -822,65 +940,74 @@ JSObject::putProperty(JSContext *cx, jsi
             shape->setTable(table);
         }
 
         if (!lastProp->table) {
             /* See comment in JSObject::addPropertyCommon about ignoring OOM here. */
             lastProp->maybeHash(cx);
         }
 
+        CHECK_SHAPE_CONSISTENCY(this);
         METER(puts);
         return shape;
     }
 
     if (table)
         SHAPE_STORE_PRESERVING_COLLISION(spp, overwriting);
+    CHECK_SHAPE_CONSISTENCY(this);
     METER(putFails);
     return NULL;
 }
 
 const Shape *
 JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN mask,
                          PropertyOp getter, PropertyOp setter)
 {
     const Shape *newShape;
 
     JS_ASSERT(!JSID_IS_VOID(shape->id));
     JS_ASSERT(nativeContains(*shape));
 
     attrs |= shape->attrs & mask;
 
-    /* Allow only shared (slot-less) => unshared (slot-full) transition. */
+    /* Allow only shared (slotless) => unshared (slotful) transition. */
     JS_ASSERT(!((attrs ^ shape->attrs) & JSPROP_SHARED) ||
               !(attrs & JSPROP_SHARED));
 
     /* Don't allow method properties to be changed to have a getter. */
     JS_ASSERT_IF(getter != shape->rawGetter, !shape->isMethod());
 
     if (getter == PropertyStub)
         getter = NULL;
     if (setter == PropertyStub)
         setter = NULL;
     if (shape->attrs == attrs && shape->getter() == getter && shape->setter() == setter)
         return shape;
 
     Shape child(shape->id, getter, setter, shape->slot, attrs, shape->flags, shape->shortid);
+
     if (inDictionaryMode()) {
         shape->removeFromDictionary(this);
         newShape = Shape::newDictionaryShape(cx, child, &lastProp);
         if (newShape) {
             JS_ASSERT(newShape == lastProp);
 
-            if (PropertyTable *table = shape->table) {
-                /* Overwrite shape with newShape in newShape's table. */
+            /*
+             * Let tableShape be the shape with non-null table, either the one
+             * we removed or the parent of lastProp.
+             */
+            const Shape *tableShape = shape->table ? shape : lastProp->parent;
+
+            if (PropertyTable *table = tableShape->table) {
+                /* Overwrite shape with newShape in the property table. */
                 Shape **spp = table->search(shape->id, true);
                 SHAPE_STORE_PRESERVING_COLLISION(spp, newShape);
 
-                /* Hand the table off from shape to newShape. */
-                shape->setTable(NULL);
+                /* Hand the table off from tableShape to newShape. */
+                tableShape->setTable(NULL);
                 newShape->setTable(table);
             }
 
             updateFlags(newShape);
             updateShape(cx);
         }
     } else if (shape == lastProp) {
         newShape = getChildProperty(cx, shape->parent, child);
@@ -900,16 +1027,17 @@ JSObject::changeProperty(JSContext *cx, 
          * JSObject::removeProperty because it will free a valid shape->slot and
          * JSObject::putProperty won't re-allocate it.
          */
         newShape = putProperty(cx, child.id, child.rawGetter, child.rawSetter, child.slot,
                                child.attrs, child.flags, child.shortid);
     }
 
 #ifdef DEBUG
+    CHECK_SHAPE_CONSISTENCY(this);
     if (newShape)
         METER(changes);
     else
         METER(changeFails);
 #endif
     return newShape;
 }
 
@@ -935,17 +1063,18 @@ JSObject::removeProperty(JSContext *cx, 
                 return false;
             spp = nativeSearch(shape->id);
             shape = SHAPE_FETCH(spp);
         }
         JS_ASSERT(SHAPE_FETCH(spp) == shape);
     }
 
     /* First, if shape is unshared and not cleared, free its slot number. */
-    if (containsSlot(shape->slot)) {
+    bool hadSlot = !shape->isAlias() && containsSlot(shape->slot);
+    if (hadSlot) {
         freeSlot(cx, shape->slot);
         JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals);
     }
 
     /*
      * Next, consider removing id from lastProp->table if in dictionary mode,
      * by setting its entry to a removed or free sentinel.
      */
@@ -965,32 +1094,52 @@ JSObject::removeProperty(JSContext *cx, 
 
 #ifdef DEBUG
                 /*
                  * Check the consistency of the table but limit the number of
                  * checks not to alter significantly the complexity of the
                  * delete in debug builds, see bug 534493.
                  */
                 const Shape *aprop = lastProp;
-                for (unsigned n = 50; aprop->parent && n != 0; aprop = aprop->parent, --n)
+                for (int n = 50; --n >= 0 && aprop->parent; aprop = aprop->parent)
                     JS_ASSERT_IF(aprop != shape, nativeContains(*aprop));
 #endif
             }
         }
 
         /*
          * Remove shape from its non-circular doubly linked list, setting this
          * object's shape first if shape is not lastProp so the updateShape(cx)
          * after this if-else will generate a fresh shape for this scope.
          */
         if (shape != lastProp)
             setOwnShape(lastProp->shape);
-        shape->setTable(NULL);
+
+        Shape *oldLastProp = lastProp;
         shape->removeFromDictionary(this);
-        lastProp->setTable(table);
+        if (table) {
+            if (shape == oldLastProp) {
+                JS_ASSERT(shape->table == table);
+                JS_ASSERT(shape->parent == lastProp);
+                JS_ASSERT(shape->slotSpan >= lastProp->slotSpan);
+                JS_ASSERT_IF(hadSlot, shape->slot + 1 <= shape->slotSpan);
+
+                /*
+                 * If the dictionary table's freelist is non-empty, we must
+                 * preserve lastProp->slotSpan. We can't reduce slotSpan even
+                 * by one or we might lose non-decreasing slotSpan order.
+                 */
+                if (table->freelist != SHAPE_INVALID_SLOT)
+                    lastProp->slotSpan = shape->slotSpan;
+            }
+
+            /* Hand off table from old to new lastProp. */
+            oldLastProp->setTable(NULL);
+            lastProp->setTable(table);
+        }
     } else {
         /*
          * Non-dictionary-mode property tables are shared immutables, so all we
          * need do is retract lastProp and we'll either get or else lazily make
          * via a later maybeHash the exact table for the new property lineage.
          */
         JS_ASSERT(shape == lastProp);
         removeLastProperty();
@@ -1001,16 +1150,17 @@ JSObject::removeProperty(JSContext *cx, 
     if (PropertyTable *table = lastProp->table) {
         uint32 size = table->capacity();
         if (size > PropertyTable::MIN_SIZE && table->entryCount <= size >> 2) {
             METER(shrinks);
             (void) table->change(cx, -1);
         }
     }
 
+    CHECK_SHAPE_CONSISTENCY(this);
     LIVE_SCOPE_METER(cx, --cx->runtime->liveObjectProps);
     METER(removes);
     return true;
 }
 
 void
 JSObject::clear(JSContext *cx)
 {
@@ -1032,16 +1182,17 @@ JSObject::clear(JSContext *cx)
      * We have rewound to a uniquely-shaped empty scope, so we don't need an
      * override for this object's shape.
      */
     clearOwnShape();
     setMap(shape);
 
     LeaveTraceIfGlobalObject(cx, this);
     JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals);
+    CHECK_SHAPE_CONSISTENCY(this);
 }
 
 void
 JSObject::generateOwnShape(JSContext *cx)
 {
 #ifdef JS_TRACER
     JS_ASSERT_IF(!parent && JS_ON_TRACE(cx), cx->bailExit);
      LeaveTraceIfGlobalObject(cx, this);
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -203,16 +203,18 @@
  * scope members as if it were non-null and minimal-length.  Until a property
  * is added that crosses the threshold of 6 or more entries for hashing, we use
  * linear search from obj->lastProp to find a given id, and save on the space
  * overhead of a hash table.
  */
 
 #define SHAPE_INVALID_SLOT              0xffffffff
 
+JS_STATIC_ASSERT(uint32(SHAPE_INVALID_SLOT + 1) == uint32(0));
+
 namespace js {
 
 /*
  * Shapes use multiplicative hashing, _a la_ jsdhash.[ch], but specialized to
  * minimize footprint.  But if a Shape lineage has fewer than HASH_THRESHOLD
  * entries, we use linear search and avoid allocating scope->table.
  */
 struct PropertyTable {
@@ -221,26 +223,26 @@ struct PropertyTable {
         MIN_SIZE_LOG2   = 4,
         MIN_SIZE        = JS_BIT(MIN_SIZE_LOG2)
     };
 
     int             hashShift;          /* multiplicative hash shift */
 
     uint32          entryCount;         /* number of entries in table */
     uint32          removedCount;       /* removed entry sentinels in table */
-    uint32          freeslot;           /* SHAPE_INVALID_SLOT or head of slot
+    uint32          freelist;           /* SHAPE_INVALID_SLOT or head of slot
                                            freelist in owning dictionary-mode
                                            object */
     js::Shape       **entries;          /* table of ptrs to shared tree nodes */
 
     PropertyTable(uint32 nentries)
       : hashShift(JS_DHASH_BITS - MIN_SIZE_LOG2),
         entryCount(nentries),
         removedCount(0),
-        freeslot(SHAPE_INVALID_SLOT)
+        freelist(SHAPE_INVALID_SLOT)
     {
         /* NB: entries is set by init, which must be called. */
     }
 
     ~PropertyTable() {
         js_free(entries);
     }
 
@@ -353,17 +355,68 @@ struct Shape : public JSObjectMap
 
     inline void removeFromDictionary(JSObject *obj) const;
     inline void insertIntoDictionary(js::Shape **dictp);
 
     js::Shape *getChild(JSContext *cx, const js::Shape &child, js::Shape **listp);
 
     bool maybeHash(JSContext *cx);
 
-    void setTable(js::PropertyTable *t) const { table = t; }
+    void setTable(js::PropertyTable *t) const {
+        JS_ASSERT_IF(t && t->freelist != SHAPE_INVALID_SLOT, t->freelist < slotSpan);
+        table = t;
+    }
+
+    /*
+     * Setter for parent. The challenge is to maintain JSObjectMap::slotSpan in
+     * the face of arbitrary slot order.
+     *
+     * By induction, an empty shape has a slotSpan member correctly computed as
+     * JSCLASS_FREE(clasp) -- see EmptyShape's constructor in jsscopeinlines.h.
+     * This is the basis case, where p is null.
+     *
+     * Any child shape, whether in a shape tree or in a dictionary list, must
+     * have a slotSpan either one greater than its slot value (if the child's
+     * slot is SHAPE_INVALID_SLOT, this will yield 0; the static assertion just
+     * after the SHAPE_INVALID_SLOT definition enforces this), or equal to its
+     * parent p's slotSpan, whichever is greater. This is the inductive step.
+     *
+     * If we maintained shape paths such that parent slot was always one less
+     * than child slot, possibly with an exception for SHAPE_INVALID_SLOT slot
+     * values where we would use another way of computing slotSpan based on the
+     * PropertyTable (as JSC does), then we would not need to store slotSpan in
+     * Shape (to be precise, in its base struct, JSobjectMap).
+     *
+     * But we currently scramble slots along shape paths due to resolve-based
+     * creation of shapes mapping reserved slots, and we do not have the needed
+     * PropertyTable machinery to use as an alternative when parent slot is not
+     * one less than child slot. This machinery is neither simple nor free, as
+     * it must involve creating a table for any slot-less transition and then
+     * pinning the table to its shape.
+     *
+     * Use of 'delete' can scramble slots along the shape lineage too, although
+     * it always switches the target object to dictionary mode, so the cost of
+     * a pinned table is less onerous.
+     *
+     * Note that allocating a uint32 slotSpan member in JSObjectMap takes no
+     * net extra space on 64-bit targets (it packs with shape). And on 32-bit
+     * targets, adding slotSpan to JSObjectMap takes no gross extra space,
+     * because Shape rounds up to an even number of 32-bit words (required for
+     * GC-thing and js::Value allocation in any event) on 32-bit targets.
+     *
+     * So in terms of space, we can afford to maintain both slotSpan and slot,
+     * but it might be better if we eliminated slotSpan using slot combined
+     * with an auxiliary mechanism based on table.
+     */
+    void setParent(js::Shape *p) {
+        if (p)
+            slotSpan = JS_MAX(p->slotSpan, slot + 1);
+        JS_ASSERT(slotSpan < JSObject::NSLOTS_LIMIT);
+        parent = p;
+    }
 
     void insertFree(js::Shape **freep) {
         id = JSID_VOID;
         parent = *freep;
         if (parent)
             parent->listp = &parent;
         listp = freep;
         *freep = this;
@@ -431,18 +484,18 @@ struct Shape : public JSObjectMap
 
         /* Property stored in per-object dictionary, not shared property tree. */
         IN_DICTIONARY   = 0x08,
 
         /* Prevent unwanted mutation of shared JSFunction::u.i.names nodes. */
         FROZEN          = 0x10
     };
 
-    Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot,
-          uintN attrs, uintN flags, intN shortid);
+    Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, uintN attrs,
+          uintN flags, intN shortid, uint32 shape = INVALID_SHAPE, uint32 slotSpan = 0);
 
     /* Used by EmptyShape (see jsscopeinlines.h). */
     Shape(JSContext *cx, Class *aclasp);
 
     bool marked() const         { return (flags & MARK) != 0; }
     void mark() const           { flags |= MARK; }
     void clearMark()            { flags &= ~MARK; }
 
@@ -701,17 +754,17 @@ Shape::insertIntoDictionary(js::Shape **
     JS_ASSERT(!listp);
     JS_ASSERT(!JSID_IS_VOID(id));
 
     JS_ASSERT_IF(*dictp, !(*dictp)->frozen());
     JS_ASSERT_IF(*dictp, (*dictp)->inDictionary());
     JS_ASSERT_IF(*dictp, (*dictp)->listp == dictp);
     JS_ASSERT_IF(*dictp, !JSID_IS_VOID((*dictp)->id));
 
-    parent = *dictp;
+    setParent(*dictp);
     if (parent)
         parent->listp = &parent;
     listp = dictp;
     *dictp = this;
 }
 
 } /* namespace js */
 
--- a/js/src/jsscopeinlines.h
+++ b/js/src/jsscopeinlines.h
@@ -139,31 +139,32 @@ JSObject::trace(JSTracer *trc)
     do {
         shape->trace(trc);
     } while ((shape = shape->parent) != NULL);
 }
 
 namespace js {
 
 inline
-Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter,
-             uint32 slot, uintN attrs, uintN flags, intN shortid)
-  : JSObjectMap(0), table(NULL),
-    id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)),
+Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, uintN attrs,
+             uintN flags, intN shortid, uint32 shape, uint32 slotSpan)
+  : JSObjectMap(shape, slotSpan),
+    table(NULL), id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)),
     flags(uint8(flags)), shortid(int16(shortid)), parent(NULL)
 {
+    JS_ASSERT_IF(slotSpan != SHAPE_INVALID_SLOT, slotSpan < JSObject::NSLOTS_LIMIT);
     JS_ASSERT_IF(getter && (attrs & JSPROP_GETTER), getterObj->isCallable());
     JS_ASSERT_IF(setter && (attrs & JSPROP_SETTER), setterObj->isCallable());
     kids.setNull();
 }
 
 inline
 Shape::Shape(JSContext *cx, Class *aclasp)
-  : JSObjectMap(js_GenerateShape(cx, false)), table(NULL),
-    id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(JSSLOT_FREE(aclasp)), attrs(0),
+  : JSObjectMap(js_GenerateShape(cx, false), JSSLOT_FREE(aclasp)), table(NULL),
+    id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(SHAPE_INVALID_SLOT), attrs(0),
     flags(SHARED_EMPTY), shortid(0), parent(NULL)
 {
     kids.setNull();
 }
 
 inline JSDHashNumber
 Shape::hash() const
 {
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -97,17 +97,17 @@ class UpvarCookie
      */
     static const uint16 UPVAR_LEVEL_LIMIT = 16;
     static const uint16 CALLEE_SLOT = 0xffff;
     static bool isLevelReserved(uint16 level) { return level >= FREE_LEVEL; }
 
     bool isFree() const { return value == FREE_VALUE; }
     uint32 asInteger() const { return value; }
     /* isFree check should be performed before using these accessors. */
-    uint16 level() const { JS_ASSERT(!isFree()); return value >> 16; }
+    uint16 level() const { JS_ASSERT(!isFree()); return uint16(value >> 16); }
     uint16 slot() const { JS_ASSERT(!isFree()); return uint16(value); }
 
     void set(const UpvarCookie &other) { set(other.level(), other.slot()); }
     void set(uint16 newLevel, uint16 newSlot) { value = (uint32(newLevel) << 16) | newSlot; }
     void makeFree() { set(0xffff, 0xffff); JS_ASSERT(isFree()); }
     void fromInteger(uint32 u32) { value = u32; }
 };
 
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -14529,17 +14529,17 @@ TraceRecorder::traverseScopeChain(JSObje
                     exit = snapshot(BRANCH_EXIT);
                 guard(true,
                       addName(lir->ins2ImmI(LIR_eqi, shape_ins(obj_ins), obj->shape()),
                               "guard_shape"),
                       exit);
             }
         }
 
-        JS_ASSERT(obj->getClass() != &js_BlockClass);
+        JS_ASSERT(!obj->isBlock());
 
         if (obj == targetObj)
             break;
 
         obj = obj->getParent();
         obj_ins = stobj_get_parent(obj_ins);
     }
 
@@ -14557,17 +14557,17 @@ TraceRecorder::record_JSOP_BINDNAME()
         obj = fp->getScopeChain();
 
 #ifdef DEBUG
         JSStackFrame *fp2 = fp;
 #endif
 
         // In global code, fp->scopeChain can only contain blocks whose values
         // are still on the stack.  We never use BINDNAME to refer to these.
-        while (obj->getClass() == &js_BlockClass) {
+        while (obj->isBlock()) {
             // The block's values are still on the stack.
 #ifdef DEBUG
             // NB: fp2 can't be a generator frame, because !fp->hasFunction.
             while (obj->getPrivate() != fp2) {
                 JS_ASSERT(fp2->flags & JSFRAME_SPECIAL);
                 fp2 = fp2->down;
                 if (!fp2)
                     JS_NOT_REACHED("bad stack frame");
--- a/js/src/jsvector.h
+++ b/js/src/jsvector.h
@@ -698,17 +698,17 @@ Vector<T,N,AP>::appendN(const T &t, size
     heapEnd() += needed;
     return true;
 }
 
 template <class T, size_t N, class AP>
 inline bool
 Vector<T,N,AP>::insert(T *p, const T &val)
 {
-    JS_ASSERT(begin() <= p && p < end());
+    JS_ASSERT(begin() <= p && p <= end());
     size_t pos = p - begin();
     JS_ASSERT(pos <= length());
     size_t oldLength = length();
     if (pos == oldLength)
         return append(val);
     {
         T oldBack = back();
         if (!append(oldBack)) /* Dup the last element. */
--- a/js/src/jsxdrapi.h
+++ b/js/src/jsxdrapi.h
@@ -200,17 +200,17 @@ JS_XDRFindClassById(JSXDRState *xdr, uin
  * Bytecode version number. Increment the subtrahend whenever JS bytecode
  * changes incompatibly.
  *
  * This version number should be XDR'ed once near the front of any file or
  * larger storage unit containing XDR'ed bytecode and other data, and checked
  * before deserialization of bytecode.  If the saved version does not match
  * the current version, abort deserialization and invalidate the file.
  */
-#define JSXDR_BYTECODE_VERSION      (0xb973c0de - 67)
+#define JSXDR_BYTECODE_VERSION      (0xb973c0de - 68)
 
 /*
  * Library-private functions.
  */
 extern JSBool
 js_XDRAtom(JSXDRState *xdr, JSAtom **atomp);
 
 JS_END_EXTERN_C
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -1264,33 +1264,31 @@ mjit::Compiler::jsop_setelem()
         stubcc.linkExitDirect(notHole, lblHole);
 
         /* Need a new handle on the object, as objReg now holds the dslots. */
         RegisterID baseReg = frame.tempRegForData(obj, objReg, stubcc.masm);
 
         /*
          * Check if the object has a prototype with indexed properties,
          * in which case it might have a setter for this element. For dense
-         * arrays we only need to check Array.prototype and Object.prototype.
+         * arrays we need to check only Array.prototype and Object.prototype.
+         * Indexed properties are indicated by the JSObject::INDEXED flag.
          */
 
-        /*
-         * Test for indexed properties in Array.prototype. flags is a one byte
-         * quantity, but will be aligned on 4 bytes.
-         */
+        /* Test for indexed properties in Array.prototype. */
         stubcc.masm.loadPtr(Address(baseReg, offsetof(JSObject, proto)), T1);
-        stubcc.masm.loadPtr(Address(T1, JSObject::flagsOffset()), T1);
+        stubcc.masm.loadPtr(Address(T1, offsetof(JSObject, flags)), T1);
         stubcc.masm.and32(Imm32(JSObject::INDEXED), T1);
         Jump extendedArray = stubcc.masm.branchTest32(Assembler::NonZero, T1, T1);
         extendedArray.linkTo(syncTarget, &stubcc.masm);
 
         /* Test for indexed properties in Object.prototype. */
         stubcc.masm.loadPtr(Address(baseReg, offsetof(JSObject, proto)), T1);
         stubcc.masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
-        stubcc.masm.loadPtr(Address(T1, JSObject::flagsOffset()), T1);
+        stubcc.masm.loadPtr(Address(T1, offsetof(JSObject, flags)), T1);
         stubcc.masm.and32(Imm32(JSObject::INDEXED), T1);
         Jump extendedObject = stubcc.masm.branchTest32(Assembler::NonZero, T1, T1);
         extendedObject.linkTo(syncTarget, &stubcc.masm);
 
         /* Update the array length if needed. Don't worry about overflow. */
         Address arrayLength(baseReg, offsetof(JSObject, fslots[JSObject::JSSLOT_ARRAY_LENGTH]));
         stubcc.masm.loadPayload(arrayLength, T1);
         Jump underLength = stubcc.masm.branch32(Assembler::LessThan, idReg, T1);
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -321,18 +321,17 @@ class SetPropCompiler : public PICStubCo
             if (pic.u.vr.u.s.isTypeKnown)
                 masm.storeTypeTag(ImmType(pic.u.vr.u.s.type.knownType), address);
             else
                 masm.storeTypeTag(pic.u.vr.u.s.type.reg, address);
             masm.storePayload(pic.u.vr.u.s.data, address);
         }
     }
 
-    bool generateStub(uint32 initialShape, uint32 initialFlagsAndFreeslot,
-                      const Shape *shape, bool adding)
+    bool generateStub(uint32 initialShape, const Shape *shape, bool adding)
     {
         /* Exits to the slow path. */
         Vector<Jump, 8> slowExits(f.cx);
 
         Assembler masm;
 
         // Shape guard.
         if (pic.shapeNeedsRemat()) {
@@ -367,29 +366,16 @@ class SetPropCompiler : public PICStubCo
             masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), pic.shapeReg);
             Jump sharedObject = masm.branchPtr(Assembler::NotEqual,
                                                Address(pic.objReg, offsetof(JSObject, title.ownercx)),
                                                pic.shapeReg);
             if (!slowExits.append(sharedObject))
                 return false;
 #endif
 
-            Address flagsAndFreeslot(pic.objReg, JSObject::flagsOffset());
-
-            /*
-             * We need to always check the flags match as some object flags can
-             * vary between objects of the same shape (DELEGATE, SYSTEM).
-             * It would be nice if these bits did not vary, so that just the
-             * shape check is sufficient.
-             */
-            Jump flagsMismatch = masm.branch32(Assembler::NotEqual, flagsAndFreeslot,
-                                               Imm32(initialFlagsAndFreeslot));
-            if (!slowExits.append(flagsMismatch))
-                return false;
-
             /* Emit shape guards for the object's prototype chain. */
             size_t chainLength = 0;
             JSObject *proto = obj->getProto();
             while (proto) {
                 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, proto)), pic.shapeReg);
                 for (size_t i = 0; i < chainLength; i++)
                     masm.loadPtr(Address(pic.shapeReg, offsetof(JSObject, proto)), pic.shapeReg);
                 masm.loadShape(pic.shapeReg, pic.shapeReg);
@@ -427,17 +413,17 @@ class SetPropCompiler : public PICStubCo
             } else {
                 /* Check dslots non-zero. */
                 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, dslots)), pic.shapeReg);
                 Jump emptyDslots = masm.branchPtr(Assembler::Equal, pic.shapeReg, ImmPtr(0));
                 if (!slowExits.append(emptyDslots))
                     return false;
 
                 /* Check capacity. */
-                Address capacity(pic.shapeReg, -sizeof(Value));
+                Address capacity(pic.shapeReg, -ptrdiff_t(sizeof(Value)));
                 masm.load32(masm.payloadOf(capacity), pic.shapeReg);
                 Jump overCapacity = masm.branch32(Assembler::LessThanOrEqual, pic.shapeReg,
                                                   Imm32(shape->slot));
                 if (!slowExits.append(overCapacity))
                     return false;
 
                 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, dslots)), pic.shapeReg);
                 Address address(pic.shapeReg,
@@ -447,18 +433,25 @@ class SetPropCompiler : public PICStubCo
 
             uint32 newShape = obj->shape();
             JS_ASSERT(newShape != initialShape);
 
             /* Write the object's new shape. */
             masm.storePtr(ImmPtr(shape), Address(pic.objReg, offsetof(JSObject, lastProp)));
             masm.store32(Imm32(newShape), Address(pic.objReg, offsetof(JSObject, objShape)));
 
-            /* Write both the object's flags and new freeslot. */
-            masm.store32(Imm32(obj->flagsAndFreeslot()), flagsAndFreeslot);
+            /* If this is a method shape, update the object's flags. */
+            if (shape->isMethod()) {
+                Address flags(pic.objReg, offsetof(JSObject, flags));
+
+                /* Use shapeReg to load, bitwise-or, and store flags. */
+                masm.load32(flags, pic.shapeReg);
+                masm.or32(Imm32(JSObject::METHOD_BARRIER), pic.shapeReg);
+                masm.store32(pic.shapeReg, flags);
+            }
         } else if (shape->hasDefaultSetter()) {
             Address address(pic.objReg, offsetof(JSObject, fslots) + shape->slot * sizeof(Value));
             if (shape->slot >= JS_INITIAL_NSLOTS) {
                 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, dslots)), pic.objReg);
                 address = Address(pic.objReg, (shape->slot - JS_INITIAL_NSLOTS) * sizeof(Value));
             }
 
             // If the scope is branded, or has a method barrier. It's now necessary
@@ -610,17 +603,16 @@ class SetPropCompiler : public PICStubCo
             if (clasp->addProperty != PropertyStub)
                 return disable("add property hook");
 
             uint32 index;
             if (js_IdIsIndex(id, &index))
                 return disable("index");
 
             uint32 initialShape = obj->shape();
-            uint32 initialFlagsAndFreeslot = obj->flagsAndFreeslot();
 
             if (!obj->ensureClassReservedSlots(f.cx))
                 return false;
 
             uint32 slots = obj->numSlots();
             uintN flags = 0;
             PropertyOp getter = clasp->getProperty;
 
@@ -658,17 +650,17 @@ class SetPropCompiler : public PICStubCo
              * Doing this would cause us to walk down this same update path
              * every time a reallocation is needed, however, which will
              * usually be a slowdown even if there *are* other shapes that
              * don't realloc.
              */
             if (obj->numSlots() != slots)
                 return disable("insufficient slot capacity");
 
-            return generateStub(initialShape, initialFlagsAndFreeslot, shape, true);
+            return generateStub(initialShape, shape, true);
         }
 
         AutoPropertyDropper dropper(f.cx, holder, prop);
 
         const Shape *shape = (const Shape *) prop;
         if (pic.kind == ic::PICInfo::SETMETHOD && !shape->isMethod())
             return disable("set method on non-method shape");
         if (!shape->writable())
@@ -689,17 +681,17 @@ class SetPropCompiler : public PICStubCo
         JS_ASSERT(obj == holder);
         if (!pic.inlinePathPatched &&
             !obj->brandedOrHasMethodBarrier() &&
             shape->hasDefaultSetter() &&
             !obj->isDenseArray()) {
             return patchInline(shape);
         } 
 
-        return generateStub(obj->shape(), 0, shape, false);
+        return generateStub(obj->shape(), shape, false);
     }
 };
 
 class GetPropCompiler : public PICStubCompiler
 {
     JSObject *obj;
     JSAtom *atom;
     void   *stub;
@@ -793,19 +785,20 @@ class GetPropCompiler : public PICStubCo
 
     bool generateArgsLengthStub()
     {
         Assembler masm;
 
         Address clasp(pic.objReg, offsetof(JSObject, clasp));
         Jump notArgs = masm.branchPtr(Assembler::NotEqual, clasp, ImmPtr(&js_SlowArrayClass));
 
-        masm.load32(Address(pic.objReg, offsetof(JSObject, fslots) +
-                                        JSObject::JSSLOT_ARGS_LENGTH * sizeof(Value)),
-                        pic.objReg);
+        masm.load32(Address(pic.objReg,
+                            offsetof(JSObject, fslots)
+                            + JSObject::JSSLOT_ARGS_LENGTH * sizeof(Value)),
+                    pic.objReg);
         masm.move(pic.objReg, pic.shapeReg);
         masm.and32(Imm32(1), pic.shapeReg);
         Jump overridden = masm.branchTest32(Assembler::NonZero, pic.shapeReg, pic.shapeReg);
         
         masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
         Jump done = masm.jump();
 
         JSC::ExecutablePool *ep = getExecPool(masm.size());
@@ -838,19 +831,20 @@ class GetPropCompiler : public PICStubCo
         Assembler masm;
 
         masm.loadPtr(Address(pic.objReg, offsetof(JSObject, clasp)), pic.shapeReg);
         Jump isDense = masm.branchPtr(Assembler::Equal, pic.shapeReg, ImmPtr(&js_ArrayClass));
         Jump notArray = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
                                        ImmPtr(&js_SlowArrayClass));
 
         isDense.linkTo(masm.label(), &masm);
-        masm.load32(Address(pic.objReg, offsetof(JSObject, fslots) +
-                                        JSObject::JSSLOT_ARRAY_LENGTH * sizeof(Value)),
-                        pic.objReg);
+        masm.load32(Address(pic.objReg,
+                            offsetof(JSObject, fslots)
+                            + JSObject::JSSLOT_ARRAY_LENGTH * sizeof(Value)),
+                    pic.objReg);
         Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX));
         masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
         Jump done = masm.jump();
 
         JSC::ExecutablePool *ep = getExecPool(masm.size());
         if (!ep || !pic.execPools.append(ep)) {
             if (ep)
                 ep->release();
@@ -2227,17 +2221,17 @@ ic::CallProp(VMFrame &f, uint32 index)
     JSObject *obj2;
     JSAtom *atom;
     JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
     if (!atom) {
         if (entry->vword.isFunObj()) {
             rval.setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj2->freeslot);
+            JS_ASSERT(obj2->containsSlot(slot));
             rval = obj2->lockedGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
             const Shape *shape = entry->vword.toShape();
             NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval,
                        THROW());
         }
         regs.sp++;
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -192,31 +192,29 @@ stubs::SetName(VMFrame &f, JSAtom *origA
                         THROW();
                 }
 
                 uint32 slot;
                 if (shape->previous() == obj->lastProperty() &&
                     entry->vshape() == cx->runtime->protoHazardShape &&
                     shape->hasDefaultSetter()) {
                     slot = shape->slot;
-                    JS_ASSERT(slot == obj->freeslot);
+                    JS_ASSERT(slot == obj->slotSpan());
 
                     /*
                      * Fast path: adding a plain old property that was once at
                      * the frontier of the property tree, whose slot is next to
                      * claim among the already-allocated slots in obj, where
                      * shape->table has not been created yet.
                      */
                     PCMETER(cache->pchits++);
                     PCMETER(cache->addpchits++);
 
                     if (slot < obj->numSlots()) {
                         JS_ASSERT(obj->getSlot(slot).isUndefined());
-                        ++obj->freeslot;
-                        JS_ASSERT(obj->freeslot != 0);
                     } else {
                         if (!obj->allocSlot(cx, &slot))
                             THROW();
                         JS_ASSERT(slot == shape->slot);
                     }
 
                     /* Simply extend obj's property tree path with shape! */
                     obj->extend(cx, shape);
@@ -313,17 +311,17 @@ NameOp(VMFrame &f, JSObject *obj, bool c
     JSAtom *atom;
     JS_PROPERTY_CACHE(cx).test(cx, f.regs.pc, obj, obj2, entry, atom);
     if (!atom) {
         if (entry->vword.isFunObj()) {
             f.regs.sp++;
             f.regs.sp[-1].setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uintN slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj2->freeslot);
+            JS_ASSERT(obj2->containsSlot(slot));
             f.regs.sp++;
             f.regs.sp[-1] = obj2->lockedGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
             shape = entry->vword.toShape();
             NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval, return NULL);
             f.regs.sp++;
             f.regs.sp[-1] = rval;
@@ -1725,17 +1723,17 @@ NameIncDec(VMFrame &f, JSObject *obj, JS
     JSAtom *atom;
     JSObject *obj2;
     JSProperty *prop;
     PropertyCacheEntry *entry;
     JS_PROPERTY_CACHE(cx).test(cx, f.regs.pc, obj, obj2, entry, atom);
     if (!atom) {
         if (obj == obj2 && entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj->freeslot);
+            JS_ASSERT(obj->containsSlot(slot));
             Value &rref = obj->getSlotRef(slot);
             int32_t tmp;
             if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
                 int32_t inc = tmp + N;
                 if (!POST)
                     tmp = inc;
                 rref.getInt32Ref() = inc;
                 f.regs.sp[0].setInt32(tmp);
@@ -1944,17 +1942,17 @@ InlineGetProp(VMFrame &f)
         JSObject *obj2;
         JSAtom *atom;
         JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
         if (!atom) {
             if (entry->vword.isFunObj()) {
                 rval.setObject(entry->vword.toFunObj());
             } else if (entry->vword.isSlot()) {
                 uint32 slot = entry->vword.toSlot();
-                JS_ASSERT(slot < obj2->freeslot);
+                JS_ASSERT(obj2->containsSlot(slot));
                 rval = obj2->lockedGetSlot(slot);
             } else {
                 JS_ASSERT(entry->vword.isShape());
                 const Shape *shape = entry->vword.toShape();
                 NATIVE_GET(cx, obj, obj2, shape,
                         f.fp()->hasIMacroPC() ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER,
                         &rval, return false);
             }
@@ -2022,17 +2020,17 @@ stubs::CallProp(VMFrame &f, JSAtom *orig
     JSObject *obj2;
     JSAtom *atom;
     JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
     if (!atom) {
         if (entry->vword.isFunObj()) {
             rval.setObject(entry->vword.toFunObj());
         } else if (entry->vword.isSlot()) {
             uint32 slot = entry->vword.toSlot();
-            JS_ASSERT(slot < obj2->freeslot);
+            JS_ASSERT(obj2->containsSlot(slot));
             rval = obj2->lockedGetSlot(slot);
         } else {
             JS_ASSERT(entry->vword.isShape());
             const Shape *shape = entry->vword.toShape();
             NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval,
                        THROW());
         }
         regs.sp++;
@@ -2176,22 +2174,20 @@ InitPropOrMethod(VMFrame &f, JSAtom *ato
     if (CX_OWNS_OBJECT_TITLE(cx, obj) &&
         JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) &&
         shape->hasDefaultSetter() &&
         shape->previous() == obj->lastProperty())
     {
         /* Fast path. Property cache hit. */
         uint32 slot = shape->slot;
 
-        JS_ASSERT(slot == obj->freeslot);
+        JS_ASSERT(slot == obj->slotSpan());
         JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
         if (slot < obj->numSlots()) {
             JS_ASSERT(obj->getSlot(slot).isUndefined());
-            ++obj->freeslot;
-            JS_ASSERT(obj->freeslot != 0);
         } else {
             if (!obj->allocSlot(cx, &slot))
                 THROW();
             JS_ASSERT(slot == shape->slot);
         }
 
         /* A new object, or one we just extended in a recent initprop op. */
         JS_ASSERT(!obj->lastProperty() ||
@@ -2381,17 +2377,17 @@ stubs::ArgCnt(VMFrame &f)
 }
 
 void JS_FASTCALL
 stubs::EnterBlock(VMFrame &f, JSObject *obj)
 {
     JSFrameRegs &regs = f.regs;
     JSStackFrame *fp = f.fp();
 
-    JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
+    JS_ASSERT(obj->isStaticBlock());
     JS_ASSERT(fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
     Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
     JS_ASSERT(regs.sp < vp);
     JS_ASSERT(vp <= fp->slots() + fp->getScript()->nslots);
     SetValueRangeToUndefined(regs.sp, vp);
     regs.sp = vp;
 
 #ifdef DEBUG
@@ -2407,17 +2403,17 @@ stubs::EnterBlock(VMFrame &f, JSObject *
      */
     JSObject *obj2 = fp->getScopeChain();
     Class *clasp;
     while ((clasp = obj2->getClass()) == &js_WithClass)
         obj2 = obj2->getParent();
     if (clasp == &js_BlockClass &&
         obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
         JSObject *youngestProto = obj2->getProto();
-        JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
+        JS_ASSERT(youngestProto->isStaticBlock());
         JSObject *parent = obj;
         while ((parent = parent->getParent()) != youngestProto)
             JS_ASSERT(parent);
     }
 #endif
 
     fp->setBlockChain(obj);
 }
--- a/js/src/tests/js1_8_5/regress/jstests.list
+++ b/js/src/tests/js1_8_5/regress/jstests.list
@@ -26,8 +26,9 @@ script regress-569306.js
 script regress-571014.js
 script regress-577648-1.js
 script regress-577648-2.js
 script regress-583429.js
 script regress-584355.js
 script regress-588339.js
 script regress-yarr-regexp.js
 script regress-592556-c35.js
+script regress-593256.js
new file mode 100644
--- /dev/null
+++ b/js/src/tests/js1_8_5/regress/regress-593256.js
@@ -0,0 +1,21 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/*
+ * Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/licenses/publicdomain/
+ */
+
+eval("\
+  (function(){for(d in[0,Number]) {\
+    __defineGetter__(\"\",function(){}),\
+    [(__defineGetter__(\"x\",Math.pow))]\
+  }})\
+")()
+delete gc
+eval("\
+  (function() {\
+    for(e in __defineSetter__(\"x\",function(){})){}\
+  })\
+")()
+delete gc
+
+reportCompare(true, true, "don't crash");