Bug 990106 part 2 - LRecoverInfo encodes MIR when flagged as Recovered. r=h4writer
authorNicolas B. Pierron <nicolas.b.pierron@mozilla.com>
Tue, 29 Apr 2014 10:17:51 -0700
changeset 180711 85b6c3b4b26da80194e63cdc2f1692c13b4b69b2
parent 180710 2fb280a72bb1774ec77cf0ae0115d551706e2370
child 180712 7977e7f8a0948976f4fbf3b4bdce74cff52dca1e
push id42865
push usernpierron@mozilla.com
push dateTue, 29 Apr 2014 17:18:26 +0000
treeherdermozilla-inbound@6b76df1986a6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersh4writer
bugs990106
milestone32.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 990106 part 2 - LRecoverInfo encodes MIR when flagged as Recovered. r=h4writer
js/src/jit/LIR.cpp
js/src/jit/LIR.h
js/src/jit/Lowering.cpp
js/src/jit/MIR.h
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/shared/CodeGenerator-shared.h
js/src/jit/shared/Lowering-shared.cpp
--- a/js/src/jit/LIR.cpp
+++ b/js/src/jit/LIR.cpp
@@ -106,21 +106,26 @@ LBlock::getExitMoveGroup(TempAllocator &
     if (exitMoveGroup_)
         return exitMoveGroup_;
     exitMoveGroup_ = LMoveGroup::New(alloc);
     insertBefore(*rbegin(), exitMoveGroup_);
     return exitMoveGroup_;
 }
 
 static size_t
-TotalOperandCount(MResumePoint *mir)
+TotalOperandCount(LRecoverInfo *recoverInfo)
 {
-    size_t accum = mir->numOperands();
-    while ((mir = mir->caller()))
-        accum += mir->numOperands();
+    LRecoverInfo::OperandIter it(recoverInfo->begin());
+    LRecoverInfo::OperandIter end(recoverInfo->end());
+    size_t accum = 0;
+
+    for (; it != end; ++it) {
+        if (!it->isRecoveredOnBailout())
+            accum++;
+    }
     return accum;
 }
 
 LRecoverInfo::LRecoverInfo(TempAllocator &alloc)
   : instructions_(alloc),
     recoverOffset_(INVALID_RECOVER_OFFSET)
 { }
 
@@ -133,37 +138,80 @@ LRecoverInfo::New(MIRGenerator *gen, MRe
 
     IonSpew(IonSpew_Snapshots, "Generating LIR recover info %p from MIR (%p)",
             (void *)recoverInfo, (void *)mir);
 
     return recoverInfo;
 }
 
 bool
+LRecoverInfo::appendOperands(MNode *ins)
+{
+    for (size_t i = 0, end = ins->numOperands(); i < end; i++) {
+        MDefinition *def = ins->getOperand(i);
+
+        // As there is no cycle in the data-flow (without MPhi), checking for
+        // isInWorkList implies that the definition is already in the
+        // instruction vector, and not processed by a caller of the current
+        // function.
+        if (def->isRecoveredOnBailout() && !def->isInWorklist()) {
+            if (!appendDefinition(def))
+                return false;
+        }
+    }
+
+    return true;
+}
+
+bool
+LRecoverInfo::appendDefinition(MDefinition *def)
+{
+    MOZ_ASSERT(def->isRecoveredOnBailout());
+    def->setInWorklist();
+    if (!appendOperands(def))
+        return false;
+    return instructions_.append(def);
+}
+
+bool
+LRecoverInfo::appendResumePoint(MResumePoint *rp)
+{
+    if (rp->caller() && !appendResumePoint(rp->caller()))
+        return false;
+
+    if (!appendOperands(rp))
+        return false;
+
+    return instructions_.append(rp);
+}
+
+bool
 LRecoverInfo::init(MResumePoint *rp)
 {
-    MResumePoint *it = rp;
-
     // Sort operations in the order in which we need to restore the stack. This
     // implies that outer frames, as well as operations needed to recover the
     // current frame, are located before the current frame. The inner-most
     // resume point should be the last element in the list.
-    do {
-        if (!instructions_.append(it))
-            return false;
-        it = it->caller();
-    } while (it);
+    if (!appendResumePoint(rp))
+        return false;
 
-    Reverse(instructions_.begin(), instructions_.end());
+    // Remove temporary flags from all definitions.
+    for (MNode **it = begin(); it != end(); it++) {
+        if (!(*it)->isDefinition())
+            continue;
+
+        (*it)->toDefinition()->setNotInWorklist();
+    }
+
     MOZ_ASSERT(mir() == rp);
     return true;
 }
 
 LSnapshot::LSnapshot(LRecoverInfo *recoverInfo, BailoutKind kind)
-  : numSlots_(TotalOperandCount(recoverInfo->mir()) * BOX_PIECES),
+  : numSlots_(TotalOperandCount(recoverInfo) * BOX_PIECES),
     slots_(nullptr),
     recoverInfo_(recoverInfo),
     snapshotOffset_(INVALID_SNAPSHOT_OFFSET),
     bailoutId_(INVALID_BAILOUT_ID),
     bailoutKind_(kind)
 { }
 
 bool
--- a/js/src/jit/LIR.h
+++ b/js/src/jit/LIR.h
@@ -874,50 +874,90 @@ class LCallInstructionHelper : public LI
     virtual bool isCall() const {
         return true;
     }
 };
 
 class LRecoverInfo : public TempObject
 {
   public:
-    typedef Vector<MResumePoint *, 2, IonAllocPolicy> Instructions;
+    typedef Vector<MNode *, 2, IonAllocPolicy> Instructions;
 
   private:
     // List of instructions needed to recover the stack frames.
     // Outer frames are stored before inner frames.
     Instructions instructions_;
 
     // Cached offset where this resume point is encoded.
     RecoverOffset recoverOffset_;
 
     LRecoverInfo(TempAllocator &alloc);
     bool init(MResumePoint *mir);
 
+    // Fill the instruction vector such as all instructions needed for the
+    // recovery are pushed before the current instruction.
+    bool appendOperands(MNode *ins);
+    bool appendDefinition(MDefinition *def);
+    bool appendResumePoint(MResumePoint *rp);
   public:
     static LRecoverInfo *New(MIRGenerator *gen, MResumePoint *mir);
 
     // Resume point of the inner most function.
     MResumePoint *mir() const {
-        return instructions_.back();
+        return instructions_.back()->toResumePoint();
     }
     RecoverOffset recoverOffset() const {
         return recoverOffset_;
     }
     void setRecoverOffset(RecoverOffset offset) {
         JS_ASSERT(recoverOffset_ == INVALID_RECOVER_OFFSET);
         recoverOffset_ = offset;
     }
 
-    MResumePoint **begin() {
+    MNode **begin() {
         return instructions_.begin();
     }
-    MResumePoint **end() {
+    MNode **end() {
         return instructions_.end();
     }
+    size_t numInstructions() const {
+        return instructions_.length();
+    }
+
+    class OperandIter
+    {
+      private:
+        MNode **it_;
+        size_t op_;
+
+      public:
+        OperandIter(MNode **it)
+          : it_(it), op_(0)
+        { }
+
+        MDefinition *operator *() {
+            return (*it_)->getOperand(op_);
+        }
+        MDefinition *operator ->() {
+            return (*it_)->getOperand(op_);
+        }
+
+        OperandIter &operator ++() {
+            ++op_;
+            if (op_ == (*it_)->numOperands()) {
+                op_ = 0;
+                ++it_;
+            }
+            return *this;
+        }
+
+        bool operator !=(const OperandIter &where) const {
+            return it_ != where.it_ || op_ != where.op_;
+        }
+    };
 };
 
 // An LSnapshot is the reflection of an MResumePoint in LIR. Unlike MResumePoints,
 // they cannot be shared, as they are filled in by the register allocator in
 // order to capture the precise low-level stack state in between an
 // instruction's input and output. During code generation, LSnapshots are
 // compressed and saved in the compiled script.
 class LSnapshot : public TempObject
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -3593,16 +3593,19 @@ SpewResumePoint(MBasicBlock *block, MIns
         in->printName(IonSpewFile);
         fprintf(IonSpewFile, "\n");
     }
 }
 
 bool
 LIRGenerator::visitInstruction(MInstruction *ins)
 {
+    if (ins->isRecoveredOnBailout())
+        return true;
+
     if (!gen->ensureBallast())
         return false;
     if (!ins->accept(this))
         return false;
 
     if (ins->possiblyCalls())
         gen->setPerformsCall();
 
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -84,17 +84,24 @@ MIRType MIRTypeFromValue(const js::Value
      * as a result the number of operands doesn't equal the original code
      * need to get marked as UseRemoved. This is important for truncation
      * analysis to know, since if all original uses are still present,
      * it can ignore resumepoints.
      * Currently this is done for every pass after IonBuilder and before
      * Truncate Doubles. So every time removeUse is called, UseRemoved needs
      * to get set.
      */                                                                         \
-    _(UseRemoved)
+    _(UseRemoved)                                                               \
+                                                                                \
+    /* Marks if the current instruction should go to the bailout paths instead
+     * of producing code as part of the control flow.  This flag can only be set
+     * on instructions which are only used by ResumePoint or by other flagged
+     * instructions.
+     */                                                                         \
+    _(RecoveredOnBailout)
 
 class MDefinition;
 class MInstruction;
 class MBasicBlock;
 class MNode;
 class MUse;
 class MIRGraph;
 class MResumePoint;
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -132,139 +132,128 @@ ToStackIndex(LAllocation *a)
         JS_ASSERT(a->toStackSlot()->slot() >= 1);
         return a->toStackSlot()->slot();
     }
     JS_ASSERT(-int32_t(sizeof(IonJSFrameLayout)) <= a->toArgument()->index());
     return -int32_t(sizeof(IonJSFrameLayout) + a->toArgument()->index());
 }
 
 bool
-CodeGeneratorShared::encodeAllocations(LSnapshot *snapshot, MResumePoint *resumePoint,
-                                       uint32_t *startIndex)
+CodeGeneratorShared::encodeAllocation(LSnapshot *snapshot, MDefinition *mir,
+                                       uint32_t *allocIndex)
 {
-    IonSpew(IonSpew_Codegen, "Encoding %u of resume point %p's operands starting from %u",
-            resumePoint->numOperands(), (void *) resumePoint, *startIndex);
-    for (uint32_t allocno = 0, e = resumePoint->numOperands(); allocno < e; allocno++) {
-        uint32_t i = allocno + *startIndex;
-        MDefinition *mir = resumePoint->getOperand(allocno);
+    if (mir->isBox())
+        mir = mir->toBox()->getOperand(0);
 
-        if (mir->isBox())
-            mir = mir->toBox()->getOperand(0);
+    MIRType type = mir->isUnused()
+        ? MIRType_MagicOptimizedOut
+        : mir->type();
 
-        MIRType type = mir->isUnused()
-                       ? MIRType_MagicOptimizedOut
-                       : mir->type();
-
-        RValueAllocation alloc;
+    RValueAllocation alloc;
 
-        switch (type) {
-          case MIRType_Undefined:
-            alloc = RValueAllocation::Undefined();
-            break;
-          case MIRType_Null:
-            alloc = RValueAllocation::Null();
-            break;
-          case MIRType_Int32:
-          case MIRType_String:
-          case MIRType_Object:
-          case MIRType_Boolean:
-          case MIRType_Double:
-          case MIRType_Float32:
-          {
-            LAllocation *payload = snapshot->payloadOfSlot(i);
-            JSValueType valueType = ValueTypeFromMIRType(type);
-            if (payload->isMemory()) {
-                if (type == MIRType_Float32)
-                    alloc = RValueAllocation::Float32(ToStackIndex(payload));
-                else
-                    alloc = RValueAllocation::Typed(valueType, ToStackIndex(payload));
-            } else if (payload->isGeneralReg()) {
-                alloc = RValueAllocation::Typed(valueType, ToRegister(payload));
-            } else if (payload->isFloatReg()) {
-                FloatRegister reg = ToFloatRegister(payload);
-                if (type == MIRType_Float32)
-                    alloc = RValueAllocation::Float32(reg);
-                else
-                    alloc = RValueAllocation::Double(reg);
-            } else {
-                MConstant *constant = mir->toConstant();
-                uint32_t index;
-                if (!graph.addConstantToPool(constant->value(), &index))
-                    return false;
-                alloc = RValueAllocation::ConstantPool(index);
-            }
-            break;
-          }
-          case MIRType_MagicOptimizedArguments:
-          case MIRType_MagicOptimizedOut:
-          {
+    switch (type) {
+      case MIRType_Undefined:
+        alloc = RValueAllocation::Undefined();
+        break;
+      case MIRType_Null:
+        alloc = RValueAllocation::Null();
+        break;
+      case MIRType_Int32:
+      case MIRType_String:
+      case MIRType_Object:
+      case MIRType_Boolean:
+      case MIRType_Double:
+      case MIRType_Float32:
+      {
+        LAllocation *payload = snapshot->payloadOfSlot(*allocIndex);
+        JSValueType valueType = ValueTypeFromMIRType(type);
+        if (payload->isMemory()) {
+            if (type == MIRType_Float32)
+                alloc = RValueAllocation::Float32(ToStackIndex(payload));
+            else
+                alloc = RValueAllocation::Typed(valueType, ToStackIndex(payload));
+        } else if (payload->isGeneralReg()) {
+            alloc = RValueAllocation::Typed(valueType, ToRegister(payload));
+        } else if (payload->isFloatReg()) {
+            FloatRegister reg = ToFloatRegister(payload);
+            if (type == MIRType_Float32)
+                alloc = RValueAllocation::Float32(reg);
+            else
+                alloc = RValueAllocation::Double(reg);
+        } else {
+            MConstant *constant = mir->toConstant();
             uint32_t index;
-            JSWhyMagic why = (type == MIRType_MagicOptimizedArguments
-                              ? JS_OPTIMIZED_ARGUMENTS
-                              : JS_OPTIMIZED_OUT);
-            Value v = MagicValue(why);
-            if (!graph.addConstantToPool(v, &index))
+            if (!graph.addConstantToPool(constant->value(), &index))
                 return false;
             alloc = RValueAllocation::ConstantPool(index);
-            break;
-          }
-          default:
-          {
-            JS_ASSERT(mir->type() == MIRType_Value);
-            LAllocation *payload = snapshot->payloadOfSlot(i);
+        }
+        break;
+      }
+      case MIRType_MagicOptimizedArguments:
+      case MIRType_MagicOptimizedOut:
+      {
+        uint32_t index;
+        JSWhyMagic why = (type == MIRType_MagicOptimizedArguments
+                          ? JS_OPTIMIZED_ARGUMENTS
+                          : JS_OPTIMIZED_OUT);
+        Value v = MagicValue(why);
+        if (!graph.addConstantToPool(v, &index))
+            return false;
+        alloc = RValueAllocation::ConstantPool(index);
+        break;
+      }
+      default:
+      {
+        JS_ASSERT(mir->type() == MIRType_Value);
+        LAllocation *payload = snapshot->payloadOfSlot(*allocIndex);
 #ifdef JS_NUNBOX32
-            LAllocation *type = snapshot->typeOfSlot(i);
-            if (type->isRegister()) {
-                if (payload->isRegister())
-                    alloc = RValueAllocation::Untyped(ToRegister(type), ToRegister(payload));
-                else
-                    alloc = RValueAllocation::Untyped(ToRegister(type), ToStackIndex(payload));
-            } else {
-                if (payload->isRegister())
-                    alloc = RValueAllocation::Untyped(ToStackIndex(type), ToRegister(payload));
-                else
-                    alloc = RValueAllocation::Untyped(ToStackIndex(type), ToStackIndex(payload));
-            }
+        LAllocation *type = snapshot->typeOfSlot(*allocIndex);
+        if (type->isRegister()) {
+            if (payload->isRegister())
+                alloc = RValueAllocation::Untyped(ToRegister(type), ToRegister(payload));
+            else
+                alloc = RValueAllocation::Untyped(ToRegister(type), ToStackIndex(payload));
+        } else {
+            if (payload->isRegister())
+                alloc = RValueAllocation::Untyped(ToStackIndex(type), ToRegister(payload));
+            else
+                alloc = RValueAllocation::Untyped(ToStackIndex(type), ToStackIndex(payload));
+        }
 #elif JS_PUNBOX64
-            if (payload->isRegister())
-                alloc = RValueAllocation::Untyped(ToRegister(payload));
-            else
-                alloc = RValueAllocation::Untyped(ToStackIndex(payload));
+        if (payload->isRegister())
+            alloc = RValueAllocation::Untyped(ToRegister(payload));
+        else
+            alloc = RValueAllocation::Untyped(ToStackIndex(payload));
 #endif
-            break;
-          }
-        }
-
-        snapshots_.add(alloc);
+        break;
+      }
     }
 
-    *startIndex += resumePoint->numOperands();
+    snapshots_.add(alloc);
+    *allocIndex += mir->isRecoveredOnBailout() ? 0 : 1;
     return true;
 }
 
 bool
 CodeGeneratorShared::encode(LRecoverInfo *recover)
 {
     if (recover->recoverOffset() != INVALID_RECOVER_OFFSET)
         return true;
 
-    uint32_t frameCount = recover->mir()->frameCount();
-    IonSpew(IonSpew_Snapshots, "Encoding LRecoverInfo %p (frameCount %u)",
-            (void *)recover, frameCount);
+    uint32_t numInstructions = recover->numInstructions();
+    IonSpew(IonSpew_Snapshots, "Encoding LRecoverInfo %p (frameCount %u, instructions %u)",
+            (void *)recover, recover->mir()->frameCount(), numInstructions);
 
     MResumePoint::Mode mode = recover->mir()->mode();
     JS_ASSERT(mode != MResumePoint::Outer);
     bool resumeAfter = (mode == MResumePoint::ResumeAfter);
 
-    RecoverOffset offset = recovers_.startRecover(frameCount, resumeAfter);
+    RecoverOffset offset = recovers_.startRecover(numInstructions, resumeAfter);
 
-    for (MResumePoint **it = recover->begin(), **end = recover->end();
-         it != end;
-         ++it)
-    {
+    for (MNode **it = recover->begin(), **end = recover->end(); it != end; ++it) {
         if (!recovers_.writeInstruction(*it))
             return false;
     }
 
     recovers_.endRecover();
     recover->setRecoverOffset(offset);
     return !recovers_.oom();
 }
@@ -302,27 +291,27 @@ CodeGeneratorShared::encode(LSnapshot *s
             mirId = ins->mirRaw()->id();
             if (ins->mirRaw()->trackedPc())
                 pcOpcode = *ins->mirRaw()->trackedPc();
         }
     }
     snapshots_.trackSnapshot(pcOpcode, mirOpcode, mirId, lirOpcode, lirId);
 #endif
 
-    uint32_t startIndex = 0;
-    for (MResumePoint **it = recoverInfo->begin(), **end = recoverInfo->end();
-         it != end;
-         ++it)
-    {
-        MResumePoint *mir = *it;
-        if (!encodeAllocations(snapshot, mir, &startIndex))
+    uint32_t allocIndex = 0;
+    LRecoverInfo::OperandIter it(recoverInfo->begin());
+    LRecoverInfo::OperandIter end(recoverInfo->end());
+    for (; it != end; ++it) {
+        DebugOnly<uint32_t> allocWritten = snapshots_.allocWritten();
+        if (!encodeAllocation(snapshot, *it, &allocIndex))
             return false;
+        MOZ_ASSERT(allocWritten + 1 == snapshots_.allocWritten());
     }
 
-    MOZ_ASSERT(snapshots_.allocWritten() == snapshot->numSlots());
+    MOZ_ASSERT(allocIndex == snapshot->numSlots());
     snapshots_.endSnapshot();
     snapshot->setSnapshotOffset(offset);
     return !snapshots_.oom();
 }
 
 bool
 CodeGeneratorShared::assignBailoutId(LSnapshot *snapshot)
 {
--- a/js/src/jit/shared/CodeGenerator-shared.h
+++ b/js/src/jit/shared/CodeGenerator-shared.h
@@ -267,17 +267,17 @@ class CodeGeneratorShared : public LInst
         return index;
     }
 
   protected:
     // Encodes an LSnapshot into the compressed snapshot buffer, returning
     // false on failure.
     bool encode(LRecoverInfo *recover);
     bool encode(LSnapshot *snapshot);
-    bool encodeAllocations(LSnapshot *snapshot, MResumePoint *resumePoint, uint32_t *startIndex);
+    bool encodeAllocation(LSnapshot *snapshot, MDefinition *def, uint32_t *startIndex);
 
     // Attempts to assign a BailoutId to a snapshot, if one isn't already set.
     // If the bailout table is full, this returns false, which is not a fatal
     // error (the code generator may use a slower bailout mechanism).
     bool assignBailoutId(LSnapshot *snapshot);
 
     // Encode all encountered safepoints in CG-order, and resolve |indices| for
     // safepoint offsets.
--- a/js/src/jit/shared/Lowering-shared.cpp
+++ b/js/src/jit/shared/Lowering-shared.cpp
@@ -69,104 +69,108 @@ LIRGeneratorShared::getRecoverInfo(MResu
     cachedRecoverInfo_ = recoverInfo;
     return recoverInfo;
 }
 
 #ifdef JS_NUNBOX32
 LSnapshot *
 LIRGeneratorShared::buildSnapshot(LInstruction *ins, MResumePoint *rp, BailoutKind kind)
 {
-    LRecoverInfo *recover = getRecoverInfo(rp);
-    if (!recover)
+    LRecoverInfo *recoverInfo = getRecoverInfo(rp);
+    if (!recoverInfo)
         return nullptr;
 
-    LSnapshot *snapshot = LSnapshot::New(gen, recover, kind);
+    LSnapshot *snapshot = LSnapshot::New(gen, recoverInfo, kind);
     if (!snapshot)
         return nullptr;
 
-    size_t i = 0;
-    for (MResumePoint **it = recover->begin(), **end = recover->end(); it != end; ++it) {
-        MResumePoint *mir = *it;
-        for (size_t j = 0, e = mir->numOperands(); j < e; ++i, ++j) {
-            MDefinition *ins = mir->getOperand(j);
+    size_t index = 0;
+    LRecoverInfo::OperandIter it(recoverInfo->begin());
+    LRecoverInfo::OperandIter end(recoverInfo->end());
+    for (; it != end; ++it) {
+        MDefinition *ins = *it;
+        if (ins->isRecoveredOnBailout())
+            continue;
 
-            LAllocation *type = snapshot->typeOfSlot(i);
-            LAllocation *payload = snapshot->payloadOfSlot(i);
+        LAllocation *type = snapshot->typeOfSlot(index);
+        LAllocation *payload = snapshot->payloadOfSlot(index);
+        ++index;
 
-            if (ins->isBox())
-                ins = ins->toBox()->getOperand(0);
+        if (ins->isBox())
+            ins = ins->toBox()->getOperand(0);
 
-            // Guards should never be eliminated.
-            JS_ASSERT_IF(ins->isUnused(), !ins->isGuard());
+        // Guards should never be eliminated.
+        JS_ASSERT_IF(ins->isUnused(), !ins->isGuard());
 
-            // Snapshot operands other than constants should never be
-            // emitted-at-uses. Try-catch support depends on there being no
-            // code between an instruction and the LOsiPoint that follows it.
-            JS_ASSERT_IF(!ins->isConstant(), !ins->isEmittedAtUses());
+        // Snapshot operands other than constants should never be
+        // emitted-at-uses. Try-catch support depends on there being no
+        // code between an instruction and the LOsiPoint that follows it.
+        JS_ASSERT_IF(!ins->isConstant(), !ins->isEmittedAtUses());
 
-            // The register allocation will fill these fields in with actual
-            // register/stack assignments. During code generation, we can restore
-            // interpreter state with the given information. Note that for
-            // constants, including known types, we record a dummy placeholder,
-            // since we can recover the same information, much cleaner, from MIR.
-            if (ins->isConstant() || ins->isUnused()) {
-                *type = LConstantIndex::Bogus();
-                *payload = LConstantIndex::Bogus();
-            } else if (ins->type() != MIRType_Value) {
-                *type = LConstantIndex::Bogus();
-                *payload = use(ins, LUse::KEEPALIVE);
-            } else {
-                *type = useType(ins, LUse::KEEPALIVE);
-                *payload = usePayload(ins, LUse::KEEPALIVE);
-            }
+        // The register allocation will fill these fields in with actual
+        // register/stack assignments. During code generation, we can restore
+        // interpreter state with the given information. Note that for
+        // constants, including known types, we record a dummy placeholder,
+        // since we can recover the same information, much cleaner, from MIR.
+        if (ins->isConstant() || ins->isUnused()) {
+            *type = LConstantIndex::Bogus();
+            *payload = LConstantIndex::Bogus();
+        } else if (ins->type() != MIRType_Value) {
+            *type = LConstantIndex::Bogus();
+            *payload = use(ins, LUse::KEEPALIVE);
+        } else {
+            *type = useType(ins, LUse::KEEPALIVE);
+            *payload = usePayload(ins, LUse::KEEPALIVE);
         }
     }
 
     return snapshot;
 }
 
 #elif JS_PUNBOX64
 
 LSnapshot *
 LIRGeneratorShared::buildSnapshot(LInstruction *ins, MResumePoint *rp, BailoutKind kind)
 {
-    LRecoverInfo *recover = getRecoverInfo(rp);
-    if (!recover)
+    LRecoverInfo *recoverInfo = getRecoverInfo(rp);
+    if (!recoverInfo)
         return nullptr;
 
-    LSnapshot *snapshot = LSnapshot::New(gen, recover, kind);
+    LSnapshot *snapshot = LSnapshot::New(gen, recoverInfo, kind);
     if (!snapshot)
         return nullptr;
 
-    size_t i = 0;
-    for (MResumePoint **it = recover->begin(), **end = recover->end(); it != end; ++it) {
-        MResumePoint *mir = *it;
-        for (size_t j = 0, e = mir->numOperands(); j < e; ++i, ++j) {
-            MDefinition *def = mir->getOperand(j);
+    size_t index = 0;
+    LRecoverInfo::OperandIter it(recoverInfo->begin());
+    LRecoverInfo::OperandIter end(recoverInfo->end());
+    for (; it != end; ++it) {
+        MDefinition *def = *it;
 
-            if (def->isBox())
-                def = def->toBox()->getOperand(0);
+        if (def->isRecoveredOnBailout())
+            continue;
 
-            // Guards should never be eliminated.
-            JS_ASSERT_IF(def->isUnused(), !def->isGuard());
+        if (def->isBox())
+            def = def->toBox()->getOperand(0);
 
-            // Snapshot operands other than constants should never be
-            // emitted-at-uses. Try-catch support depends on there being no
-            // code between an instruction and the LOsiPoint that follows it.
-            JS_ASSERT_IF(!def->isConstant(), !def->isEmittedAtUses());
+        // Guards should never be eliminated.
+        JS_ASSERT_IF(def->isUnused(), !def->isGuard());
 
-            LAllocation *a = snapshot->getEntry(i);
+        // Snapshot operands other than constants should never be
+        // emitted-at-uses. Try-catch support depends on there being no
+        // code between an instruction and the LOsiPoint that follows it.
+        JS_ASSERT_IF(!def->isConstant(), !def->isEmittedAtUses());
 
-            if (def->isUnused()) {
-                *a = LConstantIndex::Bogus();
-                continue;
-            }
+        LAllocation *a = snapshot->getEntry(index++);
 
-            *a = useKeepaliveOrConstant(def);
+        if (def->isUnused()) {
+            *a = LConstantIndex::Bogus();
+            continue;
         }
+
+        *a = useKeepaliveOrConstant(def);
     }
 
     return snapshot;
 }
 #endif
 
 bool
 LIRGeneratorShared::assignSnapshot(LInstruction *ins, BailoutKind kind)