Fixed a hack that created temporarily ill-typed LIR, i2f(i2f) (
bug 503427, r=gal).
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -2968,60 +2968,60 @@ TraceRecorder::lazilyImportGlobalSlot(un
import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
vp, type, "global", index, NULL);
SpecializeTreesToMissingGlobals(cx, globalObj, treeInfo);
return true;
}
/* Write back a value onto the stack or global frames. */
LIns*
-TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
+TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset, bool demote)
{
/*
* Sink all type casts targeting the stack into the side exit by simply storing the original
* (uncasted) value. Each guard generates the side exit map based on the types of the
* last stores to every stack location, so it's safe to not perform them on-trace.
*/
- if (isPromoteInt(i))
+ if (demote && isPromoteInt(i))
i = ::demote(lir, i);
return lir->insStorei(i, base, offset);
}
/* Update the tracker, then issue a write back store. */
JS_REQUIRES_STACK void
-TraceRecorder::set(jsval* p, LIns* i, bool initializing)
+TraceRecorder::set(jsval* p, LIns* i, bool initializing, bool demote)
{
JS_ASSERT(i != NULL);
JS_ASSERT(initializing || known(p));
checkForGlobalObjectReallocation();
tracker.set(p, i);
/*
* If we are writing to this location for the first time, calculate the
* offset into the native frame manually. Otherwise just look up the last
* load or store associated with the same source address (p) and use the
* same offset/base.
*/
LIns* x = nativeFrameTracker.get(p);
if (!x) {
if (isGlobal(p))
- x = writeBack(i, lirbuf->state, nativeGlobalOffset(p));
+ x = writeBack(i, lirbuf->state, nativeGlobalOffset(p), demote);
else
- x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
+ x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p), demote);
nativeFrameTracker.set(p, x);
} else {
#define ASSERT_VALID_CACHE_HIT(base, offset) \
JS_ASSERT(base == lirbuf->sp || base == lirbuf->state); \
JS_ASSERT(offset == ((base == lirbuf->sp) \
? -treeInfo->nativeStackBase + nativeStackOffset(p) \
: nativeGlobalOffset(p))); \
JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->disp());
- writeBack(i, x->oprnd2(), x->disp());
+ writeBack(i, x->oprnd2(), x->disp(), demote);
}
#undef ASSERT_VALID_CACHE_HIT
}
JS_REQUIRES_STACK LIns*
TraceRecorder::get(jsval* p)
{
checkForGlobalObjectReallocation();
@@ -3746,17 +3746,20 @@ class SlotMap : public SlotVisitorBase
for (unsigned i = 0; i < length(); i++) {
SlotInfo& info = get(i);
JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
if (info.lastCheck == TypeCheck_Promote) {
JS_ASSERT(isNumber(*info.v));
mRecorder.set(info.v, mRecorder.f2i(mRecorder.get(info.v)));
} else if (info.lastCheck == TypeCheck_Demote) {
JS_ASSERT(isNumber(*info.v));
- mRecorder.set(info.v, mRecorder.lir->ins1(LIR_i2f, mRecorder.get(info.v)));
+ JS_ASSERT(mRecorder.get(info.v)->isQuad());
+
+ /* Never demote this final i2f. */
+ mRecorder.set(info.v, mRecorder.get(info.v), false, false);
}
}
}
private:
TypeCheckResult
checkType(unsigned i, JSTraceType t)
{
debug_only_printf(LC_TMTracer,
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -727,18 +727,20 @@ class TraceRecorder : public avmplus::GC
JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop);
JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, ExitType exitType);
JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
- nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
- JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false);
+ nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset,
+ bool demote);
+ JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false,
+ bool demote = true);
JS_REQUIRES_STACK nanojit::LIns* get(jsval* p);
JS_REQUIRES_STACK nanojit::LIns* addr(jsval* p);
JS_REQUIRES_STACK bool known(jsval* p);
JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
JS_REQUIRES_STACK TypeConsensus selfTypeStability(SlotMap& smap);
JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, VMFragment** peer);