--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -2075,17 +2075,17 @@ ion::SideCannon(JSContext *cx, StackFram
script);
}
#endif
return status;
}
IonExecStatus
-ion::FastInvoke(JSContext *cx, HandleFunction fun, CallArgsList &args)
+ion::FastInvoke(JSContext *cx, HandleFunction fun, CallArgs &args)
{
JS_CHECK_RECURSION(cx, return IonExec_Error);
IonScript *ion = fun->nonLazyScript()->ionScript();
IonCode *code = ion->method();
void *jitcode = code->raw();
JS_ASSERT(ion::IsEnabled(cx));
@@ -2117,20 +2117,18 @@ ion::FastInvoke(JSContext *cx, HandleFun
EnterIonCode enter = cx->compartment->ionCompartment()->enterJIT();
void *calleeToken = CalleeToToken(fun);
RootedValue result(cx, Int32Value(args.length()));
JS_ASSERT(args.length() >= fun->nargs);
JSAutoResolveFlags rf(cx, RESOLVE_INFER);
- args.setActive();
enter(jitcode, args.length() + 1, args.array() - 1, fp, calleeToken,
/* scopeChain = */ NULL, 0, result.address());
- args.setInactive();
if (clearCallingIntoIon)
fp->clearCallingIntoIon();
JS_ASSERT(fp == cx->fp());
JS_ASSERT(!cx->runtime->hasIonReturnOverride());
args.rval().set(result);
--- a/js/src/ion/Ion.h
+++ b/js/src/ion/Ion.h
@@ -308,17 +308,17 @@ IsErrorStatus(IonExecStatus status)
{
return status == IonExec_Error || status == IonExec_Aborted;
}
IonExecStatus Cannon(JSContext *cx, StackFrame *fp);
IonExecStatus SideCannon(JSContext *cx, StackFrame *fp, jsbytecode *pc);
// Used to enter Ion from C++ natives like Array.map. Called from FastInvokeGuard.
-IonExecStatus FastInvoke(JSContext *cx, HandleFunction fun, CallArgsList &args);
+IonExecStatus FastInvoke(JSContext *cx, HandleFunction fun, CallArgs &args);
// Walk the stack and invalidate active Ion frames for the invalid scripts.
void Invalidate(types::TypeCompartment &types, FreeOp *fop,
const Vector<types::RecompileInfo> &invalid, bool resetUses = true);
void Invalidate(JSContext *cx, const Vector<types::RecompileInfo> &invalid, bool resetUses = true);
bool Invalidate(JSContext *cx, JSScript *script, ExecutionMode mode, bool resetUses = true);
bool Invalidate(JSContext *cx, JSScript *script, bool resetUses = true);
--- a/js/src/ion/IonFrames.cpp
+++ b/js/src/ion/IonFrames.cpp
@@ -83,31 +83,27 @@ CalleeToken
IonFrameIterator::calleeToken() const
{
return ((IonJSFrameLayout *) current_)->calleeToken();
}
JSFunction *
IonFrameIterator::callee() const
{
- if (isScripted()) {
- JS_ASSERT(isFunctionFrame() || isParallelFunctionFrame());
- if (isFunctionFrame())
- return CalleeTokenToFunction(calleeToken());
- return CalleeTokenToParallelFunction(calleeToken());
- }
-
- JS_ASSERT(isNative());
- return exitFrame()->nativeExit()->vp()[0].toObject().toFunction();
+ JS_ASSERT(isScripted());
+ JS_ASSERT(isFunctionFrame() || isParallelFunctionFrame());
+ if (isFunctionFrame())
+ return CalleeTokenToFunction(calleeToken());
+ return CalleeTokenToParallelFunction(calleeToken());
}
JSFunction *
IonFrameIterator::maybeCallee() const
{
- if ((isScripted() && (isFunctionFrame() || isParallelFunctionFrame())) || isNative())
+ if (isScripted() && (isFunctionFrame() || isParallelFunctionFrame()))
return callee();
return NULL;
}
bool
IonFrameIterator::isNative() const
{
if (type_ != IonFrame_Exit || isFakeExitFrame())
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -130,17 +130,17 @@ fun_getProperty(JSContext *cx, HandleObj
#endif
vp.setObject(*argsobj);
return true;
}
#ifdef JS_METHODJIT
StackFrame *fp = NULL;
- if (iter.isScript() && !iter.isIon())
+ if (!iter.isIon())
fp = iter.interpFrame();
if (JSID_IS_ATOM(id, cx->names().caller) && fp && fp->prev()) {
/*
* If the frame was called from within an inlined frame, mark the
* innermost function as uninlineable to expand its frame and allow us
* to recover its callee object.
*/
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -306,22 +306,20 @@ js::RunScript(JSContext *cx, StackFrame
// Check to see if useNewType flag should be set for this frame.
if (fp->isFunctionFrame() && fp->isConstructing() && !fp->isGeneratorFrame() &&
cx->typeInferenceEnabled())
{
StackIter iter(cx);
if (!iter.done()) {
++iter;
- if (iter.isScript()) {
- JSScript *script = iter.script();
- jsbytecode *pc = iter.pc();
- if (UseNewType(cx, script, pc))
- fp->setUseNewType();
- }
+ JSScript *script = iter.script();
+ jsbytecode *pc = iter.pc();
+ if (UseNewType(cx, script, pc))
+ fp->setUseNewType();
}
}
#ifdef DEBUG
struct CheckStackBalance {
JSContext *cx;
StackFrame *fp;
CheckStackBalance(JSContext *cx)
@@ -387,17 +385,17 @@ js::RunScript(JSContext *cx, StackFrame
/*
* Find a function reference and its 'this' value implicit first parameter
* under argc arguments on cx's stack, and call the function. Push missing
* required arguments, allocate declared local variables, and pop everything
* when done. Then push the return value.
*/
bool
-js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct)
+js::Invoke(JSContext *cx, CallArgs args, MaybeConstruct construct)
{
JS_ASSERT(args.length() <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(!cx->compartment->activeAnalysis);
/* We should never enter a new script while cx->iterValue is live. */
JS_ASSERT(cx->iterValue.isMagic(JS_NO_ITER_VALUE));
/* MaybeConstruct is a subset of InitialFrameFlags */
@@ -475,17 +473,17 @@ js::Invoke(JSContext *cx, const Value &t
if (!Invoke(cx, args))
return false;
*rval = args.rval();
return true;
}
bool
-js::InvokeConstructorKernel(JSContext *cx, CallArgs args)
+js::InvokeConstructor(JSContext *cx, CallArgs args)
{
JS_ASSERT(!FunctionClass.construct);
args.setThis(MagicValue(JS_IS_CONSTRUCTING));
if (!args.calleev().isObject())
return ReportIsNotFunction(cx, args.calleev().get(), args.length() + 1, CONSTRUCT);
@@ -496,17 +494,17 @@ js::InvokeConstructorKernel(JSContext *c
if (fun->isNativeConstructor()) {
bool ok = CallJSNativeConstructor(cx, fun->native(), args);
return ok;
}
if (!fun->isInterpretedConstructor())
return ReportIsNotFunction(cx, args.calleev().get(), args.length() + 1, CONSTRUCT);
- if (!InvokeKernel(cx, args, CONSTRUCT))
+ if (!Invoke(cx, args, CONSTRUCT))
return false;
JS_ASSERT(args.rval().isObject());
return true;
}
Class *clasp = callee.getClass();
if (!clasp->construct)
@@ -2337,17 +2335,17 @@ END_CASE(JSOP_ENUMELEM)
BEGIN_CASE(JSOP_EVAL)
{
CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
if (IsBuiltinEvalForScope(regs.fp()->scopeChain(), args.calleev())) {
if (!DirectEval(cx, args))
goto error;
} else {
- if (!InvokeKernel(cx, args))
+ if (!Invoke(cx, args))
goto error;
}
regs.sp = args.spAfterCall();
TypeScript::Monitor(cx, script, regs.pc, regs.sp[-1]);
}
END_CASE(JSOP_EVAL)
BEGIN_CASE(JSOP_FUNAPPLY)
@@ -2384,20 +2382,20 @@ BEGIN_CASE(JSOP_FUNCALL)
goto error;
args.setCallee(ObjectValue(*fun));
}
}
/* Don't bother trying to fast-path calls to scripted non-constructors. */
if (!isFunction || !fun->isInterpretedConstructor()) {
if (construct) {
- if (!InvokeConstructorKernel(cx, args))
+ if (!InvokeConstructor(cx, args))
goto error;
} else {
- if (!InvokeKernel(cx, args))
+ if (!Invoke(cx, args))
goto error;
}
Value *newsp = args.spAfterCall();
TypeScript::Monitor(cx, script, regs.pc, newsp[-1]);
regs.sp = newsp;
len = JSOP_CALL_LENGTH;
DO_NEXT_OP(len);
}
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -111,35 +111,21 @@ ReportIsNotFunction(JSContext *cx, const
MaybeConstruct construct = NO_CONSTRUCT);
/* See ReportIsNotFunction comment for the meaning of numToSkip. */
extern JSObject *
ValueToCallable(JSContext *cx, const Value &vp, int numToSkip = -1,
MaybeConstruct construct = NO_CONSTRUCT);
/*
- * InvokeKernel assumes that the given args have been pushed on the top of the
- * VM stack. Additionally, if 'args' is contained in a CallArgsList, that they
- * have already been marked 'active'.
+ * Invoke assumes that the given args have been pushed on the top of the
+ * VM stack.
*/
extern bool
-InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct = NO_CONSTRUCT);
-
-/*
- * Invoke assumes that 'args' has been pushed (via ContextStack::pushInvokeArgs)
- * and is currently at the top of the VM stack.
- */
-inline bool
-Invoke(JSContext *cx, InvokeArgsGuard &args, MaybeConstruct construct = NO_CONSTRUCT)
-{
- args.setActive();
- bool ok = InvokeKernel(cx, args, construct);
- args.setInactive();
- return ok;
-}
+Invoke(JSContext *cx, CallArgs args, MaybeConstruct construct = NO_CONSTRUCT);
/*
* This Invoke overload places the least requirements on the caller: it may be
* called at any time and it takes care of copying the given callee, this, and
* arguments onto the stack.
*/
extern bool
Invoke(JSContext *cx, const Value &thisv, const Value &fval, unsigned argc, Value *argv,
@@ -149,31 +135,21 @@ Invoke(JSContext *cx, const Value &thisv
* This helper takes care of the infinite-recursion check necessary for
* getter/setter calls.
*/
extern bool
InvokeGetterOrSetter(JSContext *cx, JSObject *obj, const Value &fval, unsigned argc, Value *argv,
Value *rval);
/*
- * InvokeConstructor* implement a function call from a constructor context
+ * InvokeConstructor implement a function call from a constructor context
* (e.g. 'new') handling the the creation of the new 'this' object.
*/
extern bool
-InvokeConstructorKernel(JSContext *cx, CallArgs args);
-
-/* See the InvokeArgsGuard overload of Invoke. */
-inline bool
-InvokeConstructor(JSContext *cx, InvokeArgsGuard &args)
-{
- args.setActive();
- bool ok = InvokeConstructorKernel(cx, ImplicitCast<CallArgs>(args));
- args.setInactive();
- return ok;
-}
+InvokeConstructor(JSContext *cx, CallArgs args);
/* See the fval overload of Invoke. */
extern bool
InvokeConstructor(JSContext *cx, const Value &fval, unsigned argc, Value *argv, Value *rval);
/*
* Executes a script with the given scopeChain/this. The 'type' indicates
* whether this is eval code or global code. To support debugging, the
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -5274,26 +5274,22 @@ js_DumpStackFrame(JSContext *cx, StackFr
JS_FRIEND_API(void)
js_DumpBacktrace(JSContext *cx)
{
Sprinter sprinter(cx);
sprinter.init();
size_t depth = 0;
for (StackIter i(cx); !i.done(); ++i, ++depth) {
- if (i.isScript()) {
- const char *filename = JS_GetScriptFilename(cx, i.script());
- unsigned line = JS_PCToLineNumber(cx, i.script(), i.pc());
- JSScript *script = i.script();
- sprinter.printf("#%d %14p %s:%d (%p @ %d)\n",
- depth, (i.isIon() ? 0 : i.interpFrame()), filename, line,
- script, i.pc() - script->code);
- } else {
- sprinter.printf("#%d ???\n", depth);
- }
+ const char *filename = JS_GetScriptFilename(cx, i.script());
+ unsigned line = JS_PCToLineNumber(cx, i.script(), i.pc());
+ JSScript *script = i.script();
+ sprinter.printf("#%d %14p %s:%d (%p @ %d)\n",
+ depth, (i.isIon() ? 0 : i.interpFrame()), filename, line,
+ script, i.pc() - script->code);
}
fprintf(stdout, "%s", sprinter.string());
}
void
JSObject::sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf, JS::ObjectsExtraSizes *sizes)
{
if (hasDynamicSlots())
sizes->slots = mallocSizeOf(slots);
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -1623,28 +1623,24 @@ DecompileArgumentFromStack(JSContext *cx
return true;
#endif
/*
* Settle on the nearest script frame, which should be the builtin that
* called the intrinsic.
*/
StackIter frameIter(cx);
- while (!frameIter.done() && !frameIter.isScript())
- ++frameIter;
JS_ASSERT(!frameIter.done());
/*
* Get the second-to-top frame, the caller of the builtin that called the
* intrinsic.
*/
++frameIter;
-
- /* If this frame isn't a script, we can't decompile. */
- if (frameIter.done() || !frameIter.isScript())
+ if (frameIter.done())
return true;
RootedScript script(cx, frameIter.script());
AutoCompartment ac(cx, &script->global());
jsbytecode *current = frameIter.pc();
RootedFunction fun(cx, frameIter.isFunctionFrame()
? frameIter.callee()
: NULL);
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -156,31 +156,31 @@ stubs::SlowCall(VMFrame &f, uint32_t arg
if (*f.regs.pc == JSOP_FUNAPPLY && !GuardFunApplyArgumentsOptimization(f.cx))
THROW();
CallArgs args = CallArgsFromSp(argc, f.regs.sp);
RootedScript fscript(f.cx, f.script());
if (!MaybeCloneAndPatchCallee(f.cx, args, fscript, f.pc()))
THROW();
- if (!InvokeKernel(f.cx, args))
+ if (!Invoke(f.cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
}
void JS_FASTCALL
stubs::SlowNew(VMFrame &f, uint32_t argc)
{
CallArgs args = CallArgsFromSp(argc, f.regs.sp);
RootedScript fscript(f.cx, f.script());
if (!MaybeCloneAndPatchCallee(f.cx, args, fscript, f.pc()))
THROW();
- if (!InvokeConstructorKernel(f.cx, args))
+ if (!InvokeConstructor(f.cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
}
static inline bool
CheckStackQuota(VMFrame &f)
{
@@ -427,17 +427,17 @@ stubs::UncachedNewHelper(VMFrame &f, uin
if (!ucr.setFunction(cx, args, fscript, f.pc()))
THROW();
/* Try to do a fast inline call before the general Invoke path. */
if (ucr.fun && ucr.fun->isInterpretedConstructor()) {
if (!UncachedInlineCall(f, INITIAL_CONSTRUCT, &ucr.codeAddr, &ucr.unjittable, argc))
THROW();
} else {
- if (!InvokeConstructorKernel(cx, args))
+ if (!InvokeConstructor(cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
}
}
void * JS_FASTCALL
stubs::UncachedCall(VMFrame &f, uint32_t argc)
{
@@ -455,17 +455,17 @@ stubs::UncachedLoweredCall(VMFrame &f, u
}
void JS_FASTCALL
stubs::Eval(VMFrame &f, uint32_t argc)
{
CallArgs args = CallArgsFromSp(argc, f.regs.sp);
if (!IsBuiltinEvalForScope(f.fp()->scopeChain(), args.calleev())) {
- if (!InvokeKernel(f.cx, args))
+ if (!Invoke(f.cx, args))
THROW();
RootedScript fscript(f.cx, f.script());
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
return;
}
JS_ASSERT(f.fp() == f.cx->fp());
@@ -500,17 +500,17 @@ stubs::UncachedCallHelper(VMFrame &f, ui
if (!CallJSNative(cx, ucr.fun->native(), args))
THROW();
RootedScript fscript(cx, f.script());
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
return;
}
}
- if (!InvokeKernel(f.cx, args))
+ if (!Invoke(f.cx, args))
THROW();
types::TypeScript::Monitor(f.cx, fscript, f.pc(), args.rval());
return;
}
static void
RemoveOrphanedNative(JSContext *cx, StackFrame *fp)
@@ -1021,17 +1021,17 @@ js_InternalInterpret(void *returnData, v
}
break;
case REJOIN_CALL_SPLAT: {
/* Leave analysis early and do the Invoke which SplatApplyArgs prepared. */
nextDepth = analysis->getCode(nextpc).stackDepth;
enter.destroy();
f.regs.sp = nextsp + 2 + f.u.call.dynamicArgc;
- if (!InvokeKernel(cx, CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp)))
+ if (!Invoke(cx, CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp)))
return js_InternalThrow(f);
nextsp[-1] = nextsp[0];
f.regs.pc = nextpc;
break;
}
case REJOIN_GETTER:
/*
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -505,27 +505,16 @@ StackSegment::contains(const StackFrame
}
bool
StackSegment::contains(const FrameRegs *regs) const
{
return regs && contains(regs->fp());
}
-bool
-StackSegment::contains(const CallArgsList *call) const
-{
- if (!call || !calls_)
- return false;
-
- /* NB: this depends on the continuity of segments in memory. */
- Value *vp = call->array();
- return vp > slotsBegin() && vp <= calls_->array();
-}
-
StackFrame *
StackSegment::computeNextFrame(const StackFrame *f, size_t maxDepth) const
{
JS_ASSERT(contains(f) && f != fp());
StackFrame *next = fp();
for (size_t i = 0; i <= maxDepth; ++i) {
if (next->prev() == f)
@@ -535,24 +524,20 @@ StackSegment::computeNextFrame(const Sta
return NULL;
}
Value *
StackSegment::end() const
{
/* NB: this depends on the continuity of segments in memory. */
- JS_ASSERT_IF(calls_ || regs_, contains(calls_) || contains(regs_));
- Value *p = calls_
- ? regs_
- ? Max(regs_->sp, calls_->end())
- : calls_->end()
- : regs_
- ? regs_->sp
- : slotsBegin();
+ JS_ASSERT_IF(regs_, contains(regs_));
+ Value *p = regs_ ? regs_->sp : slotsBegin();
+ if (invokeArgsEnd_ > p)
+ p = invokeArgsEnd_;
JS_ASSERT(p >= slotsBegin());
return p;
}
FrameRegs *
StackSegment::pushRegs(FrameRegs ®s)
{
JS_ASSERT_IF(contains(regs_), regs.fp()->prev() == regs_->fp());
@@ -563,35 +548,16 @@ StackSegment::pushRegs(FrameRegs ®s)
void
StackSegment::popRegs(FrameRegs *regs)
{
JS_ASSERT_IF(regs && contains(regs->fp()), regs->fp() == regs_->fp()->prev());
regs_ = regs;
}
-void
-StackSegment::pushCall(CallArgsList &callList)
-{
- callList.prev_ = calls_;
- calls_ = &callList;
-}
-
-void
-StackSegment::pointAtCall(CallArgsList &callList)
-{
- calls_ = &callList;
-}
-
-void
-StackSegment::popCall()
-{
- calls_ = calls_->prev_;
-}
-
/*****************************************************************************/
StackSpace::StackSpace()
: seg_(NULL),
base_(NULL),
conservativeEnd_(NULL),
#ifdef XP_WIN
commitEnd_(NULL),
@@ -940,26 +906,19 @@ ContextStack::ensureOnTop(JSContext *cx,
if (!space().ensureSpace(cx, report, firstUnused, nvars))
return NULL;
return firstUnused;
}
if (!space().ensureSpace(cx, report, firstUnused, VALUES_PER_STACK_SEGMENT + nvars))
return NULL;
- CallArgsList *calls;
- if (seg_ && extend) {
- regs = seg_->maybeRegs();
- calls = seg_->maybeCalls();
- } else {
- regs = NULL;
- calls = NULL;
- }
+ regs = (seg_ && extend) ? seg_->maybeRegs() : NULL;
- seg_ = new(firstUnused) StackSegment(cx, seg_, space().seg_, regs, calls);
+ seg_ = new(firstUnused) StackSegment(cx, seg_, space().seg_, regs);
space().seg_ = seg_;
*pushedSeg = true;
return seg_->slotsBegin();
}
void
ContextStack::popSegment()
{
@@ -980,32 +939,34 @@ ContextStack::pushInvokeArgs(JSContext *
Value *firstUnused = ensureOnTop(cx, report, nvars, CAN_EXTEND, &iag->pushedSeg_);
if (!firstUnused)
return false;
MakeRangeGCSafe(firstUnused, nvars);
ImplicitCast<CallArgs>(*iag) = CallArgsFromVp(argc, firstUnused);
- seg_->pushCall(*iag);
+ seg_->pushInvokeArgsEnd(iag->end(), &iag->prevInvokeArgsEnd_);
+
JS_ASSERT(space().firstUnused() == iag->end());
iag->setPushed(*this);
return true;
}
void
ContextStack::popInvokeArgs(const InvokeArgsGuard &iag)
{
JS_ASSERT(iag.pushed());
JS_ASSERT(onTop());
- JS_ASSERT(space().firstUnused() == seg_->calls().end());
+ JS_ASSERT(space().firstUnused() == seg_->invokeArgsEnd());
Value *oldend = seg_->end();
- seg_->popCall();
+ seg_->popInvokeArgsEnd(iag.prevInvokeArgsEnd_);
+
if (iag.pushedSeg_)
popSegment();
Debug_SetValueRangeToCrashOnTouch(space().firstUnused(), oldend);
}
StackFrame *
ContextStack::pushInvokeFrame(JSContext *cx, MaybeReportError report,
@@ -1056,39 +1017,37 @@ ContextStack::pushExecuteFrame(JSContext
* prev-links.
*
* Eval-in-frame is the exception since it prev-links to an arbitrary frame
* (possibly in the middle of some previous segment). Thus pass CANT_EXTEND
* (to start a new segment) and link the frame and call chain manually
* below. If |evalInFrame| is a baseline JIT frame, prev-link to its entry
* frame.
*/
- CallArgsList *evalInFrameCalls = NULL; /* quell overwarning */
MaybeExtend extend;
StackFrame *prevLink;
AbstractFramePtr prev = NullFramePtr();
if (evalInFrame) {
/* First, find the right segment. */
AllFramesIter frameIter(cx->runtime);
while (frameIter.isIonOptimizedJS() || frameIter.abstractFramePtr() != evalInFrame)
++frameIter;
JS_ASSERT(frameIter.abstractFramePtr() == evalInFrame);
StackSegment &seg = *frameIter.seg();
StackIter iter(cx->runtime, seg);
/* Debug-mode currently disables Ion compilation. */
JS_ASSERT_IF(evalInFrame.isStackFrame(), !evalInFrame.asStackFrame()->runningInIon());
JS_ASSERT_IF(evalInFrame.compartment() == iter.compartment(), !iter.isIonOptimizedJS());
- while (!iter.isScript() || iter.isIonOptimizedJS() || iter.abstractFramePtr() != evalInFrame) {
+ while (iter.isIonOptimizedJS() || iter.abstractFramePtr() != evalInFrame) {
++iter;
JS_ASSERT_IF(evalInFrame.compartment() == iter.compartment(), !iter.isIonOptimizedJS());
}
JS_ASSERT(iter.abstractFramePtr() == evalInFrame);
- evalInFrameCalls = iter.data_.calls_;
prevLink = iter.data_.fp_;
prev = evalInFrame;
extend = CANT_EXTEND;
} else {
prevLink = maybefp();
extend = CAN_EXTEND;
if (maybefp()) {
ScriptFrameIter iter(cx);
@@ -1101,20 +1060,16 @@ ContextStack::pushExecuteFrame(JSContext
if (!firstUnused)
return false;
StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
fp->initExecuteFrame(script, prevLink, prev, seg_->maybeRegs(), thisv, *scopeChain, type);
fp->initVarsToUndefined();
efg->regs_.prepareToRun(*fp, script);
- /* pushRegs() below links the prev-frame; manually link the prev-call. */
- if (evalInFrame && evalInFrameCalls)
- seg_->pointAtCall(*evalInFrameCalls);
-
efg->prevRegs_ = seg_->pushRegs(efg->regs_);
JS_ASSERT(space().firstUnused() == efg->regs_.sp);
efg->setPushed(*this);
return true;
}
#ifdef JS_ION
bool
@@ -1274,222 +1229,167 @@ StackIter::popFrame()
data_.pc_ = oldfp->prevpc(&inline_);
JS_ASSERT(!inline_);
} else {
poisonRegs();
}
}
void
-StackIter::popCall()
-{
- DebugOnly<CallArgsList*> oldCall = data_.calls_;
- JS_ASSERT(data_.seg_->contains(oldCall));
- data_.calls_ = data_.calls_->prev();
- if (!data_.seg_->contains(data_.fp_))
- poisonRegs();
-}
-
-void
StackIter::settleOnNewSegment()
{
if (FrameRegs *regs = data_.seg_->maybeRegs())
data_.pc_ = regs->pc;
else
poisonRegs();
}
void
StackIter::startOnSegment(StackSegment *seg)
{
data_.seg_ = seg;
data_.fp_ = data_.seg_->maybefp();
- data_.calls_ = data_.seg_->maybeCalls();
settleOnNewSegment();
}
/*
- * Given that the iterator's current value of fp_ and calls_ (initialized on
- * construction or after operator++ popped the previous scripted/native call),
- * "settle" the iterator on a new StackIter::State value. The goal is to
- * present the client a simple linear sequence of native/scripted calls while
- * covering up unpleasant stack implementation details:
+ * Given the iterator's current value of fp_ (initialized on construction or
+ * after operator++ popped the previous call), "settle" the iterator on a new
+ * StackIter::State value. The goal is to present the client a simple linear
+ * sequence of scripted calls while covering up unpleasant stack implementation
+ * details:
* - The frame chain can be "saved" and "restored" (see JS_SaveFrameChain).
* This artificially cuts the call chain and the StackIter client may want
* to continue through this cut to the previous frame by passing
* GO_THROUGH_SAVED.
* - fp->prev can be in a different contiguous segment from fp. In this case,
- * the current values of sp/pc after calling popFrame/popCall are incorrect
- * and should be recovered from fp->prev's segment.
- * - there is no explicit relationship to determine whether fp_ or calls_ is
- * the innermost invocation so implicit memory ordering is used since both
- * push values on the stack.
- * - a native call's 'callee' argument is clobbered on return while the
- * CallArgsList element is still visible.
+ * the current values of sp/pc after calling popFrame are incorrect and
+ * should be recovered from fp->prev's segment.
*/
+/* PGO causes xpcshell startup crashes with VS2010. */
+#if defined(_MSC_VER)
+# pragma optimize("g", off)
+#endif
void
StackIter::settleOnNewState()
{
- /* Reset whether or we popped a call last time we settled. */
- data_.poppedCallDuringSettle_ = false;
-
/*
- * There are elements of the calls_ and fp_ chains that we want to skip
- * over so iterate until we settle on one or until there are no more.
+ * There are elements of the fp_ chain that we want to skip over so iterate
+ * until we settle on one or until there are no more.
*/
while (true) {
- if (!data_.fp_ && !data_.calls_) {
+ if (!data_.fp_) {
if (data_.savedOption_ == GO_THROUGH_SAVED && data_.seg_->prevInContext()) {
startOnSegment(data_.seg_->prevInContext());
continue;
}
data_.state_ = DONE;
return;
}
- /* Check if popFrame/popCall changed segment. */
+ /* Check if popFrame changed segment. */
bool containsFrame = data_.seg_->contains(data_.fp_);
- bool containsCall = data_.seg_->contains(data_.calls_);
- while (!containsFrame && !containsCall) {
+ while (!containsFrame) {
/* Eval-in-frame can cross contexts, so use prevInMemory. */
data_.seg_ = data_.seg_->prevInMemory();
containsFrame = data_.seg_->contains(data_.fp_);
- containsCall = data_.seg_->contains(data_.calls_);
/* Eval-in-frame allows jumping into the middle of a segment. */
- if (containsFrame &&
- (data_.seg_->fp() != data_.fp_ || data_.seg_->maybeCalls() != data_.calls_))
- {
+ if (containsFrame && data_.seg_->fp() != data_.fp_) {
/* Avoid duplicating logic; seg_ contains fp_, so no iloop. */
StackIter tmp = *this;
tmp.startOnSegment(data_.seg_);
tmp.settleOnNewState();
- while (!tmp.isScript() || tmp.data_.fp_ != data_.fp_)
+ while (tmp.data_.fp_ != data_.fp_)
++tmp;
- JS_ASSERT(tmp.isScript() &&
+ JS_ASSERT(!tmp.done() &&
tmp.data_.seg_ == data_.seg_ &&
tmp.data_.fp_ == data_.fp_);
*this = tmp;
return;
}
- /* There is no eval-in-frame equivalent for native calls. */
- JS_ASSERT_IF(containsCall, &data_.seg_->calls() == data_.calls_);
-
settleOnNewSegment();
}
- /*
- * In case of both a scripted frame and call record, use linear memory
- * ordering to decide which was the most recent.
- */
- if (containsFrame && (!containsCall || (Value *)data_.fp_ >= data_.calls_->array())) {
#ifdef JS_ION
- if (data_.fp_->beginsIonActivation()) {
- /*
- * Eval-in-frame can link to an arbitrary frame on the stack.
- * Skip any IonActivation's until we reach the one for the
- * current StackFrame. Treat activations with NULL entryfp
- * (pushed by FastInvoke) as belonging to the previous
- * activation.
- */
- while (true) {
- ion::IonActivation *act = data_.ionActivations_.activation();
- while (!act->entryfp())
- act = act->prev();
- if (act->entryfp() == data_.fp_)
- break;
+ if (data_.fp_->beginsIonActivation()) {
+ /*
+ * Eval-in-frame can link to an arbitrary frame on the stack.
+ * Skip any IonActivation's until we reach the one for the
+ * current StackFrame. Treat activations with NULL entryfp
+ * (pushed by FastInvoke) as belonging to the previous
+ * activation.
+ */
+ while (true) {
+ ion::IonActivation *act = data_.ionActivations_.activation();
+ while (!act->entryfp())
+ act = act->prev();
+ if (act->entryfp() == data_.fp_)
+ break;
- ++data_.ionActivations_;
- }
-
- data_.ionFrames_ = ion::IonFrameIterator(data_.ionActivations_);
+ ++data_.ionActivations_;
+ }
- if (data_.ionFrames_.isNative()) {
- data_.state_ = ION;
- return;
- }
+ data_.ionFrames_ = ion::IonFrameIterator(data_.ionActivations_);
- while (!data_.ionFrames_.isScripted() && !data_.ionFrames_.done())
- ++data_.ionFrames_;
+ while (!data_.ionFrames_.isScripted() && !data_.ionFrames_.done())
+ ++data_.ionFrames_;
- // When invoked from JM, we don't re-use the entryfp, so we
- // may have an empty Ion activation.
- if (data_.ionFrames_.done()) {
- data_.state_ = SCRIPTED;
- return;
- }
-
- data_.state_ = ION;
- nextIonFrame();
+ // When invoked from JM, we don't re-use the entryfp, so we
+ // may have an empty Ion activation.
+ if (data_.ionFrames_.done()) {
+ data_.state_ = SCRIPTED;
return;
}
-#endif /* JS_ION */
- data_.state_ = SCRIPTED;
+ data_.state_ = ION;
+ nextIonFrame();
return;
}
+#endif /* JS_ION */
- /*
- * A CallArgsList element is pushed for any call to Invoke, regardless
- * of whether the callee is a scripted function or even a callable
- * object. Thus, it is necessary to filter calleev for natives.
- *
- * Second, stuff can happen after the args are pushed but before/after
- * the actual call, so only consider "active" calls. (Since Invoke
- * necessarily clobbers the callee, "active" is also necessary to
- * ensure that the callee slot is valid.)
- */
- if (data_.calls_->active() && IsNativeFunction(data_.calls_->calleev())) {
- data_.state_ = NATIVE;
- data_.args_ = *data_.calls_;
- return;
- }
-
- /* Pop the call and keep looking. */
- popCall();
- data_.poppedCallDuringSettle_ = true;
+ data_.state_ = SCRIPTED;
+ return;
}
}
+#if defined(_MSC_VER)
+# pragma optimize("", on)
+#endif
StackIter::Data::Data(JSContext *cx, PerThreadData *perThread, SavedOption savedOption)
: perThread_(perThread),
cx_(cx),
- savedOption_(savedOption),
- poppedCallDuringSettle_(false)
+ savedOption_(savedOption)
#ifdef JS_ION
, ionActivations_(cx),
ionFrames_((uint8_t *)NULL)
#endif
{
}
StackIter::Data::Data(JSContext *cx, JSRuntime *rt, StackSegment *seg)
: perThread_(&rt->mainThread),
cx_(cx),
- savedOption_(STOP_AT_SAVED),
- poppedCallDuringSettle_(false)
+ savedOption_(STOP_AT_SAVED)
#ifdef JS_ION
, ionActivations_(rt),
ionFrames_((uint8_t *)NULL)
#endif
{
}
StackIter::Data::Data(const StackIter::Data &other)
: perThread_(other.perThread_),
cx_(other.cx_),
savedOption_(other.savedOption_),
state_(other.state_),
fp_(other.fp_),
- calls_(other.calls_),
seg_(other.seg_),
- pc_(other.pc_),
- args_(other.args_),
- poppedCallDuringSettle_(other.poppedCallDuringSettle_)
+ pc_(other.pc_)
#ifdef JS_ION
, ionActivations_(other.ionActivations_),
ionFrames_(other.ionFrames_)
#endif
{
}
StackIter::StackIter(JSContext *cx, SavedOption savedOption)
@@ -1601,22 +1501,16 @@ StackIter::popIonFrame()
void
StackIter::popBaselineDebuggerFrame()
{
ion::BaselineFrame *prevBaseline = data_.fp_->prevBaselineFrame();
popFrame();
settleOnNewState();
- /* Pop native and Ion frames until we reach the target frame. */
- while (data_.state_ == NATIVE) {
- popCall();
- settleOnNewState();
- }
-
JS_ASSERT(data_.state_ == ION);
while (!data_.ionFrames_.isBaselineJS() || data_.ionFrames_.baselineFrame() != prevBaseline)
popIonFrame();
}
#endif
StackIter &
StackIter::operator++()
@@ -1630,39 +1524,32 @@ StackIter::operator++()
/* Eval-in-frame with a baseline JIT frame. */
popBaselineDebuggerFrame();
break;
}
#endif
popFrame();
settleOnNewState();
break;
- case NATIVE:
- popCall();
- settleOnNewState();
- break;
case ION:
#ifdef JS_ION
popIonFrame();
break;
#else
JS_NOT_REACHED("Unexpected state");
#endif
}
return *this;
}
bool
StackIter::operator==(const StackIter &rhs) const
{
return done() == rhs.done() &&
- (done() ||
- (isScript() == rhs.isScript() &&
- ((isScript() && data_.fp_ == rhs.data_.fp_) ||
- (!isScript() && nativeArgs().base() == rhs.nativeArgs().base()))));
+ (done() || data_.fp_ == rhs.data_.fp_);
}
StackIter::Data *
StackIter::copyData() const
{
#ifdef JS_ION
/*
* This doesn't work for optimized Ion frames since ionInlineFrames_ is
@@ -1682,18 +1569,16 @@ StackIter::compartment() const
case SCRIPTED:
return data_.fp_->compartment();
case ION:
#ifdef JS_ION
return data_.ionActivations_.activation()->compartment();
#else
break;
#endif
- case NATIVE:
- return data_.calls_->callee().compartment();
}
JS_NOT_REACHED("Unexpected state");
return NULL;
}
bool
StackIter::isFunctionFrame() const
{
@@ -1706,18 +1591,16 @@ StackIter::isFunctionFrame() const
#ifdef JS_ION
JS_ASSERT(data_.ionFrames_.isScripted());
if (data_.ionFrames_.isBaselineJS())
return data_.ionFrames_.isFunctionFrame();
return ionInlineFrames_.isFunctionFrame();
#else
break;
#endif
- case NATIVE:
- return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
}
bool
StackIter::isGlobalFrame() const
{
@@ -1730,18 +1613,16 @@ StackIter::isGlobalFrame() const
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS())
return data_.ionFrames_.baselineFrame()->isGlobalFrame();
JS_ASSERT(!script()->isForEval());
return !script()->function();
#else
break;
#endif
- case NATIVE:
- return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
}
bool
StackIter::isEvalFrame() const
{
@@ -1754,50 +1635,46 @@ StackIter::isEvalFrame() const
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS())
return data_.ionFrames_.baselineFrame()->isEvalFrame();
JS_ASSERT(!script()->isForEval());
return false;
#else
break;
#endif
- case NATIVE:
- return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
}
bool
StackIter::isNonEvalFunctionFrame() const
{
JS_ASSERT(!done());
switch (data_.state_) {
case DONE:
break;
case SCRIPTED:
return interpFrame()->isNonEvalFunctionFrame();
case ION:
- case NATIVE:
return !isEvalFrame() && isFunctionFrame();
}
JS_NOT_REACHED("Unexpected state");
return false;
}
bool
StackIter::isGeneratorFrame() const
{
switch (data_.state_) {
case DONE:
break;
case SCRIPTED:
return interpFrame()->isGeneratorFrame();
case ION:
- case NATIVE:
return false;
}
JS_NOT_REACHED("Unexpected state");
return false;
}
bool
StackIter::isConstructing() const
@@ -1810,17 +1687,16 @@ StackIter::isConstructing() const
if (data_.ionFrames_.isOptimizedJS())
return ionInlineFrames_.isConstructing();
JS_ASSERT(data_.ionFrames_.isBaselineJS());
return data_.ionFrames_.isConstructing();
#else
break;
#endif
case SCRIPTED:
- case NATIVE:
return interpFrame()->isConstructing();
}
JS_NOT_REACHED("Unexpected state");
return false;
}
AbstractFramePtr
StackIter::abstractFramePtr() const
@@ -1832,18 +1708,16 @@ StackIter::abstractFramePtr() const
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS())
return data_.ionFrames_.baselineFrame();
#endif
break;
case SCRIPTED:
JS_ASSERT(interpFrame());
return AbstractFramePtr(interpFrame());
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
return NullFramePtr();
}
void
StackIter::updatePcQuadratic()
{
@@ -1872,18 +1746,16 @@ StackIter::updatePcQuadratic()
// Update the pc.
JS_ASSERT(data_.ionFrames_.baselineFrame() == frame);
data_.ionFrames_.baselineScriptAndPc(NULL, &data_.pc_);
return;
}
#endif
break;
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
}
JSFunction *
StackIter::callee() const
{
switch (data_.state_) {
@@ -1891,25 +1763,21 @@ StackIter::callee() const
break;
case SCRIPTED:
JS_ASSERT(isFunctionFrame());
return &interpFrame()->callee();
case ION:
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS())
return data_.ionFrames_.callee();
- if (data_.ionFrames_.isOptimizedJS())
- return ionInlineFrames_.callee();
- JS_ASSERT(data_.ionFrames_.isNative());
- return data_.ionFrames_.callee();
+ JS_ASSERT(data_.ionFrames_.isOptimizedJS());
+ return ionInlineFrames_.callee();
#else
break;
#endif
- case NATIVE:
- return nativeArgs().callee().toFunction();
}
JS_NOT_REACHED("Unexpected state");
return NULL;
}
Value
StackIter::calleev() const
{
@@ -1920,18 +1788,16 @@ StackIter::calleev() const
JS_ASSERT(isFunctionFrame());
return interpFrame()->calleev();
case ION:
#ifdef JS_ION
return ObjectValue(*callee());
#else
break;
#endif
- case NATIVE:
- return nativeArgs().calleev();
}
JS_NOT_REACHED("Unexpected state");
return Value();
}
unsigned
StackIter::numActualArgs() const
{
@@ -1946,18 +1812,16 @@ StackIter::numActualArgs() const
if (data_.ionFrames_.isOptimizedJS())
return ionInlineFrames_.numActualArgs();
JS_ASSERT(data_.ionFrames_.isBaselineJS());
return data_.ionFrames_.numActualArgs();
#else
break;
#endif
- case NATIVE:
- return nativeArgs().length();
}
JS_NOT_REACHED("Unexpected state");
return 0;
}
Value
StackIter::unaliasedActual(unsigned i, MaybeCheckAliasing checkAliasing) const
{
@@ -1968,18 +1832,16 @@ StackIter::unaliasedActual(unsigned i, M
return interpFrame()->unaliasedActual(i, checkAliasing);
case ION:
#ifdef JS_ION
JS_ASSERT(data_.ionFrames_.isBaselineJS());
return data_.ionFrames_.baselineFrame()->unaliasedActual(i, checkAliasing);
#else
break;
#endif
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
return NullValue();
}
JSObject *
StackIter::scopeChain() const
{
@@ -1991,18 +1853,16 @@ StackIter::scopeChain() const
if (data_.ionFrames_.isOptimizedJS())
return ionInlineFrames_.scopeChain();
return data_.ionFrames_.baselineFrame()->scopeChain();
#else
break;
#endif
case SCRIPTED:
return interpFrame()->scopeChain();
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
return NULL;
}
CallObject &
StackIter::callObj() const
{
@@ -2024,18 +1884,16 @@ StackIter::hasArgsObj() const
return interpFrame()->hasArgsObj();
case ION:
#ifdef JS_ION
JS_ASSERT(data_.ionFrames_.isBaselineJS());
return data_.ionFrames_.baselineFrame()->hasArgsObj();
#else
break;
#endif
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
return false;
}
ArgumentsObject &
StackIter::argsObj() const
{
@@ -2048,27 +1906,26 @@ StackIter::argsObj() const
#ifdef JS_ION
JS_ASSERT(data_.ionFrames_.isBaselineJS());
return data_.ionFrames_.baselineFrame()->argsObj();
#else
break;
#endif
case SCRIPTED:
return interpFrame()->argsObj();
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
return interpFrame()->argsObj();
}
bool
StackIter::computeThis() const
{
- if (isScript() && !isIonOptimizedJS()) {
+ JS_ASSERT(!done());
+ if (!isIonOptimizedJS()) {
JS_ASSERT(data_.cx_);
return ComputeThis(data_.cx_, abstractFramePtr());
}
return true;
}
Value
StackIter::thisv() const
@@ -2080,17 +1937,16 @@ StackIter::thisv() const
#ifdef JS_ION
if (data_.ionFrames_.isOptimizedJS())
return ObjectValue(*ionInlineFrames_.thisObject());
return data_.ionFrames_.baselineFrame()->thisValue();
#else
break;
#endif
case SCRIPTED:
- case NATIVE:
return interpFrame()->thisValue();
}
JS_NOT_REACHED("Unexpected state");
return NullValue();
}
Value
StackIter::returnValue() const
@@ -2101,18 +1957,16 @@ StackIter::returnValue() const
case ION:
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS())
return *data_.ionFrames_.baselineFrame()->returnValue();
#endif
break;
case SCRIPTED:
return interpFrame()->returnValue();
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
return NullValue();
}
void
StackIter::setReturnValue(const Value &v)
{
@@ -2125,28 +1979,25 @@ StackIter::setReturnValue(const Value &v
data_.ionFrames_.baselineFrame()->setReturnValue(v);
return;
}
#endif
break;
case SCRIPTED:
interpFrame()->setReturnValue(v);
return;
- case NATIVE:
- break;
}
JS_NOT_REACHED("Unexpected state");
}
size_t
StackIter::numFrameSlots() const
{
switch (data_.state_) {
case DONE:
- case NATIVE:
break;
case ION: {
#ifdef JS_ION
if (data_.ionFrames_.isOptimizedJS())
return ionInlineFrames_.snapshotIterator().slots() - ionInlineFrames_.script()->nfixed;
ion::BaselineFrame *frame = data_.ionFrames_.baselineFrame();
return frame->numValueSlots() - data_.ionFrames_.script()->nfixed;
#else
@@ -2162,17 +2013,16 @@ StackIter::numFrameSlots() const
return 0;
}
Value
StackIter::frameSlotValue(size_t index) const
{
switch (data_.state_) {
case DONE:
- case NATIVE:
break;
case ION:
#ifdef JS_ION
if (data_.ionFrames_.isOptimizedJS()) {
ion::SnapshotIterator si(ionInlineFrames_.snapshotIterator());
index += ionInlineFrames_.script()->nfixed;
return si.maybeReadSlotByIndex(index);
}
@@ -2308,17 +2158,17 @@ AbstractFramePtr::evalPrevScopeChain(JSR
{
/* Find the stack segment containing this frame. */
AllFramesIter alliter(rt);
while (alliter.isIonOptimizedJS() || alliter.abstractFramePtr() != *this)
++alliter;
/* Eval frames are not compiled by Ion, though their caller might be. */
StackIter iter(rt, *alliter.seg());
- while (!iter.isScript() || iter.isIonOptimizedJS() || iter.abstractFramePtr() != *this)
+ while (iter.isIonOptimizedJS() || iter.abstractFramePtr() != *this)
++iter;
++iter;
return iter.scopeChain();
}
#ifdef DEBUG
void
js::CheckLocalUnaliased(MaybeCheckAliasing checkAliasing, JSScript *script,
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -106,48 +106,20 @@ namespace ion {
* the expression become the arguments of a call. There are also layout
* invariants concerning the arguments and StackFrame; see "Arguments" comment
* in StackFrame for more details.
*
* The top of a segment's current frame's expression stack is pointed to by the
* segment's "current regs", which contains the stack pointer 'sp'. In the
* interpreter, sp is adjusted as individual values are pushed and popped from
* the stack and the FrameRegs struct (pointed by the StackSegment) is a local
- * var of js::Interpret. JIT code simulates this by lazily updating FrameRegs
+ * var of js::Interpret. JM JIT code simulates this by lazily updating FrameRegs
* when calling from JIT code into the VM. Ideally, we'd like to remove all
* dependence on FrameRegs outside the interpreter.
*
- * A call to a native (C++) function does not push a frame. Instead, an array
- * of values is passed to the native. The layout of this array is abstracted by
- * JS::CallArgs. With respect to the StackSegment layout above, the args to a
- * native call are inserted anywhere there can be values. A sample memory layout
- * looks like:
- *
- * regs
- * .------------------------------------------.
- * | V
- * | fp .--FrameRegs--. sp
- * | V V
- * |StackSegment| native call | values |StackFrame| values | native call |
- * | vp <--argc--> end vp <--argc--> end
- * | CallArgs <------------------------------ CallArgs
- * | prev ^
- * `-------------------------------------------------------'
- * calls
- *
- * Here there are two native calls on the stack. The start of each native arg
- * range is recorded by a CallArgs element which is prev-linked like stack
- * frames. Note that, in full generality, native and scripted calls can
- * interleave arbitrarily. Thus, the end of a segment is the maximum of its
- * current frame and its current native call. Similarly, the top of the entire
- * thread stack is the end of its current segment.
- *
- * Note that, between any two StackFrames there may be any number
- * of native calls, so the meaning of 'prev' is not 'directly called by'.
- *
* An additional feature (perhaps not for much longer: bug 650361) is that
* multiple independent "contexts" can interleave (LIFO) on a single contiguous
* stack. "Independent" here means that each context has its own callstack.
* Note, though, that eval-in-frame allows one context's callstack to join
* another context's callstack. Thus, in general, the structure of calls in a
* StackSpace is a forest.
*
* More concretely, an embedding may enter the JS engine on cx1 and then, from
@@ -158,61 +130,16 @@ namespace ion {
* perspective of cx1 and cx2. Thus, each segment has two links: prevInMemory
* and prevInContext. Each independent stack is encapsulated and managed by
* the js::ContextStack object stored in JSContext. ContextStack is the primary
* interface to the rest of the engine for pushing and popping the stack.
*/
/*****************************************************************************/
-/*
- * For calls to natives, the InvokeArgsGuard object provides a record of the
- * call for the debugger's callstack. For this to work, the InvokeArgsGuard
- * record needs to know when the call is actually active (because the
- * InvokeArgsGuard can be pushed long before and popped long after the actual
- * call, during which time many stack-observing things can happen).
- */
-class MOZ_STACK_CLASS CallArgsList : public JS::CallArgs
-{
- friend class StackSegment;
- CallArgsList *prev_;
- bool active_;
- protected:
- CallArgsList() : prev_(NULL), active_(false) {}
- public:
- friend CallArgsList CallArgsListFromVp(unsigned, Value *, CallArgsList *);
- friend CallArgsList CallArgsListFromArgv(unsigned, Value *, CallArgsList *);
- CallArgsList *prev() const { return prev_; }
- bool active() const { return active_; }
- void setActive() { active_ = true; }
- void setInactive() { active_ = false; }
-};
-
-JS_ALWAYS_INLINE CallArgsList
-CallArgsListFromArgv(unsigned argc, Value *argv, CallArgsList *prev)
-{
- CallArgsList args;
-#ifdef DEBUG
- args.usedRval_ = false;
-#endif
- args.argv_ = argv;
- args.argc_ = argc;
- args.prev_ = prev;
- args.active_ = false;
- return args;
-}
-
-JS_ALWAYS_INLINE CallArgsList
-CallArgsListFromVp(unsigned argc, Value *vp, CallArgsList *prev)
-{
- return CallArgsListFromArgv(argc, vp + 2, prev);
-}
-
-/*****************************************************************************/
-
enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
/*****************************************************************************/
#ifdef DEBUG
extern void
CheckLocalUnaliased(MaybeCheckAliasing checkAliasing, JSScript *script,
StaticBlockObject *maybeBlock, unsigned i);
@@ -1383,39 +1310,38 @@ class StackSegment
StackSegment *const prevInContext_;
/* Previous segment sequentially in memory. */
StackSegment *const prevInMemory_;
/* Execution registers for most recent script in this segment (or null). */
FrameRegs *regs_;
- /* Call args for most recent native call in this segment (or null). */
- CallArgsList *calls_;
+ /* End of CallArgs pushed by pushInvokeArgs. */
+ Value *invokeArgsEnd_;
#if JS_BITS_PER_WORD == 32
/*
* Ensure StackSegment is Value-aligned. Protected to silence Clang warning
* about unused private fields.
*/
protected:
uint32_t padding_;
#endif
public:
StackSegment(JSContext *cx,
StackSegment *prevInContext,
StackSegment *prevInMemory,
- FrameRegs *regs,
- CallArgsList *calls)
+ FrameRegs *regs)
: cx_(cx),
prevInContext_(prevInContext),
prevInMemory_(prevInMemory),
regs_(regs),
- calls_(calls)
+ invokeArgsEnd_(NULL)
{}
/* A segment is followed in memory by the arguments of the first call. */
Value *slotsBegin() const {
return (Value *)(this + 1);
}
@@ -1437,33 +1363,16 @@ class StackSegment
StackFrame *maybefp() const {
return regs_ ? regs_->fp() : NULL;
}
jsbytecode *maybepc() const {
return regs_ ? regs_->pc : NULL;
}
- CallArgsList &calls() const {
- JS_ASSERT(calls_);
- return *calls_;
- }
-
- CallArgsList *maybeCalls() const {
- return calls_;
- }
-
- Value *callArgv() const {
- return calls_->array();
- }
-
- Value *maybeCallArgv() const {
- return calls_ ? calls_->array() : NULL;
- }
-
JSContext *cx() const {
return cx_;
}
StackSegment *prevInContext() const {
return prevInContext_;
}
@@ -1471,32 +1380,39 @@ class StackSegment
return prevInMemory_;
}
void repointRegs(FrameRegs *regs) {
regs_ = regs;
}
bool isEmpty() const {
- return !calls_ && !regs_;
+ return !regs_;
}
bool contains(const StackFrame *fp) const;
bool contains(const FrameRegs *regs) const;
- bool contains(const CallArgsList *call) const;
StackFrame *computeNextFrame(const StackFrame *fp, size_t maxDepth) const;
Value *end() const;
FrameRegs *pushRegs(FrameRegs ®s);
void popRegs(FrameRegs *regs);
- void pushCall(CallArgsList &callList);
- void pointAtCall(CallArgsList &callList);
- void popCall();
+
+ Value *invokeArgsEnd() const {
+ return invokeArgsEnd_;
+ }
+ void pushInvokeArgsEnd(Value *end, Value **prev) {
+ *prev = invokeArgsEnd_;
+ invokeArgsEnd_ = end;
+ }
+ void popInvokeArgsEnd(Value *prev) {
+ invokeArgsEnd_ = prev;
+ }
/* For jit access: */
static const size_t offsetOfRegs() { return offsetof(StackSegment, regs_); }
};
static const size_t VALUES_PER_STACK_SEGMENT = sizeof(StackSegment) / sizeof(Value);
JS_STATIC_ASSERT(sizeof(StackSegment) % sizeof(Value) == 0);
@@ -1794,24 +1710,25 @@ class ContextStack
* As an optimization, the interpreter/mjit can operate on a local
* FrameRegs instance repoint the ContextStack to this local instance.
*/
inline void repointRegs(FrameRegs *regs) { JS_ASSERT(hasfp()); seg_->repointRegs(regs); }
};
/*****************************************************************************/
-class InvokeArgsGuard : public CallArgsList
+class InvokeArgsGuard : public JS::CallArgs
{
friend class ContextStack;
ContextStack *stack_;
+ Value *prevInvokeArgsEnd_;
bool pushedSeg_;
void setPushed(ContextStack &stack) { JS_ASSERT(!pushed()); stack_ = &stack; }
public:
- InvokeArgsGuard() : CallArgsList(), stack_(NULL), pushedSeg_(false) {}
+ InvokeArgsGuard() : CallArgs(), stack_(NULL), prevInvokeArgsEnd_(NULL), pushedSeg_(false) {}
~InvokeArgsGuard() { if (pushed()) stack_->popInvokeArgs(*this); }
bool pushed() const { return !!stack_; }
void pop() { stack_->popInvokeArgs(*this); stack_ = NULL; }
};
class FrameGuard
{
protected:
@@ -1883,38 +1800,34 @@ struct DefaultHasher<AbstractFramePtr> {
*
* The SavedOption parameter additionally lets the iterator continue through
* breaks in the callstack (from JS_SaveFrameChain). The default is to stop.
*/
class StackIter
{
public:
enum SavedOption { STOP_AT_SAVED, GO_THROUGH_SAVED };
- enum State { DONE, SCRIPTED, NATIVE, ION };
+ enum State { DONE, SCRIPTED, ION };
/*
* Unlike StackIter itself, StackIter::Data can be allocated on the heap,
* so this structure should not contain any GC things.
*/
struct Data
{
PerThreadData *perThread_;
JSContext *cx_;
SavedOption savedOption_;
State state_;
StackFrame *fp_;
- CallArgsList *calls_;
StackSegment *seg_;
jsbytecode *pc_;
- CallArgs args_;
-
- bool poppedCallDuringSettle_;
#ifdef JS_ION
ion::IonActivationIterator ionActivations_;
ion::IonFrameIterator ionFrames_;
#endif
Data(JSContext *cx, PerThreadData *perThread, SavedOption savedOption);
Data(JSContext *cx, JSRuntime *rt, StackSegment *seg);
@@ -1926,17 +1839,16 @@ class StackIter
private:
Data data_;
#ifdef JS_ION
ion::InlineFrameIterator ionInlineFrames_;
#endif
void poisonRegs();
void popFrame();
- void popCall();
#ifdef JS_ION
void nextIonFrame();
void popIonFrame();
void popBaselineDebuggerFrame();
#endif
void settleOnNewSegment();
void settleOnNewState();
void startOnSegment(StackSegment *seg);
@@ -1952,28 +1864,18 @@ class StackIter
Data *copyData() const;
bool operator==(const StackIter &rhs) const;
bool operator!=(const StackIter &rhs) const { return !(*this == rhs); }
JSCompartment *compartment() const;
- bool poppedCallDuringSettle() const { return data_.poppedCallDuringSettle_; }
-
- bool isScript() const {
+ JSScript *script() const {
JS_ASSERT(!done());
-#ifdef JS_ION
- if (data_.state_ == ION)
- return data_.ionFrames_.isScripted();
-#endif
- return data_.state_ == SCRIPTED;
- }
- JSScript *script() const {
- JS_ASSERT(isScript());
if (data_.state_ == SCRIPTED)
return interpFrame()->script();
#ifdef JS_ION
JS_ASSERT(data_.state_ == ION);
if (data_.ionFrames_.isOptimizedJS())
return ionInlineFrames_.script();
return data_.ionFrames_.script();
#else
@@ -1996,25 +1898,16 @@ class StackIter
bool isIonBaselineJS() const {
#ifdef JS_ION
return isIon() && data_.ionFrames_.isBaselineJS();
#else
return false;
#endif
}
- bool isNativeCall() const {
- JS_ASSERT(!done());
-#ifdef JS_ION
- if (data_.state_ == ION)
- return data_.ionFrames_.isNative();
-#endif
- return data_.state_ == NATIVE;
- }
-
bool isFunctionFrame() const;
bool isGlobalFrame() const;
bool isEvalFrame() const;
bool isNonEvalFunctionFrame() const;
bool isGeneratorFrame() const;
bool isConstructing() const;
bool hasArgs() const { return isNonEvalFunctionFrame(); }
@@ -2022,19 +1915,19 @@ class StackIter
AbstractFramePtr abstractFramePtr() const;
/*
* When entering IonMonkey, the top interpreter frame (pushed by the caller)
* is kept on the stack as bookkeeping (with runningInIon() set). The
* contents of the frame are ignored by Ion code (and GC) and thus
* immediately become garbage and must not be touched directly.
*/
- StackFrame *interpFrame() const { JS_ASSERT(isScript() && !isIon()); return data_.fp_; }
+ StackFrame *interpFrame() const { JS_ASSERT(data_.state_ == SCRIPTED); return data_.fp_; }
- jsbytecode *pc() const { JS_ASSERT(isScript()); return data_.pc_; }
+ jsbytecode *pc() const { JS_ASSERT(!done()); return data_.pc_; }
void updatePcQuadratic();
JSFunction *callee() const;
Value calleev() const;
unsigned numActualArgs() const;
unsigned numFormalArgs() const { return script()->function()->nargs; }
Value unaliasedActual(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
JSObject *scopeChain() const;
@@ -2053,46 +1946,37 @@ class StackIter
JSFunction *maybeCallee() const {
return isFunctionFrame() ? callee() : NULL;
}
// These are only valid for the top frame.
size_t numFrameSlots() const;
Value frameSlotValue(size_t index) const;
- CallArgs nativeArgs() const { JS_ASSERT(isNativeCall()); return data_.args_; }
-
template <class Op>
inline void ionForEachCanonicalActualArg(JSContext *cx, Op op);
};
/* A filtering of the StackIter to only stop at scripts. */
class ScriptFrameIter : public StackIter
{
- void settle() {
- while (!done() && !isScript())
- StackIter::operator++();
- }
-
public:
ScriptFrameIter(JSContext *cx, StackIter::SavedOption opt = StackIter::STOP_AT_SAVED)
- : StackIter(cx, opt) { settle(); }
+ : StackIter(cx, opt) { }
ScriptFrameIter(const StackIter::Data &data)
: StackIter(data)
{}
-
- ScriptFrameIter &operator++() { StackIter::operator++(); settle(); return *this; }
};
/* A filtering of the StackIter to only stop at non-self-hosted scripts. */
class NonBuiltinScriptFrameIter : public StackIter
{
void settle() {
- while (!done() && (!isScript() || script()->selfHosted))
+ while (!done() && script()->selfHosted)
StackIter::operator++();
}
public:
NonBuiltinScriptFrameIter(JSContext *cx, StackIter::SavedOption opt = StackIter::STOP_AT_SAVED)
: StackIter(cx, opt) { settle(); }
NonBuiltinScriptFrameIter(const StackIter::Data &data)