Bug 876064 - Change HeapLabel with NonAssertingLabel, and don't allocate it on the heap. r=luke
authorDan Gohman <sunfish@google.com>
Mon, 01 Jul 2013 21:23:00 -0700
changeset 137270 65b5396b440194975496d03c9b17a9b84582f5e9
parent 137269 73e06fde62e310f63e0ad0f64565b13b1264b706
child 137271 ccb0796f08694880e9f27cc8601ecdd57384cf27
push id1824
push userryanvm@gmail.com
push dateWed, 03 Jul 2013 18:16:56 +0000
treeherderfx-team@dcbbfcdf7bb4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke
bugs876064
milestone25.0a1
Bug 876064 - Change HeapLabel with NonAssertingLabel, and don't allocate it on the heap. r=luke
js/src/ion/BaselineCompiler.cpp
js/src/ion/BaselineCompiler.h
js/src/ion/CodeGenerator.cpp
js/src/ion/arm/CodeGenerator-arm.cpp
js/src/ion/arm/CodeGenerator-arm.h
js/src/ion/shared/Assembler-shared.h
js/src/ion/shared/CodeGenerator-x86-shared.cpp
js/src/ion/shared/CodeGenerator-x86-shared.h
--- a/js/src/ion/BaselineCompiler.cpp
+++ b/js/src/ion/BaselineCompiler.cpp
@@ -17,21 +17,17 @@
 #include "jsopcodeinlines.h"
 
 #include "vm/Interpreter-inl.h"
 
 using namespace js;
 using namespace js::ion;
 
 BaselineCompiler::BaselineCompiler(JSContext *cx, HandleScript script)
-  : BaselineCompilerSpecific(cx, script),
-    return_(new HeapLabel())
-#ifdef JSGC_GENERATIONAL
-    , postBarrierSlot_(new HeapLabel())
-#endif
+  : BaselineCompilerSpecific(cx, script)
 {
 }
 
 bool
 BaselineCompiler::init()
 {
     if (!analysis_.init())
         return false;
@@ -256,17 +252,17 @@ BaselineCompiler::emitPrologue()
         return false;
 
     return true;
 }
 
 bool
 BaselineCompiler::emitEpilogue()
 {
-    masm.bind(return_);
+    masm.bind(&return_);
 
     // Pop SPS frame if necessary
     emitSPSPop();
 
     masm.mov(BaselineFrameReg, BaselineStackReg);
     masm.pop(BaselineFrameReg);
 
     masm.ret();
@@ -278,17 +274,17 @@ BaselineCompiler::emitEpilogue()
 //  R2.scratchReg() contains object being written to.
 //  R1.scratchReg() contains slot index being written to.
 //  Otherwise, baseline stack will be synced, so all other registers are usable as scratch.
 // This calls:
 //    void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
 bool
 BaselineCompiler::emitOutOfLinePostBarrierSlot()
 {
-    masm.bind(postBarrierSlot_);
+    masm.bind(&postBarrierSlot_);
 
     Register objReg = R2.scratchReg();
     GeneralRegisterSet regs(GeneralRegisterSet::All());
     regs.take(objReg);
     regs.take(BaselineFrameReg);
     Register scratch = regs.takeAny();
 #if defined(JS_CPU_ARM)
     // On ARM, save the link register before calling.  It contains the return
@@ -341,17 +337,17 @@ BaselineCompiler::emitDebugPrologue()
         return false;
 
     // If the stub returns |true|, we have to return the value stored in the
     // frame's return value slot.
     Label done;
     masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, &done);
     {
         masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
-        masm.jump(return_);
+        masm.jump(&return_);
     }
     masm.bind(&done);
     return true;
 }
 
 typedef bool (*StrictEvalPrologueFn)(JSContext *, BaselineFrame *);
 static const VMFunction StrictEvalPrologueInfo =
     FunctionInfo<StrictEvalPrologueFn>(ion::StrictEvalPrologue);
@@ -1819,17 +1815,17 @@ BaselineCompiler::emit_JSOP_SETALIASEDVA
     Nursery &nursery = cx->runtime()->gcNursery;
     Label skipBarrier;
     Label isTenured;
     masm.branchTestObject(Assembler::NotEqual, R0, &skipBarrier);
     masm.branchPtr(Assembler::Below, objReg, ImmWord(nursery.start()), &isTenured);
     masm.branchPtr(Assembler::Below, objReg, ImmWord(nursery.heapEnd()), &skipBarrier);
 
     masm.bind(&isTenured);
-    masm.call(postBarrierSlot_);
+    masm.call(&postBarrierSlot_);
 
     masm.bind(&skipBarrier);
 #endif
 
     return true;
 }
 
 bool
@@ -2484,17 +2480,17 @@ BaselineCompiler::emit_JSOP_DEBUGGER()
     if (!callVM(OnDebuggerStatementInfo))
         return false;
 
     // If the stub returns |true|, return the frame's return value.
     Label done;
     masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, &done);
     {
         masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
-        masm.jump(return_);
+        masm.jump(&return_);
     }
     masm.bind(&done);
     return true;
 }
 
 typedef bool (*DebugEpilogueFn)(JSContext *, BaselineFrame *, JSBool);
 static const VMFunction DebugEpilogueInfo = FunctionInfo<DebugEpilogueFn>(ion::DebugEpilogue);
 
@@ -2517,17 +2513,17 @@ BaselineCompiler::emitReturn()
             return false;
 
         masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
     }
 
     if (JSOp(*pc) != JSOP_STOP) {
         // JSOP_STOP is immediately followed by the return label, so we don't
         // need a jump.
-        masm.jump(return_);
+        masm.jump(&return_);
     }
 
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_RETURN()
 {
--- a/js/src/ion/BaselineCompiler.h
+++ b/js/src/ion/BaselineCompiler.h
@@ -179,19 +179,19 @@ namespace ion {
     _(JSOP_SETRVAL)            \
     _(JSOP_RETURN)             \
     _(JSOP_STOP)               \
     _(JSOP_RETRVAL)
 
 class BaselineCompiler : public BaselineCompilerSpecific
 {
     FixedList<Label>            labels_;
-    HeapLabel *                 return_;
+    NonAssertingLabel           return_;
 #ifdef JSGC_GENERATIONAL
-    HeapLabel *                 postBarrierSlot_;
+    NonAssertingLabel           postBarrierSlot_;
 #endif
 
     // Native code offset right before the scope chain is initialized.
     CodeOffsetLabel prologueOffset_;
 
     Label *labelOf(jsbytecode *pc) {
         return &labels_[pc - script->code];
     }
--- a/js/src/ion/CodeGenerator.cpp
+++ b/js/src/ion/CodeGenerator.cpp
@@ -882,17 +882,17 @@ CodeGenerator::visitReturn(LReturn *lir)
     JS_ASSERT(ToRegister(type)    == JSReturnReg_Type);
     JS_ASSERT(ToRegister(payload) == JSReturnReg_Data);
 #elif defined(JS_PUNBOX64)
     DebugOnly<LAllocation *> result = lir->getOperand(0);
     JS_ASSERT(ToRegister(result) == JSReturnReg);
 #endif
     // Don't emit a jump to the return label if this is the last block.
     if (current->mir() != *gen->graph().poBegin())
-        masm.jump(returnLabel_);
+        masm.jump(&returnLabel_);
     return true;
 }
 
 bool
 CodeGenerator::visitOsrEntry(LOsrEntry *lir)
 {
     // Remember the OSR entry offset into the code buffer.
     masm.flushBuffer();
@@ -6939,17 +6939,17 @@ CodeGenerator::visitOutOfLineParallelAbo
     masm.setupUnalignedABICall(4, CallTempReg4);
     masm.passABIArg(CallTempReg0);
     masm.passABIArg(CallTempReg1);
     masm.passABIArg(CallTempReg2);
     masm.passABIArg(CallTempReg3);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, ParallelAbort));
 
     masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
-    masm.jump(returnLabel_);
+    masm.jump(&returnLabel_);
     return true;
 }
 
 bool
 CodeGenerator::visitIsCallable(LIsCallable *ins)
 {
     Register object = ToRegister(ins->object());
     Register output = ToRegister(ins->output());
@@ -6999,17 +6999,17 @@ CodeGenerator::visitOutOfLinePropagatePa
     loadJSScriptForBlock(ool->lir()->mirRaw()->block(), CallTempReg1);
 
     masm.setupUnalignedABICall(2, CallTempReg2);
     masm.passABIArg(CallTempReg0);
     masm.passABIArg(CallTempReg1);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PropagateParallelAbort));
 
     masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
-    masm.jump(returnLabel_);
+    masm.jump(&returnLabel_);
     return true;
 }
 
 bool
 CodeGenerator::visitHaveSameClass(LHaveSameClass *ins)
 {
     Register lhs = ToRegister(ins->lhs());
     Register rhs = ToRegister(ins->rhs());
@@ -7092,26 +7092,26 @@ bool
 CodeGenerator::visitAsmJSReturn(LAsmJSReturn *lir)
 {
     // Don't emit a jump to the return label if this is the last block.
 #if defined(JS_CPU_ARM) && !defined(JS_CPU_ARM_HARDFP)
     if (lir->getOperand(0)->isFloatReg())
         masm.ma_vxfer(d0, r0, r1);
 #endif
     if (current->mir() != *gen->graph().poBegin())
-        masm.jump(returnLabel_);
+        masm.jump(&returnLabel_);
     return true;
 }
 
 bool
 CodeGenerator::visitAsmJSVoidReturn(LAsmJSVoidReturn *lir)
 {
     // Don't emit a jump to the return label if this is the last block.
     if (current->mir() != *gen->graph().poBegin())
-        masm.jump(returnLabel_);
+        masm.jump(&returnLabel_);
     return true;
 }
 
 bool
 CodeGenerator::visitAsmJSCheckOverRecursed(LAsmJSCheckOverRecursed *lir)
 {
     uintptr_t *limitAddr = &gen->compartment->rt->mainThread.nativeStackLimit;
     masm.branchPtr(Assembler::AboveOrEqual,
--- a/js/src/ion/arm/CodeGenerator-arm.cpp
+++ b/js/src/ion/arm/CodeGenerator-arm.cpp
@@ -23,45 +23,40 @@
 
 #include "vm/Shape-inl.h"
 
 using namespace js;
 using namespace js::ion;
 
 // shared
 CodeGeneratorARM::CodeGeneratorARM(MIRGenerator *gen, LIRGraph *graph, MacroAssembler *masm)
-  : CodeGeneratorShared(gen, graph, masm),
-    deoptLabel_(NULL)
+  : CodeGeneratorShared(gen, graph, masm)
 {
 }
 
 bool
 CodeGeneratorARM::generatePrologue()
 {
     if (gen->compilingAsmJS()) {
         masm.Push(lr);
         // Note that this automatically sets MacroAssembler::framePushed().
         masm.reserveStack(frameDepth_);
     } else {
         // Note that this automatically sets MacroAssembler::framePushed().
         masm.reserveStack(frameSize());
         masm.checkStackAlignment();
     }
 
-    // Allocate returnLabel_ on the heap, so we don't run its destructor and
-    // assert-not-bound in debug mode on compilation failure.
-    returnLabel_ = new HeapLabel();
-
     return true;
 }
 
 bool
 CodeGeneratorARM::generateEpilogue()
 {
-    masm.bind(returnLabel_); 
+    masm.bind(&returnLabel_); 
     if (gen->compilingAsmJS()) {
         // Pop the stack we allocated at the start of the function.
         masm.freeStack(frameDepth_);
         masm.Pop(pc);
         JS_ASSERT(masm.framePushed() == 0);
         //masm.as_bkpt();
     } else {
         // Pop the stack we allocated at the start of the function.
@@ -147,19 +142,19 @@ CodeGeneratorARM::visitCompareAndBranch(
 }
 
 bool
 CodeGeneratorARM::generateOutOfLineCode()
 {
     if (!CodeGeneratorShared::generateOutOfLineCode())
         return false;
 
-    if (deoptLabel_) {
+    if (deoptLabel_.used()) {
         // All non-table-based bailouts will go here.
-        masm.bind(deoptLabel_);
+        masm.bind(&deoptLabel_);
 
         // Push the frame size, so the handler can recover the IonScript.
         masm.ma_mov(Imm32(frameSize()), lr);
 
         IonCompartment *ion = GetIonContext()->compartment->ionCompartment();
         IonCode *handler = ion->getGenericBailoutHandler();
 
         masm.branch(handler);
@@ -259,22 +254,20 @@ CodeGeneratorARM::bailout(LSnapshot *sna
     Label label;
     masm.ma_b(&label);
     return bailoutFrom(&label, snapshot);
 }
 
 bool
 CodeGeneratorARM::visitOutOfLineBailout(OutOfLineBailout *ool)
 {
-    if (!deoptLabel_)
-        deoptLabel_ = new HeapLabel();
     masm.ma_mov(Imm32(ool->snapshot()->snapshotOffset()), ScratchRegister);
     masm.ma_push(ScratchRegister);
     masm.ma_push(ScratchRegister);
-    masm.ma_b(deoptLabel_);
+    masm.ma_b(&deoptLabel_);
     return true;
 }
 
 bool
 CodeGeneratorARM::visitMinMaxD(LMinMaxD *ins)
 {
     FloatRegister first = ToFloatRegister(ins->first());
     FloatRegister second = ToFloatRegister(ins->second());
--- a/js/src/ion/arm/CodeGenerator-arm.h
+++ b/js/src/ion/arm/CodeGenerator-arm.h
@@ -19,18 +19,18 @@ class OutOfLineTableSwitch;
 class CodeGeneratorARM : public CodeGeneratorShared
 {
     friend class MoveResolverARM;
 
     CodeGeneratorARM *thisFromCtor() {return this;}
 
   protected:
     // Label for the common return path.
-    HeapLabel *returnLabel_;
-    HeapLabel *deoptLabel_;
+    NonAssertingLabel returnLabel_;
+    NonAssertingLabel deoptLabel_;
     // ugh.  this is not going to be pretty to move over.
     // stack slotted variables are not useful on arm.
     // it looks like this will need to return one of two types.
     inline Operand ToOperand(const LAllocation &a) {
         if (a.isGeneralReg())
             return Operand(a.toGeneralReg()->reg());
         if (a.isFloatReg())
             return Operand(a.toFloatReg()->reg());
--- a/js/src/ion/shared/Assembler-shared.h
+++ b/js/src/ion/shared/Assembler-shared.h
@@ -273,21 +273,29 @@ class Label : public LabelBase
         // Note: the condition is a hack to silence this assert when OOM testing,
         // see bug 756614.
         if (!js_IonOptions.parallelCompilation)
             JS_ASSERT_IF(MaybeGetIonContext() && !GetIonContext()->runtime->hadOutOfMemory, !used());
 #endif
     }
 };
 
-// Wrapper around Label, on the heap, to avoid a bogus assert with OOM.
-struct HeapLabel
-  : public TempObject,
-    public Label
+// Label's destructor asserts that if it has been used it has also been bound.
+// In the case long-lived labels, however, failed compilation (e.g. OOM) will
+// trigger this failure innocuously. This Label silences the assertion.
+class NonAssertingLabel : public Label
 {
+  public:
+    ~NonAssertingLabel()
+    {
+#ifdef DEBUG
+        if (used())
+            bind(0);
+#endif
+    }
 };
 
 class RepatchLabel
 {
     static const int32_t INVALID_OFFSET = 0xC0000000;
     int32_t offset_ : 31;
     uint32_t bound_ : 1;
   public:
--- a/js/src/ion/shared/CodeGenerator-x86-shared.cpp
+++ b/js/src/ion/shared/CodeGenerator-x86-shared.cpp
@@ -17,44 +17,39 @@
 
 using namespace js;
 using namespace js::ion;
 
 namespace js {
 namespace ion {
 
 CodeGeneratorX86Shared::CodeGeneratorX86Shared(MIRGenerator *gen, LIRGraph *graph, MacroAssembler *masm)
-  : CodeGeneratorShared(gen, graph, masm),
-    deoptLabel_(NULL)
+  : CodeGeneratorShared(gen, graph, masm)
 {
 }
 
 double
 test(double x, double y)
 {
     return x + y;
 }
 
 bool
 CodeGeneratorX86Shared::generatePrologue()
 {
     // Note that this automatically sets MacroAssembler::framePushed().
     masm.reserveStack(frameSize());
 
-    // Allocate returnLabel_ on the heap, so we don't run its destructor and
-    // assert-not-bound in debug mode on compilation failure.
-    returnLabel_ = new HeapLabel();
-
     return true;
 }
 
 bool
 CodeGeneratorX86Shared::generateEpilogue()
 {
-    masm.bind(returnLabel_);
+    masm.bind(&returnLabel_);
 
     // Pop the stack we allocated at the start of the function.
     masm.freeStack(frameSize());
     JS_ASSERT(masm.framePushed() == 0);
 
     masm.ret();
     return true;
 }
@@ -223,19 +218,19 @@ CodeGeneratorX86Shared::visitAsmJSPassSt
 }
 
 bool
 CodeGeneratorX86Shared::generateOutOfLineCode()
 {
     if (!CodeGeneratorShared::generateOutOfLineCode())
         return false;
 
-    if (deoptLabel_) {
+    if (deoptLabel_.used()) {
         // All non-table-based bailouts will go here.
-        masm.bind(deoptLabel_);
+        masm.bind(&deoptLabel_);
 
         // Push the frame size, so the handler can recover the IonScript.
         masm.push(Imm32(frameSize()));
 
         IonCompartment *ion = GetIonContext()->compartment->ionCompartment();
         IonCode *handler = ion->getGenericBailoutHandler();
 
         masm.jmp(handler->raw(), Relocation::IONCODE);
@@ -344,21 +339,18 @@ CodeGeneratorX86Shared::bailout(LSnapsho
     Label label;
     masm.jump(&label);
     return bailoutFrom(&label, snapshot);
 }
 
 bool
 CodeGeneratorX86Shared::visitOutOfLineBailout(OutOfLineBailout *ool)
 {
-    if (!deoptLabel_)
-        deoptLabel_ = new HeapLabel();
-
     masm.push(Imm32(ool->snapshot()->snapshotOffset()));
-    masm.jmp(deoptLabel_);
+    masm.jmp(&deoptLabel_);
     return true;
 }
 
 
 bool
 CodeGeneratorX86Shared::visitMinMaxD(LMinMaxD *ins)
 {
     FloatRegister first = ToFloatRegister(ins->first());
--- a/js/src/ion/shared/CodeGenerator-x86-shared.h
+++ b/js/src/ion/shared/CodeGenerator-x86-shared.h
@@ -25,18 +25,18 @@ class CodeGeneratorX86Shared : public Co
         return this;
     }
 
     template <typename T>
     bool bailout(const T &t, LSnapshot *snapshot);
 
   protected:
     // Label for the common return path.
-    HeapLabel *returnLabel_;
-    HeapLabel *deoptLabel_;
+    NonAssertingLabel returnLabel_;
+    NonAssertingLabel deoptLabel_;
 
     inline Operand ToOperand(const LAllocation &a) {
         if (a.isGeneralReg())
             return Operand(a.toGeneralReg()->reg());
         if (a.isFloatReg())
             return Operand(a.toFloatReg()->reg());
         return Operand(StackPointer, ToStackOffset(&a));
     }