Correctly bound code blocks when using repatch buffers (bug 602333 part 1, r=sstangl).
authorDavid Anderson <danderson@mozilla.com>
Fri, 19 Nov 2010 15:40:16 -0800
changeset 58063 54fb9d61096ae7d2bbf90af66dbe05041a99d2d7
parent 58062 3abee1afa0662452c5d4d316a8efea4d31f3cdda
child 58064 bbc8aeb55bae780cc1297e3dc5f3029ef666d06a
push id1
push usershaver@mozilla.com
push dateTue, 04 Jan 2011 17:58:04 +0000
reviewerssstangl
bugs602333
milestone2.0b8pre
Correctly bound code blocks when using repatch buffers (bug 602333 part 1, r=sstangl).
js/src/assembler/assembler/RepatchBuffer.h
js/src/assembler/moco/MocoStubs.h
js/src/jsinterp.h
js/src/methodjit/BaseCompiler.h
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/PolyIC.cpp
js/src/methodjit/PolyIC.h
--- a/js/src/assembler/assembler/RepatchBuffer.h
+++ b/js/src/assembler/assembler/RepatchBuffer.h
@@ -44,25 +44,28 @@ namespace JSC {
 // This class is used to modify code after code generation has been completed,
 // and after the code has potentially already been executed.  This mechanism is
 // used to apply optimizations to the code.
 //
 class RepatchBuffer {
     typedef MacroAssemblerCodePtr CodePtr;
 
 public:
-    RepatchBuffer(void *start, size_t size, bool mprot = true)
-    : m_start(start), m_size(size), mprot(mprot)
+    RepatchBuffer(const MacroAssemblerCodeRef &ref)
     {
-        ExecutableAllocator::makeWritable(m_start, m_size);
+        m_start = ref.m_code.executableAddress();
+        m_size = ref.m_size;
+        mprot = true;
+
+        if (mprot)
+            ExecutableAllocator::makeWritable(m_start, m_size);
     }
 
-    RepatchBuffer(CodeBlock* codeBlock)
+    RepatchBuffer(const JITCode &code)
     {
-        JITCode& code = codeBlock->getJITCode();
         m_start = code.start();
         m_size = code.size();
         mprot = true;
 
         if (mprot)
             ExecutableAllocator::makeWritable(m_start, m_size);
     }
 
--- a/js/src/assembler/moco/MocoStubs.h
+++ b/js/src/assembler/moco/MocoStubs.h
@@ -43,20 +43,20 @@
 #define _include_assembler_moco_stubs_h_
 
 namespace JSC {
 
 class JITCode {
 public:
   JITCode(void* start, size_t size)
     : m_start(start), m_size(size)
-  {
-  }
-  void*  start() { return m_start; }
-  size_t size()  { return m_size; }
+  { }
+  JITCode() { }
+  void*  start() const { return m_start; }
+  size_t size() const { return m_size; }
 private:
   void*  m_start;
   size_t m_size;
 };
 
 class CodeBlock {
 public:
   CodeBlock(JITCode& jc)
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -98,16 +98,18 @@ enum JSFrameFlags
     JSFRAME_HAS_ARGS_OBJ       =  0x20000, /* frame has an argsobj in JSStackFrame::args */
     JSFRAME_HAS_HOOK_DATA      =  0x40000, /* frame has hookData_ set */
     JSFRAME_HAS_ANNOTATION     =  0x80000, /* frame has annotation_ set */
     JSFRAME_HAS_RVAL           = 0x100000, /* frame has rval_ set */
     JSFRAME_HAS_SCOPECHAIN     = 0x200000, /* frame has scopeChain_ set */
     JSFRAME_HAS_PREVPC         = 0x400000  /* frame has prevpc_ set */
 };
 
+namespace js { namespace mjit { struct JITScript; } }
+
 /*
  * A stack frame is a part of a stack segment (see js::StackSegment) which is
  * on the per-thread VM stack (see js::StackSpace).
  */
 struct JSStackFrame
 {
   private:
     mutable uint32      flags_;         /* bits described by JSFrameFlags */
@@ -766,16 +768,22 @@ struct JSStackFrame
 
     /* Workaround for static asserts on private members. */
 
     void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) % sizeof(js::Value) == 0);
         JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(js::Value) == 0);
     }
 
+#ifdef JS_METHODJIT
+    js::mjit::JITScript *jit() {
+        return script()->getJIT(isConstructing());
+    }
+#endif
+
     void methodjitStaticAsserts();
 
 #ifdef DEBUG
     /* Poison scopeChain value set before a frame is flushed. */
     static JSObject *const sInvalidScopeChain;
 #endif
 };
 
--- a/js/src/methodjit/BaseCompiler.h
+++ b/js/src/methodjit/BaseCompiler.h
@@ -39,16 +39,18 @@
  * ***** END LICENSE BLOCK ***** */
 #if !defined jsjaeger_compilerbase_h__ && defined JS_METHODJIT
 #define jsjaeger_compilerbase_h__
 
 #include "jscntxt.h"
 #include "jstl.h"
 #include "assembler/assembler/MacroAssembler.h"
 #include "assembler/assembler/LinkBuffer.h"
+#include "assembler/assembler/RepatchBuffer.h"
+#include "assembler/jit/ExecutableAllocator.h"
 
 namespace js {
 namespace mjit {
 
 struct MacroAssemblerTypedefs {
     typedef JSC::MacroAssembler::Label Label;
     typedef JSC::MacroAssembler::Imm32 Imm32;
     typedef JSC::MacroAssembler::ImmPtr ImmPtr;
@@ -64,16 +66,17 @@ struct MacroAssemblerTypedefs {
     typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
     typedef JSC::MacroAssembler::DataLabel32 DataLabel32;
     typedef JSC::FunctionPtr FunctionPtr;
     typedef JSC::RepatchBuffer RepatchBuffer;
     typedef JSC::CodeLocationLabel CodeLocationLabel;
     typedef JSC::CodeLocationCall CodeLocationCall;
     typedef JSC::ReturnAddressPtr ReturnAddressPtr;
     typedef JSC::MacroAssemblerCodePtr MacroAssemblerCodePtr;
+    typedef JSC::JITCode JITCode;
 };
 
 class BaseCompiler : public MacroAssemblerTypedefs
 {
   protected:
     JSContext *cx;
 
   public:
@@ -129,14 +132,28 @@ class LinkerHelper : public JSC::LinkBuf
         return ep;
     }
 
     void maybeLink(MaybeJump jump, JSC::CodeLocationLabel label) {
         if (!jump.isSet())
             return;
         link(jump.get(), label);
     }
+
+    size_t size() const {
+        return m_size;
+    }
+};
+
+class Repatcher : public JSC::RepatchBuffer
+{
+  public:
+    Repatcher(JITScript *jit) : JSC::RepatchBuffer(jit->code)
+    { }
+
+    Repatcher(const JSC::JITCode &code) : JSC::RepatchBuffer(code)
+    { }
 };
 
 } /* namespace js */
 } /* namespace mjit */
 
 #endif
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -44,24 +44,24 @@
 #include "jslibmath.h"
 #include "jsiter.h"
 #include "jsnum.h"
 #include "jsxml.h"
 #include "jsstaticcheck.h"
 #include "jsbool.h"
 #include "assembler/assembler/MacroAssemblerCodeRef.h"
 #include "assembler/assembler/CodeLocation.h"
-#include "assembler/assembler/RepatchBuffer.h"
 #include "jsiter.h"
 #include "jstypes.h"
 #include "methodjit/StubCalls.h"
 #include "jstracer.h"
 #include "jspropertycache.h"
 #include "methodjit/MonoIC.h"
 #include "jsanalyze.h"
+#include "methodjit/BaseCompiler.h"
 
 #include "jsinterpinlines.h"
 #include "jspropertycacheinlines.h"
 #include "jsscopeinlines.h"
 #include "jsscriptinlines.h"
 #include "jsstrinlines.h"
 #include "jsobjinlines.h"
 #include "jscntxtinlines.h"
@@ -885,54 +885,55 @@ FinishExcessFrames(VMFrame &f, JSStackFr
         }
     }
 
     return true;
 }
 
 #if JS_MONOIC
 static void
-UpdateTraceHintSingle(JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
+UpdateTraceHintSingle(Repatcher &repatcher, JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
 {
     /*
      * Hack: The value that will be patched is before the executable address,
      * so to get protection right, just unprotect the general region around
      * the jump.
      */
-    uint8 *addr = (uint8 *)(jump.executableAddress());
-    JSC::RepatchBuffer repatch(addr - 64, 128);
-    repatch.relink(jump, target);
+    repatcher.relink(jump, target);
 
     JaegerSpew(JSpew_PICs, "relinking trace hint %p to %p\n",
                jump.executableAddress(), target.executableAddress());
 }
 
 static void
 DisableTraceHint(VMFrame &f, ic::TraceICInfo &tic)
 {
-    UpdateTraceHintSingle(tic.traceHint, tic.jumpTarget);
+    Repatcher repatcher(f.jit());
+    UpdateTraceHintSingle(repatcher, tic.traceHint, tic.jumpTarget);
 
     if (tic.hasSlowTraceHint)
-        UpdateTraceHintSingle(tic.slowTraceHint, tic.jumpTarget);
+        UpdateTraceHintSingle(repatcher, tic.slowTraceHint, tic.jumpTarget);
 }
 
 static void
 EnableTraceHintAt(JSScript *script, js::mjit::JITScript *jit, jsbytecode *pc, uint16_t index)
 {
     JS_ASSERT(index < jit->nTraceICs);
     ic::TraceICInfo &tic = jit->traceICs[index];
 
     JS_ASSERT(tic.jumpTargetPC == pc);
 
     JaegerSpew(JSpew_PICs, "Enabling trace IC %u in script %p\n", index, script);
 
-    UpdateTraceHintSingle(tic.traceHint, tic.stubEntry);
+    Repatcher repatcher(jit);
+
+    UpdateTraceHintSingle(repatcher, tic.traceHint, tic.stubEntry);
 
     if (tic.hasSlowTraceHint)
-        UpdateTraceHintSingle(tic.slowTraceHint, tic.stubEntry);
+        UpdateTraceHintSingle(repatcher, tic.slowTraceHint, tic.stubEntry);
 }
 #endif
 
 void
 js::mjit::EnableTraceHint(JSScript *script, jsbytecode *pc, uint16_t index)
 {
 #if JS_MONOIC
     if (script->jitNormal)
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -50,16 +50,18 @@
 #endif
 
 #if !defined(JS_NUNBOX32) && !defined(JS_PUNBOX64)
 # error "No boxing format selected."
 #endif
 
 namespace js {
 
+namespace mjit { struct JITScript; }
+
 struct VMFrame
 {
     union Arguments {
         struct {
             void *ptr;
             void *ptr2;
             void *ptr3;
         } x;
@@ -131,16 +133,17 @@ struct VMFrame
     }
 #else
 # error "The VMFrame layout isn't defined for your processor architecture!"
 #endif
 
     JSRuntime *runtime() { return cx->runtime; }
 
     JSStackFrame *&fp() { return regs.fp; }
+    mjit::JITScript *jit() { return fp()->jit(); }
 };
 
 #ifdef JS_CPU_ARM
 // WARNING: Do not call this function directly from C(++) code because it is not ABI-compliant.
 extern "C" void JaegerStubVeneer(void);
 #endif
 
 namespace mjit {
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -38,17 +38,16 @@
  *
  * ***** END LICENSE BLOCK ***** */
 #include "jsscope.h"
 #include "jsnum.h"
 #include "MonoIC.h"
 #include "StubCalls.h"
 #include "StubCalls-inl.h"
 #include "assembler/assembler/LinkBuffer.h"
-#include "assembler/assembler/RepatchBuffer.h"
 #include "assembler/assembler/MacroAssembler.h"
 #include "assembler/assembler/CodeLocation.h"
 #include "CodeGenIncludes.h"
 #include "methodjit/Compiler.h"
 #include "InlineFrameAssembler.h"
 #include "jsobj.h"
 
 #include "jsinterpinlines.h"
@@ -67,17 +66,17 @@ typedef JSC::MacroAssembler::Imm32 Imm32
 typedef JSC::MacroAssembler::ImmPtr ImmPtr;
 typedef JSC::MacroAssembler::Call Call;
 
 #if defined JS_MONOIC
 
 static void
 PatchGetFallback(VMFrame &f, ic::MICInfo *ic)
 {
-    JSC::RepatchBuffer repatch(ic->stubEntry.executableAddress(), 64);
+    Repatcher repatch(f.jit());
     JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::GetGlobalName));
     repatch.relink(ic->stubCall, fptr);
 }
 
 void JS_FASTCALL
 ic::GetGlobalName(VMFrame &f, ic::MICInfo *ic)
 {
     JSObject *obj = f.fp()->scopeChain().getGlobal();
@@ -96,31 +95,30 @@ ic::GetGlobalName(VMFrame &f, ic::MICInf
         stubs::GetGlobalName(f);
         return;
     }
     uint32 slot = shape->slot;
 
     ic->u.name.touched = true;
 
     /* Patch shape guard. */
-    JSC::RepatchBuffer repatch(ic->entry.executableAddress(), 50);
-    repatch.repatch(ic->shape, obj->shape());
+    Repatcher repatcher(f.jit());
+    repatcher.repatch(ic->shape, obj->shape());
 
     /* Patch loads. */
     slot *= sizeof(Value);
-    JSC::RepatchBuffer loads(ic->load.executableAddress(), 32, false);
 #if defined JS_CPU_X86
-    loads.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_DATA_OFFSET), slot);
-    loads.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_TYPE_OFFSET), slot + 4);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_DATA_OFFSET), slot);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_TYPE_OFFSET), slot + 4);
 #elif defined JS_CPU_ARM
     // ic->load actually points to the LDR instruction which fetches the offset, but 'repatch'
     // knows how to dereference it to find the integer value.
-    loads.repatch(ic->load.dataLabel32AtOffset(0), slot);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(0), slot);
 #elif defined JS_PUNBOX64
-    loads.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
 #endif
 
     /* Do load anyway... this time. */
     stubs::GetGlobalName(f);
 }
 
 template <JSBool strict>
 static void JS_FASTCALL
@@ -146,17 +144,17 @@ DisabledSetGlobalNoCache(VMFrame &f, ic:
 template void JS_FASTCALL DisabledSetGlobalNoCache<true>(VMFrame &f, ic::MICInfo *ic);
 template void JS_FASTCALL DisabledSetGlobalNoCache<false>(VMFrame &f, ic::MICInfo *ic);
 
 static void
 PatchSetFallback(VMFrame &f, ic::MICInfo *ic)
 {
     JSScript *script = f.fp()->script();
 
-    JSC::RepatchBuffer repatch(ic->stubEntry.executableAddress(), 64);
+    Repatcher repatch(f.jit());
     VoidStubMIC stub = ic->u.name.usePropertyCache
                        ? STRICT_VARIANT(DisabledSetGlobal)
                        : STRICT_VARIANT(DisabledSetGlobalNoCache);
     JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stub));
     repatch.relink(ic->stubCall, fptr);
 }
 
 void JS_FASTCALL
@@ -183,38 +181,37 @@ ic::SetGlobalName(VMFrame &f, ic::MICInf
             STRICT_VARIANT(stubs::SetGlobalNameNoCache)(f, atom);
         return;
     }
     uint32 slot = shape->slot;
 
     ic->u.name.touched = true;
 
     /* Patch shape guard. */
-    JSC::RepatchBuffer repatch(ic->entry.executableAddress(), 50);
-    repatch.repatch(ic->shape, obj->shape());
+    Repatcher repatcher(f.jit());
+    repatcher.repatch(ic->shape, obj->shape());
 
     /* Patch loads. */
     slot *= sizeof(Value);
 
-    JSC::RepatchBuffer stores(ic->load.executableAddress(), 32, false);
 #if defined JS_CPU_X86
-    stores.repatch(ic->load.dataLabel32AtOffset(MICInfo::SET_TYPE_OFFSET), slot + 4);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(MICInfo::SET_TYPE_OFFSET), slot + 4);
 
     uint32 dataOffset;
     if (ic->u.name.typeConst)
         dataOffset = MICInfo::SET_DATA_CONST_TYPE_OFFSET;
     else
         dataOffset = MICInfo::SET_DATA_TYPE_OFFSET;
-    stores.repatch(ic->load.dataLabel32AtOffset(dataOffset), slot);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(dataOffset), slot);
 #elif defined JS_CPU_ARM
     // ic->load actually points to the LDR instruction which fetches the offset, but 'repatch'
     // knows how to dereference it to find the integer value.
-    stores.repatch(ic->load.dataLabel32AtOffset(0), slot);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(0), slot);
 #elif defined JS_PUNBOX64
-    stores.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
+    repatcher.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
 #endif
 
     if (ic->u.name.usePropertyCache)
         STRICT_VARIANT(stubs::SetGlobalName)(f, atom);
     else
         STRICT_VARIANT(stubs::SetGlobalNameNoCache)(f, atom);
 }
 
@@ -367,24 +364,24 @@ class EqualityCompiler : public BaseComp
         jumpList.clear();
 
         /* Set the targets for the the success and failure of the actual equality test. */
         buffer.link(trueJump, ic.target);
         buffer.link(falseJump, ic.fallThrough);
 
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
 
+        Repatcher repatcher(f.jit());
+
         /* Jump to the newly generated code instead of to the IC. */
-        JSC::RepatchBuffer jumpRepatcher(ic.jumpToStub.executableAddress(), INLINE_PATH_LENGTH);
-        jumpRepatcher.relink(ic.jumpToStub, cs);
+        repatcher.relink(ic.jumpToStub, cs);
 
         /* Overwrite the call to the IC with a call to the stub. */
-        JSC::RepatchBuffer stubRepatcher(ic.stubCall.executableAddress(), INLINE_PATH_LENGTH);
         JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic.stub));
-        stubRepatcher.relink(ic.stubCall, fptr);
+        repatcher.relink(ic.stubCall, fptr);
         
         return true;
     }
 
     bool update()
     {
         if (!ic.generated) {
             Assembler masm;
@@ -487,17 +484,17 @@ class CallCompiler : public BaseCompiler
         JSC::ExecutablePool *ep = getExecPool(size);
         if (!ep)
             return NULL;
         JS_ASSERT(!ic.pools[index]);
         ic.pools[index] = ep;
         return ep;
     }
 
-    bool generateFullCallStub(JSScript *script, uint32 flags)
+    bool generateFullCallStub(JITScript *from, JSScript *script, uint32 flags)
     {
         /*
          * Create a stub that works with arity mismatches. Like the fast-path,
          * this allocates a frame on the caller side, but also performs extra
          * checks for compilability. Perhaps this should be a separate, shared
          * trampoline, but for now we generate it dynamically.
          */
         Assembler masm;
@@ -557,43 +554,42 @@ class CallCompiler : public BaseCompiler
         buffer.link(notCompiled, ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
         masm.finalize(buffer);
         JSC::CodeLocationLabel cs = buffer.finalizeCodeAddendum();
 
         JaegerSpew(JSpew_PICs, "generated CALL stub %p (%d bytes)\n", cs.executableAddress(),
                    masm.size());
 
         JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
-        uint8 *start = (uint8 *)oolJump.executableAddress();
-        JSC::RepatchBuffer repatch(start - 32, 64);
+        Repatcher repatch(from);
         repatch.relink(oolJump, cs);
 
         return true;
     }
 
-    void patchInlinePath(JSScript *script, JSObject *obj)
+    void patchInlinePath(JITScript *from, JSScript *script, JSObject *obj)
     {
         JS_ASSERT(ic.frameSize.isStatic());
 
         /* Very fast path. */
-        uint8 *start = (uint8 *)ic.funGuard.executableAddress();
-        JSC::RepatchBuffer repatch(start - 32, 64);
+        Repatcher repatch(from);
 
         ic.fastGuardedObject = obj;
 
         JITScript *jit = script->getJIT(callingNew);
 
         repatch.repatch(ic.funGuard, obj);
         repatch.relink(ic.funGuard.jumpAtOffset(ic.hotJumpOffset),
                        JSC::CodeLocationLabel(jit->fastEntry));
 
-        JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %p)\n", start, ic.fastGuardedObject);
+        JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %p)\n",
+                   ic.funGuard.executableAddress(), ic.fastGuardedObject);
     }
 
-    bool generateStubForClosures(JSObject *obj)
+    bool generateStubForClosures(JITScript *from, JSObject *obj)
     {
         JS_ASSERT(ic.frameSize.isStatic());
 
         /* Slightly less fast path - guard on fun->getFunctionPrivate() instead. */
         Assembler masm;
 
         Registers tempRegs;
         tempRegs.takeReg(ic.funObjReg);
@@ -617,27 +613,28 @@ class CallCompiler : public BaseCompiler
         buffer.link(claspGuard, ic.slowPathStart);
         buffer.link(funGuard, ic.slowPathStart);
         buffer.link(done, ic.funGuard.labelAtOffset(ic.hotPathOffset));
         JSC::CodeLocationLabel cs = buffer.finalizeCodeAddendum();
 
         JaegerSpew(JSpew_PICs, "generated CALL closure stub %p (%d bytes)\n",
                    cs.executableAddress(), masm.size());
 
-        uint8 *start = (uint8 *)ic.funJump.executableAddress();
-        JSC::RepatchBuffer repatch(start - 32, 64);
+        Repatcher repatch(from);
         repatch.relink(ic.funJump, cs);
 
         ic.hasJsFunCheck = true;
 
         return true;
     }
 
     bool generateNativeStub()
     {
+        JITScript *jit = f.jit();
+
         /* Snapshot the frameDepth before SplatApplyArgs modifies it. */
         uintN initialFrameDepth = f.regs.sp - f.regs.fp->slots();
 
         /*
          * SplatApplyArgs has not been called, so we call it here before
          * potentially touching f.u.call.dynamicArgc.
          */
         Value *vp;
@@ -778,39 +775,39 @@ class CallCompiler : public BaseCompiler
         buffer.link(funGuard, ic.slowPathStart);
         masm.finalize(buffer);
         
         JSC::CodeLocationLabel cs = buffer.finalizeCodeAddendum();
 
         JaegerSpew(JSpew_PICs, "generated native CALL stub %p (%d bytes)\n",
                    cs.executableAddress(), masm.size());
 
-        uint8 *start = (uint8 *)ic.funJump.executableAddress();
-        JSC::RepatchBuffer repatch(start - 32, 64);
+        Repatcher repatch(jit);
         repatch.relink(ic.funJump, cs);
 
         ic.fastGuardedNative = obj;
 
         return true;
     }
 
     void *update()
     {
+        JITScript *jit = f.jit();
+
         stubs::UncachedCallResult ucr;
         if (callingNew)
             stubs::UncachedNewHelper(f, ic.frameSize.staticArgc(), &ucr);
         else
             stubs::UncachedCallHelper(f, ic.frameSize.getArgc(f), &ucr);
 
         // If the function cannot be jitted (generally unjittable or empty script),
         // patch this site to go to a slow path always.
         if (!ucr.codeAddr) {
             JSC::CodeLocationCall oolCall = ic.slowPathStart.callAtOffset(ic.oolCallOffset);
-            uint8 *start = (uint8 *)oolCall.executableAddress();
-            JSC::RepatchBuffer repatch(start - 32, 64);
+            Repatcher repatch(jit);
             JSC::FunctionPtr fptr = callingNew
                                     ? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC))
                                     : JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC));
             repatch.relink(oolCall, fptr);
             return NULL;
         }
             
         JSFunction *fun = ucr.fun;
@@ -823,32 +820,32 @@ class CallCompiler : public BaseCompiler
         uint32 flags = callingNew ? JSFRAME_CONSTRUCTING : 0;
 
         if (!ic.hit) {
             ic.hit = true;
             return ucr.codeAddr;
         }
 
         if (!ic.frameSize.isStatic() || ic.frameSize.staticArgc() != fun->nargs) {
-            if (!generateFullCallStub(script, flags))
+            if (!generateFullCallStub(jit, script, flags))
                 THROWV(NULL);
         } else {
             if (!ic.fastGuardedObject) {
-                patchInlinePath(script, callee);
+                patchInlinePath(jit, script, callee);
             } else if (!ic.hasJsFunCheck &&
                        !ic.fastGuardedNative &&
                        ic.fastGuardedObject->getFunctionPrivate() == fun) {
                 /*
                  * Note: Multiple "function guard" stubs are not yet
                  * supported, thus the fastGuardedNative check.
                  */
-                if (!generateStubForClosures(callee))
+                if (!generateStubForClosures(jit, callee))
                     THROWV(NULL);
             } else {
-                if (!generateFullCallStub(script, flags))
+                if (!generateFullCallStub(jit, script, flags))
                     THROWV(NULL);
             }
         }
 
         return ucr.codeAddr;
     }
 };
 
@@ -1028,24 +1025,28 @@ ic::SplatApplyArgs(VMFrame &f)
 
     f.u.call.dynamicArgc = n;
     return true;
 }
 
 void
 JITScript::purgeMICs()
 {
+    if (!nMICs)
+        return;
+
+    Repatcher repatch(this);
+
     for (uint32 i = 0; i < nMICs; i++) {
         ic::MICInfo &mic = mics[i];
         switch (mic.kind) {
           case ic::MICInfo::SET:
           case ic::MICInfo::GET:
           {
             /* Patch shape guard. */
-            JSC::RepatchBuffer repatch(mic.entry.executableAddress(), 50);
             repatch.repatch(mic.shape, int(JSObjectMap::INVALID_SHAPE));
 
             /* 
              * If the stub call was patched, leave it alone -- it probably will
              * just be invalidated again.
              */
             break;
           }
@@ -1066,46 +1067,48 @@ ic::PurgeMICs(JSContext *cx, JSScript *s
         script->jitNormal->purgeMICs();
     if (script->jitCtor)
         script->jitCtor->purgeMICs();
 }
 
 void
 JITScript::sweepCallICs()
 {
+    if (!nCallICs)
+        return;
+
+    Repatcher repatcher(this);
+
     for (uint32 i = 0; i < nCallICs; i++) {
         ic::CallICInfo &ic = callICs[i];
 
         /*
          * If the object is unreachable, we're guaranteed not to be currently
          * executing a stub generated by a guard on that object. This lets us
          * precisely GC call ICs while keeping the identity guard safe.
          */
         bool fastFunDead = ic.fastGuardedObject && IsAboutToBeFinalized(ic.fastGuardedObject);
         bool nativeDead = ic.fastGuardedNative && IsAboutToBeFinalized(ic.fastGuardedNative);
 
         if (!fastFunDead && !nativeDead)
             continue;
 
-        uint8 *start = (uint8 *)ic.funGuard.executableAddress();
-        JSC::RepatchBuffer repatch(start - 32, 64);
-
         if (fastFunDead) {
-            repatch.repatch(ic.funGuard, NULL);
+            repatcher.repatch(ic.funGuard, NULL);
             ic.releasePool(CallICInfo::Pool_ClosureStub);
             ic.hasJsFunCheck = false;
             ic.fastGuardedObject = NULL;
         }
 
         if (nativeDead) {
             ic.releasePool(CallICInfo::Pool_NativeStub);
             ic.fastGuardedNative = NULL;
         }
 
-        repatch.relink(ic.funJump, ic.slowPathStart);
+        repatcher.relink(ic.funJump, ic.slowPathStart);
 
         ic.hit = false;
     }
 }
 
 void
 ic::SweepCallICs(JSScript *script)
 {
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -37,34 +37,32 @@
  *
  * ***** END LICENSE BLOCK ***** */
 #include "PolyIC.h"
 #include "StubCalls.h"
 #include "CodeGenIncludes.h"
 #include "StubCalls-inl.h"
 #include "BaseCompiler.h"
 #include "assembler/assembler/LinkBuffer.h"
-#include "assembler/assembler/RepatchBuffer.h"
 #include "jsscope.h"
 #include "jsnum.h"
 #include "jsatominlines.h"
 #include "jsobjinlines.h"
 #include "jsscopeinlines.h"
 #include "jspropertycache.h"
 #include "jspropertycacheinlines.h"
 #include "jsinterpinlines.h"
 #include "jsautooplen.h"
 
 #if defined JS_POLYIC
 
 using namespace js;
 using namespace js::mjit;
 using namespace js::mjit::ic;
 
-typedef JSC::RepatchBuffer RepatchBuffer;
 typedef JSC::FunctionPtr FunctionPtr;
 
 /* Rough over-estimate of how much memory we need to unprotect. */
 static const uint32 INLINE_PATH_LENGTH = 64;
 
 // Helper class to simplify LinkBuffer usage in PIC stub generators.
 // This guarantees correct OOM and refcount handling for buffers while they
 // are instantiated and rooted.
@@ -132,32 +130,16 @@ class PICStubCompiler : public BaseCompi
 #ifdef JS_METHODJIT_SPEW
         JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
                    type, event, op, script->filename,
                    js_FramePCToLineNumber(cx, f.fp()));
 #endif
     }
 };
 
-class PICRepatchBuffer : public JSC::RepatchBuffer
-{
-    ic::BaseIC &ic;
-    JSC::CodeLocationLabel label;
-
-  public:
-    PICRepatchBuffer(ic::BaseIC &ic, JSC::CodeLocationLabel path)
-      : JSC::RepatchBuffer(path.executableAddress(), INLINE_PATH_LENGTH),
-        ic(ic), label(path)
-    { }
-
-    void relink(int32 offset, JSC::CodeLocationLabel target) {
-        JSC::RepatchBuffer::relink(label.jumpAtOffset(offset), target);
-    }
-};
-
 class SetPropCompiler : public PICStubCompiler
 {
     JSObject *obj;
     JSAtom *atom;
     int lastStubSecondShapeGuard;
 
     static int32 dslotsLoadOffset(ic::PICInfo &pic) {
 #if defined JS_NUNBOX32
@@ -221,38 +203,36 @@ class SetPropCompiler : public PICStubCo
 
   public:
     SetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, JSAtom *atom,
                     VoidStubPIC stub)
       : PICStubCompiler("setprop", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
         obj(obj), atom(atom), lastStubSecondShapeGuard(pic.secondShapeGuard)
     { }
 
-    static void reset(ic::PICInfo &pic)
+    static void reset(Repatcher &repatcher, ic::PICInfo &pic)
     {
-        RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
         repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset(pic)));
         repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
                            pic.shapeGuard + inlineShapeOffset(pic)),
                           int32(JSObjectMap::INVALID_SHAPE));
         repatcher.relink(pic.fastPathStart.jumpAtOffset(
                           pic.shapeGuard + inlineShapeJump(pic)),
                          pic.slowPathStart);
 
-        RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
         FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
         repatcher.relink(pic.slowPathCall, target);
     }
 
     LookupStatus patchInline(const Shape *shape, bool inlineSlot)
     {
         JS_ASSERT(!pic.inlinePathPatched);
         JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.fastPathStart);
+        Repatcher repatcher(f.jit());
 
         int32 offset;
         if (inlineSlot) {
             JSC::CodeLocationInstruction istr;
             istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset());
             repatcher.repatchLoadPtrToLEA(istr);
 
             // 
@@ -279,33 +259,36 @@ class SetPropCompiler : public PICStubCo
         repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(SETPROP_INLINE_STORE_VALUE), offset);
 #endif
 
         pic.inlinePathPatched = true;
 
         return Lookup_Cacheable;
     }
 
-    void patchPreviousToHere(PICRepatchBuffer &repatcher, CodeLocationLabel cs)
+    void patchPreviousToHere(CodeLocationLabel cs)
     {
+        Repatcher repatcher(pic.lastCodeBlock(f.jit()));
+        CodeLocationLabel label = pic.lastPathStart();
+
         // Patch either the inline fast path or a generated stub. The stub
         // omits the prefix of the inline fast path that loads the shape, so
         // the offsets are different.
         int shapeGuardJumpOffset;
         if (pic.stubsGenerated)
 #if defined JS_NUNBOX32
             shapeGuardJumpOffset = SETPROP_STUB_SHAPE_JUMP;
 #elif defined JS_PUNBOX64
             shapeGuardJumpOffset = pic.labels.setprop.stubShapeJump;
 #endif
         else
             shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
-        repatcher.relink(shapeGuardJumpOffset, cs);
+        repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
         if (lastStubSecondShapeGuard)
-            repatcher.relink(lastStubSecondShapeGuard, cs);
+            repatcher.relink(label.jumpAtOffset(lastStubSecondShapeGuard), cs);
     }
 
     LookupStatus generateStub(uint32 initialShape, const Shape *shape, bool adding, bool inlineSlot)
     {
         /* Exits to the slow path. */
         Vector<Jump, 8> slowExits(cx);
         Vector<Jump, 8> otherGuards(cx);
 
@@ -483,25 +466,23 @@ class SetPropCompiler : public PICStubCo
             buffer.link(skipOver.get(), pic.fastPathRejoin);
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generate setprop stub %p %d %d at %p\n",
                    (void*)&pic,
                    initialShape,
                    pic.stubsGenerated,
                    cs.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart());
-
         // This function can patch either the inline fast path for a generated
         // stub. The stub omits the prefix of the inline fast path that loads
         // the shape, so the offsets are different.
-        patchPreviousToHere(repatcher, cs);
+        patchPreviousToHere(cs);
 
         pic.stubsGenerated++;
-        pic.lastStubStart = buffer.locationOf(start);
+        pic.updateLastPath(buffer, start);
 
 #if defined JS_PUNBOX64
         pic.labels.setprop.stubShapeJump = masm.differenceBetween(start, stubShapeJumpLabel);
         JS_ASSERT(pic.labels.setprop.stubShapeJump == masm.differenceBetween(start, stubShapeJumpLabel));
 #endif
 
         if (pic.stubsGenerated == MAX_PIC_STUBS)
             disable("max stubs reached");
@@ -788,33 +769,30 @@ class GetPropCompiler : public PICStubCo
                     VoidStubPIC stub)
       : PICStubCompiler(pic.kind == ic::PICInfo::CALL ? "callprop" : "getprop", f, script, pic,
                         JS_FUNC_TO_DATA_PTR(void *, stub)),
         obj(obj),
         atom(atom),
         lastStubSecondShapeGuard(pic.secondShapeGuard)
     { }
 
-    static void reset(ic::PICInfo &pic)
+    static void reset(Repatcher &repatcher, ic::PICInfo &pic)
     {
-        RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
         repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoad(pic)));
         repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
                            pic.shapeGuard + inlineShapeOffset(pic)),
                           int32(JSObjectMap::INVALID_SHAPE));
         repatcher.relink(pic.fastPathStart.jumpAtOffset(pic.shapeGuard + inlineShapeJump(pic)),
                          pic.slowPathStart);
 
         if (pic.hasTypeCheck()) {
             repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD),
                              pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
         }
 
-        RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
-
         VoidStubPIC stub;
         switch (pic.kind) {
           case ic::PICInfo::GET:
             stub = ic::GetProp;
             break;
           case ic::PICInfo::CALL:
             stub = ic::CallProp;
             break;
@@ -850,18 +828,17 @@ class GetPropCompiler : public PICStubCo
         buffer.link(notArgs, pic.slowPathStart);
         buffer.link(overridden, pic.slowPathStart);
         buffer.link(done, pic.fastPathRejoin);
 
         CodeLocationLabel start = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generate args length stub at %p\n",
                    start.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart());
-        patchPreviousToHere(repatcher, start);
+        patchPreviousToHere(start);
 
         disable("args length done");
 
         return Lookup_Cacheable;
     }
 
     LookupStatus generateArrayLengthStub()
     {
@@ -884,18 +861,17 @@ class GetPropCompiler : public PICStubCo
         buffer.link(notArray, pic.slowPathStart);
         buffer.link(oob, pic.slowPathStart);
         buffer.link(done, pic.fastPathRejoin);
 
         CodeLocationLabel start = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
                    start.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart());
-        patchPreviousToHere(repatcher, start);
+        patchPreviousToHere(start);
 
         disable("array length done");
 
         return Lookup_Cacheable;
     }
 
     LookupStatus generateStringCallStub()
     {
@@ -953,17 +929,17 @@ class GetPropCompiler : public PICStubCo
         buffer.link(done, pic.fastPathRejoin);
 
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generate string call stub at %p\n",
                    cs.executableAddress());
 
         /* Patch the type check to jump here. */
         if (pic.hasTypeCheck()) {
-            RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
+            Repatcher repatcher(f.jit());
             repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD), cs);
         }
 
         /* Disable the PIC so we don't keep generating stubs on the above shape mismatch. */
         disable("generated string call stub");
 
         return Lookup_Cacheable;
     }
@@ -988,29 +964,29 @@ class GetPropCompiler : public PICStubCo
         buffer.link(notString, pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
         buffer.link(done, pic.fastPathRejoin);
 
         CodeLocationLabel start = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generate string length stub at %p\n",
                    start.executableAddress());
 
         if (pic.hasTypeCheck()) {
-            RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
+            Repatcher repatcher(f.jit());
             repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD), start);
         }
 
         disable("generated string length stub");
 
         return Lookup_Cacheable;
     }
 
     LookupStatus patchInline(JSObject *holder, const Shape *shape)
     {
         spew("patch", "inline");
-        PICRepatchBuffer repatcher(pic, pic.fastPathStart);
+        Repatcher repatcher(f.jit());
 
         int32 offset;
         if (!holder->hasSlotsArray()) {
             JSC::CodeLocationInstruction istr;
             istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoad());
             repatcher.repatchLoadPtrToLEA(istr);
 
             // 
@@ -1109,52 +1085,54 @@ class GetPropCompiler : public PICStubCo
         for (Jump *pj = shapeMismatches.begin(); pj != shapeMismatches.end(); ++pj)
             buffer.link(*pj, pic.slowPathStart);
 
         // The final exit jumps to the store-back in the inline stub.
         buffer.link(done, pic.fastPathRejoin);
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart()); 
-        patchPreviousToHere(repatcher, cs);
+        patchPreviousToHere(cs);
 
         pic.stubsGenerated++;
-        pic.lastStubStart = buffer.locationOf(start);
+        pic.updateLastPath(buffer, start);
 
 #if defined JS_PUNBOX64
         pic.labels.getprop.stubShapeJump = masm.differenceBetween(start, stubShapeJumpLabel);
         JS_ASSERT(pic.labels.getprop.stubShapeJump == masm.differenceBetween(start, stubShapeJumpLabel));
 #endif
 
         if (pic.stubsGenerated == MAX_PIC_STUBS)
             disable("max stubs reached");
         if (obj->isDenseArray())
             disable("dense array");
 
         return Lookup_Cacheable;
     }
 
-    void patchPreviousToHere(PICRepatchBuffer &repatcher, CodeLocationLabel cs)
+    void patchPreviousToHere(CodeLocationLabel cs)
     {
+        Repatcher repatcher(pic.lastCodeBlock(f.jit()));
+        CodeLocationLabel label = pic.lastPathStart();
+
         // Patch either the inline fast path or a generated stub. The stub
         // omits the prefix of the inline fast path that loads the shape, so
         // the offsets are different.
         int shapeGuardJumpOffset;
         if (pic.stubsGenerated)
 #if defined JS_NUNBOX32
             shapeGuardJumpOffset = GETPROP_STUB_SHAPE_JUMP;
 #elif defined JS_PUNBOX64
             shapeGuardJumpOffset = pic.labels.getprop.stubShapeJump;
 #endif
         else
             shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
-        repatcher.relink(shapeGuardJumpOffset, cs);
+        repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
         if (lastStubSecondShapeGuard)
-            repatcher.relink(lastStubSecondShapeGuard, cs);
+            repatcher.relink(label.jumpAtOffset(lastStubSecondShapeGuard), cs);
     }
 
     LookupStatus update()
     {
         JS_ASSERT(pic.hit);
 
         GetPropertyHelper<GetPropCompiler> getprop(cx, obj, atom, *this);
         LookupStatus status = getprop.lookupAndTest();
@@ -1179,23 +1157,21 @@ class ScopeNameCompiler : public PICStub
   public:
     ScopeNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
                       JSAtom *atom, VoidStubPIC stub)
       : PICStubCompiler("name", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
         scopeChain(scopeChain), atom(atom),
         getprop(f.cx, NULL, atom, *thisFromCtor())
     { }
 
-    static void reset(ic::PICInfo &pic)
+    static void reset(Repatcher &repatcher, ic::PICInfo &pic)
     {
-        RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
         repatcher.relink(pic.fastPathStart.jumpAtOffset(SCOPENAME_JUMP_OFFSET),
                          pic.slowPathStart);
 
-        RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
         VoidStubPIC stub = (pic.kind == ic::PICInfo::NAME) ? ic::Name : ic::XName;
         FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
         repatcher.relink(pic.slowPathCall, target);
     }
 
     typedef Vector<Jump, 8, ContextAllocPolicy> JumpList;
 
     LookupStatus walkScopeChain(Assembler &masm, JumpList &fails)
@@ -1281,21 +1257,22 @@ class ScopeNameCompiler : public PICStub
             return error();
 
         buffer.link(failJump, pic.slowPathStart);
         buffer.link(done, pic.fastPathRejoin);
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
         spew("NAME stub", "global");
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart()); 
-        repatcher.relink(SCOPENAME_JUMP_OFFSET, cs);
+        Repatcher repatcher(pic.lastCodeBlock(f.jit()));
+        CodeLocationLabel label = pic.lastPathStart();
+        repatcher.relink(label.jumpAtOffset(SCOPENAME_JUMP_OFFSET), cs);
 
         pic.stubsGenerated++;
-        pic.lastStubStart = buffer.locationOf(failLabel);
+        pic.updateLastPath(buffer, failLabel);
 
         if (pic.stubsGenerated == MAX_PIC_STUBS)
             disable("max stubs reached");
 
         return Lookup_Cacheable;
     }
 
     enum CallObjPropKind {
@@ -1383,21 +1360,22 @@ class ScopeNameCompiler : public PICStub
         if (!buffer.init(masm))
             return error();
 
         buffer.link(failJump, pic.slowPathStart);
         buffer.link(done, pic.fastPathRejoin);
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart()); 
-        repatcher.relink(SCOPENAME_JUMP_OFFSET, cs);
+        Repatcher repatcher(pic.lastCodeBlock(f.jit()));
+        CodeLocationLabel label = pic.lastPathStart();
+        repatcher.relink(label.jumpAtOffset(SCOPENAME_JUMP_OFFSET), cs);
 
         pic.stubsGenerated++;
-        pic.lastStubStart = buffer.locationOf(failLabel);
+        pic.updateLastPath(buffer, failLabel);
 
         if (pic.stubsGenerated == MAX_PIC_STUBS)
             disable("max stubs reached");
 
         return Lookup_Cacheable;
     }
 
     LookupStatus updateForName()
@@ -1494,24 +1472,24 @@ class BindNameCompiler : public PICStubC
 
   public:
     BindNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
                       JSAtom *atom, VoidStubPIC stub)
       : PICStubCompiler("bind", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
         scopeChain(scopeChain), atom(atom)
     { }
 
-    static void reset(ic::PICInfo &pic)
+    static void reset(Repatcher &repatcher, ic::PICInfo &pic)
     {
-        PICRepatchBuffer repatcher(pic, pic.fastPathStart); 
-        repatcher.relink(pic.shapeGuard + inlineJumpOffset(pic), pic.slowPathStart);
-
-        RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
+        int jumpOffset = pic.shapeGuard + inlineJumpOffset(pic);
+        JSC::CodeLocationJump jump = pic.fastPathStart.jumpAtOffset(jumpOffset);
+        repatcher.relink(jump, pic.slowPathStart);
+
         FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
-        repatcher2.relink(pic.slowPathCall, target);
+        repatcher.relink(pic.slowPathCall, target);
     }
 
     LookupStatus generateStub(JSObject *obj)
     {
         Assembler masm;
         js::Vector<Jump, 8, ContextAllocPolicy> fails(cx);
 
         /* Guard on the shape of the scope chain. */
@@ -1554,24 +1532,25 @@ class BindNameCompiler : public PICStubC
         if (!buffer.init(masm))
             return error();
 
         buffer.link(failJump, pic.slowPathStart);
         buffer.link(done, pic.fastPathRejoin);
         CodeLocationLabel cs = buffer.finalizeCodeAddendum();
         JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
 
-        PICRepatchBuffer repatcher(pic, pic.lastPathStart()); 
+        Repatcher repatcher(pic.lastCodeBlock(f.jit()));
+        CodeLocationLabel label = pic.lastPathStart();
         if (!pic.stubsGenerated)
-            repatcher.relink(pic.shapeGuard + inlineJumpOffset(), cs);
+            repatcher.relink(label.jumpAtOffset(pic.shapeGuard + inlineJumpOffset()), cs);
         else
-            repatcher.relink(BINDNAME_STUB_JUMP_OFFSET, cs);
+            repatcher.relink(label.jumpAtOffset(BINDNAME_STUB_JUMP_OFFSET), cs);
 
         pic.stubsGenerated++;
-        pic.lastStubStart = buffer.locationOf(failLabel);
+        pic.updateLastPath(buffer, failLabel);
 
         if (pic.stubsGenerated == MAX_PIC_STUBS)
             disable("max stubs reached");
 
         return Lookup_Cacheable;
     }
 
     JSObject *update()
@@ -1930,17 +1909,17 @@ BaseIC::spew(JSContext *cx, const char *
                js_FramePCToLineNumber(cx, cx->fp()));
 #endif
 }
 
 LookupStatus
 BaseIC::disable(JSContext *cx, const char *reason, void *stub)
 {
     spew(cx, "disabled", reason);
-    RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
+    Repatcher repatcher(cx->fp()->jit());
     repatcher.relink(slowPathCall, FunctionPtr(stub));
     return Lookup_Uncacheable;
 }
 
 bool
 BaseIC::shouldUpdate(JSContext *cx)
 {
     if (!hit) {
@@ -1990,30 +1969,25 @@ GetElementIC::disable(JSContext *cx, con
 LookupStatus
 GetElementIC::error(JSContext *cx)
 {
     disable(cx, "error");
     return Lookup_Error;
 }
 
 void
-GetElementIC::purge()
+GetElementIC::purge(Repatcher &repatcher)
 {
-    if (inlineTypeGuardPatched || inlineClaspGuardPatched) {
-        RepatchBuffer repatcher(fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
-
-        // Repatch the inline jumps.
-        if (inlineTypeGuardPatched)
-            repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
-        if (inlineClaspGuardPatched)
-            repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
-    }
+    // Repatch the inline jumps.
+    if (inlineTypeGuardPatched)
+        repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
+    if (inlineClaspGuardPatched)
+        repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
 
     if (slowCallPatched) {
-        RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
         if (op == JSOP_GETELEM)
             repatcher.relink(slowPathCall, FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::GetElement)));
         else if (op == JSOP_CALLELEM)
             repatcher.relink(slowPathCall, FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::CallElement)));
     }
 
     reset();
 }
@@ -2109,53 +2083,54 @@ GetElementIC::attachGetProp(JSContext *c
     JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom 0x%x (\"%s\") shape 0x%x (%s: %d)\n",
                js_CodeName[op], cs.executableAddress(), id, chars, holder->shape(),
                cx->fp()->script()->filename, js_FramePCToLineNumber(cx, cx->fp()));
     cx->free(chars);
 #endif
 
     // Update the inline guards, if needed.
     if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalClaspGuard()) {
-        PICRepatchBuffer repatcher(*this, fastPathStart);
+        Repatcher repatcher(cx->fp()->jit());
 
         if (shouldPatchInlineTypeGuard()) {
             // A type guard is present in the inline path, and this is the
             // first string stub, so patch it now.
             JS_ASSERT(!inlineTypeGuardPatched);
             JS_ASSERT(atomTypeGuard.isSet());
 
-            repatcher.relink(inlineTypeGuard, cs);
+            repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), cs);
             inlineTypeGuardPatched = true;
         }
 
         if (shouldPatchUnconditionalClaspGuard()) {
             // The clasp guard is unconditional, meaning there is no type
             // check. This is the first stub, so it has to be patched. Note
             // that it is wrong to patch the inline clasp guard otherwise,
             // because it follows an integer-id guard.
             JS_ASSERT(!hasInlineTypeGuard());
 
-            repatcher.relink(inlineClaspGuard, cs);
+            repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), cs);
             inlineClaspGuardPatched = true;
         }
     }
 
     // If there were previous stub guards, patch them now.
     if (hasLastStringStub) {
-        PICRepatchBuffer repatcher(*this, lastStringStub);
+        Repatcher repatcher(lastStringStub);
+        CodeLocationLabel stub(lastStringStub.start());
         if (atomGuard)
-            repatcher.relink(atomGuard, cs);
-        repatcher.relink(firstShapeGuard, cs);
+            repatcher.relink(stub.jumpAtOffset(atomGuard), cs);
+        repatcher.relink(stub.jumpAtOffset(firstShapeGuard), cs);
         if (secondShapeGuard)
-            repatcher.relink(secondShapeGuard, cs);
+            repatcher.relink(stub.jumpAtOffset(secondShapeGuard), cs);
     }
 
     // Update state.
     hasLastStringStub = true;
-    lastStringStub = cs;
+    lastStringStub = JITCode(cs.executableAddress(), buffer.size());
     if (atomIdGuard.isSet()) {
         atomGuard = buffer.locationOf(atomIdGuard.get()) - cs;
         JS_ASSERT(atomGuard == buffer.locationOf(atomIdGuard.get()) - cs);
         JS_ASSERT(atomGuard);
     } else {
         atomGuard = 0;
     }
     if (protoGuard.isSet()) {
@@ -2308,30 +2283,25 @@ SetElementIC::disable(JSContext *cx, con
 LookupStatus
 SetElementIC::error(JSContext *cx)
 {
     disable(cx, "error");
     return Lookup_Error;
 }
 
 void
-SetElementIC::purge()
+SetElementIC::purge(Repatcher &repatcher)
 {
-    if (inlineClaspGuardPatched || inlineHoleGuardPatched) {
-        RepatchBuffer repatcher(fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
-
-        // Repatch the inline jumps.
-        if (inlineClaspGuardPatched)
-            repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
-        if (inlineHoleGuardPatched)
-            repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
-    }
+    // Repatch the inline jumps.
+    if (inlineClaspGuardPatched)
+        repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
+    if (inlineHoleGuardPatched)
+        repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
 
     if (slowCallPatched) {
-        RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
         void *stub = JS_FUNC_TO_DATA_PTR(void *, APPLY_STRICTNESS(ic::SetElement, strictMode));
         repatcher.relink(slowPathCall, FunctionPtr(stub));
     }
 
     reset();
 }
 
 LookupStatus
@@ -2418,18 +2388,18 @@ SetElementIC::attachHoleStub(JSContext *
     buffer.link(extendedArray, slowPathStart);
     buffer.link(sameProto, slowPathStart);
     buffer.link(extendedObject, slowPathStart);
     buffer.link(done, fastPathRejoin);
 
     CodeLocationLabel cs = buffer.finalizeCodeAddendum();
     JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
 
-    PICRepatchBuffer repatcher(*this, fastPathStart);
-    repatcher.relink(inlineHoleGuard, cs);
+    Repatcher repatcher(cx->fp()->jit());
+    repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), cs);
     inlineHoleGuardPatched = true;
 
     disable(cx, "generated dense array hole stub");
 
     return Lookup_Cacheable;
 }
 
 LookupStatus
@@ -2465,45 +2435,50 @@ ic::SetElement(VMFrame &f, ic::SetElemen
 }
 
 template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
 template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
 
 void
 JITScript::purgePICs()
 {
+    if (!nPICs && !nGetElems && !nSetElems)
+        return;
+
+    Repatcher repatcher(this);
+
     for (uint32 i = 0; i < nPICs; i++) {
         ic::PICInfo &pic = pics[i];
         switch (pic.kind) {
           case ic::PICInfo::SET:
           case ic::PICInfo::SETMETHOD:
-            SetPropCompiler::reset(pic);
+            SetPropCompiler::reset(repatcher, pic);
             break;
           case ic::PICInfo::NAME:
           case ic::PICInfo::XNAME:
-            ScopeNameCompiler::reset(pic);
+            ScopeNameCompiler::reset(repatcher, pic);
             break;
           case ic::PICInfo::BIND:
-            BindNameCompiler::reset(pic);
+            BindNameCompiler::reset(repatcher, pic);
             break;
           case ic::PICInfo::CALL: /* fall-through */
           case ic::PICInfo::GET:
-            GetPropCompiler::reset(pic);
+            GetPropCompiler::reset(repatcher, pic);
             break;
           default:
             JS_NOT_REACHED("Unhandled PIC kind");
             break;
         }
         pic.reset();
     }
 
     for (uint32 i = 0; i < nGetElems; i++)
-        getElems[i].purge();
+        getElems[i].purge(repatcher);
     for (uint32 i = 0; i < nSetElems; i++)
-        setElems[i].purge();
+        setElems[i].purge(repatcher);
 }
 
 void
 ic::PurgePICs(JSContext *cx, JSScript *script)
 {
     if (script->jitNormal)
         script->jitNormal->purgePICs();
     if (script->jitCtor)
--- a/js/src/methodjit/PolyIC.h
+++ b/js/src/methodjit/PolyIC.h
@@ -44,16 +44,17 @@
 #include "jstl.h"
 #include "jsvector.h"
 #include "assembler/assembler/MacroAssembler.h"
 #include "assembler/assembler/CodeLocation.h"
 #include "methodjit/MethodJIT.h"
 #include "BaseAssembler.h"
 #include "RematInfo.h"
 #include "BaseCompiler.h"
+#include "assembler/moco/MocoStubs.h"
 
 namespace js {
 namespace mjit {
 namespace ic {
 
 /* Maximum number of stubs for a given callsite. */
 static const uint32 MAX_PIC_STUBS = 16;
 static const uint32 MAX_GETELEM_IC_STUBS = 17;
@@ -186,25 +187,16 @@ struct BaseIC : public MacroAssemblerTyp
     CodeLocationLabel fastPathRejoin;
 
     // Start of the slow path.
     CodeLocationLabel slowPathStart;
 
     // Slow path stub call.
     CodeLocationCall slowPathCall;
 
-    // Address of the start of the last generated stub, if any.
-    CodeLocationLabel lastStubStart;
-
-    // Return the start address of the last path in this PIC, which is the
-    // inline path if no stubs have been generated yet.
-    CodeLocationLabel lastPathStart() {
-        return stubsGenerated > 0 ? lastStubStart : fastPathStart;
-    }
-
     // Whether or not the callsite has been hit at least once.
     bool hit : 1;
     bool slowCallPatched : 1;
 
     // Number of stubs generated.
     uint32 stubsGenerated : 5;
 
     // Offset from start of stub to jump target of second shape guard as Nitro
@@ -301,17 +293,17 @@ struct GetElementIC : public BasePolyIC 
     // These offsets are used for string-key dependent stubs, such as named
     // property accesses. They are separated from the int-key dependent stubs,
     // in order to guarantee that the id type needs only one guard per type.
     int atomGuard : 8;          // optional, non-zero if present
     int firstShapeGuard : 8;    // always set
     int secondShapeGuard : 8;   // optional, non-zero if present
 
     bool hasLastStringStub : 1;
-    CodeLocationLabel lastStringStub;
+    JITCode lastStringStub;
 
     // A limited ValueRemat instance. It may contains either:
     //  1) A constant, or
     //  2) A known type and data reg, or
     //  3) A data reg.
     // The sync bits are not set, and the type reg is never set and should not
     // be used, as it is encapsulated more accurately in |typeReg|. Also, note
     // carefully that the data reg is immutable.
@@ -329,17 +321,17 @@ struct GetElementIC : public BasePolyIC 
 
     void reset() {
         BasePolyIC::reset();
         inlineTypeGuardPatched = false;
         inlineClaspGuardPatched = false;
         typeRegHasBaseShape = false;
         hasLastStringStub = false;
     }
-    void purge();
+    void purge(Repatcher &repatcher);
     LookupStatus update(JSContext *cx, JSObject *obj, const Value &v, jsid id, Value *vp);
     LookupStatus attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid id,
                                Value *vp);
     LookupStatus disable(JSContext *cx, const char *reason);
     LookupStatus error(JSContext *cx);
     bool shouldUpdate(JSContext *cx);
 };
 
@@ -391,17 +383,17 @@ struct SetElementIC : public BaseIC {
     void reset() {
         BaseIC::reset();
         if (execPool != NULL)
             execPool->release();
         execPool = NULL;
         inlineClaspGuardPatched = false;
         inlineHoleGuardPatched = false;
     }
-    void purge();
+    void purge(Repatcher &repatcher);
     LookupStatus attachHoleStub(JSContext *cx, JSObject *obj, int32 key);
     LookupStatus update(JSContext *cx, const Value &objval, const Value &idval);
     LookupStatus disable(JSContext *cx, const char *reason);
     LookupStatus error(JSContext *cx);
 };
 
 struct PICInfo : public BasePolyIC {
     PICInfo() { reset(); }
@@ -427,16 +419,42 @@ struct PICInfo : public BasePolyIC {
             bool hasTypeCheck   : 1;  // type check and reg are present
 
             // Reverse offset from slowPathStart to the type check slow path.
             int32 typeCheckOffset;
         } get;
         ValueRemat vr;
     } u;
 
+    // Address of the start of the last generated stub, if any. Note that this
+    // does not correctly overlay with the allocated memory; it does however
+    // overlay the portion that may need to be patched, which is good enough.
+    JITCode lastStubStart;
+
+    // Return the start address of the last path in this PIC, which is the
+    // inline path if no stubs have been generated yet.
+    CodeLocationLabel lastPathStart() {
+        if (!stubsGenerated)
+            return fastPathStart;
+        return CodeLocationLabel(lastStubStart.start());
+    }
+
+    // Return a JITCode block corresponding to the code memory to attach a
+    // new stub to.
+    JITCode lastCodeBlock(JITScript *jit) {
+        if (!stubsGenerated)
+            return JITCode(jit->code.m_code.executableAddress(), jit->code.m_size);
+        return lastStubStart;
+    }
+
+    void updateLastPath(LinkerHelper &linker, Label label) {
+        CodeLocationLabel loc = linker.locationOf(label);
+        lastStubStart = JITCode(loc.executableAddress(), linker.size());
+    }
+
     Kind kind : 3;
 
     // True if register R holds the base object shape along exits from the
     // last stub.
     bool shapeRegHasBaseShape : 1;
 
     // True if can use the property cache.
     bool usePropCache : 1;