js/src/jit/CodeGenerator.cpp
author maharsh312 <maharsh312@gmail.com>
Sun, 27 May 2018 04:00:06 +0530
changeset 420055 2e7a4d376aff00ae43f8051aa53af8be65a112a3
parent 419793 2ce6ce0e629153bedd88afc63898fbca50ed6774
child 420459 8af7dd4fb5e201208f018e484c93b101e2fa3212
permissions -rw-r--r--
Bug 1464321 - Changed return type of two methods in JSScript with relative format string change. r=arai

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
 * vim: set ts=8 sts=4 et sw=4 tw=99:
 * This Source Code Form is subject to the terms of the Mozilla Public
 * License, v. 2.0. If a copy of the MPL was not distributed with this
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */

#include "jit/CodeGenerator.h"

#include "mozilla/Assertions.h"
#include "mozilla/Attributes.h"
#include "mozilla/Casting.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/EnumeratedArray.h"
#include "mozilla/EnumeratedRange.h"
#include "mozilla/MathAlgorithms.h"
#include "mozilla/ScopeExit.h"
#include "mozilla/Unused.h"

#include <type_traits>

#include "jslibmath.h"
#include "jsmath.h"
#include "jsnum.h"

#include "builtin/Eval.h"
#include "builtin/RegExp.h"
#include "builtin/SelfHostingDefines.h"
#include "builtin/String.h"
#include "builtin/TypedObject.h"
#include "gc/Nursery.h"
#include "irregexp/NativeRegExpMacroAssembler.h"
#include "jit/AtomicOperations.h"
#include "jit/BaselineCompiler.h"
#include "jit/IonBuilder.h"
#include "jit/IonIC.h"
#include "jit/IonOptimizationLevels.h"
#include "jit/JitcodeMap.h"
#include "jit/JitSpewer.h"
#include "jit/Linker.h"
#include "jit/Lowering.h"
#include "jit/MIRGenerator.h"
#include "jit/MoveEmitter.h"
#include "jit/RangeAnalysis.h"
#include "jit/SharedICHelpers.h"
#include "jit/StackSlotAllocator.h"
#include "util/Unicode.h"
#include "vm/AsyncFunction.h"
#include "vm/AsyncIteration.h"
#include "vm/MatchPairs.h"
#include "vm/RegExpObject.h"
#include "vm/RegExpStatics.h"
#include "vm/StringType.h"
#include "vm/TraceLogging.h"
#include "vm/TypedArrayObject.h"
#include "vtune/VTuneWrapper.h"

#include "builtin/Boolean-inl.h"
#include "jit/MacroAssembler-inl.h"
#include "jit/shared/CodeGenerator-shared-inl.h"
#include "jit/shared/Lowering-shared-inl.h"
#include "jit/TemplateObject-inl.h"
#include "vm/Interpreter-inl.h"

using namespace js;
using namespace js::jit;

using mozilla::AssertedCast;
using mozilla::DebugOnly;
using mozilla::FloatingPoint;
using mozilla::Maybe;
using mozilla::NegativeInfinity;
using mozilla::PositiveInfinity;
using JS::GenericNaN;

namespace js {
namespace jit {

class OutOfLineICFallback : public OutOfLineCodeBase<CodeGenerator>
{
  private:
    LInstruction* lir_;
    size_t cacheIndex_;
    size_t cacheInfoIndex_;

  public:
    OutOfLineICFallback(LInstruction* lir, size_t cacheIndex, size_t cacheInfoIndex)
      : lir_(lir),
        cacheIndex_(cacheIndex),
        cacheInfoIndex_(cacheInfoIndex)
    { }

    void bind(MacroAssembler* masm) override {
        // The binding of the initial jump is done in
        // CodeGenerator::visitOutOfLineICFallback.
    }

    size_t cacheIndex() const {
        return cacheIndex_;
    }
    size_t cacheInfoIndex() const {
        return cacheInfoIndex_;
    }
    LInstruction* lir() const {
        return lir_;
    }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineICFallback(this);
    }
};

void
CodeGeneratorShared::addIC(LInstruction* lir, size_t cacheIndex)
{
    if (cacheIndex == SIZE_MAX) {
        masm.setOOM();
        return;
    }

    DataPtr<IonIC> cache(this, cacheIndex);
    MInstruction* mir = lir->mirRaw()->toInstruction();
    if (mir->resumePoint()) {
        cache->setScriptedLocation(mir->block()->info().script(),
                                   mir->resumePoint()->pc());
    } else {
        cache->setIdempotent();
    }

    Register temp = cache->scratchRegisterForEntryJump();
    icInfo_.back().icOffsetForJump = masm.movWithPatch(ImmWord(-1), temp);
    masm.jump(Address(temp, 0));

    MOZ_ASSERT(!icInfo_.empty());

    OutOfLineICFallback* ool = new(alloc()) OutOfLineICFallback(lir, cacheIndex, icInfo_.length() - 1);
    addOutOfLineCode(ool, mir);

    masm.bind(ool->rejoin());
    cache->setRejoinLabel(CodeOffset(ool->rejoin()->offset()));
}

typedef bool (*IonGetPropertyICFn)(JSContext*, HandleScript, IonGetPropertyIC*, HandleValue, HandleValue,
                                   MutableHandleValue);
static const VMFunction IonGetPropertyICInfo =
    FunctionInfo<IonGetPropertyICFn>(IonGetPropertyIC::update, "IonGetPropertyIC::update");

typedef bool (*IonSetPropertyICFn)(JSContext*, HandleScript, IonSetPropertyIC*, HandleObject,
                                   HandleValue, HandleValue);
static const VMFunction IonSetPropertyICInfo =
    FunctionInfo<IonSetPropertyICFn>(IonSetPropertyIC::update, "IonSetPropertyIC::update");

typedef bool (*IonGetPropSuperICFn)(JSContext*, HandleScript, IonGetPropSuperIC*, HandleObject, HandleValue,
                                    HandleValue, MutableHandleValue);
static const VMFunction IonGetPropSuperICInfo =
    FunctionInfo<IonGetPropSuperICFn>(IonGetPropSuperIC::update, "IonGetPropSuperIC::update");

typedef bool (*IonGetNameICFn)(JSContext*, HandleScript, IonGetNameIC*, HandleObject,
                               MutableHandleValue);
static const VMFunction IonGetNameICInfo =
    FunctionInfo<IonGetNameICFn>(IonGetNameIC::update, "IonGetNameIC::update");

typedef bool (*IonHasOwnICFn)(JSContext*, HandleScript, IonHasOwnIC*, HandleValue, HandleValue,
                              int32_t*);
static const VMFunction IonHasOwnICInfo =
    FunctionInfo<IonHasOwnICFn>(IonHasOwnIC::update, "IonHasOwnIC::update");

typedef JSObject* (*IonBindNameICFn)(JSContext*, HandleScript, IonBindNameIC*, HandleObject);
static const VMFunction IonBindNameICInfo =
    FunctionInfo<IonBindNameICFn>(IonBindNameIC::update, "IonBindNameIC::update");

typedef JSObject* (*IonGetIteratorICFn)(JSContext*, HandleScript, IonGetIteratorIC*, HandleValue);
static const VMFunction IonGetIteratorICInfo =
    FunctionInfo<IonGetIteratorICFn>(IonGetIteratorIC::update, "IonGetIteratorIC::update");

typedef bool (*IonInICFn)(JSContext*, HandleScript, IonInIC*, HandleValue, HandleObject, bool*);
static const VMFunction IonInICInfo =
    FunctionInfo<IonInICFn>(IonInIC::update, "IonInIC::update");

typedef bool (*IonInstanceOfICFn)(JSContext*, HandleScript, IonInstanceOfIC*,
                         HandleValue lhs, HandleObject rhs, bool* res);
static const VMFunction IonInstanceOfInfo =
    FunctionInfo<IonInstanceOfICFn>(IonInstanceOfIC::update, "IonInstanceOfIC::update");

typedef bool (*IonUnaryArithICFn)(JSContext* cx, HandleScript outerScript, IonUnaryArithIC* stub,
                                    HandleValue val, MutableHandleValue res);
static const VMFunction IonUnaryArithICInfo =
    FunctionInfo<IonUnaryArithICFn>(IonUnaryArithIC::update, "IonUnaryArithIC::update");

void
CodeGenerator::visitOutOfLineICFallback(OutOfLineICFallback* ool)
{
    LInstruction* lir = ool->lir();
    size_t cacheIndex = ool->cacheIndex();
    size_t cacheInfoIndex = ool->cacheInfoIndex();

    DataPtr<IonIC> ic(this, cacheIndex);

    // Register the location of the OOL path in the IC.
    ic->setFallbackLabel(masm.labelForPatch());

    switch (ic->kind()) {
      case CacheKind::GetProp:
      case CacheKind::GetElem: {
        IonGetPropertyIC* getPropIC = ic->asGetPropertyIC();

        saveLive(lir);

        pushArg(getPropIC->id());
        pushArg(getPropIC->value());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonGetPropertyICInfo, lir);

        StoreValueTo(getPropIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreValueTo(getPropIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::GetPropSuper:
      case CacheKind::GetElemSuper: {
        IonGetPropSuperIC* getPropSuperIC = ic->asGetPropSuperIC();

        saveLive(lir);

        pushArg(getPropSuperIC->id());
        pushArg(getPropSuperIC->receiver());
        pushArg(getPropSuperIC->object());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonGetPropSuperICInfo, lir);

        StoreValueTo(getPropSuperIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreValueTo(getPropSuperIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::SetProp:
      case CacheKind::SetElem: {
        IonSetPropertyIC* setPropIC = ic->asSetPropertyIC();

        saveLive(lir);

        pushArg(setPropIC->rhs());
        pushArg(setPropIC->id());
        pushArg(setPropIC->object());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonSetPropertyICInfo, lir);

        restoreLive(lir);

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::GetName: {
        IonGetNameIC* getNameIC = ic->asGetNameIC();

        saveLive(lir);

        pushArg(getNameIC->environment());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonGetNameICInfo, lir);

        StoreValueTo(getNameIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreValueTo(getNameIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::BindName: {
        IonBindNameIC* bindNameIC = ic->asBindNameIC();

        saveLive(lir);

        pushArg(bindNameIC->environment());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonBindNameICInfo, lir);

        StoreRegisterTo(bindNameIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreRegisterTo(bindNameIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::GetIterator: {
        IonGetIteratorIC* getIteratorIC = ic->asGetIteratorIC();

        saveLive(lir);

        pushArg(getIteratorIC->value());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonGetIteratorICInfo, lir);

        StoreRegisterTo(getIteratorIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreRegisterTo(getIteratorIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::In: {
        IonInIC* inIC = ic->asInIC();

        saveLive(lir);

        pushArg(inIC->object());
        pushArg(inIC->key());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonInICInfo, lir);

        StoreRegisterTo(inIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreRegisterTo(inIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::HasOwn: {
        IonHasOwnIC* hasOwnIC = ic->asHasOwnIC();

        saveLive(lir);

        pushArg(hasOwnIC->id());
        pushArg(hasOwnIC->value());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonHasOwnICInfo, lir);

        StoreRegisterTo(hasOwnIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreRegisterTo(hasOwnIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::InstanceOf: {
        IonInstanceOfIC* hasInstanceOfIC = ic->asInstanceOfIC();

        saveLive(lir);

        pushArg(hasInstanceOfIC->rhs());
        pushArg(hasInstanceOfIC->lhs());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));

        callVM(IonInstanceOfInfo, lir);

        StoreRegisterTo(hasInstanceOfIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreRegisterTo(hasInstanceOfIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::UnaryArith: {
        IonUnaryArithIC* unaryArithIC = ic->asUnaryArithIC();

        saveLive(lir);

        pushArg(unaryArithIC->input());
        icInfo_[cacheInfoIndex].icOffsetForPush = pushArgWithPatch(ImmWord(-1));
        pushArg(ImmGCPtr(gen->info().script()));
        callVM(IonUnaryArithICInfo, lir);

        StoreValueTo(unaryArithIC->output()).generate(this);
        restoreLiveIgnore(lir, StoreValueTo(unaryArithIC->output()).clobbered());

        masm.jump(ool->rejoin());
        return;
      }
      case CacheKind::Call:
      case CacheKind::Compare:
      case CacheKind::TypeOf:
      case CacheKind::ToBool:
      case CacheKind::GetIntrinsic:
        MOZ_CRASH("Unsupported IC");
    }
    MOZ_CRASH();
}

StringObject*
MNewStringObject::templateObj() const
{
    return &templateObj_->as<StringObject>();
}

CodeGenerator::CodeGenerator(MIRGenerator* gen, LIRGraph* graph, MacroAssembler* masm)
  : CodeGeneratorSpecific(gen, graph, masm)
  , ionScriptLabels_(gen->alloc())
  , scriptCounts_(nullptr)
  , simdTemplatesToReadBarrier_(0)
  , realmStubsToReadBarrier_(0)
{
}

CodeGenerator::~CodeGenerator()
{
    js_delete(scriptCounts_);
}

typedef bool (*StringToNumberFn)(JSContext*, JSString*, double*);
static const VMFunction StringToNumberInfo =
    FunctionInfo<StringToNumberFn>(StringToNumber, "StringToNumber");

void
CodeGenerator::visitValueToInt32(LValueToInt32* lir)
{
    ValueOperand operand = ToValue(lir, LValueToInt32::Input);
    Register output = ToRegister(lir->output());
    FloatRegister temp = ToFloatRegister(lir->tempFloat());

    MDefinition* input;
    if (lir->mode() == LValueToInt32::NORMAL)
        input = lir->mirNormal()->input();
    else
        input = lir->mirTruncate()->input();

    Label fails;
    if (lir->mode() == LValueToInt32::TRUNCATE) {
        OutOfLineCode* oolDouble = oolTruncateDouble(temp, output, lir->mir());

        // We can only handle strings in truncation contexts, like bitwise
        // operations.
        Label* stringEntry;
        Label* stringRejoin;
        Register stringReg;
        if (input->mightBeType(MIRType::String)) {
            stringReg = ToRegister(lir->temp());
            OutOfLineCode* oolString = oolCallVM(StringToNumberInfo, lir, ArgList(stringReg),
                                                 StoreFloatRegisterTo(temp));
            stringEntry = oolString->entry();
            stringRejoin = oolString->rejoin();
        } else {
            stringReg = InvalidReg;
            stringEntry = nullptr;
            stringRejoin = nullptr;
        }

        masm.truncateValueToInt32(operand, input, stringEntry, stringRejoin, oolDouble->entry(),
                                  stringReg, temp, output, &fails);
        masm.bind(oolDouble->rejoin());
    } else {
        masm.convertValueToInt32(operand, input, temp, output, &fails,
                                 lir->mirNormal()->canBeNegativeZero(),
                                 lir->mirNormal()->conversion());
    }

    bailoutFrom(&fails, lir->snapshot());
}

void
CodeGenerator::visitValueToDouble(LValueToDouble* lir)
{
    MToDouble* mir = lir->mir();
    ValueOperand operand = ToValue(lir, LValueToDouble::Input);
    FloatRegister output = ToFloatRegister(lir->output());

    Label isDouble, isInt32, isBool, isNull, isUndefined, done;
    bool hasBoolean = false, hasNull = false, hasUndefined = false;

    {
        ScratchTagScope tag(masm, operand);
        masm.splitTagForTest(operand, tag);

        masm.branchTestDouble(Assembler::Equal, tag, &isDouble);
        masm.branchTestInt32(Assembler::Equal, tag, &isInt32);

        if (mir->conversion() != MToFPInstruction::NumbersOnly) {
            masm.branchTestBoolean(Assembler::Equal, tag, &isBool);
            masm.branchTestUndefined(Assembler::Equal, tag, &isUndefined);
            hasBoolean = true;
            hasUndefined = true;
            if (mir->conversion() != MToFPInstruction::NonNullNonStringPrimitives) {
                masm.branchTestNull(Assembler::Equal, tag, &isNull);
                hasNull = true;
            }
        }
    }

    bailout(lir->snapshot());

    if (hasNull) {
        masm.bind(&isNull);
        masm.loadConstantDouble(0.0, output);
        masm.jump(&done);
    }

    if (hasUndefined) {
        masm.bind(&isUndefined);
        masm.loadConstantDouble(GenericNaN(), output);
        masm.jump(&done);
    }

    if (hasBoolean) {
        masm.bind(&isBool);
        masm.boolValueToDouble(operand, output);
        masm.jump(&done);
    }

    masm.bind(&isInt32);
    masm.int32ValueToDouble(operand, output);
    masm.jump(&done);

    masm.bind(&isDouble);
    masm.unboxDouble(operand, output);
    masm.bind(&done);
}

void
CodeGenerator::visitValueToFloat32(LValueToFloat32* lir)
{
    MToFloat32* mir = lir->mir();
    ValueOperand operand = ToValue(lir, LValueToFloat32::Input);
    FloatRegister output = ToFloatRegister(lir->output());

    Label isDouble, isInt32, isBool, isNull, isUndefined, done;
    bool hasBoolean = false, hasNull = false, hasUndefined = false;

    {
        ScratchTagScope tag(masm, operand);
        masm.splitTagForTest(operand, tag);

        masm.branchTestDouble(Assembler::Equal, tag, &isDouble);
        masm.branchTestInt32(Assembler::Equal, tag, &isInt32);

        if (mir->conversion() != MToFPInstruction::NumbersOnly) {
            masm.branchTestBoolean(Assembler::Equal, tag, &isBool);
            masm.branchTestUndefined(Assembler::Equal, tag, &isUndefined);
            hasBoolean = true;
            hasUndefined = true;
            if (mir->conversion() != MToFPInstruction::NonNullNonStringPrimitives) {
                masm.branchTestNull(Assembler::Equal, tag, &isNull);
                hasNull = true;
            }
        }
    }

    bailout(lir->snapshot());

    if (hasNull) {
        masm.bind(&isNull);
        masm.loadConstantFloat32(0.0f, output);
        masm.jump(&done);
    }

    if (hasUndefined) {
        masm.bind(&isUndefined);
        masm.loadConstantFloat32(float(GenericNaN()), output);
        masm.jump(&done);
    }

    if (hasBoolean) {
        masm.bind(&isBool);
        masm.boolValueToFloat32(operand, output);
        masm.jump(&done);
    }

    masm.bind(&isInt32);
    masm.int32ValueToFloat32(operand, output);
    masm.jump(&done);

    masm.bind(&isDouble);
    // ARM and MIPS may not have a double register available if we've
    // allocated output as a float32.
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32)
    masm.unboxDouble(operand, ScratchDoubleReg);
    masm.convertDoubleToFloat32(ScratchDoubleReg, output);
#else
    masm.unboxDouble(operand, output);
    masm.convertDoubleToFloat32(output, output);
#endif
    masm.bind(&done);
}

void
CodeGenerator::visitInt32ToDouble(LInt32ToDouble* lir)
{
    masm.convertInt32ToDouble(ToRegister(lir->input()), ToFloatRegister(lir->output()));
}

void
CodeGenerator::visitFloat32ToDouble(LFloat32ToDouble* lir)
{
    masm.convertFloat32ToDouble(ToFloatRegister(lir->input()), ToFloatRegister(lir->output()));
}

void
CodeGenerator::visitDoubleToFloat32(LDoubleToFloat32* lir)
{
    masm.convertDoubleToFloat32(ToFloatRegister(lir->input()), ToFloatRegister(lir->output()));
}

void
CodeGenerator::visitInt32ToFloat32(LInt32ToFloat32* lir)
{
    masm.convertInt32ToFloat32(ToRegister(lir->input()), ToFloatRegister(lir->output()));
}

void
CodeGenerator::visitDoubleToInt32(LDoubleToInt32* lir)
{
    Label fail;
    FloatRegister input = ToFloatRegister(lir->input());
    Register output = ToRegister(lir->output());
    masm.convertDoubleToInt32(input, output, &fail, lir->mir()->canBeNegativeZero());
    bailoutFrom(&fail, lir->snapshot());
}

void
CodeGenerator::visitFloat32ToInt32(LFloat32ToInt32* lir)
{
    Label fail;
    FloatRegister input = ToFloatRegister(lir->input());
    Register output = ToRegister(lir->output());
    masm.convertFloat32ToInt32(input, output, &fail, lir->mir()->canBeNegativeZero());
    bailoutFrom(&fail, lir->snapshot());
}

void
CodeGenerator::emitOOLTestObject(Register objreg,
                                 Label* ifEmulatesUndefined,
                                 Label* ifDoesntEmulateUndefined,
                                 Register scratch)
{
    saveVolatile(scratch);
    masm.setupUnalignedABICall(scratch);
    masm.passABIArg(objreg);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, js::EmulatesUndefined));
    masm.storeCallBoolResult(scratch);
    restoreVolatile(scratch);

    masm.branchIfTrueBool(scratch, ifEmulatesUndefined);
    masm.jump(ifDoesntEmulateUndefined);
}

// Base out-of-line code generator for all tests of the truthiness of an
// object, where the object might not be truthy.  (Recall that per spec all
// objects are truthy, but we implement the JSCLASS_EMULATES_UNDEFINED class
// flag to permit objects to look like |undefined| in certain contexts,
// including in object truthiness testing.)  We check truthiness inline except
// when we're testing it on a proxy (or if TI guarantees us that the specified
// object will never emulate |undefined|), in which case out-of-line code will
// call EmulatesUndefined for a conclusive answer.
class OutOfLineTestObject : public OutOfLineCodeBase<CodeGenerator>
{
    Register objreg_;
    Register scratch_;

    Label* ifEmulatesUndefined_;
    Label* ifDoesntEmulateUndefined_;

#ifdef DEBUG
    bool initialized() { return ifEmulatesUndefined_ != nullptr; }
#endif

  public:
    OutOfLineTestObject()
#ifdef DEBUG
      : ifEmulatesUndefined_(nullptr), ifDoesntEmulateUndefined_(nullptr)
#endif
    { }

    void accept(CodeGenerator* codegen) final {
        MOZ_ASSERT(initialized());
        codegen->emitOOLTestObject(objreg_, ifEmulatesUndefined_, ifDoesntEmulateUndefined_,
                                   scratch_);
    }

    // Specify the register where the object to be tested is found, labels to
    // jump to if the object is truthy or falsy, and a scratch register for
    // use in the out-of-line path.
    void setInputAndTargets(Register objreg, Label* ifEmulatesUndefined, Label* ifDoesntEmulateUndefined,
                            Register scratch)
    {
        MOZ_ASSERT(!initialized());
        MOZ_ASSERT(ifEmulatesUndefined);
        objreg_ = objreg;
        scratch_ = scratch;
        ifEmulatesUndefined_ = ifEmulatesUndefined;
        ifDoesntEmulateUndefined_ = ifDoesntEmulateUndefined;
    }
};

// A subclass of OutOfLineTestObject containing two extra labels, for use when
// the ifTruthy/ifFalsy labels are needed in inline code as well as out-of-line
// code.  The user should bind these labels in inline code, and specify them as
// targets via setInputAndTargets, as appropriate.
class OutOfLineTestObjectWithLabels : public OutOfLineTestObject
{
    Label label1_;
    Label label2_;

  public:
    OutOfLineTestObjectWithLabels() { }

    Label* label1() { return &label1_; }
    Label* label2() { return &label2_; }
};

void
CodeGenerator::testObjectEmulatesUndefinedKernel(Register objreg,
                                                 Label* ifEmulatesUndefined,
                                                 Label* ifDoesntEmulateUndefined,
                                                 Register scratch, OutOfLineTestObject* ool)
{
    ool->setInputAndTargets(objreg, ifEmulatesUndefined, ifDoesntEmulateUndefined, scratch);

    // Perform a fast-path check of the object's class flags if the object's
    // not a proxy.  Let out-of-line code handle the slow cases that require
    // saving registers, making a function call, and restoring registers.
    masm.branchIfObjectEmulatesUndefined(objreg, scratch, ool->entry(), ifEmulatesUndefined);
}

void
CodeGenerator::branchTestObjectEmulatesUndefined(Register objreg,
                                                 Label* ifEmulatesUndefined,
                                                 Label* ifDoesntEmulateUndefined,
                                                 Register scratch, OutOfLineTestObject* ool)
{
    MOZ_ASSERT(!ifDoesntEmulateUndefined->bound(),
               "ifDoesntEmulateUndefined will be bound to the fallthrough path");

    testObjectEmulatesUndefinedKernel(objreg, ifEmulatesUndefined, ifDoesntEmulateUndefined,
                                      scratch, ool);
    masm.bind(ifDoesntEmulateUndefined);
}

void
CodeGenerator::testObjectEmulatesUndefined(Register objreg,
                                           Label* ifEmulatesUndefined,
                                           Label* ifDoesntEmulateUndefined,
                                           Register scratch, OutOfLineTestObject* ool)
{
    testObjectEmulatesUndefinedKernel(objreg, ifEmulatesUndefined, ifDoesntEmulateUndefined,
                                      scratch, ool);
    masm.jump(ifDoesntEmulateUndefined);
}

void
CodeGenerator::testValueTruthyKernel(const ValueOperand& value,
                                     const LDefinition* scratch1, const LDefinition* scratch2,
                                     FloatRegister fr,
                                     Label* ifTruthy, Label* ifFalsy,
                                     OutOfLineTestObject* ool,
                                     MDefinition* valueMIR)
{
    // Count the number of possible type tags we might have, so we'll know when
    // we've checked them all and hence can avoid emitting a tag check for the
    // last one.  In particular, whenever tagCount is 1 that means we've tried
    // all but one of them already so we know exactly what's left based on the
    // mightBe* booleans.
    bool mightBeUndefined = valueMIR->mightBeType(MIRType::Undefined);
    bool mightBeNull = valueMIR->mightBeType(MIRType::Null);
    bool mightBeBoolean = valueMIR->mightBeType(MIRType::Boolean);
    bool mightBeInt32 = valueMIR->mightBeType(MIRType::Int32);
    bool mightBeObject = valueMIR->mightBeType(MIRType::Object);
    bool mightBeString = valueMIR->mightBeType(MIRType::String);
    bool mightBeSymbol = valueMIR->mightBeType(MIRType::Symbol);
    bool mightBeDouble = valueMIR->mightBeType(MIRType::Double);
    int tagCount = int(mightBeUndefined) + int(mightBeNull) +
        int(mightBeBoolean) + int(mightBeInt32) + int(mightBeObject) +
        int(mightBeString) + int(mightBeSymbol) + int(mightBeDouble);

    MOZ_ASSERT_IF(!valueMIR->emptyResultTypeSet(), tagCount > 0);

    // If we know we're null or undefined, we're definitely falsy, no
    // need to even check the tag.
    if (int(mightBeNull) + int(mightBeUndefined) == tagCount) {
        masm.jump(ifFalsy);
        return;
    }

    ScratchTagScope tag(masm, value);
    masm.splitTagForTest(value, tag);

    if (mightBeUndefined) {
        MOZ_ASSERT(tagCount > 1);
        masm.branchTestUndefined(Assembler::Equal, tag, ifFalsy);
        --tagCount;
    }

    if (mightBeNull) {
        MOZ_ASSERT(tagCount > 1);
        masm.branchTestNull(Assembler::Equal, tag, ifFalsy);
        --tagCount;
    }

    if (mightBeBoolean) {
        MOZ_ASSERT(tagCount != 0);
        Label notBoolean;
        if (tagCount != 1)
            masm.branchTestBoolean(Assembler::NotEqual, tag, &notBoolean);
        {
            ScratchTagScopeRelease _(&tag);
            masm.branchTestBooleanTruthy(false, value, ifFalsy);
        }
        if (tagCount != 1)
            masm.jump(ifTruthy);
        // Else just fall through to truthiness.
        masm.bind(&notBoolean);
        --tagCount;
    }

    if (mightBeInt32) {
        MOZ_ASSERT(tagCount != 0);
        Label notInt32;
        if (tagCount != 1)
            masm.branchTestInt32(Assembler::NotEqual, tag, &notInt32);
        {
            ScratchTagScopeRelease _(&tag);
            masm.branchTestInt32Truthy(false, value, ifFalsy);
        }
        if (tagCount != 1)
            masm.jump(ifTruthy);
        // Else just fall through to truthiness.
        masm.bind(&notInt32);
        --tagCount;
    }

    if (mightBeObject) {
        MOZ_ASSERT(tagCount != 0);
        if (ool) {
            Label notObject;

            if (tagCount != 1)
                masm.branchTestObject(Assembler::NotEqual, tag, &notObject);

            {
                ScratchTagScopeRelease _(&tag);
                Register objreg = masm.extractObject(value, ToRegister(scratch1));
                testObjectEmulatesUndefined(objreg, ifFalsy, ifTruthy, ToRegister(scratch2), ool);
            }

            masm.bind(&notObject);
        } else {
            if (tagCount != 1)
                masm.branchTestObject(Assembler::Equal, tag, ifTruthy);
            // Else just fall through to truthiness.
        }
        --tagCount;
    } else {
        MOZ_ASSERT(!ool,
                   "We better not have an unused OOL path, since the code generator will try to "
                   "generate code for it but we never set up its labels, which will cause null "
                   "derefs of those labels.");
    }

    if (mightBeString) {
        // Test if a string is non-empty.
        MOZ_ASSERT(tagCount != 0);
        Label notString;
        if (tagCount != 1)
            masm.branchTestString(Assembler::NotEqual, tag, &notString);
        {
            ScratchTagScopeRelease _(&tag);
            masm.branchTestStringTruthy(false, value, ifFalsy);
        }
        if (tagCount != 1)
            masm.jump(ifTruthy);
        // Else just fall through to truthiness.
        masm.bind(&notString);
        --tagCount;
    }

    if (mightBeSymbol) {
        // All symbols are truthy.
        MOZ_ASSERT(tagCount != 0);
        if (tagCount != 1)
            masm.branchTestSymbol(Assembler::Equal, tag, ifTruthy);
        // Else fall through to ifTruthy.
        --tagCount;
    }

    if (mightBeDouble) {
        MOZ_ASSERT(tagCount == 1);
        // If we reach here the value is a double.
        {
            ScratchTagScopeRelease _(&tag);
            masm.unboxDouble(value, fr);
            masm.branchTestDoubleTruthy(false, fr, ifFalsy);
        }
        --tagCount;
    }

    MOZ_ASSERT(tagCount == 0);

    // Fall through for truthy.
}

void
CodeGenerator::testValueTruthy(const ValueOperand& value,
                               const LDefinition* scratch1, const LDefinition* scratch2,
                               FloatRegister fr,
                               Label* ifTruthy, Label* ifFalsy,
                               OutOfLineTestObject* ool,
                               MDefinition* valueMIR)
{
    testValueTruthyKernel(value, scratch1, scratch2, fr, ifTruthy, ifFalsy, ool, valueMIR);
    masm.jump(ifTruthy);
}

void
CodeGenerator::visitTestOAndBranch(LTestOAndBranch* lir)
{
    MIRType inputType = lir->mir()->input()->type();
    MOZ_ASSERT(inputType == MIRType::ObjectOrNull || lir->mir()->operandMightEmulateUndefined(),
               "If the object couldn't emulate undefined, this should have been folded.");

    Label* truthy = getJumpLabelForBranch(lir->ifTruthy());
    Label* falsy = getJumpLabelForBranch(lir->ifFalsy());
    Register input = ToRegister(lir->input());

    if (lir->mir()->operandMightEmulateUndefined()) {
        if (inputType == MIRType::ObjectOrNull)
            masm.branchTestPtr(Assembler::Zero, input, input, falsy);

        OutOfLineTestObject* ool = new(alloc()) OutOfLineTestObject();
        addOutOfLineCode(ool, lir->mir());

        testObjectEmulatesUndefined(input, falsy, truthy, ToRegister(lir->temp()), ool);
    } else {
        MOZ_ASSERT(inputType == MIRType::ObjectOrNull);
        testZeroEmitBranch(Assembler::NotEqual, input, lir->ifTruthy(), lir->ifFalsy());
    }
}

void
CodeGenerator::visitTestVAndBranch(LTestVAndBranch* lir)
{
    OutOfLineTestObject* ool = nullptr;
    MDefinition* input = lir->mir()->input();
    // Unfortunately, it's possible that someone (e.g. phi elimination) switched
    // out our input after we did cacheOperandMightEmulateUndefined.  So we
    // might think it can emulate undefined _and_ know that it can't be an
    // object.
    if (lir->mir()->operandMightEmulateUndefined() && input->mightBeType(MIRType::Object)) {
        ool = new(alloc()) OutOfLineTestObject();
        addOutOfLineCode(ool, lir->mir());
    }

    Label* truthy = getJumpLabelForBranch(lir->ifTruthy());
    Label* falsy = getJumpLabelForBranch(lir->ifFalsy());

    testValueTruthy(ToValue(lir, LTestVAndBranch::Input),
                    lir->temp1(), lir->temp2(),
                    ToFloatRegister(lir->tempFloat()),
                    truthy, falsy, ool, input);
}

void
CodeGenerator::visitFunctionDispatch(LFunctionDispatch* lir)
{
    MFunctionDispatch* mir = lir->mir();
    Register input = ToRegister(lir->input());
    Label* lastLabel;
    size_t casesWithFallback;

    // Determine if the last case is fallback or an ordinary case.
    if (!mir->hasFallback()) {
        MOZ_ASSERT(mir->numCases() > 0);
        casesWithFallback = mir->numCases();
        lastLabel = skipTrivialBlocks(mir->getCaseBlock(mir->numCases() - 1))->lir()->label();
    } else {
        casesWithFallback = mir->numCases() + 1;
        lastLabel = skipTrivialBlocks(mir->getFallback())->lir()->label();
    }

    // Compare function pointers, except for the last case.
    for (size_t i = 0; i < casesWithFallback - 1; i++) {
        MOZ_ASSERT(i < mir->numCases());
        LBlock* target = skipTrivialBlocks(mir->getCaseBlock(i))->lir();
        if (ObjectGroup* funcGroup = mir->getCaseObjectGroup(i)) {
            masm.branchTestObjGroupUnsafe(Assembler::Equal, input, funcGroup, target->label());
        } else {
            JSFunction* func = mir->getCase(i);
            masm.branchPtr(Assembler::Equal, input, ImmGCPtr(func), target->label());
        }
    }

    // Jump to the last case.
    masm.jump(lastLabel);
}

void
CodeGenerator::visitObjectGroupDispatch(LObjectGroupDispatch* lir)
{
    MObjectGroupDispatch* mir = lir->mir();
    Register input = ToRegister(lir->input());
    Register temp = ToRegister(lir->temp());

    // Load the incoming ObjectGroup in temp.
    masm.loadObjGroupUnsafe(input, temp);

    // Compare ObjectGroups.
    MacroAssembler::BranchGCPtr lastBranch;
    LBlock* lastBlock = nullptr;
    InlinePropertyTable* propTable = mir->propTable();
    for (size_t i = 0; i < mir->numCases(); i++) {
        JSFunction* func = mir->getCase(i);
        LBlock* target = skipTrivialBlocks(mir->getCaseBlock(i))->lir();

        DebugOnly<bool> found = false;
        for (size_t j = 0; j < propTable->numEntries(); j++) {
            if (propTable->getFunction(j) != func)
                continue;

            if (lastBranch.isInitialized())
                lastBranch.emit(masm);

            ObjectGroup* group = propTable->getObjectGroup(j);
            lastBranch = MacroAssembler::BranchGCPtr(Assembler::Equal, temp, ImmGCPtr(group),
                                                     target->label());
            lastBlock = target;
            found = true;
        }
        MOZ_ASSERT(found);
    }

    // Jump to fallback block if we have an unknown ObjectGroup. If there's no
    // fallback block, we should have handled all cases.

    if (!mir->hasFallback()) {
        MOZ_ASSERT(lastBranch.isInitialized());
#ifdef DEBUG
        Label ok;
        lastBranch.relink(&ok);
        lastBranch.emit(masm);
        masm.assumeUnreachable("Unexpected ObjectGroup");
        masm.bind(&ok);
#endif
        if (!isNextBlock(lastBlock))
            masm.jump(lastBlock->label());
        return;
    }

    LBlock* fallback = skipTrivialBlocks(mir->getFallback())->lir();
    if (!lastBranch.isInitialized()) {
        if (!isNextBlock(fallback))
            masm.jump(fallback->label());
        return;
    }

    lastBranch.invertCondition();
    lastBranch.relink(fallback->label());
    lastBranch.emit(masm);

    if (!isNextBlock(lastBlock))
        masm.jump(lastBlock->label());
}

void
CodeGenerator::visitBooleanToString(LBooleanToString* lir)
{
    Register input = ToRegister(lir->input());
    Register output = ToRegister(lir->output());
    const JSAtomState& names = gen->runtime->names();
    Label true_, done;

    masm.branchTest32(Assembler::NonZero, input, input, &true_);
    masm.movePtr(ImmGCPtr(names.false_), output);
    masm.jump(&done);

    masm.bind(&true_);
    masm.movePtr(ImmGCPtr(names.true_), output);

    masm.bind(&done);
}

void
CodeGenerator::emitIntToString(Register input, Register output, Label* ool)
{
    masm.boundsCheck32PowerOfTwo(input, StaticStrings::INT_STATIC_LIMIT, ool);

    // Fast path for small integers.
    masm.movePtr(ImmPtr(&gen->runtime->staticStrings().intStaticTable), output);
    masm.loadPtr(BaseIndex(output, input, ScalePointer), output);
}

typedef JSFlatString* (*IntToStringFn)(JSContext*, int);
static const VMFunction IntToStringInfo =
    FunctionInfo<IntToStringFn>(Int32ToString<CanGC>, "Int32ToString");

void
CodeGenerator::visitIntToString(LIntToString* lir)
{
    Register input = ToRegister(lir->input());
    Register output = ToRegister(lir->output());

    OutOfLineCode* ool = oolCallVM(IntToStringInfo, lir, ArgList(input),
                                   StoreRegisterTo(output));

    emitIntToString(input, output, ool->entry());

    masm.bind(ool->rejoin());
}

typedef JSString* (*DoubleToStringFn)(JSContext*, double);
static const VMFunction DoubleToStringInfo =
    FunctionInfo<DoubleToStringFn>(NumberToString<CanGC>, "NumberToString");

void
CodeGenerator::visitDoubleToString(LDoubleToString* lir)
{
    FloatRegister input = ToFloatRegister(lir->input());
    Register temp = ToRegister(lir->tempInt());
    Register output = ToRegister(lir->output());

    OutOfLineCode* ool = oolCallVM(DoubleToStringInfo, lir, ArgList(input),
                                   StoreRegisterTo(output));

    // Try double to integer conversion and run integer to string code.
    masm.convertDoubleToInt32(input, temp, ool->entry(), true);
    emitIntToString(temp, output, ool->entry());

    masm.bind(ool->rejoin());
}

typedef JSString* (*PrimitiveToStringFn)(JSContext*, HandleValue);
static const VMFunction PrimitiveToStringInfo =
    FunctionInfo<PrimitiveToStringFn>(ToStringSlow, "ToStringSlow");

void
CodeGenerator::visitValueToString(LValueToString* lir)
{
    ValueOperand input = ToValue(lir, LValueToString::Input);
    Register output = ToRegister(lir->output());

    OutOfLineCode* ool = oolCallVM(PrimitiveToStringInfo, lir, ArgList(input),
                                   StoreRegisterTo(output));

    Label done;
    Register tag = masm.extractTag(input, output);
    const JSAtomState& names = gen->runtime->names();

    // String
    if (lir->mir()->input()->mightBeType(MIRType::String)) {
        Label notString;
        masm.branchTestString(Assembler::NotEqual, tag, &notString);
        masm.unboxString(input, output);
        masm.jump(&done);
        masm.bind(&notString);
    }

    // Integer
    if (lir->mir()->input()->mightBeType(MIRType::Int32)) {
        Label notInteger;
        masm.branchTestInt32(Assembler::NotEqual, tag, &notInteger);
        Register unboxed = ToTempUnboxRegister(lir->tempToUnbox());
        unboxed = masm.extractInt32(input, unboxed);
        emitIntToString(unboxed, output, ool->entry());
        masm.jump(&done);
        masm.bind(&notInteger);
    }

    // Double
    if (lir->mir()->input()->mightBeType(MIRType::Double)) {
        // Note: no fastpath. Need two extra registers and can only convert doubles
        // that fit integers and are smaller than StaticStrings::INT_STATIC_LIMIT.
        masm.branchTestDouble(Assembler::Equal, tag, ool->entry());
    }

    // Undefined
    if (lir->mir()->input()->mightBeType(MIRType::Undefined)) {
        Label notUndefined;
        masm.branchTestUndefined(Assembler::NotEqual, tag, &notUndefined);
        masm.movePtr(ImmGCPtr(names.undefined), output);
        masm.jump(&done);
        masm.bind(&notUndefined);
    }

    // Null
    if (lir->mir()->input()->mightBeType(MIRType::Null)) {
        Label notNull;
        masm.branchTestNull(Assembler::NotEqual, tag, &notNull);
        masm.movePtr(ImmGCPtr(names.null), output);
        masm.jump(&done);
        masm.bind(&notNull);
    }

    // Boolean
    if (lir->mir()->input()->mightBeType(MIRType::Boolean)) {
        Label notBoolean, true_;
        masm.branchTestBoolean(Assembler::NotEqual, tag, &notBoolean);
        masm.branchTestBooleanTruthy(true, input, &true_);
        masm.movePtr(ImmGCPtr(names.false_), output);
        masm.jump(&done);
        masm.bind(&true_);
        masm.movePtr(ImmGCPtr(names.true_), output);
        masm.jump(&done);
        masm.bind(&notBoolean);
    }

    // Object
    if (lir->mir()->input()->mightBeType(MIRType::Object)) {
        // Bail.
        MOZ_ASSERT(lir->mir()->fallible());
        Label bail;
        masm.branchTestObject(Assembler::Equal, tag, &bail);
        bailoutFrom(&bail, lir->snapshot());
    }

    // Symbol
    if (lir->mir()->input()->mightBeType(MIRType::Symbol)) {
        // Bail.
        MOZ_ASSERT(lir->mir()->fallible());
        Label bail;
        masm.branchTestSymbol(Assembler::Equal, tag, &bail);
        bailoutFrom(&bail, lir->snapshot());
    }

#ifdef DEBUG
    masm.assumeUnreachable("Unexpected type for MValueToString.");
#endif

    masm.bind(&done);
    masm.bind(ool->rejoin());
}

typedef JSObject* (*ToObjectFn)(JSContext*, HandleValue, bool);
static const VMFunction ToObjectInfo =
    FunctionInfo<ToObjectFn>(ToObjectSlow, "ToObjectSlow");

void
CodeGenerator::visitValueToObject(LValueToObject* lir)
{
    ValueOperand input = ToValue(lir, LValueToObject::Input);
    Register output = ToRegister(lir->output());

    OutOfLineCode* ool = oolCallVM(ToObjectInfo, lir, ArgList(input, Imm32(0)),
                                   StoreRegisterTo(output));

    masm.branchTestObject(Assembler::NotEqual, input, ool->entry());
    masm.unboxObject(input, output);

    masm.bind(ool->rejoin());
}

void
CodeGenerator::visitValueToObjectOrNull(LValueToObjectOrNull* lir)
{
    ValueOperand input = ToValue(lir, LValueToObjectOrNull::Input);
    Register output = ToRegister(lir->output());

    OutOfLineCode* ool = oolCallVM(ToObjectInfo, lir, ArgList(input, Imm32(0)),
                                   StoreRegisterTo(output));

    Label isObject;
    masm.branchTestObject(Assembler::Equal, input, &isObject);
    masm.branchTestNull(Assembler::NotEqual, input, ool->entry());

    masm.movePtr(ImmWord(0), output);
    masm.jump(ool->rejoin());

    masm.bind(&isObject);
    masm.unboxObject(input, output);

    masm.bind(ool->rejoin());
}

static void
EmitStoreBufferMutation(MacroAssembler& masm, Register holder, size_t offset,
                        Register buffer,
                        LiveGeneralRegisterSet& liveVolatiles,
                        void (*fun)(js::gc::StoreBuffer*, js::gc::Cell**))
{
    Label callVM;
    Label exit;

    // Call into the VM to barrier the write. The only registers that need to
    // be preserved are those in liveVolatiles, so once they are saved on the
    // stack all volatile registers are available for use.
    masm.bind(&callVM);
    masm.PushRegsInMask(liveVolatiles);

    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
    regs.takeUnchecked(buffer);
    regs.takeUnchecked(holder);
    Register addrReg = regs.takeAny();

    masm.computeEffectiveAddress(Address(holder, offset), addrReg);

    bool needExtraReg = !regs.hasAny<GeneralRegisterSet::DefaultType>();
    if (needExtraReg) {
        masm.push(holder);
        masm.setupUnalignedABICall(holder);
    } else {
        masm.setupUnalignedABICall(regs.takeAny());
    }
    masm.passABIArg(buffer);
    masm.passABIArg(addrReg);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, fun), MoveOp::GENERAL,
                     CheckUnsafeCallWithABI::DontCheckOther);

    if (needExtraReg)
        masm.pop(holder);
    masm.PopRegsInMask(liveVolatiles);
    masm.bind(&exit);
}

// Warning: this function modifies prev and next.
static void
EmitPostWriteBarrierS(MacroAssembler& masm,
                      Register holder, size_t offset,
                      Register prev, Register next,
                      LiveGeneralRegisterSet& liveVolatiles)
{
    Label exit;
    Label checkRemove, putCell;

    // if (next && (buffer = next->storeBuffer()))
    // but we never pass in nullptr for next.
    Register storebuffer = next;
    masm.loadStoreBuffer(next, storebuffer);
    masm.branchPtr(Assembler::Equal, storebuffer, ImmWord(0), &checkRemove);

    // if (prev && prev->storeBuffer())
    masm.branchPtr(Assembler::Equal, prev, ImmWord(0), &putCell);
    masm.loadStoreBuffer(prev, prev);
    masm.branchPtr(Assembler::NotEqual, prev, ImmWord(0), &exit);

    // buffer->putCell(cellp)
    masm.bind(&putCell);
    EmitStoreBufferMutation(masm, holder, offset, storebuffer, liveVolatiles,
                            JSString::addCellAddressToStoreBuffer);
    masm.jump(&exit);

    // if (prev && (buffer = prev->storeBuffer()))
    masm.bind(&checkRemove);
    masm.branchPtr(Assembler::Equal, prev, ImmWord(0), &exit);
    masm.loadStoreBuffer(prev, storebuffer);
    masm.branchPtr(Assembler::Equal, storebuffer, ImmWord(0), &exit);
    EmitStoreBufferMutation(masm, holder, offset, storebuffer, liveVolatiles,
                            JSString::removeCellAddressFromStoreBuffer);

    masm.bind(&exit);
}

typedef JSObject* (*CloneRegExpObjectFn)(JSContext*, Handle<RegExpObject*>);
static const VMFunction CloneRegExpObjectInfo =
    FunctionInfo<CloneRegExpObjectFn>(CloneRegExpObject, "CloneRegExpObject");

void
CodeGenerator::visitRegExp(LRegExp* lir)
{
    Register output = ToRegister(lir->output());
    Register temp = ToRegister(lir->temp());
    JSObject* source = lir->mir()->source();

    OutOfLineCode* ool = oolCallVM(CloneRegExpObjectInfo, lir, ArgList(ImmGCPtr(source)),
                                   StoreRegisterTo(output));
    if (lir->mir()->hasShared()) {
        TemplateObject templateObject(source);
        masm.createGCObject(output, temp, templateObject, gc::DefaultHeap, ool->entry());
    } else {
        masm.jump(ool->entry());
    }
    masm.bind(ool->rejoin());
}

// Amount of space to reserve on the stack when executing RegExps inline.
static const size_t RegExpReservedStack = sizeof(irregexp::InputOutputData)
                                        + sizeof(MatchPairs)
                                        + RegExpObject::MaxPairCount * sizeof(MatchPair);

static size_t
RegExpPairsVectorStartOffset(size_t inputOutputDataStartOffset)
{
    return inputOutputDataStartOffset + sizeof(irregexp::InputOutputData) + sizeof(MatchPairs);
}

static Address
RegExpPairCountAddress(MacroAssembler& masm, size_t inputOutputDataStartOffset)
{
    return Address(masm.getStackPointer(), inputOutputDataStartOffset
                                           + sizeof(irregexp::InputOutputData)
                                           + MatchPairs::offsetOfPairCount());
}

// Prepare an InputOutputData and optional MatchPairs which space has been
// allocated for on the stack, and try to execute a RegExp on a string input.
// If the RegExp was successfully executed and matched the input, fallthrough,
// otherwise jump to notFound or failure.
static bool
PrepareAndExecuteRegExp(JSContext* cx, MacroAssembler& masm, Register regexp, Register input,
                        Register lastIndex,
                        Register temp1, Register temp2, Register temp3,
                        size_t inputOutputDataStartOffset,
                        RegExpShared::CompilationMode mode,
                        bool stringsCanBeInNursery,
                        Label* notFound, Label* failure)
{
    size_t matchPairsStartOffset = inputOutputDataStartOffset + sizeof(irregexp::InputOutputData);
    size_t pairsVectorStartOffset = RegExpPairsVectorStartOffset(inputOutputDataStartOffset);

    Address inputStartAddress(masm.getStackPointer(),
        inputOutputDataStartOffset + offsetof(irregexp::InputOutputData, inputStart));
    Address inputEndAddress(masm.getStackPointer(),
        inputOutputDataStartOffset + offsetof(irregexp::InputOutputData, inputEnd));
    Address matchesPointerAddress(masm.getStackPointer(),
        inputOutputDataStartOffset + offsetof(irregexp::InputOutputData, matches));
    Address startIndexAddress(masm.getStackPointer(),
        inputOutputDataStartOffset + offsetof(irregexp::InputOutputData, startIndex));
    Address endIndexAddress(masm.getStackPointer(),
        inputOutputDataStartOffset + offsetof(irregexp::InputOutputData, endIndex));
    Address matchResultAddress(masm.getStackPointer(),
        inputOutputDataStartOffset + offsetof(irregexp::InputOutputData, result));

    Address pairCountAddress = RegExpPairCountAddress(masm, inputOutputDataStartOffset);
    Address pairsPointerAddress(masm.getStackPointer(),
        matchPairsStartOffset + MatchPairs::offsetOfPairs());

    Address pairsVectorAddress(masm.getStackPointer(), pairsVectorStartOffset);

    RegExpStatics* res = GlobalObject::getRegExpStatics(cx, cx->global());
    if (!res)
        return false;
#ifdef JS_USE_LINK_REGISTER
    if (mode != RegExpShared::MatchOnly)
        masm.pushReturnAddress();
#endif
    if (mode == RegExpShared::Normal) {
        // First, fill in a skeletal MatchPairs instance on the stack. This will be
        // passed to the OOL stub in the caller if we aren't able to execute the
        // RegExp inline, and that stub needs to be able to determine whether the
        // execution finished successfully.
        masm.store32(Imm32(1), pairCountAddress);
        masm.store32(Imm32(-1), pairsVectorAddress);
        masm.computeEffectiveAddress(pairsVectorAddress, temp1);
        masm.storePtr(temp1, pairsPointerAddress);
    }

    // Check for a linear input string.
    masm.branchIfRopeOrExternal(input, temp1, failure);

    // Get the RegExpShared for the RegExp.
    masm.loadPtr(Address(regexp, NativeObject::getFixedSlotOffset(RegExpObject::PRIVATE_SLOT)), temp1);
    masm.branchPtr(Assembler::Equal, temp1, ImmWord(0), failure);

    // ES6 21.2.2.2 step 2.
    // See RegExp.cpp ExecuteRegExp for more detail.
    {
        Label done;

        masm.branchTest32(Assembler::Zero, Address(temp1, RegExpShared::offsetOfFlags()),
                          Imm32(UnicodeFlag), &done);

        // If input is latin1, there should not be surrogate pair.
        masm.branchLatin1String(input, &done);

        // Check if |lastIndex > 0 && lastIndex < input->length()|.
        // lastIndex should already have no sign here.
        masm.branchTest32(Assembler::Zero, lastIndex, lastIndex, &done);
        masm.loadStringLength(input, temp2);
        masm.branch32(Assembler::AboveOrEqual, lastIndex, temp2, &done);

        // Check if input[lastIndex] is trail surrogate.
        masm.loadStringChars(input, temp2, CharEncoding::TwoByte);
        masm.computeEffectiveAddress(BaseIndex(temp2, lastIndex, TimesTwo), temp3);
        masm.load16ZeroExtend(Address(temp3, 0), temp3);

        masm.branch32(Assembler::Below, temp3, Imm32(unicode::TrailSurrogateMin), &done);
        masm.branch32(Assembler::Above, temp3, Imm32(unicode::TrailSurrogateMax), &done);

        // Check if input[lastIndex-1] is lead surrogate.
        masm.move32(lastIndex, temp3);
        masm.sub32(Imm32(1), temp3);
        masm.computeEffectiveAddress(BaseIndex(temp2, temp3, TimesTwo), temp3);
        masm.load16ZeroExtend(Address(temp3, 0), temp3);

        masm.branch32(Assembler::Below, temp3, Imm32(unicode::LeadSurrogateMin), &done);
        masm.branch32(Assembler::Above, temp3, Imm32(unicode::LeadSurrogateMax), &done);

        // Move lastIndex to lead surrogate.
        masm.subPtr(Imm32(1), lastIndex);

        masm.bind(&done);
    }

    if (mode == RegExpShared::Normal) {
        // Don't handle RegExps with excessive parens.
        masm.load32(Address(temp1, RegExpShared::offsetOfParenCount()), temp2);
        masm.branch32(Assembler::AboveOrEqual, temp2, Imm32(RegExpObject::MaxPairCount), failure);

        // Fill in the paren count in the MatchPairs on the stack.
        masm.add32(Imm32(1), temp2);
        masm.store32(temp2, pairCountAddress);
    }

    // Load the code pointer for the type of input string we have, and compute
    // the input start/end pointers in the InputOutputData.
    Register codePointer = temp1;
    {
        masm.loadStringLength(input, temp3);

        Label isLatin1, done;
        masm.branchLatin1String(input, &isLatin1);
        {
            masm.loadStringChars(input, temp2, CharEncoding::TwoByte);
            masm.storePtr(temp2, inputStartAddress);
            masm.lshiftPtr(Imm32(1), temp3);
            masm.loadPtr(Address(temp1, RegExpShared::offsetOfTwoByteJitCode(mode)),
                         codePointer);
            masm.jump(&done);
        }
        masm.bind(&isLatin1);
        {
            masm.loadStringChars(input, temp2, CharEncoding::Latin1);
            masm.storePtr(temp2, inputStartAddress);
            masm.loadPtr(Address(temp1, RegExpShared::offsetOfLatin1JitCode(mode)),
                         codePointer);
        }
        masm.bind(&done);

        masm.addPtr(temp3, temp2);
        masm.storePtr(temp2, inputEndAddress);
    }

    // Check the RegExpShared has been compiled for this type of input.
    masm.branchPtr(Assembler::Equal, codePointer, ImmWord(0), failure);
    masm.loadPtr(Address(codePointer, JitCode::offsetOfCode()), codePointer);

    // Finish filling in the InputOutputData instance on the stack.
    if (mode == RegExpShared::Normal) {
        masm.computeEffectiveAddress(Address(masm.getStackPointer(), matchPairsStartOffset), temp2);
        masm.storePtr(temp2, matchesPointerAddress);
    } else {
        // Use InputOutputData.endIndex itself for output.
        masm.computeEffectiveAddress(endIndexAddress, temp2);
        masm.storePtr(temp2, endIndexAddress);
    }
    masm.storePtr(lastIndex, startIndexAddress);
    masm.store32(Imm32(0), matchResultAddress);

    // Save any volatile inputs.
    LiveGeneralRegisterSet volatileRegs;
    if (lastIndex.volatile_())
        volatileRegs.add(lastIndex);
    if (input.volatile_())
        volatileRegs.add(input);
    if (regexp.volatile_())
        volatileRegs.add(regexp);

#ifdef JS_TRACE_LOGGING
    if (TraceLogTextIdEnabled(TraceLogger_IrregexpExecute)) {
        masm.push(temp1);
        masm.loadTraceLogger(temp1);
        masm.tracelogStartId(temp1, TraceLogger_IrregexpExecute);
        masm.pop(temp1);
    }
#endif

    // Execute the RegExp.
    masm.computeEffectiveAddress(Address(masm.getStackPointer(), inputOutputDataStartOffset), temp2);
    masm.PushRegsInMask(volatileRegs);
    masm.setupUnalignedABICall(temp3);
    masm.passABIArg(temp2);
    masm.callWithABI(codePointer);
    masm.PopRegsInMask(volatileRegs);

#ifdef JS_TRACE_LOGGING
    if (TraceLogTextIdEnabled(TraceLogger_IrregexpExecute)) {
        masm.loadTraceLogger(temp1);
        masm.tracelogStopId(temp1, TraceLogger_IrregexpExecute);
    }
#endif

    Label success;
    masm.branch32(Assembler::Equal, matchResultAddress,
                  Imm32(RegExpRunStatus_Success_NotFound), notFound);
    masm.branch32(Assembler::Equal, matchResultAddress,
                  Imm32(RegExpRunStatus_Error), failure);

    // Lazily update the RegExpStatics.
    masm.movePtr(ImmPtr(res), temp1);

    Address pendingInputAddress(temp1, RegExpStatics::offsetOfPendingInput());
    Address matchesInputAddress(temp1, RegExpStatics::offsetOfMatchesInput());
    Address lazySourceAddress(temp1, RegExpStatics::offsetOfLazySource());
    Address lazyIndexAddress(temp1, RegExpStatics::offsetOfLazyIndex());

    masm.guardedCallPreBarrier(pendingInputAddress, MIRType::String);
    masm.guardedCallPreBarrier(matchesInputAddress, MIRType::String);
    masm.guardedCallPreBarrier(lazySourceAddress, MIRType::String);

    if (stringsCanBeInNursery) {
        // Writing into RegExpStatics tenured memory; must post-barrier.
        if (temp1.volatile_())
            volatileRegs.add(temp1);

        masm.loadPtr(pendingInputAddress, temp2);
        masm.storePtr(input, pendingInputAddress);
        masm.movePtr(input, temp3);
        EmitPostWriteBarrierS(masm, temp1, RegExpStatics::offsetOfPendingInput(),
                              temp2 /* prev */, temp3 /* next */, volatileRegs);

        masm.loadPtr(matchesInputAddress, temp2);
        masm.storePtr(input, matchesInputAddress);
        masm.movePtr(input, temp3);
        EmitPostWriteBarrierS(masm, temp1, RegExpStatics::offsetOfMatchesInput(),
                              temp2 /* prev */, temp3 /* next */, volatileRegs);
    } else {
        masm.storePtr(input, pendingInputAddress);
        masm.storePtr(input, matchesInputAddress);
    }

    masm.storePtr(lastIndex, Address(temp1, RegExpStatics::offsetOfLazyIndex()));
    masm.store32(Imm32(1), Address(temp1, RegExpStatics::offsetOfPendingLazyEvaluation()));

    masm.loadPtr(Address(regexp, NativeObject::getFixedSlotOffset(RegExpObject::PRIVATE_SLOT)), temp2);
    masm.loadPtr(Address(temp2, RegExpShared::offsetOfSource()), temp3);
    masm.storePtr(temp3, lazySourceAddress);
    masm.load32(Address(temp2, RegExpShared::offsetOfFlags()), temp3);
    masm.store32(temp3, Address(temp1, RegExpStatics::offsetOfLazyFlags()));

    if (mode == RegExpShared::MatchOnly) {
        // endIndex is passed via temp3.
        masm.load32(endIndexAddress, temp3);
    }

    return true;
}

static void
CopyStringChars(MacroAssembler& masm, Register to, Register from, Register len,
                Register byteOpScratch, size_t fromWidth, size_t toWidth);

class CreateDependentString
{
    Register string_;
    Register temp_;
    Label* failure_;
    enum class FallbackKind : uint8_t {
        InlineString,
        FatInlineString,
        NotInlineString,
        Count
    };
    mozilla::EnumeratedArray<FallbackKind, FallbackKind::Count, Label> fallbacks_, joins_;

public:
    // Generate code that creates DependentString.
    // Caller should call generateFallback after masm.ret(), to generate
    // fallback path.
    void generate(MacroAssembler& masm, const JSAtomState& names,
                  CompileRuntime* runtime,
                  bool latin1, Register string,
                  Register base, Register temp1, Register temp2,
                  BaseIndex startIndexAddress, BaseIndex limitIndexAddress,
                  bool stringsCanBeInNursery,
                  Label* failure);

    // Generate fallback path for creating DependentString.
    void generateFallback(MacroAssembler& masm, LiveRegisterSet regsToSave);
};

void
CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
                                CompileRuntime* runtime,
                                bool latin1, Register string,
                                Register base, Register temp1, Register temp2,
                                BaseIndex startIndexAddress, BaseIndex limitIndexAddress,
                                bool stringsCanBeInNursery,
                                Label* failure)
{
    string_ = string;
    temp_ = temp2;
    failure_ = failure;

    // Compute the string length.
    masm.load32(startIndexAddress, temp2);
    masm.load32(limitIndexAddress, temp1);
    masm.sub32(temp2, temp1);

    Label done, nonEmpty;

    // Zero length matches use the empty string.
    masm.branchTest32(Assembler::NonZero, temp1, temp1, &nonEmpty);
    masm.movePtr(ImmGCPtr(names.empty), string);
    masm.jump(&done);

    masm.bind(&nonEmpty);

    Label notInline;

    int32_t maxInlineLength = latin1
                              ? (int32_t) JSFatInlineString::MAX_LENGTH_LATIN1
                              : (int32_t) JSFatInlineString::MAX_LENGTH_TWO_BYTE;
    masm.branch32(Assembler::Above, temp1, Imm32(maxInlineLength), &notInline);

    {
        // Make a thin or fat inline string.
        Label stringAllocated, fatInline;

        int32_t maxThinInlineLength = latin1
                                      ? (int32_t) JSThinInlineString::MAX_LENGTH_LATIN1
                                      : (int32_t) JSThinInlineString::MAX_LENGTH_TWO_BYTE;
        masm.branch32(Assembler::Above, temp1, Imm32(maxThinInlineLength), &fatInline);

        int32_t thinFlags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::INIT_THIN_INLINE_FLAGS;
        masm.newGCString(string, temp2, &fallbacks_[FallbackKind::InlineString], stringsCanBeInNursery);
        masm.bind(&joins_[FallbackKind::InlineString]);
        masm.store32(Imm32(thinFlags), Address(string, JSString::offsetOfFlags()));
        masm.jump(&stringAllocated);

        masm.bind(&fatInline);

        int32_t fatFlags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::INIT_FAT_INLINE_FLAGS;
        masm.newGCFatInlineString(string, temp2, &fallbacks_[FallbackKind::FatInlineString], stringsCanBeInNursery);
        masm.bind(&joins_[FallbackKind::FatInlineString]);
        masm.store32(Imm32(fatFlags), Address(string, JSString::offsetOfFlags()));

        masm.bind(&stringAllocated);
        masm.store32(temp1, Address(string, JSString::offsetOfLength()));

        masm.push(string);
        masm.push(base);

        // Adjust the start index address for the above pushes.
        MOZ_ASSERT(startIndexAddress.base == masm.getStackPointer());
        BaseIndex newStartIndexAddress = startIndexAddress;
        newStartIndexAddress.offset += 2 * sizeof(void*);

        // Load chars pointer for the new string.
        masm.loadInlineStringCharsForStore(string, string);

        // Load the source characters pointer.
        masm.loadStringChars(base, temp2,
                             latin1 ? CharEncoding::Latin1 : CharEncoding::TwoByte);
        masm.load32(newStartIndexAddress, base);
        if (latin1)
            masm.addPtr(temp2, base);
        else
            masm.computeEffectiveAddress(BaseIndex(temp2, base, TimesTwo), base);

        CopyStringChars(masm, string, base, temp1, temp2, latin1 ? 1 : 2, latin1 ? 1 : 2);

        // Null-terminate.
        if (latin1)
            masm.store8(Imm32(0), Address(string, 0));
        else
            masm.store16(Imm32(0), Address(string, 0));

        masm.pop(base);
        masm.pop(string);
    }

    masm.jump(&done);
    masm.bind(&notInline);

    {
        // Make a dependent string.
        int32_t flags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::DEPENDENT_FLAGS;

        masm.newGCString(string, temp2, &fallbacks_[FallbackKind::NotInlineString], stringsCanBeInNursery);
        // Warning: string may be tenured (if the fallback case is hit), so
        // stores into it must be post barriered.
        masm.bind(&joins_[FallbackKind::NotInlineString]);
        masm.store32(Imm32(flags), Address(string, JSString::offsetOfFlags()));
        masm.store32(temp1, Address(string, JSString::offsetOfLength()));

        masm.loadNonInlineStringChars(base, temp1,
                                      latin1 ? CharEncoding::Latin1 : CharEncoding::TwoByte);
        masm.load32(startIndexAddress, temp2);
        if (latin1)
            masm.addPtr(temp2, temp1);
        else
            masm.computeEffectiveAddress(BaseIndex(temp1, temp2, TimesTwo), temp1);
        masm.storeNonInlineStringChars(temp1, string);
        masm.storeDependentStringBase(base, string);
        masm.movePtr(base, temp1);

        // Follow any base pointer if the input is itself a dependent string.
        // Watch for undepended strings, which have a base pointer but don't
        // actually share their characters with it.
        Label noBase;
        masm.load32(Address(base, JSString::offsetOfFlags()), temp2);
        masm.and32(Imm32(JSString::TYPE_FLAGS_MASK), temp2);
        masm.branch32(Assembler::NotEqual, temp2, Imm32(JSString::DEPENDENT_FLAGS), &noBase);
        masm.loadDependentStringBase(base, temp1);
        masm.storeDependentStringBase(temp1, string);
        masm.bind(&noBase);

        // Post-barrier the base store, whether it was the direct or indirect
        // base (both will end up in temp1 here).
        masm.branchPtrInNurseryChunk(Assembler::Equal, string, temp2, &done);
        masm.branchPtrInNurseryChunk(Assembler::NotEqual, temp1, temp2, &done);

        LiveRegisterSet regsToSave(RegisterSet::Volatile());
        regsToSave.takeUnchecked(temp1);
        regsToSave.takeUnchecked(temp2);
        regsToSave.addUnchecked(string);

        masm.PushRegsInMask(regsToSave);

        masm.mov(ImmPtr(runtime), temp1);

        masm.setupUnalignedABICall(temp2);
        masm.passABIArg(temp1);
        masm.passABIArg(string);
        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier));

        masm.PopRegsInMask(regsToSave);
    }

    masm.bind(&done);
}

static void*
AllocateString(JSContext* cx)
{
    AutoUnsafeCallWithABI unsafe;
    return js::Allocate<JSString, NoGC>(cx, js::gc::TenuredHeap);
}

static void*
AllocateFatInlineString(JSContext* cx)
{
    AutoUnsafeCallWithABI unsafe;
    return js::Allocate<JSFatInlineString, NoGC>(cx, js::gc::TenuredHeap);
}

void
CreateDependentString::generateFallback(MacroAssembler& masm, LiveRegisterSet regsToSave)
{
    regsToSave.take(string_);
    regsToSave.take(temp_);
    for (FallbackKind kind : mozilla::MakeEnumeratedRange(FallbackKind::Count)) {
        masm.bind(&fallbacks_[kind]);

        masm.PushRegsInMask(regsToSave);

        masm.setupUnalignedABICall(string_);
        masm.loadJSContext(string_);
        masm.passABIArg(string_);
        masm.callWithABI(kind == FallbackKind::FatInlineString
                         ? JS_FUNC_TO_DATA_PTR(void*, AllocateFatInlineString)
                         : JS_FUNC_TO_DATA_PTR(void*, AllocateString));
        masm.storeCallPointerResult(string_);

        masm.PopRegsInMask(regsToSave);

        masm.branchPtr(Assembler::Equal, string_, ImmWord(0), failure_);

        masm.jump(&joins_[kind]);
    }
}

static void*
CreateMatchResultFallbackFunc(JSContext* cx, gc::AllocKind kind, size_t nDynamicSlots)
{
    AutoUnsafeCallWithABI unsafe;
    return js::Allocate<JSObject, NoGC>(cx, kind, nDynamicSlots, gc::DefaultHeap,
                                        &ArrayObject::class_);
}

static void
CreateMatchResultFallback(MacroAssembler& masm, LiveRegisterSet regsToSave,
                          Register object, Register temp2, Register temp5,
                          ArrayObject* templateObj, Label* fail)
{
    MOZ_ASSERT(templateObj->group()->clasp() == &ArrayObject::class_);

    regsToSave.take(object);
    regsToSave.take(temp2);
    regsToSave.take(temp5);
    masm.PushRegsInMask(regsToSave);

    masm.setupUnalignedABICall(object);

    masm.loadJSContext(object);
    masm.passABIArg(object);
    masm.move32(Imm32(int32_t(templateObj->asTenured().getAllocKind())), temp2);
    masm.passABIArg(temp2);
    masm.move32(Imm32(int32_t(templateObj->as<NativeObject>().numDynamicSlots())), temp5);
    masm.passABIArg(temp5);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, CreateMatchResultFallbackFunc));
    masm.storeCallPointerResult(object);

    masm.PopRegsInMask(regsToSave);

    masm.branchPtr(Assembler::Equal, object, ImmWord(0), fail);

    TemplateObject templateObject(templateObj);
    masm.initGCThing(object, temp2, templateObject, true);
}

JitCode*
JitRealm::generateRegExpMatcherStub(JSContext* cx)
{
    Register regexp = RegExpMatcherRegExpReg;
    Register input = RegExpMatcherStringReg;
    Register lastIndex = RegExpMatcherLastIndexReg;
    ValueOperand result = JSReturnOperand;

    // We are free to clobber all registers, as LRegExpMatcher is a call instruction.
    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    regs.take(input);
    regs.take(regexp);
    regs.take(lastIndex);

    // temp5 is used in single byte instructions when creating dependent
    // strings, and has restrictions on which register it can be on some
    // platforms.
    Register temp5;
    {
        AllocatableGeneralRegisterSet oregs = regs;
        do {
            temp5 = oregs.takeAny();
        } while (!MacroAssembler::canUseInSingleByteInstruction(temp5));
        regs.take(temp5);
    }

    Register temp1 = regs.takeAny();
    Register temp2 = regs.takeAny();
    Register temp3 = regs.takeAny();

    Register maybeTemp4 = InvalidReg;
    if (!regs.empty()) {
        // There are not enough registers on x86.
        maybeTemp4 = regs.takeAny();
    }

    ArrayObject* templateObject = cx->realm()->regExps.getOrCreateMatchResultTemplateObject(cx);
    if (!templateObject)
        return nullptr;

    // The template object should have enough space for the maximum number of
    // pairs this stub can handle.
    MOZ_ASSERT(ObjectElements::VALUES_PER_HEADER + RegExpObject::MaxPairCount ==
               gc::GetGCKindSlots(templateObject->asTenured().getAllocKind()));

    StackMacroAssembler masm(cx);

    // The InputOutputData is placed above the return address on the stack.
    size_t inputOutputDataStartOffset = sizeof(void*);

    Label notFound, oolEntry;
    if (!PrepareAndExecuteRegExp(cx, masm, regexp, input, lastIndex,
                                 temp1, temp2, temp5, inputOutputDataStartOffset,
                                 RegExpShared::Normal, stringsCanBeInNursery, &notFound, &oolEntry))
    {
        return nullptr;
    }

    // Construct the result.
    Register object = temp1;
    Label matchResultFallback, matchResultJoin;
    TemplateObject templateObj(templateObject);
    masm.createGCObject(object, temp2, templateObj, gc::DefaultHeap, &matchResultFallback);
    masm.bind(&matchResultJoin);

    // Initialize slots of result object.
    masm.loadPtr(Address(object, NativeObject::offsetOfSlots()), temp2);
    masm.storeValue(templateObject->getSlot(0), Address(temp2, 0));
    masm.storeValue(templateObject->getSlot(1), Address(temp2, sizeof(Value)));

    size_t elementsOffset = NativeObject::offsetOfFixedElements();

#ifdef DEBUG
    // Assert the initial value of initializedLength and length to make sure
    // restoration on failure case works.
    {
        Label initLengthOK, lengthOK;
        masm.branch32(Assembler::Equal,
                      Address(object, elementsOffset + ObjectElements::offsetOfInitializedLength()),
                      Imm32(templateObject->getDenseInitializedLength()),
                      &initLengthOK);
        masm.assumeUnreachable("Initial value of the match object's initializedLength does not match to restoration.");
        masm.bind(&initLengthOK);

        masm.branch32(Assembler::Equal,
                      Address(object, elementsOffset + ObjectElements::offsetOfLength()),
                      Imm32(templateObject->length()),
                      &lengthOK);
        masm.assumeUnreachable("Initial value of The match object's length does not match to restoration.");
        masm.bind(&lengthOK);
    }
#endif

    Register matchIndex = temp2;
    masm.move32(Imm32(0), matchIndex);

    size_t pairsVectorStartOffset = RegExpPairsVectorStartOffset(inputOutputDataStartOffset);
    Address pairsVectorAddress(masm.getStackPointer(), pairsVectorStartOffset);
    Address pairCountAddress = RegExpPairCountAddress(masm, inputOutputDataStartOffset);

    BaseIndex stringAddress(object, matchIndex, TimesEight, elementsOffset);

    JS_STATIC_ASSERT(sizeof(MatchPair) == 8);
    BaseIndex stringIndexAddress(masm.getStackPointer(), matchIndex, TimesEight,
                                 pairsVectorStartOffset + offsetof(MatchPair, start));
    BaseIndex stringLimitAddress(masm.getStackPointer(), matchIndex, TimesEight,
                                 pairsVectorStartOffset + offsetof(MatchPair, limit));

    // Loop to construct the match strings. There are two different loops,
    // depending on whether the input is latin1.
    CreateDependentString depStr[2];

    // depStr may refer to failureRestore during generateFallback below,
    // so this variable must live outside of the block.
    Label failureRestore;
    {
        Label isLatin1, done;
        masm.branchLatin1String(input, &isLatin1);

        Label* failure = &oolEntry;
        Register temp4 = (maybeTemp4 == InvalidReg) ? lastIndex : maybeTemp4;

        if (maybeTemp4 == InvalidReg) {
            failure = &failureRestore;

            // Save lastIndex value to temporary space.
            masm.store32(lastIndex, Address(object, elementsOffset + ObjectElements::offsetOfLength()));
        }

        for (int isLatin = 0; isLatin <= 1; isLatin++) {
            if (isLatin)
                masm.bind(&isLatin1);

            Label matchLoop;
            masm.bind(&matchLoop);

            Label isUndefined, storeDone;
            masm.branch32(Assembler::LessThan, stringIndexAddress, Imm32(0), &isUndefined);

            depStr[isLatin].generate(masm, cx->names(),
                                     CompileRuntime::get(cx->runtime()),
                                     isLatin, temp3, input, temp4, temp5,
                                     stringIndexAddress, stringLimitAddress,
                                     stringsCanBeInNursery,
                                     failure);

            masm.storeValue(JSVAL_TYPE_STRING, temp3, stringAddress);
            // Storing into nursery-allocated results object's elements; no post barrier.
            masm.jump(&storeDone);
            masm.bind(&isUndefined);

            masm.storeValue(UndefinedValue(), stringAddress);
            masm.bind(&storeDone);

            masm.add32(Imm32(1), matchIndex);
            masm.branch32(Assembler::LessThanOrEqual, pairCountAddress, matchIndex, &done);
            masm.jump(&matchLoop);
        }

        if (maybeTemp4 == InvalidReg) {
            // Restore lastIndex value from temporary space, both for success
            // and failure cases.

            masm.load32(Address(object, elementsOffset + ObjectElements::offsetOfLength()), lastIndex);
            masm.jump(&done);

            masm.bind(&failureRestore);
            masm.load32(Address(object, elementsOffset + ObjectElements::offsetOfLength()), lastIndex);

            // Restore the match object for failure case.
            masm.store32(Imm32(templateObject->getDenseInitializedLength()),
                         Address(object, elementsOffset + ObjectElements::offsetOfInitializedLength()));
            masm.store32(Imm32(templateObject->length()),
                         Address(object, elementsOffset + ObjectElements::offsetOfLength()));
            masm.jump(&oolEntry);
        }

        masm.bind(&done);
    }

    // Fill in the rest of the output object.
    masm.store32(matchIndex, Address(object, elementsOffset + ObjectElements::offsetOfInitializedLength()));
    masm.store32(matchIndex, Address(object, elementsOffset + ObjectElements::offsetOfLength()));

    masm.loadPtr(Address(object, NativeObject::offsetOfSlots()), temp2);

    MOZ_ASSERT(templateObject->numFixedSlots() == 0);
    MOZ_ASSERT(templateObject->lookupPure(cx->names().index)->slot() == 0);
    MOZ_ASSERT(templateObject->lookupPure(cx->names().input)->slot() == 1);

    masm.load32(pairsVectorAddress, temp3);
    masm.storeValue(JSVAL_TYPE_INT32, temp3, Address(temp2, 0));
    Address inputSlotAddress(temp2, sizeof(Value));
    masm.storeValue(JSVAL_TYPE_STRING, input, inputSlotAddress);
    // No post barrier needed (inputSlotAddress is within nursery object.)

    // All done!
    masm.tagValue(JSVAL_TYPE_OBJECT, object, result);
    masm.ret();

    masm.bind(&notFound);
    masm.moveValue(NullValue(), result);
    masm.ret();

    // Fallback paths for CreateDependentString and createGCObject.
    // Need to save all registers in use when they were called.
    LiveRegisterSet regsToSave(RegisterSet::Volatile());
    regsToSave.addUnchecked(regexp);
    regsToSave.addUnchecked(input);
    regsToSave.addUnchecked(lastIndex);
    regsToSave.addUnchecked(temp1);
    regsToSave.addUnchecked(temp2);
    regsToSave.addUnchecked(temp3);
    if (maybeTemp4 != InvalidReg)
        regsToSave.addUnchecked(maybeTemp4);
    regsToSave.addUnchecked(temp5);

    for (int isLatin = 0; isLatin <= 1; isLatin++)
        depStr[isLatin].generateFallback(masm, regsToSave);

    masm.bind(&matchResultFallback);
    CreateMatchResultFallback(masm, regsToSave, object, temp2, temp5, templateObject, &oolEntry);
    masm.jump(&matchResultJoin);

    // Use an undefined value to signal to the caller that the OOL stub needs to be called.
    masm.bind(&oolEntry);
    masm.moveValue(UndefinedValue(), result);
    masm.ret();

    Linker linker(masm);
    AutoFlushICache afc("RegExpMatcherStub");
    JitCode* code = linker.newCode(cx, CodeKind::Other);
    if (!code)
        return nullptr;

#ifdef JS_ION_PERF
    writePerfSpewerJitCodeProfile(code, "RegExpMatcherStub");
#endif
#ifdef MOZ_VTUNE
    vtune::MarkStub(code, "RegExpMatcherStub");
#endif

    return code;
}

class OutOfLineRegExpMatcher : public OutOfLineCodeBase<CodeGenerator>
{
    LRegExpMatcher* lir_;

  public:
    explicit OutOfLineRegExpMatcher(LRegExpMatcher* lir)
      : lir_(lir)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineRegExpMatcher(this);
    }

    LRegExpMatcher* lir() const {
        return lir_;
    }
};

typedef bool (*RegExpMatcherRawFn)(JSContext* cx, HandleObject regexp, HandleString input,
                                   int32_t lastIndex,
                                   MatchPairs* pairs, MutableHandleValue output);
static const VMFunction RegExpMatcherRawInfo =
    FunctionInfo<RegExpMatcherRawFn>(RegExpMatcherRaw, "RegExpMatcherRaw");

void
CodeGenerator::visitOutOfLineRegExpMatcher(OutOfLineRegExpMatcher* ool)
{
    LRegExpMatcher* lir = ool->lir();
    Register lastIndex = ToRegister(lir->lastIndex());
    Register input = ToRegister(lir->string());
    Register regexp = ToRegister(lir->regexp());

    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    regs.take(lastIndex);
    regs.take(input);
    regs.take(regexp);
    Register temp = regs.takeAny();

    masm.computeEffectiveAddress(Address(masm.getStackPointer(),
        sizeof(irregexp::InputOutputData)), temp);

    pushArg(temp);
    pushArg(lastIndex);
    pushArg(input);
    pushArg(regexp);

    // We are not using oolCallVM because we are in a Call, and that live
    // registers are already saved by the the register allocator.
    callVM(RegExpMatcherRawInfo, lir);

    masm.jump(ool->rejoin());
}

void
CodeGenerator::visitRegExpMatcher(LRegExpMatcher* lir)
{
    MOZ_ASSERT(ToRegister(lir->regexp()) == RegExpMatcherRegExpReg);
    MOZ_ASSERT(ToRegister(lir->string()) == RegExpMatcherStringReg);
    MOZ_ASSERT(ToRegister(lir->lastIndex()) == RegExpMatcherLastIndexReg);
    MOZ_ASSERT(ToOutValue(lir) == JSReturnOperand);

#if defined(JS_NUNBOX32)
    MOZ_ASSERT(RegExpMatcherRegExpReg != JSReturnReg_Type);
    MOZ_ASSERT(RegExpMatcherRegExpReg != JSReturnReg_Data);
    MOZ_ASSERT(RegExpMatcherStringReg != JSReturnReg_Type);
    MOZ_ASSERT(RegExpMatcherStringReg != JSReturnReg_Data);
    MOZ_ASSERT(RegExpMatcherLastIndexReg != JSReturnReg_Type);
    MOZ_ASSERT(RegExpMatcherLastIndexReg != JSReturnReg_Data);
#elif defined(JS_PUNBOX64)
    MOZ_ASSERT(RegExpMatcherRegExpReg != JSReturnReg);
    MOZ_ASSERT(RegExpMatcherStringReg != JSReturnReg);
    MOZ_ASSERT(RegExpMatcherLastIndexReg != JSReturnReg);
#endif

    masm.reserveStack(RegExpReservedStack);

    OutOfLineRegExpMatcher* ool = new(alloc()) OutOfLineRegExpMatcher(lir);
    addOutOfLineCode(ool, lir->mir());

    const JitRealm* jitRealm = gen->compartment->jitRealm();
    JitCode* regExpMatcherStub = jitRealm->regExpMatcherStubNoBarrier(&realmStubsToReadBarrier_);
    masm.call(regExpMatcherStub);
    masm.branchTestUndefined(Assembler::Equal, JSReturnOperand, ool->entry());
    masm.bind(ool->rejoin());

    masm.freeStack(RegExpReservedStack);
}

static const int32_t RegExpSearcherResultNotFound = -1;
static const int32_t RegExpSearcherResultFailed = -2;

JitCode*
JitRealm::generateRegExpSearcherStub(JSContext* cx)
{
    Register regexp = RegExpTesterRegExpReg;
    Register input = RegExpTesterStringReg;
    Register lastIndex = RegExpTesterLastIndexReg;
    Register result = ReturnReg;

    // We are free to clobber all registers, as LRegExpSearcher is a call instruction.
    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    regs.take(input);
    regs.take(regexp);
    regs.take(lastIndex);

    Register temp1 = regs.takeAny();
    Register temp2 = regs.takeAny();
    Register temp3 = regs.takeAny();

    StackMacroAssembler masm(cx);

    // The InputOutputData is placed above the return address on the stack.
    size_t inputOutputDataStartOffset = sizeof(void*);

    Label notFound, oolEntry;
    if (!PrepareAndExecuteRegExp(cx, masm, regexp, input, lastIndex,
                                 temp1, temp2, temp3, inputOutputDataStartOffset,
                                 RegExpShared::Normal, stringsCanBeInNursery,
                                 &notFound, &oolEntry))
    {
        return nullptr;
    }

    size_t pairsVectorStartOffset = RegExpPairsVectorStartOffset(inputOutputDataStartOffset);
    Address stringIndexAddress(masm.getStackPointer(),
                               pairsVectorStartOffset + offsetof(MatchPair, start));
    Address stringLimitAddress(masm.getStackPointer(),
                               pairsVectorStartOffset + offsetof(MatchPair, limit));

    masm.load32(stringIndexAddress, result);
    masm.load32(stringLimitAddress, input);
    masm.lshiftPtr(Imm32(15), input);
    masm.or32(input, result);
    masm.ret();

    masm.bind(&notFound);
    masm.move32(Imm32(RegExpSearcherResultNotFound), result);
    masm.ret();

    masm.bind(&oolEntry);
    masm.move32(Imm32(RegExpSearcherResultFailed), result);
    masm.ret();

    Linker linker(masm);
    AutoFlushICache afc("RegExpSearcherStub");
    JitCode* code = linker.newCode(cx, CodeKind::Other);
    if (!code)
        return nullptr;

#ifdef JS_ION_PERF
    writePerfSpewerJitCodeProfile(code, "RegExpSearcherStub");
#endif
#ifdef MOZ_VTUNE
    vtune::MarkStub(code, "RegExpSearcherStub");
#endif

    return code;
}

class OutOfLineRegExpSearcher : public OutOfLineCodeBase<CodeGenerator>
{
    LRegExpSearcher* lir_;

  public:
    explicit OutOfLineRegExpSearcher(LRegExpSearcher* lir)
      : lir_(lir)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineRegExpSearcher(this);
    }

    LRegExpSearcher* lir() const {
        return lir_;
    }
};

typedef bool (*RegExpSearcherRawFn)(JSContext* cx, HandleObject regexp, HandleString input,
                                    int32_t lastIndex,
                                    MatchPairs* pairs, int32_t* result);
static const VMFunction RegExpSearcherRawInfo =
    FunctionInfo<RegExpSearcherRawFn>(RegExpSearcherRaw, "RegExpSearcherRaw");

void
CodeGenerator::visitOutOfLineRegExpSearcher(OutOfLineRegExpSearcher* ool)
{
    LRegExpSearcher* lir = ool->lir();
    Register lastIndex = ToRegister(lir->lastIndex());
    Register input = ToRegister(lir->string());
    Register regexp = ToRegister(lir->regexp());

    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    regs.take(lastIndex);
    regs.take(input);
    regs.take(regexp);
    Register temp = regs.takeAny();

    masm.computeEffectiveAddress(Address(masm.getStackPointer(),
        sizeof(irregexp::InputOutputData)), temp);

    pushArg(temp);
    pushArg(lastIndex);
    pushArg(input);
    pushArg(regexp);

    // We are not using oolCallVM because we are in a Call, and that live
    // registers are already saved by the the register allocator.
    callVM(RegExpSearcherRawInfo, lir);

    masm.jump(ool->rejoin());
}

void
CodeGenerator::visitRegExpSearcher(LRegExpSearcher* lir)
{
    MOZ_ASSERT(ToRegister(lir->regexp()) == RegExpTesterRegExpReg);
    MOZ_ASSERT(ToRegister(lir->string()) == RegExpTesterStringReg);
    MOZ_ASSERT(ToRegister(lir->lastIndex()) == RegExpTesterLastIndexReg);
    MOZ_ASSERT(ToRegister(lir->output()) == ReturnReg);

    MOZ_ASSERT(RegExpTesterRegExpReg != ReturnReg);
    MOZ_ASSERT(RegExpTesterStringReg != ReturnReg);
    MOZ_ASSERT(RegExpTesterLastIndexReg != ReturnReg);

    masm.reserveStack(RegExpReservedStack);

    OutOfLineRegExpSearcher* ool = new(alloc()) OutOfLineRegExpSearcher(lir);
    addOutOfLineCode(ool, lir->mir());

    const JitRealm* jitRealm = gen->compartment->jitRealm();
    JitCode* regExpSearcherStub = jitRealm->regExpSearcherStubNoBarrier(&realmStubsToReadBarrier_);
    masm.call(regExpSearcherStub);
    masm.branch32(Assembler::Equal, ReturnReg, Imm32(RegExpSearcherResultFailed), ool->entry());
    masm.bind(ool->rejoin());

    masm.freeStack(RegExpReservedStack);
}

static const int32_t RegExpTesterResultNotFound = -1;
static const int32_t RegExpTesterResultFailed = -2;

JitCode*
JitRealm::generateRegExpTesterStub(JSContext* cx)
{
    Register regexp = RegExpTesterRegExpReg;
    Register input = RegExpTesterStringReg;
    Register lastIndex = RegExpTesterLastIndexReg;
    Register result = ReturnReg;

    StackMacroAssembler masm(cx);

#ifdef JS_USE_LINK_REGISTER
    masm.pushReturnAddress();
#endif

    // We are free to clobber all registers, as LRegExpTester is a call instruction.
    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    regs.take(input);
    regs.take(regexp);
    regs.take(lastIndex);

    Register temp1 = regs.takeAny();
    Register temp2 = regs.takeAny();
    Register temp3 = regs.takeAny();

    masm.reserveStack(sizeof(irregexp::InputOutputData));

    Label notFound, oolEntry;
    if (!PrepareAndExecuteRegExp(cx, masm, regexp, input, lastIndex,
                                 temp1, temp2, temp3, 0,
                                 RegExpShared::MatchOnly, stringsCanBeInNursery,
                                 &notFound, &oolEntry))
    {
        return nullptr;
    }

    Label done;

    // temp3 contains endIndex.
    masm.move32(temp3, result);
    masm.jump(&done);

    masm.bind(&notFound);
    masm.move32(Imm32(RegExpTesterResultNotFound), result);
    masm.jump(&done);

    masm.bind(&oolEntry);
    masm.move32(Imm32(RegExpTesterResultFailed), result);

    masm.bind(&done);
    masm.freeStack(sizeof(irregexp::InputOutputData));
    masm.ret();

    Linker linker(masm);
    AutoFlushICache afc("RegExpTesterStub");
    JitCode* code = linker.newCode(cx, CodeKind::Other);
    if (!code)
        return nullptr;

#ifdef JS_ION_PERF
    writePerfSpewerJitCodeProfile(code, "RegExpTesterStub");
#endif
#ifdef MOZ_VTUNE
    vtune::MarkStub(code, "RegExpTesterStub");
#endif

    return code;
}

class OutOfLineRegExpTester : public OutOfLineCodeBase<CodeGenerator>
{
    LRegExpTester* lir_;

  public:
    explicit OutOfLineRegExpTester(LRegExpTester* lir)
      : lir_(lir)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineRegExpTester(this);
    }

    LRegExpTester* lir() const {
        return lir_;
    }
};

typedef bool (*RegExpTesterRawFn)(JSContext* cx, HandleObject regexp, HandleString input,
                                  int32_t lastIndex, int32_t* result);
static const VMFunction RegExpTesterRawInfo =
    FunctionInfo<RegExpTesterRawFn>(RegExpTesterRaw, "RegExpTesterRaw");

void
CodeGenerator::visitOutOfLineRegExpTester(OutOfLineRegExpTester* ool)
{
    LRegExpTester* lir = ool->lir();
    Register lastIndex = ToRegister(lir->lastIndex());
    Register input = ToRegister(lir->string());
    Register regexp = ToRegister(lir->regexp());

    pushArg(lastIndex);
    pushArg(input);
    pushArg(regexp);

    // We are not using oolCallVM because we are in a Call, and that live
    // registers are already saved by the the register allocator.
    callVM(RegExpTesterRawInfo, lir);

    masm.jump(ool->rejoin());
}

void
CodeGenerator::visitRegExpTester(LRegExpTester* lir)
{
    MOZ_ASSERT(ToRegister(lir->regexp()) == RegExpTesterRegExpReg);
    MOZ_ASSERT(ToRegister(lir->string()) == RegExpTesterStringReg);
    MOZ_ASSERT(ToRegister(lir->lastIndex()) == RegExpTesterLastIndexReg);
    MOZ_ASSERT(ToRegister(lir->output()) == ReturnReg);

    MOZ_ASSERT(RegExpTesterRegExpReg != ReturnReg);
    MOZ_ASSERT(RegExpTesterStringReg != ReturnReg);
    MOZ_ASSERT(RegExpTesterLastIndexReg != ReturnReg);

    OutOfLineRegExpTester* ool = new(alloc()) OutOfLineRegExpTester(lir);
    addOutOfLineCode(ool, lir->mir());

    const JitRealm* jitRealm = gen->compartment->jitRealm();
    JitCode* regExpTesterStub = jitRealm->regExpTesterStubNoBarrier(&realmStubsToReadBarrier_);
    masm.call(regExpTesterStub);

    masm.branch32(Assembler::Equal, ReturnReg, Imm32(RegExpTesterResultFailed), ool->entry());
    masm.bind(ool->rejoin());
}

class OutOfLineRegExpPrototypeOptimizable : public OutOfLineCodeBase<CodeGenerator>
{
    LRegExpPrototypeOptimizable* ins_;

  public:
    explicit OutOfLineRegExpPrototypeOptimizable(LRegExpPrototypeOptimizable* ins)
      : ins_(ins)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineRegExpPrototypeOptimizable(this);
    }
    LRegExpPrototypeOptimizable* ins() const {
        return ins_;
    }
};

void
CodeGenerator::visitRegExpPrototypeOptimizable(LRegExpPrototypeOptimizable* ins)
{
    Register object = ToRegister(ins->object());
    Register output = ToRegister(ins->output());
    Register temp = ToRegister(ins->temp());

    OutOfLineRegExpPrototypeOptimizable* ool = new(alloc()) OutOfLineRegExpPrototypeOptimizable(ins);
    addOutOfLineCode(ool, ins->mir());

    masm.loadJSContext(temp);
    masm.loadPtr(Address(temp, JSContext::offsetOfRealm()), temp);
    size_t offset = Realm::offsetOfRegExps() +
                    RegExpRealm::offsetOfOptimizableRegExpPrototypeShape();
    masm.loadPtr(Address(temp, offset), temp);

    masm.branchTestObjShapeUnsafe(Assembler::NotEqual, object, temp, ool->entry());
    masm.move32(Imm32(0x1), output);

    masm.bind(ool->rejoin());
}

void
CodeGenerator::visitOutOfLineRegExpPrototypeOptimizable(OutOfLineRegExpPrototypeOptimizable* ool)
{
    LRegExpPrototypeOptimizable* ins = ool->ins();
    Register object = ToRegister(ins->object());
    Register output = ToRegister(ins->output());

    saveVolatile(output);

    masm.setupUnalignedABICall(output);
    masm.loadJSContext(output);
    masm.passABIArg(output);
    masm.passABIArg(object);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, RegExpPrototypeOptimizableRaw));
    masm.storeCallBoolResult(output);

    restoreVolatile(output);

    masm.jump(ool->rejoin());
}

class OutOfLineRegExpInstanceOptimizable : public OutOfLineCodeBase<CodeGenerator>
{
    LRegExpInstanceOptimizable* ins_;

  public:
    explicit OutOfLineRegExpInstanceOptimizable(LRegExpInstanceOptimizable* ins)
      : ins_(ins)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineRegExpInstanceOptimizable(this);
    }
    LRegExpInstanceOptimizable* ins() const {
        return ins_;
    }
};

void
CodeGenerator::visitRegExpInstanceOptimizable(LRegExpInstanceOptimizable* ins)
{
    Register object = ToRegister(ins->object());
    Register output = ToRegister(ins->output());
    Register temp = ToRegister(ins->temp());

    OutOfLineRegExpInstanceOptimizable* ool = new(alloc()) OutOfLineRegExpInstanceOptimizable(ins);
    addOutOfLineCode(ool, ins->mir());

    masm.loadJSContext(temp);
    masm.loadPtr(Address(temp, JSContext::offsetOfRealm()), temp);
    size_t offset = Realm::offsetOfRegExps() +
                    RegExpRealm::offsetOfOptimizableRegExpInstanceShape();
    masm.loadPtr(Address(temp, offset), temp);

    masm.branchTestObjShapeUnsafe(Assembler::NotEqual, object, temp, ool->entry());
    masm.move32(Imm32(0x1), output);

    masm.bind(ool->rejoin());
}

void
CodeGenerator::visitOutOfLineRegExpInstanceOptimizable(OutOfLineRegExpInstanceOptimizable* ool)
{
    LRegExpInstanceOptimizable* ins = ool->ins();
    Register object = ToRegister(ins->object());
    Register proto = ToRegister(ins->proto());
    Register output = ToRegister(ins->output());

    saveVolatile(output);

    masm.setupUnalignedABICall(output);
    masm.loadJSContext(output);
    masm.passABIArg(output);
    masm.passABIArg(object);
    masm.passABIArg(proto);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, RegExpInstanceOptimizableRaw));
    masm.storeCallBoolResult(output);

    restoreVolatile(output);

    masm.jump(ool->rejoin());
}

static void
FindFirstDollarIndex(MacroAssembler& masm, Register len, Register chars,
                     Register temp, Register output, bool isLatin1)
{
    masm.move32(Imm32(0), output);

    Label start, done;
    masm.bind(&start);
    if (isLatin1)
        masm.load8ZeroExtend(BaseIndex(chars, output, TimesOne), temp);
    else
        masm.load16ZeroExtend(BaseIndex(chars, output, TimesTwo), temp);

    masm.branch32(Assembler::Equal, temp, Imm32('$'), &done);

    masm.add32(Imm32(1), output);
    masm.branch32(Assembler::NotEqual, output, len, &start);

    masm.move32(Imm32(-1), output);

    masm.bind(&done);
}

typedef bool (*GetFirstDollarIndexRawFn)(JSContext*, JSString*, int32_t*);
static const VMFunction GetFirstDollarIndexRawInfo =
    FunctionInfo<GetFirstDollarIndexRawFn>(GetFirstDollarIndexRaw, "GetFirstDollarIndexRaw");

void
CodeGenerator::visitGetFirstDollarIndex(LGetFirstDollarIndex* ins)
{
    Register str = ToRegister(ins->str());
    Register output = ToRegister(ins->output());
    Register temp0 = ToRegister(ins->temp0());
    Register temp1 = ToRegister(ins->temp1());
    Register len = ToRegister(ins->temp2());

    OutOfLineCode* ool = oolCallVM(GetFirstDollarIndexRawInfo, ins, ArgList(str),
                                   StoreRegisterTo(output));

    masm.branchIfRope(str, ool->entry());
    masm.loadStringLength(str, len);

    Label isLatin1, done;
    masm.branchLatin1String(str, &isLatin1);
    {
        masm.loadStringChars(str, temp0, CharEncoding::TwoByte);
        FindFirstDollarIndex(masm, len, temp0, temp1, output, /* isLatin1 = */ false);
        masm.jump(&done);
    }
    masm.bind(&isLatin1);
    {
        masm.loadStringChars(str, temp0, CharEncoding::Latin1);
        FindFirstDollarIndex(masm, len, temp0, temp1, output, /* isLatin1 = */ true);
    }
    masm.bind(&done);
    masm.bind(ool->rejoin());
}

typedef JSString* (*StringReplaceFn)(JSContext*, HandleString, HandleString, HandleString);
static const VMFunction StringFlatReplaceInfo =
    FunctionInfo<StringReplaceFn>(js::str_flat_replace_string, "str_flat_replace_string");
static const VMFunction StringReplaceInfo =
    FunctionInfo<StringReplaceFn>(StringReplace, "StringReplace");

void
CodeGenerator::visitStringReplace(LStringReplace* lir)
{
    if (lir->replacement()->isConstant())
        pushArg(ImmGCPtr(lir->replacement()->toConstant()->toString()));
    else
        pushArg(ToRegister(lir->replacement()));

    if (lir->pattern()->isConstant())
        pushArg(ImmGCPtr(lir->pattern()->toConstant()->toString()));
    else
        pushArg(ToRegister(lir->pattern()));

    if (lir->string()->isConstant())
        pushArg(ImmGCPtr(lir->string()->toConstant()->toString()));
    else
        pushArg(ToRegister(lir->string()));

    if (lir->mir()->isFlatReplacement())
        callVM(StringFlatReplaceInfo, lir);
    else
        callVM(StringReplaceInfo, lir);
}

void
CodeGenerator::emitSharedStub(ICStub::Kind kind, LInstruction* lir)
{
    JSScript* script = lir->mirRaw()->block()->info().script();
    jsbytecode* pc = lir->mirRaw()->toInstruction()->resumePoint()->pc();

#ifdef JS_USE_LINK_REGISTER
    // Some architectures don't push the return address on the stack but
    // use the link register. In that case the stack isn't aligned. Push
    // to make sure we are aligned.
    masm.Push(Imm32(0));
#endif

    // Create descriptor signifying end of Ion frame.
    uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
                                              JitStubFrameLayout::Size());
    masm.Push(Imm32(descriptor));

    // Call into the stubcode.
    CodeOffset patchOffset;
    IonICEntry entry(script->pcToOffset(pc), ICEntry::Kind_Op, script);
    EmitCallIC(&patchOffset, masm);
    entry.setReturnOffset(CodeOffset(masm.currentOffset()));

    SharedStub sharedStub(kind, entry, patchOffset);
    masm.propagateOOM(sharedStubs_.append(sharedStub));

    // Fix up upon return.
    uint32_t callOffset = masm.currentOffset();
#ifdef JS_USE_LINK_REGISTER
    masm.freeStack(sizeof(intptr_t) * 2);
#else
    masm.freeStack(sizeof(intptr_t));
#endif
    markSafepointAt(callOffset, lir);
}

void
CodeGenerator::visitBinarySharedStub(LBinarySharedStub* lir)
{
    JSOp jsop = JSOp(*lir->mirRaw()->toInstruction()->resumePoint()->pc());
    switch (jsop) {
      case JSOP_ADD:
      case JSOP_SUB:
      case JSOP_MUL:
      case JSOP_DIV:
      case JSOP_MOD:
      case JSOP_POW:
        emitSharedStub(ICStub::Kind::BinaryArith_Fallback, lir);
        break;
      case JSOP_LT:
      case JSOP_LE:
      case JSOP_GT:
      case JSOP_GE:
      case JSOP_EQ:
      case JSOP_NE:
      case JSOP_STRICTEQ:
      case JSOP_STRICTNE:
        emitSharedStub(ICStub::Kind::Compare_Fallback, lir);
        break;
      default:
        MOZ_CRASH("Unsupported jsop in shared stubs.");
    }
}

void
CodeGenerator::visitUnaryCache(LUnaryCache* lir)
{
    LiveRegisterSet liveRegs = lir->safepoint()->liveRegs();
    TypedOrValueRegister input = TypedOrValueRegister(ToValue(lir, LUnaryCache::Input));
    ValueOperand output = ToOutValue(lir);

    IonUnaryArithIC ic(liveRegs, input, output);
    addIC(lir, allocateIC(ic));
}

void
CodeGenerator::visitNullarySharedStub(LNullarySharedStub* lir)
{
    jsbytecode* pc = lir->mir()->resumePoint()->pc();
    JSOp jsop = JSOp(*pc);
    switch (jsop) {
      case JSOP_NEWARRAY: {
        uint32_t length = GET_UINT32(pc);
        MOZ_ASSERT(length <= INT32_MAX,
                   "the bytecode emitter must fail to compile code that would "
                   "produce JSOP_NEWARRAY with a length exceeding int32_t range");

        // Pass length in R0.
        masm.move32(Imm32(AssertedCast<int32_t>(length)), R0.scratchReg());
        emitSharedStub(ICStub::Kind::NewArray_Fallback, lir);
        break;
      }
      case JSOP_NEWOBJECT:
        emitSharedStub(ICStub::Kind::NewObject_Fallback, lir);
        break;
      case JSOP_NEWINIT: {
        JSProtoKey key = JSProtoKey(GET_UINT8(pc));
        if (key == JSProto_Array) {
            masm.move32(Imm32(0), R0.scratchReg());
            emitSharedStub(ICStub::Kind::NewArray_Fallback, lir);
        } else {
            emitSharedStub(ICStub::Kind::NewObject_Fallback, lir);
        }
        break;
      }
      default:
        MOZ_CRASH("Unsupported jsop in shared stubs.");
    }
}

typedef JSFunction* (*MakeDefaultConstructorFn)(JSContext*, HandleScript,
                                                jsbytecode*, HandleObject);
static const VMFunction MakeDefaultConstructorInfo =
    FunctionInfo<MakeDefaultConstructorFn>(js::MakeDefaultConstructor,
                                           "MakeDefaultConstructor");

void
CodeGenerator::visitClassConstructor(LClassConstructor* lir)
{
    pushArg(ImmPtr(nullptr));
    pushArg(ImmPtr(lir->mir()->pc()));
    pushArg(ImmGCPtr(current->mir()->info().script()));
    callVM(MakeDefaultConstructorInfo, lir);
}

typedef JSObject* (*LambdaFn)(JSContext*, HandleFunction, HandleObject);
static const VMFunction LambdaInfo = FunctionInfo<LambdaFn>(js::Lambda, "Lambda");

void
CodeGenerator::visitLambdaForSingleton(LLambdaForSingleton* lir)
{
    pushArg(ToRegister(lir->environmentChain()));
    pushArg(ImmGCPtr(lir->mir()->info().funUnsafe()));
    callVM(LambdaInfo, lir);
}

void
CodeGenerator::visitLambda(LLambda* lir)
{
    Register envChain = ToRegister(lir->environmentChain());
    Register output = ToRegister(lir->output());
    Register tempReg = ToRegister(lir->temp());
    const LambdaFunctionInfo& info = lir->mir()->info();

    OutOfLineCode* ool = oolCallVM(LambdaInfo, lir, ArgList(ImmGCPtr(info.funUnsafe()), envChain),
                                   StoreRegisterTo(output));

    MOZ_ASSERT(!info.singletonType);

    TemplateObject templateObject(info.funUnsafe());
    masm.createGCObject(output, tempReg, templateObject, gc::DefaultHeap, ool->entry());

    emitLambdaInit(output, envChain, info);

    if (info.flags & JSFunction::EXTENDED) {
        static_assert(FunctionExtended::NUM_EXTENDED_SLOTS == 2, "All slots must be initialized");
        masm.storeValue(UndefinedValue(), Address(output, FunctionExtended::offsetOfExtendedSlot(0)));
        masm.storeValue(UndefinedValue(), Address(output, FunctionExtended::offsetOfExtendedSlot(1)));
    }

    masm.bind(ool->rejoin());
}

class OutOfLineLambdaArrow : public OutOfLineCodeBase<CodeGenerator>
{
  public:
    LLambdaArrow* lir;
    Label entryNoPop_;

    explicit OutOfLineLambdaArrow(LLambdaArrow* lir)
      : lir(lir)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineLambdaArrow(this);
    }

    Label* entryNoPop() {
        return &entryNoPop_;
    }
};

typedef JSObject* (*LambdaArrowFn)(JSContext*, HandleFunction, HandleObject, HandleValue);
static const VMFunction LambdaArrowInfo =
    FunctionInfo<LambdaArrowFn>(js::LambdaArrow, "LambdaArrow");

void
CodeGenerator::visitOutOfLineLambdaArrow(OutOfLineLambdaArrow* ool)
{
    Register envChain = ToRegister(ool->lir->environmentChain());
    ValueOperand newTarget = ToValue(ool->lir, LLambdaArrow::NewTargetValue);
    Register output = ToRegister(ool->lir->output());
    const LambdaFunctionInfo& info = ool->lir->mir()->info();

    // When we get here, we may need to restore part of the newTarget,
    // which has been conscripted into service as a temp register.
    masm.pop(newTarget.scratchReg());

    masm.bind(ool->entryNoPop());

    saveLive(ool->lir);

    pushArg(newTarget);
    pushArg(envChain);
    pushArg(ImmGCPtr(info.funUnsafe()));

    callVM(LambdaArrowInfo, ool->lir);
    StoreRegisterTo(output).generate(this);

    restoreLiveIgnore(ool->lir, StoreRegisterTo(output).clobbered());

    masm.jump(ool->rejoin());
}

void
CodeGenerator::visitLambdaArrow(LLambdaArrow* lir)
{
    Register envChain = ToRegister(lir->environmentChain());
    ValueOperand newTarget = ToValue(lir, LLambdaArrow::NewTargetValue);
    Register output = ToRegister(lir->output());
    const LambdaFunctionInfo& info = lir->mir()->info();

    OutOfLineLambdaArrow* ool = new (alloc()) OutOfLineLambdaArrow(lir);
    addOutOfLineCode(ool, lir->mir());

    MOZ_ASSERT(!info.useSingletonForClone);

    if (info.singletonType) {
        // If the function has a singleton type, this instruction will only be
        // executed once so we don't bother inlining it.
        masm.jump(ool->entryNoPop());
        masm.bind(ool->rejoin());
        return;
    }

    // There's not enough registers on x86 with the profiler enabled to request
    // a temp. Instead, spill part of one of the values, being prepared to
    // restore it if necessary on the out of line path.
    Register tempReg = newTarget.scratchReg();
    masm.push(newTarget.scratchReg());

    TemplateObject templateObject(info.funUnsafe());
    masm.createGCObject(output, tempReg, templateObject, gc::DefaultHeap, ool->entry());

    masm.pop(newTarget.scratchReg());

    emitLambdaInit(output, envChain, info);

    // Initialize extended slots. Lexical |this| is stored in the first one.
    MOZ_ASSERT(info.flags & JSFunction::EXTENDED);
    static_assert(FunctionExtended::NUM_EXTENDED_SLOTS == 2, "All slots must be initialized");
    static_assert(FunctionExtended::ARROW_NEWTARGET_SLOT == 0,
                  "|new.target| must be stored in first slot");
    masm.storeValue(newTarget, Address(output, FunctionExtended::offsetOfExtendedSlot(0)));
    masm.storeValue(UndefinedValue(), Address(output, FunctionExtended::offsetOfExtendedSlot(1)));

    masm.bind(ool->rejoin());
}

void
CodeGenerator::emitLambdaInit(Register output, Register envChain,
                              const LambdaFunctionInfo& info)
{
    // Initialize nargs and flags. We do this with a single uint32 to avoid
    // 16-bit writes.
    union {
        struct S {
            uint16_t nargs;
            uint16_t flags;
        } s;
        uint32_t word;
    } u;
    u.s.nargs = info.nargs;
    u.s.flags = info.flags;

    static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
                  "the code below needs to be adapted");
    masm.store32(Imm32(u.word), Address(output, JSFunction::offsetOfNargs()));
    masm.storePtr(ImmGCPtr(info.scriptOrLazyScript),
                  Address(output, JSFunction::offsetOfScriptOrLazyScript()));
    masm.storePtr(envChain, Address(output, JSFunction::offsetOfEnvironment()));
    // No post barrier needed because output is guaranteed to be allocated in
    // the nursery.
    masm.storePtr(ImmGCPtr(info.funUnsafe()->displayAtom()),
                  Address(output, JSFunction::offsetOfAtom()));
}

typedef bool (*SetFunNameFn)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
static const VMFunction SetFunNameInfo =
    FunctionInfo<SetFunNameFn>(js::SetFunctionNameIfNoOwnName, "SetFunName");

void
CodeGenerator::visitSetFunName(LSetFunName* lir)
{
    pushArg(Imm32(lir->mir()->prefixKind()));
    pushArg(ToValue(lir, LSetFunName::NameValue));
    pushArg(ToRegister(lir->fun()));

    callVM(SetFunNameInfo, lir);
}

void
CodeGenerator::visitOsiPoint(LOsiPoint* lir)
{
    // Note: markOsiPoint ensures enough space exists between the last
    // LOsiPoint and this one to patch adjacent call instructions.

    MOZ_ASSERT(masm.framePushed() == frameSize());

    uint32_t osiCallPointOffset = markOsiPoint(lir);

    LSafepoint* safepoint = lir->associatedSafepoint();
    MOZ_ASSERT(!safepoint->osiCallPointOffset());
    safepoint->setOsiCallPointOffset(osiCallPointOffset);

#ifdef DEBUG
    // There should be no movegroups or other instructions between
    // an instruction and its OsiPoint. This is necessary because
    // we use the OsiPoint's snapshot from within VM calls.
    for (LInstructionReverseIterator iter(current->rbegin(lir)); iter != current->rend(); iter++) {
        if (*iter == lir)
            continue;
        MOZ_ASSERT(!iter->isMoveGroup());
        MOZ_ASSERT(iter->safepoint() == safepoint);
        break;
    }
#endif

#ifdef CHECK_OSIPOINT_REGISTERS
    if (shouldVerifyOsiPointRegs(safepoint))
        verifyOsiPointRegs(safepoint);
#endif
}

void
CodeGenerator::visitPhi(LPhi* lir)
{
    MOZ_CRASH("Unexpected LPhi in CodeGenerator");
}

void
CodeGenerator::visitGoto(LGoto* lir)
{
    jumpToBlock(lir->target());
}

typedef bool (*InterruptCheckFn)(JSContext*);
static const VMFunction InterruptCheckInfo =
    FunctionInfo<InterruptCheckFn>(InterruptCheck, "InterruptCheck");

void
CodeGenerator::visitTableSwitch(LTableSwitch* ins)
{
    MTableSwitch* mir = ins->mir();
    Label* defaultcase = skipTrivialBlocks(mir->getDefault())->lir()->label();
    const LAllocation* temp;

    if (mir->getOperand(0)->type() != MIRType::Int32) {
        temp = ins->tempInt()->output();

        // The input is a double, so try and convert it to an integer.
        // If it does not fit in an integer, take the default case.
        masm.convertDoubleToInt32(ToFloatRegister(ins->index()), ToRegister(temp), defaultcase, false);
    } else {
        temp = ins->index();
    }

    emitTableSwitchDispatch(mir, ToRegister(temp), ToRegisterOrInvalid(ins->tempPointer()));
}

void
CodeGenerator::visitTableSwitchV(LTableSwitchV* ins)
{
    MTableSwitch* mir = ins->mir();
    Label* defaultcase = skipTrivialBlocks(mir->getDefault())->lir()->label();

    Register index = ToRegister(ins->tempInt());
    ValueOperand value = ToValue(ins, LTableSwitchV::InputValue);
    Register tag = masm.extractTag(value, index);
    masm.branchTestNumber(Assembler::NotEqual, tag, defaultcase);

    Label unboxInt, isInt;
    masm.branchTestInt32(Assembler::Equal, tag, &unboxInt);
    {
        FloatRegister floatIndex = ToFloatRegister(ins->tempFloat());
        masm.unboxDouble(value, floatIndex);
        masm.convertDoubleToInt32(floatIndex, index, defaultcase, false);
        masm.jump(&isInt);
    }

    masm.bind(&unboxInt);
    masm.unboxInt32(value, index);

    masm.bind(&isInt);

    emitTableSwitchDispatch(mir, index, ToRegisterOrInvalid(ins->tempPointer()));
}

typedef JSObject* (*DeepCloneObjectLiteralFn)(JSContext*, HandleObject, NewObjectKind);
static const VMFunction DeepCloneObjectLiteralInfo =
    FunctionInfo<DeepCloneObjectLiteralFn>(DeepCloneObjectLiteral, "DeepCloneObjectLiteral");

void
CodeGenerator::visitCloneLiteral(LCloneLiteral* lir)
{
    pushArg(ImmWord(TenuredObject));
    pushArg(ToRegister(lir->getObjectLiteral()));
    callVM(DeepCloneObjectLiteralInfo, lir);
}

void
CodeGenerator::visitParameter(LParameter* lir)
{
}

void
CodeGenerator::visitCallee(LCallee* lir)
{
    Register callee = ToRegister(lir->output());
    Address ptr(masm.getStackPointer(), frameSize() + JitFrameLayout::offsetOfCalleeToken());

    masm.loadFunctionFromCalleeToken(ptr, callee);
}

void
CodeGenerator::visitIsConstructing(LIsConstructing* lir)
{
    Register output = ToRegister(lir->output());
    Address calleeToken(masm.getStackPointer(), frameSize() + JitFrameLayout::offsetOfCalleeToken());
    masm.loadPtr(calleeToken, output);

    // We must be inside a function.
    MOZ_ASSERT(current->mir()->info().script()->functionNonDelazifying());

    // The low bit indicates whether this call is constructing, just clear the
    // other bits.
    static_assert(CalleeToken_Function == 0x0, "CalleeTokenTag value should match");
    static_assert(CalleeToken_FunctionConstructing == 0x1, "CalleeTokenTag value should match");
    masm.andPtr(Imm32(0x1), output);
}

void
CodeGenerator::visitStart(LStart* lir)
{
}

void
CodeGenerator::visitReturn(LReturn* lir)
{
#if defined(JS_NUNBOX32)
    DebugOnly<LAllocation*> type    = lir->getOperand(TYPE_INDEX);
    DebugOnly<LAllocation*> payload = lir->getOperand(PAYLOAD_INDEX);
    MOZ_ASSERT(ToRegister(type)    == JSReturnReg_Type);
    MOZ_ASSERT(ToRegister(payload) == JSReturnReg_Data);
#elif defined(JS_PUNBOX64)
    DebugOnly<LAllocation*> result = lir->getOperand(0);
    MOZ_ASSERT(ToRegister(result) == JSReturnReg);
#endif
    // Don't emit a jump to the return label if this is the last block.
    if (current->mir() != *gen->graph().poBegin())
        masm.jump(&returnLabel_);
}

void
CodeGenerator::visitOsrEntry(LOsrEntry* lir)
{
    Register temp = ToRegister(lir->temp());

    // Remember the OSR entry offset into the code buffer.
    masm.flushBuffer();
    setOsrEntryOffset(masm.size());

#ifdef JS_TRACE_LOGGING
    emitTracelogStopEvent(TraceLogger_Baseline);
    emitTracelogStartEvent(TraceLogger_IonMonkey);
#endif

    // If profiling, save the current frame pointer to a per-thread global field.
    if (isProfilerInstrumentationEnabled())
        masm.profilerEnterFrame(masm.getStackPointer(), temp);

    // Allocate the full frame for this function
    // Note we have a new entry here. So we reset MacroAssembler::framePushed()
    // to 0, before reserving the stack.
    MOZ_ASSERT(masm.framePushed() == frameSize());
    masm.setFramePushed(0);

    // Ensure that the Ion frames is properly aligned.
    masm.assertStackAlignment(JitStackAlignment, 0);

    masm.reserveStack(frameSize());
}

void
CodeGenerator::visitOsrEnvironmentChain(LOsrEnvironmentChain* lir)
{
    const LAllocation* frame   = lir->getOperand(0);
    const LDefinition* object  = lir->getDef(0);

    const ptrdiff_t frameOffset = BaselineFrame::reverseOffsetOfEnvironmentChain();

    masm.loadPtr(Address(ToRegister(frame), frameOffset), ToRegister(object));
}

void
CodeGenerator::visitOsrArgumentsObject(LOsrArgumentsObject* lir)
{
    const LAllocation* frame   = lir->getOperand(0);
    const LDefinition* object  = lir->getDef(0);

    const ptrdiff_t frameOffset = BaselineFrame::reverseOffsetOfArgsObj();

    masm.loadPtr(Address(ToRegister(frame), frameOffset), ToRegister(object));
}

void
CodeGenerator::visitOsrValue(LOsrValue* value)
{
    const LAllocation* frame   = value->getOperand(0);
    const ValueOperand out     = ToOutValue(value);

    const ptrdiff_t frameOffset = value->mir()->frameOffset();

    masm.loadValue(Address(ToRegister(frame), frameOffset), out);
}

void
CodeGenerator::visitOsrReturnValue(LOsrReturnValue* lir)
{
    const LAllocation* frame   = lir->getOperand(0);
    const ValueOperand out     = ToOutValue(lir);

    Address flags = Address(ToRegister(frame), BaselineFrame::reverseOffsetOfFlags());
    Address retval = Address(ToRegister(frame), BaselineFrame::reverseOffsetOfReturnValue());

    masm.moveValue(UndefinedValue(), out);

    Label done;
    masm.branchTest32(Assembler::Zero, flags, Imm32(BaselineFrame::HAS_RVAL), &done);
    masm.loadValue(retval, out);
    masm.bind(&done);
}

void
CodeGenerator::visitStackArgT(LStackArgT* lir)
{
    const LAllocation* arg = lir->getArgument();
    MIRType argType = lir->type();
    uint32_t argslot = lir->argslot();
    MOZ_ASSERT(argslot - 1u < graph.argumentSlotCount());

    int32_t stack_offset = StackOffsetOfPassedArg(argslot);
    Address dest(masm.getStackPointer(), stack_offset);

    if (arg->isFloatReg())
        masm.storeDouble(ToFloatRegister(arg), dest);
    else if (arg->isRegister())
        masm.storeValue(ValueTypeFromMIRType(argType), ToRegister(arg), dest);
    else
        masm.storeValue(arg->toConstant()->toJSValue(), dest);
}

void
CodeGenerator::visitStackArgV(LStackArgV* lir)
{
    ValueOperand val = ToValue(lir, 0);
    uint32_t argslot = lir->argslot();
    MOZ_ASSERT(argslot - 1u < graph.argumentSlotCount());

    int32_t stack_offset = StackOffsetOfPassedArg(argslot);

    masm.storeValue(val, Address(masm.getStackPointer(), stack_offset));
}

void
CodeGenerator::visitMoveGroup(LMoveGroup* group)
{
    if (!group->numMoves())
        return;

    MoveResolver& resolver = masm.moveResolver();

    for (size_t i = 0; i < group->numMoves(); i++) {
        const LMove& move = group->getMove(i);

        LAllocation from = move.from();
        LAllocation to = move.to();
        LDefinition::Type type = move.type();

        // No bogus moves.
        MOZ_ASSERT(from != to);
        MOZ_ASSERT(!from.isConstant());
        MoveOp::Type moveType;
        switch (type) {
          case LDefinition::OBJECT:
          case LDefinition::SLOTS:
#ifdef JS_NUNBOX32
          case LDefinition::TYPE:
          case LDefinition::PAYLOAD:
#else
          case LDefinition::BOX:
#endif
          case LDefinition::GENERAL:      moveType = MoveOp::GENERAL;      break;
          case LDefinition::INT32:        moveType = MoveOp::INT32;        break;
          case LDefinition::FLOAT32:      moveType = MoveOp::FLOAT32;      break;
          case LDefinition::DOUBLE:       moveType = MoveOp::DOUBLE;       break;
          case LDefinition::SIMD128INT:   moveType = MoveOp::SIMD128INT;   break;
          case LDefinition::SIMD128FLOAT: moveType = MoveOp::SIMD128FLOAT; break;
          default: MOZ_CRASH("Unexpected move type");
        }

        masm.propagateOOM(resolver.addMove(toMoveOperand(from), toMoveOperand(to), moveType));
    }

    masm.propagateOOM(resolver.resolve());
    if (masm.oom())
        return;

    MoveEmitter emitter(masm);

#ifdef JS_CODEGEN_X86
    if (group->maybeScratchRegister().isGeneralReg())
        emitter.setScratchRegister(group->maybeScratchRegister().toGeneralReg()->reg());
    else
        resolver.sortMemoryToMemoryMoves();
#endif

    emitter.emit(resolver);
    emitter.finish();
}

void
CodeGenerator::visitInteger(LInteger* lir)
{
    masm.move32(Imm32(lir->getValue()), ToRegister(lir->output()));
}

void
CodeGenerator::visitInteger64(LInteger64* lir)
{
    masm.move64(Imm64(lir->getValue()), ToOutRegister64(lir));
}

void
CodeGenerator::visitPointer(LPointer* lir)
{
    if (lir->kind() == LPointer::GC_THING)
        masm.movePtr(ImmGCPtr(lir->gcptr()), ToRegister(lir->output()));
    else
        masm.movePtr(ImmPtr(lir->ptr()), ToRegister(lir->output()));
}

void
CodeGenerator::visitKeepAliveObject(LKeepAliveObject* lir)
{
    // No-op.
}

void
CodeGenerator::visitSlots(LSlots* lir)
{
    Address slots(ToRegister(lir->object()), NativeObject::offsetOfSlots());
    masm.loadPtr(slots, ToRegister(lir->output()));
}

void
CodeGenerator::visitLoadSlotT(LLoadSlotT* lir)
{
    Register base = ToRegister(lir->slots());
    int32_t offset = lir->mir()->slot() * sizeof(js::Value);
    AnyRegister result = ToAnyRegister(lir->output());

    masm.loadUnboxedValue(Address(base, offset), lir->mir()->type(), result);
}

void
CodeGenerator::visitLoadSlotV(LLoadSlotV* lir)
{
    ValueOperand dest = ToOutValue(lir);
    Register base = ToRegister(lir->input());
    int32_t offset = lir->mir()->slot() * sizeof(js::Value);

    masm.loadValue(Address(base, offset), dest);
}

void
CodeGenerator::visitStoreSlotT(LStoreSlotT* lir)
{
    Register base = ToRegister(lir->slots());
    int32_t offset = lir->mir()->slot() * sizeof(js::Value);
    Address dest(base, offset);

    if (lir->mir()->needsBarrier())
        emitPreBarrier(dest);

    MIRType valueType = lir->mir()->value()->type();

    if (valueType == MIRType::ObjectOrNull) {
        masm.storeObjectOrNull(ToRegister(lir->value()), dest);
    } else {
        ConstantOrRegister value;
        if (lir->value()->isConstant())
            value = ConstantOrRegister(lir->value()->toConstant()->toJSValue());
        else
            value = TypedOrValueRegister(valueType, ToAnyRegister(lir->value()));
        masm.storeUnboxedValue(value, valueType, dest, lir->mir()->slotType());
    }
}

void
CodeGenerator::visitStoreSlotV(LStoreSlotV* lir)
{
    Register base = ToRegister(lir->slots());
    int32_t offset = lir->mir()->slot() * sizeof(Value);

    const ValueOperand value = ToValue(lir, LStoreSlotV::Value);

    if (lir->mir()->needsBarrier())
       emitPreBarrier(Address(base, offset));

    masm.storeValue(value, Address(base, offset));
}

static void
GuardReceiver(MacroAssembler& masm, const ReceiverGuard& guard,
              Register obj, Register expandoScratch, Register scratch, Label* miss,
              bool checkNullExpando)
{
    if (guard.group) {
        masm.branchTestObjGroup(Assembler::NotEqual, obj, guard.group, scratch, obj, miss);

        Address expandoAddress(obj, UnboxedPlainObject::offsetOfExpando());
        if (guard.shape) {
            masm.loadPtr(expandoAddress, expandoScratch);
            masm.branchPtr(Assembler::Equal, expandoScratch, ImmWord(0), miss);
            masm.branchTestObjShape(Assembler::NotEqual, expandoScratch, guard.shape, scratch,
                                    expandoScratch, miss);
        } else if (checkNullExpando) {
            masm.branchPtr(Assembler::NotEqual, expandoAddress, ImmWord(0), miss);
        }
    } else {
        masm.branchTestObjShape(Assembler::NotEqual, obj, guard.shape, scratch, obj, miss);
    }
}

void
CodeGenerator::emitGetPropertyPolymorphic(LInstruction* ins, Register obj, Register expandoScratch,
                                          Register scratch,
                                          const TypedOrValueRegister& output)
{
    MGetPropertyPolymorphic* mir = ins->mirRaw()->toGetPropertyPolymorphic();

    Label done;

    for (size_t i = 0; i < mir->numReceivers(); i++) {
        ReceiverGuard receiver = mir->receiver(i);

        Label next;
        masm.comment("GuardReceiver");
        GuardReceiver(masm, receiver, obj, expandoScratch, scratch, &next,
                      /* checkNullExpando = */ false);

        if (receiver.shape) {
            masm.comment("loadTypedOrValue");
            // If this is an unboxed expando access, GuardReceiver loaded the
            // expando object into expandoScratch.
            Register target = receiver.group ? expandoScratch : obj;

            Shape* shape = mir->shape(i);
            if (shape->slot() < shape->numFixedSlots()) {
                // Fixed slot.
                masm.loadTypedOrValue(Address(target, NativeObject::getFixedSlotOffset(shape->slot())),
                                      output);
            } else {
                // Dynamic slot.
                uint32_t offset = (shape->slot() - shape->numFixedSlots()) * sizeof(js::Value);
                masm.loadPtr(Address(target, NativeObject::offsetOfSlots()), scratch);
                masm.loadTypedOrValue(Address(scratch, offset), output);
            }
        } else {
            masm.comment("loadUnboxedProperty");
            const UnboxedLayout::Property* property =
                receiver.group->unboxedLayoutDontCheckGeneration().lookup(mir->name());
            Address propertyAddr(obj, UnboxedPlainObject::offsetOfData() + property->offset);

            masm.loadUnboxedProperty(propertyAddr, property->type, output);
        }

        if (i == mir->numReceivers() - 1) {
            bailoutFrom(&next, ins->snapshot());
        } else {
            masm.jump(&done);
            masm.bind(&next);
        }
    }

    masm.bind(&done);
}

void
CodeGenerator::visitGetPropertyPolymorphicV(LGetPropertyPolymorphicV* ins)
{
    Register obj = ToRegister(ins->obj());
    ValueOperand output = ToOutValue(ins);
    Register temp = ToRegister(ins->temp());
    emitGetPropertyPolymorphic(ins, obj, output.scratchReg(), temp, output);
}

void
CodeGenerator::visitGetPropertyPolymorphicT(LGetPropertyPolymorphicT* ins)
{
    Register obj = ToRegister(ins->obj());
    TypedOrValueRegister output(ins->mir()->type(), ToAnyRegister(ins->output()));
    Register temp1 = ToRegister(ins->temp1());
    Register temp2 = (output.type() == MIRType::Double)
                     ? ToRegister(ins->temp2())
                     : output.typedReg().gpr();
    emitGetPropertyPolymorphic(ins, obj, temp1, temp2, output);
}

template <typename T>
static void
EmitUnboxedPreBarrier(MacroAssembler &masm, T address, JSValueType type)
{
    if (type == JSVAL_TYPE_OBJECT)
        masm.guardedCallPreBarrier(address, MIRType::Object);
    else if (type == JSVAL_TYPE_STRING)
        masm.guardedCallPreBarrier(address, MIRType::String);
    else
        MOZ_ASSERT(!UnboxedTypeNeedsPreBarrier(type));
}

void
CodeGenerator::emitSetPropertyPolymorphic(LInstruction* ins, Register obj, Register expandoScratch,
                                          Register scratch, const ConstantOrRegister& value)
{
    MSetPropertyPolymorphic* mir = ins->mirRaw()->toSetPropertyPolymorphic();

    Label done;
    for (size_t i = 0; i < mir->numReceivers(); i++) {
        ReceiverGuard receiver = mir->receiver(i);

        Label next;
        GuardReceiver(masm, receiver, obj, expandoScratch, scratch, &next,
                      /* checkNullExpando = */ false);

        if (receiver.shape) {
            // If this is an unboxed expando access, GuardReceiver loaded the
            // expando object into expandoScratch.
            Register target = receiver.group ? expandoScratch : obj;

            Shape* shape = mir->shape(i);
            if (shape->slot() < shape->numFixedSlots()) {
                // Fixed slot.
                Address addr(target, NativeObject::getFixedSlotOffset(shape->slot()));
                if (mir->needsBarrier())
                    emitPreBarrier(addr);
                masm.storeConstantOrRegister(value, addr);
            } else {
                // Dynamic slot.
                masm.loadPtr(Address(target, NativeObject::offsetOfSlots()), scratch);
                Address addr(scratch, (shape->slot() - shape->numFixedSlots()) * sizeof(js::Value));
                if (mir->needsBarrier())
                    emitPreBarrier(addr);
                masm.storeConstantOrRegister(value, addr);
            }
        } else {
            const UnboxedLayout::Property* property =
                receiver.group->unboxedLayoutDontCheckGeneration().lookup(mir->name());
            Address propertyAddr(obj, UnboxedPlainObject::offsetOfData() + property->offset);

            EmitUnboxedPreBarrier(masm, propertyAddr, property->type);
            masm.storeUnboxedProperty(propertyAddr, property->type, value, nullptr);
        }

        if (i == mir->numReceivers() - 1) {
            bailoutFrom(&next, ins->snapshot());
        } else {
            masm.jump(&done);
            masm.bind(&next);
        }
    }

    masm.bind(&done);
}

void
CodeGenerator::visitSetPropertyPolymorphicV(LSetPropertyPolymorphicV* ins)
{
    Register obj = ToRegister(ins->obj());
    Register temp1 = ToRegister(ins->temp1());
    Register temp2 = ToRegister(ins->temp2());
    ValueOperand value = ToValue(ins, LSetPropertyPolymorphicV::Value);
    emitSetPropertyPolymorphic(ins, obj, temp1, temp2, TypedOrValueRegister(value));
}

void
CodeGenerator::visitSetPropertyPolymorphicT(LSetPropertyPolymorphicT* ins)
{
    Register obj = ToRegister(ins->obj());
    Register temp1 = ToRegister(ins->temp1());
    Register temp2 = ToRegister(ins->temp2());

    ConstantOrRegister value;
    if (ins->mir()->value()->isConstant())
        value = ConstantOrRegister(ins->mir()->value()->toConstant()->toJSValue());
    else
        value = TypedOrValueRegister(ins->mir()->value()->type(), ToAnyRegister(ins->value()));

    emitSetPropertyPolymorphic(ins, obj, temp1, temp2, value);
}

void
CodeGenerator::visitElements(LElements* lir)
{
    Address elements(ToRegister(lir->object()), NativeObject::offsetOfElements());
    masm.loadPtr(elements, ToRegister(lir->output()));
}

typedef bool (*ConvertElementsToDoublesFn)(JSContext*, uintptr_t);
static const VMFunction ConvertElementsToDoublesInfo =
    FunctionInfo<ConvertElementsToDoublesFn>(ObjectElements::ConvertElementsToDoubles,
                                             "ObjectElements::ConvertElementsToDoubles");

void
CodeGenerator::visitConvertElementsToDoubles(LConvertElementsToDoubles* lir)
{
    Register elements = ToRegister(lir->elements());

    OutOfLineCode* ool = oolCallVM(ConvertElementsToDoublesInfo, lir,
                                   ArgList(elements), StoreNothing());

    Address convertedAddress(elements, ObjectElements::offsetOfFlags());
    Imm32 bit(ObjectElements::CONVERT_DOUBLE_ELEMENTS);
    masm.branchTest32(Assembler::Zero, convertedAddress, bit, ool->entry());
    masm.bind(ool->rejoin());
}

void
CodeGenerator::visitMaybeToDoubleElement(LMaybeToDoubleElement* lir)
{
    Register elements = ToRegister(lir->elements());
    Register value = ToRegister(lir->value());
    ValueOperand out = ToOutValue(lir);

    FloatRegister temp = ToFloatRegister(lir->tempFloat());
    Label convert, done;

    // If the CONVERT_DOUBLE_ELEMENTS flag is set, convert the int32
    // value to double. Else, just box it.
    masm.branchTest32(Assembler::NonZero,
                      Address(elements, ObjectElements::offsetOfFlags()),
                      Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
                      &convert);

    masm.tagValue(JSVAL_TYPE_INT32, value, out);
    masm.jump(&done);

    masm.bind(&convert);
    masm.convertInt32ToDouble(value, temp);
    masm.boxDouble(temp, out, temp);

    masm.bind(&done);
}

typedef bool (*CopyElementsForWriteFn)(JSContext*, NativeObject*);
static const VMFunction CopyElementsForWriteInfo =
    FunctionInfo<CopyElementsForWriteFn>(NativeObject::CopyElementsForWrite,
                                         "NativeObject::CopyElementsForWrite");

void
CodeGenerator::visitMaybeCopyElementsForWrite(LMaybeCopyElementsForWrite* lir)
{
    Register object = ToRegister(lir->object());
    Register temp = ToRegister(lir->temp());

    OutOfLineCode* ool = oolCallVM(CopyElementsForWriteInfo, lir,
                                   ArgList(object), StoreNothing());

    if (lir->mir()->checkNative())
        masm.branchIfNonNativeObj(object, temp, ool->rejoin());

    masm.loadPtr(Address(object, NativeObject::offsetOfElements()), temp);
    masm.branchTest32(Assembler::NonZero,
                      Address(temp, ObjectElements::offsetOfFlags()),
                      Imm32(ObjectElements::COPY_ON_WRITE),
                      ool->entry());
    masm.bind(ool->rejoin());
}

void
CodeGenerator::visitFunctionEnvironment(LFunctionEnvironment* lir)
{
    Address environment(ToRegister(lir->function()), JSFunction::offsetOfEnvironment());
    masm.loadPtr(environment, ToRegister(lir->output()));
}

void
CodeGenerator::visitHomeObject(LHomeObject* lir)
{
    Address homeObject(ToRegister(lir->function()), FunctionExtended::offsetOfMethodHomeObjectSlot());
#ifdef DEBUG
    Label isObject;
    masm.branchTestObject(Assembler::Equal, homeObject, &isObject);
    masm.assumeUnreachable("[[HomeObject]] must be Object");
    masm.bind(&isObject);
#endif
    masm.unboxObject(homeObject, ToRegister(lir->output()));
}

typedef JSObject* (*HomeObjectSuperBaseFn)(JSContext*, HandleObject);
static const VMFunction HomeObjectSuperBaseInfo =
    FunctionInfo<HomeObjectSuperBaseFn>(HomeObjectSuperBase, "HomeObjectSuperBase");

void
CodeGenerator::visitHomeObjectSuperBase(LHomeObjectSuperBase* lir)
{
    Register homeObject = ToRegister(lir->homeObject());
    Register output = ToRegister(lir->output());

    OutOfLineCode* ool = oolCallVM(HomeObjectSuperBaseInfo, lir, ArgList(homeObject),
                                   StoreRegisterTo(output));

    masm.loadObjProto(homeObject, output);
    masm.branchPtr(Assembler::BelowOrEqual, output, ImmWord(1), ool->entry());
    masm.bind(ool->rejoin());
}

typedef LexicalEnvironmentObject* (*NewLexicalEnvironmentObjectFn)(JSContext*,
                                                                   Handle<LexicalScope*>,
                                                                   HandleObject, gc::InitialHeap);
static const VMFunction NewLexicalEnvironmentObjectInfo =
    FunctionInfo<NewLexicalEnvironmentObjectFn>(LexicalEnvironmentObject::create,
                                                "LexicalEnvironmentObject::create");

void
CodeGenerator::visitNewLexicalEnvironmentObject(LNewLexicalEnvironmentObject* lir)
{
    pushArg(Imm32(gc::DefaultHeap));
    pushArg(ToRegister(lir->enclosing()));
    pushArg(ImmGCPtr(lir->mir()->scope()));
    callVM(NewLexicalEnvironmentObjectInfo, lir);
}

typedef JSObject* (*CopyLexicalEnvironmentObjectFn)(JSContext*, HandleObject, bool);
static const VMFunction CopyLexicalEnvironmentObjectInfo =
    FunctionInfo<CopyLexicalEnvironmentObjectFn>(js::jit::CopyLexicalEnvironmentObject,
                                                "js::jit::CopyLexicalEnvironmentObject");

void
CodeGenerator::visitCopyLexicalEnvironmentObject(LCopyLexicalEnvironmentObject* lir)
{
    pushArg(Imm32(lir->mir()->copySlots()));
    pushArg(ToRegister(lir->env()));
    callVM(CopyLexicalEnvironmentObjectInfo, lir);
}

void
CodeGenerator::visitGuardShape(LGuardShape* guard)
{
    Register obj = ToRegister(guard->input());
    Register temp = ToTempRegisterOrInvalid(guard->temp());
    Label bail;
    masm.branchTestObjShape(Assembler::NotEqual, obj, guard->mir()->shape(), temp, obj, &bail);
    bailoutFrom(&bail, guard->snapshot());
}

void
CodeGenerator::visitGuardObjectGroup(LGuardObjectGroup* guard)
{
    Register obj = ToRegister(guard->input());
    Register temp = ToTempRegisterOrInvalid(guard->temp());
    Assembler::Condition cond =
        guard->mir()->bailOnEquality() ? Assembler::Equal : Assembler::NotEqual;
    Label bail;
    masm.branchTestObjGroup(cond, obj, guard->mir()->group(), temp, obj, &bail);
    bailoutFrom(&bail, guard->snapshot());
}

void
CodeGenerator::visitGuardObjectIdentity(LGuardObjectIdentity* guard)
{
    Register input = ToRegister(guard->input());
    Register expected = ToRegister(guard->expected());

    Assembler::Condition cond =
        guard->mir()->bailOnEquality() ? Assembler::Equal : Assembler::NotEqual;
    bailoutCmpPtr(cond, input, expected, guard->snapshot());
}

void
CodeGenerator::visitGuardReceiverPolymorphic(LGuardReceiverPolymorphic* lir)
{
    const MGuardReceiverPolymorphic* mir = lir->mir();
    Register obj = ToRegister(lir->object());
    Register temp1 = ToRegister(lir->temp1());
    Register temp2 = ToRegister(lir->temp2());

    Label done;

    for (size_t i = 0; i < mir->numReceivers(); i++) {
        const ReceiverGuard& receiver = mir->receiver(i);

        Label next;
        GuardReceiver(masm, receiver, obj, temp1, temp2, &next, /* checkNullExpando = */ true);

        if (i == mir->numReceivers() - 1) {
            bailoutFrom(&next, lir->snapshot());
        } else {
            masm.jump(&done);
            masm.bind(&next);
        }
    }

    masm.bind(&done);
}

void
CodeGenerator::visitGuardUnboxedExpando(LGuardUnboxedExpando* lir)
{
    Label miss;

    Register obj = ToRegister(lir->object());
    masm.branchPtr(lir->mir()->requireExpando() ? Assembler::Equal : Assembler::NotEqual,
                   Address(obj, UnboxedPlainObject::offsetOfExpando()), ImmWord(0), &miss);

    bailoutFrom(&miss, lir->snapshot());
}

void
CodeGenerator::visitLoadUnboxedExpando(LLoadUnboxedExpando* lir)
{
    Register obj = ToRegister(lir->object());
    Register result = ToRegister(lir->getDef(0));

    masm.loadPtr(Address(obj, UnboxedPlainObject::offsetOfExpando()), result);
}

void
CodeGenerator::visitTypeBarrierV(LTypeBarrierV* lir)
{
    ValueOperand operand = ToValue(lir, LTypeBarrierV::Input);
    Register unboxScratch = ToTempRegisterOrInvalid(lir->unboxTemp());
    Register objScratch = ToTempRegisterOrInvalid(lir->objTemp());

    // guardObjectType may zero the payload/Value register on speculative paths
    // (we should have a defineReuseInput allocation in this case).
    Register spectreRegToZero = operand.payloadOrValueReg();

    Label miss;
    masm.guardTypeSet(operand, lir->mir()->resultTypeSet(), lir->mir()->barrierKind(),
                      unboxScratch, objScratch, spectreRegToZero, &miss);
    bailoutFrom(&miss, lir->snapshot());
}

void
CodeGenerator::visitTypeBarrierO(LTypeBarrierO* lir)
{
    Register obj = ToRegister(lir->object());
    Register scratch = ToTempRegisterOrInvalid(lir->temp());
    Label miss, ok;

    if (lir->mir()->type() == MIRType::ObjectOrNull) {
        masm.comment("Object or Null");
        Label* nullTarget = lir->mir()->resultTypeSet()->mightBeMIRType(MIRType::Null) ? &ok : &miss;
        masm.branchTestPtr(Assembler::Zero, obj, obj, nullTarget);
    } else {
        MOZ_ASSERT(lir->mir()->type() == MIRType::Object);
        MOZ_ASSERT(lir->mir()->barrierKind() != BarrierKind::TypeTagOnly);
    }

    if (lir->mir()->barrierKind() != BarrierKind::TypeTagOnly) {
        masm.comment("Type tag only");
        // guardObjectType may zero the object register on speculative paths
        // (we should have a defineReuseInput allocation in this case).
        Register spectreRegToZero = obj;
        masm.guardObjectType(obj, lir->mir()->resultTypeSet(), scratch, spectreRegToZero, &miss);
    }

    bailoutFrom(&miss, lir->snapshot());
    masm.bind(&ok);
}

// Out-of-line path to update the store buffer.
class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
{
    LInstruction* lir_;
    const LAllocation* object_;

  public:
    OutOfLineCallPostWriteBarrier(LInstruction* lir, const LAllocation* object)
      : lir_(lir), object_(object)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineCallPostWriteBarrier(this);
    }

    LInstruction* lir() const {
        return lir_;
    }
    const LAllocation* object() const {
        return object_;
    }
};

static void
EmitStoreBufferCheckForConstant(MacroAssembler& masm, const gc::TenuredCell* cell,
                                AllocatableGeneralRegisterSet& regs, Label* exit, Label* callVM)
{
    Register temp = regs.takeAny();

    gc::Arena* arena = cell->arena();

    Register cells = temp;
    masm.loadPtr(AbsoluteAddress(&arena->bufferedCells()), cells);

    size_t index = gc::ArenaCellSet::getCellIndex(cell);
    size_t word;
    uint32_t mask;
    gc::ArenaCellSet::getWordIndexAndMask(index, &word, &mask);
    size_t offset = gc::ArenaCellSet::offsetOfBits() + word * sizeof(uint32_t);

    masm.branchTest32(Assembler::NonZero, Address(cells, offset), Imm32(mask), exit);

    // Check whether this is the sentinel set and if so call the VM to allocate
    // one for this arena.
    masm.branchPtr(Assembler::Equal, Address(cells, gc::ArenaCellSet::offsetOfArena()),
                   ImmPtr(nullptr), callVM);

    // Add the cell to the set.
    masm.or32(Imm32(mask), Address(cells, offset));
    masm.jump(exit);

    regs.add(temp);
}

static void
EmitPostWriteBarrier(MacroAssembler& masm, CompileRuntime* runtime, Register objreg,
                     JSObject* maybeConstant, bool isGlobal, AllocatableGeneralRegisterSet& regs)
{
    MOZ_ASSERT_IF(isGlobal, maybeConstant);

    Label callVM;
    Label exit;

    // We already have a fast path to check whether a global is in the store
    // buffer.
    if (!isGlobal && maybeConstant)
        EmitStoreBufferCheckForConstant(masm, &maybeConstant->asTenured(), regs, &exit, &callVM);

    // Call into the VM to barrier the write.
    masm.bind(&callVM);

    Register runtimereg = regs.takeAny();
    masm.mov(ImmPtr(runtime), runtimereg);

    masm.setupUnalignedABICall(regs.takeAny());
    masm.passABIArg(runtimereg);
    masm.passABIArg(objreg);
    if (isGlobal)
        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostGlobalWriteBarrier));
    else
        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier));

    masm.bind(&exit);
}

void
CodeGenerator::emitPostWriteBarrier(const LAllocation* obj)
{
    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());

    Register objreg;
    JSObject* object = nullptr;
    bool isGlobal = false;
    if (obj->isConstant()) {
        object = &obj->toConstant()->toObject();
        isGlobal = isGlobalObject(object);
        objreg = regs.takeAny();
        masm.movePtr(ImmGCPtr(object), objreg);
    } else {
        objreg = ToRegister(obj);
        regs.takeUnchecked(objreg);
    }

    EmitPostWriteBarrier(masm, gen->runtime, objreg, object, isGlobal, regs);
}

void
CodeGenerator::emitPostWriteBarrier(Register objreg)
{
    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
    regs.takeUnchecked(objreg);
    EmitPostWriteBarrier(masm, gen->runtime, objreg, nullptr, false, regs);
}

void
CodeGenerator::visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier* ool)
{
    saveLiveVolatile(ool->lir());
    const LAllocation* obj = ool->object();
    emitPostWriteBarrier(obj);
    restoreLiveVolatile(ool->lir());

    masm.jump(ool->rejoin());
}

void
CodeGenerator::maybeEmitGlobalBarrierCheck(const LAllocation* maybeGlobal, OutOfLineCode* ool)
{
    // Check whether an object is a global that we have already barriered before
    // calling into the VM.
    //
    // We only check for the script's global, not other globals within the same
    // compartment, because we bake in a pointer to realm->globalWriteBarriered
    // and doing that would be invalid for other realms because they could be
    // collected before the Ion code is discarded.

    if (!maybeGlobal->isConstant())
        return;

    JSObject* obj = &maybeGlobal->toConstant()->toObject();
    if (gen->compartment->maybeGlobal() != obj)
        return;

    auto addr = AbsoluteAddress(gen->compartment->addressOfGlobalWriteBarriered());
    masm.branch32(Assembler::NotEqual, addr, Imm32(0), ool->rejoin());
}

template <class LPostBarrierType, MIRType nurseryType>
void
CodeGenerator::visitPostWriteBarrierCommon(LPostBarrierType* lir, OutOfLineCode* ool)
{
    addOutOfLineCode(ool, lir->mir());

    Register temp = ToTempRegisterOrInvalid(lir->temp());

    if (lir->object()->isConstant()) {
        // Constant nursery objects cannot appear here, see
        // LIRGenerator::visitPostWriteElementBarrier.
        MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
    } else {
        masm.branchPtrInNurseryChunk(Assembler::Equal, ToRegister(lir->object()), temp,
                                     ool->rejoin());
    }

    maybeEmitGlobalBarrierCheck(lir->object(), ool);

    Register value = ToRegister(lir->value());
    if (nurseryType == MIRType::Object) {
        if (lir->mir()->value()->type() == MIRType::ObjectOrNull)
            masm.branchTestPtr(Assembler::Zero, value, value, ool->rejoin());
        else
            MOZ_ASSERT(lir->mir()->value()->type() == MIRType::Object);
    } else {
        MOZ_ASSERT(nurseryType == MIRType::String);
        MOZ_ASSERT(lir->mir()->value()->type() == MIRType::String);
    }
    masm.branchPtrInNurseryChunk(Assembler::Equal, value, temp, ool->entry());

    masm.bind(ool->rejoin());
}

template <class LPostBarrierType>
void
CodeGenerator::visitPostWriteBarrierCommonV(LPostBarrierType* lir, OutOfLineCode* ool)
{
    addOutOfLineCode(ool, lir->mir());

    Register temp = ToTempRegisterOrInvalid(lir->temp());

    if (lir->object()->isConstant()) {
        // Constant nursery objects cannot appear here, see LIRGenerator::visitPostWriteElementBarrier.
        MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
    } else {
        masm.branchPtrInNurseryChunk(Assembler::Equal, ToRegister(lir->object()), temp,
                                     ool->rejoin());
    }

    maybeEmitGlobalBarrierCheck(lir->object(), ool);

    ValueOperand value = ToValue(lir, LPostBarrierType::Input);
    // Bug 1386094 - most callers only need to check for object or string, not
    // both.
    masm.branchValueIsNurseryCell(Assembler::Equal, value, temp, ool->entry());

    masm.bind(ool->rejoin());
}

void
CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO* lir)
{
    auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
    visitPostWriteBarrierCommon<LPostWriteBarrierO, MIRType::Object>(lir, ool);
}

void
CodeGenerator::visitPostWriteBarrierS(LPostWriteBarrierS* lir)
{
    auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
    visitPostWriteBarrierCommon<LPostWriteBarrierS, MIRType::String>(lir, ool);
}

void
CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV* lir)
{
    auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
    visitPostWriteBarrierCommonV(lir, ool);
}

// Out-of-line path to update the store buffer.
class OutOfLineCallPostWriteElementBarrier : public OutOfLineCodeBase<CodeGenerator>
{
    LInstruction* lir_;
    const LAllocation* object_;
    const LAllocation* index_;

  public:
    OutOfLineCallPostWriteElementBarrier(LInstruction* lir, const LAllocation* object,
                                         const LAllocation* index)
      : lir_(lir),
        object_(object),
        index_(index)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitOutOfLineCallPostWriteElementBarrier(this);
    }

    LInstruction* lir() const {
        return lir_;
    }

    const LAllocation* object() const {
        return object_;
    }

    const LAllocation* index() const {
        return index_;
    }
};

void
CodeGenerator::visitOutOfLineCallPostWriteElementBarrier(OutOfLineCallPostWriteElementBarrier* ool)
{
    saveLiveVolatile(ool->lir());

    const LAllocation* obj = ool->object();
    const LAllocation* index = ool->index();

    Register objreg = obj->isConstant() ? InvalidReg : ToRegister(obj);
    Register indexreg = ToRegister(index);

    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
    regs.takeUnchecked(indexreg);

    if (obj->isConstant()) {
        objreg = regs.takeAny();
        masm.movePtr(ImmGCPtr(&obj->toConstant()->toObject()), objreg);
    } else {
        regs.takeUnchecked(objreg);
    }

    Register runtimereg = regs.takeAny();
    masm.setupUnalignedABICall(runtimereg);
    masm.mov(ImmPtr(gen->runtime), runtimereg);
    masm.passABIArg(runtimereg);
    masm.passABIArg(objreg);
    masm.passABIArg(indexreg);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (PostWriteElementBarrier<IndexInBounds::Maybe>)));

    restoreLiveVolatile(ool->lir());

    masm.jump(ool->rejoin());
}

void
CodeGenerator::visitPostWriteElementBarrierO(LPostWriteElementBarrierO* lir)
{
    auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
    visitPostWriteBarrierCommon<LPostWriteElementBarrierO, MIRType::Object>(lir, ool);
}

void
CodeGenerator::visitPostWriteElementBarrierS(LPostWriteElementBarrierS* lir)
{
    auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
    visitPostWriteBarrierCommon<LPostWriteElementBarrierS, MIRType::String>(lir, ool);
}

void
CodeGenerator::visitPostWriteElementBarrierV(LPostWriteElementBarrierV* lir)
{
    auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
    visitPostWriteBarrierCommonV(lir, ool);
}

void
CodeGenerator::visitCallNative(LCallNative* call)
{
    WrappedFunction* target = call->getSingleTarget();
    MOZ_ASSERT(target);
    MOZ_ASSERT(target->isNativeWithCppEntry());

    int callargslot = call->argslot();
    int unusedStack = StackOffsetOfPassedArg(callargslot);

    // Registers used for callWithABI() argument-passing.
    const Register argContextReg   = ToRegister(call->getArgContextReg());
    const Register argUintNReg     = ToRegister(call->getArgUintNReg());
    const Register argVpReg        = ToRegister(call->getArgVpReg());

    // Misc. temporary registers.
    const Register tempReg = ToRegister(call->getTempReg());

    DebugOnly<uint32_t> initialStack = masm.framePushed();

    masm.checkStackAlignment();

    // Native functions have the signature:
    //  bool (*)(JSContext*, unsigned, Value* vp)
    // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
    // are the function arguments.

    // Allocate space for the outparam, moving the StackPointer to what will be &vp[1].
    masm.adjustStack(unusedStack);

    // Push a Value containing the callee object: natives are allowed to access
    // their callee before setting the return value. The StackPointer is moved
    // to &vp[0].
    masm.Push(ObjectValue(*target->rawJSFunction()));

    // Preload arguments into registers.
    masm.loadJSContext(argContextReg);
    masm.move32(Imm32(call->numActualArgs()), argUintNReg);
    masm.moveStackPtrTo(argVpReg);

    masm.Push(argUintNReg);

    // Construct native exit frame.
    uint32_t safepointOffset = masm.buildFakeExitFrame(tempReg);
    masm.enterFakeExitFrameForNative(argContextReg, tempReg, call->mir()->isConstructing());

    markSafepointAt(safepointOffset, call);

    emitTracelogStartEvent(TraceLogger_Call);

    // Construct and execute call.
    masm.setupUnalignedABICall(tempReg);
    masm.passABIArg(argContextReg);
    masm.passABIArg(argUintNReg);
    masm.passABIArg(argVpReg);
    JSNative native = target->native();
    if (call->ignoresReturnValue() && target->hasJitInfo()) {
        const JSJitInfo* jitInfo = target->jitInfo();
        if (jitInfo->type() == JSJitInfo::IgnoresReturnValueNative)
            native = jitInfo->ignoresReturnValueMethod;
    }
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, native), MoveOp::GENERAL,
                     CheckUnsafeCallWithABI::DontCheckHasExitFrame);

    emitTracelogStopEvent(TraceLogger_Call);

    // Test for failure.
    masm.branchIfFalseBool(ReturnReg, masm.failureLabel());

    // Load the outparam vp[0] into output register(s).
    masm.loadValue(Address(masm.getStackPointer(), NativeExitFrameLayout::offsetOfResult()), JSReturnOperand);

    // Until C++ code is instrumented against Spectre, prevent speculative
    // execution from returning any private data.
    if (JitOptions.spectreJitToCxxCalls && !call->mir()->ignoresReturnValue() &&
        call->mir()->hasLiveDefUses())
    {
        masm.speculationBarrier();
    }

    // The next instruction is removing the footer of the exit frame, so there
    // is no need for leaveFakeExitFrame.

    // Move the StackPointer back to its original location, unwinding the native exit frame.
    masm.adjustStack(NativeExitFrameLayout::Size() - unusedStack);
    MOZ_ASSERT(masm.framePushed() == initialStack);
}

static void
LoadDOMPrivate(MacroAssembler& masm, Register obj, Register priv, DOMObjectKind kind)
{
    // Load the value in DOM_OBJECT_SLOT for a native or proxy DOM object. This
    // will be in the first slot but may be fixed or non-fixed.
    MOZ_ASSERT(obj != priv);

    // Check if it's a proxy.
    Label isProxy, done;
    if (kind == DOMObjectKind::Unknown)
        masm.branchTestObjectIsProxy(true, obj, priv, &isProxy);

    if (kind != DOMObjectKind::Proxy) {
        // If it's a native object, the value must be in a fixed slot.
        masm.debugAssertObjHasFixedSlots(obj, priv);
        masm.loadPrivate(Address(obj, NativeObject::getFixedSlotOffset(0)), priv);
        if (kind == DOMObjectKind::Unknown)
            masm.jump(&done);
    }

    if (kind != DOMObjectKind::Native) {
        masm.bind(&isProxy);
#ifdef DEBUG
        // Sanity check: it must be a DOM proxy.
        Label isDOMProxy;
        masm.branchTestProxyHandlerFamily(Assembler::Equal, obj, priv,
                                          GetDOMProxyHandlerFamily(), &isDOMProxy);
        masm.assumeUnreachable("Expected a DOM proxy");
        masm.bind(&isDOMProxy);
#endif
        masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), priv);
        masm.loadPrivate(Address(priv, detail::ProxyReservedSlots::offsetOfSlot(0)), priv);
    }

    masm.bind(&done);
}

void
CodeGenerator::visitCallDOMNative(LCallDOMNative* call)
{
    WrappedFunction* target = call->getSingleTarget();
    MOZ_ASSERT(target);
    MOZ_ASSERT(target->isNative());
    MOZ_ASSERT(target->hasJitInfo());
    MOZ_ASSERT(call->mir()->isCallDOMNative());

    int callargslot = call->argslot();
    int unusedStack = StackOffsetOfPassedArg(callargslot);

    // Registers used for callWithABI() argument-passing.
    const Register argJSContext = ToRegister(call->getArgJSContext());
    const Register argObj       = ToRegister(call->getArgObj());
    const Register argPrivate   = ToRegister(call->getArgPrivate());
    const Register argArgs      = ToRegister(call->getArgArgs());

    DebugOnly<uint32_t> initialStack = masm.framePushed();

    masm.checkStackAlignment();

    // DOM methods have the signature:
    //  bool (*)(JSContext*, HandleObject, void* private, const JSJitMethodCallArgs& args)
    // Where args is initialized from an argc and a vp, vp[0] is space for an
    // outparam and the callee, vp[1] is |this|, and vp[2] onward are the
    // function arguments.  Note that args stores the argv, not the vp, and
    // argv == vp + 2.

    // Nestle the stack up against the pushed arguments, leaving StackPointer at
    // &vp[1]
    masm.adjustStack(unusedStack);
    // argObj is filled with the extracted object, then returned.
    Register obj = masm.extractObject(Address(masm.getStackPointer(), 0), argObj);
    MOZ_ASSERT(obj == argObj);

    // Push a Value containing the callee object: natives are allowed to access their callee before
    // setitng the return value. After this the StackPointer points to &vp[0].
    masm.Push(ObjectValue(*target->rawJSFunction()));

    // Now compute the argv value.  Since StackPointer is pointing to &vp[0] and
    // argv is &vp[2] we just need to add 2*sizeof(Value) to the current
    // StackPointer.
    JS_STATIC_ASSERT(JSJitMethodCallArgsTraits::offsetOfArgv == 0);
    JS_STATIC_ASSERT(JSJitMethodCallArgsTraits::offsetOfArgc ==
                     IonDOMMethodExitFrameLayoutTraits::offsetOfArgcFromArgv);
    masm.computeEffectiveAddress(Address(masm.getStackPointer(), 2 * sizeof(Value)), argArgs);

    LoadDOMPrivate(masm, obj, argPrivate, static_cast<MCallDOMNative*>(call->mir())->objectKind());

    // Push argc from the call instruction into what will become the IonExitFrame
    masm.Push(Imm32(call->numActualArgs()));

    // Push our argv onto the stack
    masm.Push(argArgs);
    // And store our JSJitMethodCallArgs* in argArgs.
    masm.moveStackPtrTo(argArgs);

    // Push |this| object for passing HandleObject. We push after argc to
    // maintain the same sp-relative location of the object pointer with other
    // DOMExitFrames.
    masm.Push(argObj);
    masm.moveStackPtrTo(argObj);

    // Construct native exit frame.
    uint32_t safepointOffset = masm.buildFakeExitFrame(argJSContext);
    masm.loadJSContext(argJSContext);
    masm.enterFakeExitFrame(argJSContext, argJSContext, ExitFrameType::IonDOMMethod);

    markSafepointAt(safepointOffset, call);

    // Construct and execute call.
    masm.setupUnalignedABICall(argJSContext);
    masm.loadJSContext(argJSContext);
    masm.passABIArg(argJSContext);
    masm.passABIArg(argObj);
    masm.passABIArg(argPrivate);
    masm.passABIArg(argArgs);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, target->jitInfo()->method), MoveOp::GENERAL,
                     CheckUnsafeCallWithABI::DontCheckHasExitFrame);

    if (target->jitInfo()->isInfallible) {
        masm.loadValue(Address(masm.getStackPointer(), IonDOMMethodExitFrameLayout::offsetOfResult()),
                       JSReturnOperand);
    } else {
        // Test for failure.
        masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());

        // Load the outparam vp[0] into output register(s).
        masm.loadValue(Address(masm.getStackPointer(), IonDOMMethodExitFrameLayout::offsetOfResult()),
                       JSReturnOperand);
    }

    // Until C++ code is instrumented against Spectre, prevent speculative
    // execution from returning any private data.
    if (JitOptions.spectreJitToCxxCalls && call->mir()->hasLiveDefUses())
        masm.speculationBarrier();

    // The next instruction is removing the footer of the exit frame, so there
    // is no need for leaveFakeExitFrame.

    // Move the StackPointer back to its original location, unwinding the native exit frame.
    masm.adjustStack(IonDOMMethodExitFrameLayout::Size() - unusedStack);
    MOZ_ASSERT(masm.framePushed() == initialStack);
}

typedef bool (*GetIntrinsicValueFn)(JSContext* cx, HandlePropertyName, MutableHandleValue);
static const VMFunction GetIntrinsicValueInfo =
    FunctionInfo<GetIntrinsicValueFn>(GetIntrinsicValue, "GetIntrinsicValue");

void
CodeGenerator::visitCallGetIntrinsicValue(LCallGetIntrinsicValue* lir)
{
    pushArg(ImmGCPtr(lir->mir()->name()));
    callVM(GetIntrinsicValueInfo, lir);
}

typedef bool (*InvokeFunctionFn)(JSContext*, HandleObject, bool, bool, uint32_t, Value*,
                                 MutableHandleValue);
static const VMFunction InvokeFunctionInfo =
    FunctionInfo<InvokeFunctionFn>(InvokeFunction, "InvokeFunction");

void
CodeGenerator::emitCallInvokeFunction(LInstruction* call, Register calleereg,
                                      bool constructing, bool ignoresReturnValue,
                                      uint32_t argc, uint32_t unusedStack)
{
    // Nestle %esp up to the argument vector.
    // Each path must account for framePushed_ separately, for callVM to be valid.
    masm.freeStack(unusedStack);

    pushArg(masm.getStackPointer()); // argv.
    pushArg(Imm32(argc));            // argc.
    pushArg(Imm32(ignoresReturnValue));
    pushArg(Imm32(constructing));    // constructing.
    pushArg(calleereg);              // JSFunction*.

    callVM(InvokeFunctionInfo, call);

    // Un-nestle %esp from the argument vector. No prefix was pushed.
    masm.reserveStack(unusedStack);
}

void
CodeGenerator::visitCallGeneric(LCallGeneric* call)
{
    Register calleereg = ToRegister(call->getFunction());
    Register objreg    = ToRegister(call->getTempObject());
    Register nargsreg  = ToRegister(call->getNargsReg());
    uint32_t unusedStack = StackOffsetOfPassedArg(call->argslot());
    Label invoke, thunk, makeCall, end;

    // Known-target case is handled by LCallKnown.
    MOZ_ASSERT(!call->hasSingleTarget());

    masm.checkStackAlignment();

    // Guard that calleereg is actually a function object.
    if (call->mir()->needsClassCheck()) {
        masm.branchTestObjClass(Assembler::NotEqual, calleereg, &JSFunction::class_, nargsreg,
                                calleereg, &invoke);
    }

    // Guard that calleereg is an interpreted function with a JSScript or a
    // wasm function.
    // If we are constructing, also ensure the callee is a constructor.
    if (call->mir()->isConstructing()) {
        masm.branchIfNotInterpretedConstructor(calleereg, nargsreg, &invoke);
    } else {
        masm.branchIfFunctionHasNoJitEntry(calleereg, /* isConstructing */ false, &invoke);
        masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, calleereg, objreg,
                                &invoke);
    }

    if (call->mir()->needsArgCheck())
        masm.loadJitCodeRaw(calleereg, objreg);
    else
        masm.loadJitCodeNoArgCheck(calleereg, objreg);

    // Nestle the StackPointer up to the argument vector.
    masm.freeStack(unusedStack);

    // Construct the IonFramePrefix.
    uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
                                              JitFrameLayout::Size());
    masm.Push(Imm32(call->numActualArgs()));
    masm.PushCalleeToken(calleereg, call->mir()->isConstructing());
    masm.Push(Imm32(descriptor));

    // Check whether the provided arguments satisfy target argc.
    // We cannot have lowered to LCallGeneric with a known target. Assert that we didn't
    // add any undefineds in IonBuilder. NB: MCall::numStackArgs includes |this|.
    DebugOnly<unsigned> numNonArgsOnStack = 1 + call->isConstructing();
    MOZ_ASSERT(call->numActualArgs() == call->mir()->numStackArgs() - numNonArgsOnStack);
    masm.load16ZeroExtend(Address(calleereg, JSFunction::offsetOfNargs()), nargsreg);
    masm.branch32(Assembler::Above, nargsreg, Imm32(call->numActualArgs()), &thunk);
    masm.jump(&makeCall);

    // Argument fixup needed. Load the ArgumentsRectifier.
    masm.bind(&thunk);
    {
        TrampolinePtr argumentsRectifier = gen->jitRuntime()->getArgumentsRectifier();
        masm.movePtr(argumentsRectifier, objreg);
    }

    // Finally call the function in objreg.
    masm.bind(&makeCall);
    uint32_t callOffset = masm.callJit(objreg);
    markSafepointAt(callOffset, call);

    // Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
    // The return address has already been removed from the Ion frame.
    int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*);
    masm.adjustStack(prefixGarbage - unusedStack);
    masm.jump(&end);

    // Handle uncompiled or native functions.
    masm.bind(&invoke);
    emitCallInvokeFunction(call, calleereg, call->isConstructing(), call->ignoresReturnValue(),
                           call->numActualArgs(), unusedStack);

    masm.bind(&end);

    // If the return value of the constructing function is Primitive,
    // replace the return value with the Object from CreateThis.
    if (call->mir()->isConstructing()) {
        Label notPrimitive;
        masm.branchTestPrimitive(Assembler::NotEqual, JSReturnOperand, &notPrimitive);
        masm.loadValue(Address(masm.getStackPointer(), unusedStack), JSReturnOperand);
        masm.bind(&notPrimitive);
    }
}

typedef bool (*InvokeFunctionShuffleFn)(JSContext*, HandleObject, uint32_t, uint32_t, Value*,
                                        MutableHandleValue);
static const VMFunction InvokeFunctionShuffleInfo =
    FunctionInfo<InvokeFunctionShuffleFn>(InvokeFunctionShuffleNewTarget,
                                          "InvokeFunctionShuffleNewTarget");
void
CodeGenerator::emitCallInvokeFunctionShuffleNewTarget(LCallKnown* call, Register calleeReg,
                                                      uint32_t numFormals, uint32_t unusedStack)
{
    masm.freeStack(unusedStack);

    pushArg(masm.getStackPointer());
    pushArg(Imm32(numFormals));
    pushArg(Imm32(call->numActualArgs()));
    pushArg(calleeReg);

    callVM(InvokeFunctionShuffleInfo, call);

    masm.reserveStack(unusedStack);
}

void
CodeGenerator::visitCallKnown(LCallKnown* call)
{
    Register calleereg = ToRegister(call->getFunction());
    Register objreg    = ToRegister(call->getTempObject());
    uint32_t unusedStack = StackOffsetOfPassedArg(call->argslot());
    WrappedFunction* target = call->getSingleTarget();

    // Native single targets (except wasm) are handled by LCallNative.
    MOZ_ASSERT(!target->isNativeWithCppEntry());
    // Missing arguments must have been explicitly appended by the IonBuilder.
    DebugOnly<unsigned> numNonArgsOnStack = 1 + call->isConstructing();
    MOZ_ASSERT(target->nargs() <= call->mir()->numStackArgs() - numNonArgsOnStack);

    MOZ_ASSERT_IF(call->isConstructing(), target->isConstructor());

    masm.checkStackAlignment();

    if (target->isClassConstructor() && !call->isConstructing()) {
        emitCallInvokeFunction(call, calleereg, call->isConstructing(), call->ignoresReturnValue(),
                               call->numActualArgs(), unusedStack);
        return;
    }

    MOZ_ASSERT_IF(target->isClassConstructor(), call->isConstructing());

    Label uncompiled;
    if (!target->isNativeWithJitEntry()) {
        // The calleereg is known to be a non-native function, but might point
        // to a LazyScript instead of a JSScript.
        masm.branchIfFunctionHasNoJitEntry(calleereg, call->isConstructing(), &uncompiled);
    }

    if (call->mir()->needsArgCheck())
        masm.loadJitCodeRaw(calleereg, objreg);
    else
        masm.loadJitCodeNoArgCheck(calleereg, objreg);

    // Nestle the StackPointer up to the argument vector.
    masm.freeStack(unusedStack);

    // Construct the IonFramePrefix.
    uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
                                              JitFrameLayout::Size());
    masm.Push(Imm32(call->numActualArgs()));
    masm.PushCalleeToken(calleereg, call->mir()->isConstructing());
    masm.Push(Imm32(descriptor));

    // Finally call the function in objreg.
    uint32_t callOffset = masm.callJit(objreg);
    markSafepointAt(callOffset, call);

    // Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
    // The return address has already been removed from the Ion frame.
    int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*);
    masm.adjustStack(prefixGarbage - unusedStack);

    if (uncompiled.used()) {
        Label end;
        masm.jump(&end);

        // Handle uncompiled functions.
        masm.bind(&uncompiled);
        if (call->isConstructing() && target->nargs() > call->numActualArgs()) {
            emitCallInvokeFunctionShuffleNewTarget(call, calleereg, target->nargs(), unusedStack);
        } else {
            emitCallInvokeFunction(call, calleereg, call->isConstructing(),
                                   call->ignoresReturnValue(), call->numActualArgs(), unusedStack);
        }

        masm.bind(&end);
    }

    // If the return value of the constructing function is Primitive,
    // replace the return value with the Object from CreateThis.
    if (call->mir()->isConstructing()) {
        Label notPrimitive;
        masm.branchTestPrimitive(Assembler::NotEqual, JSReturnOperand, &notPrimitive);
        masm.loadValue(Address(masm.getStackPointer(), unusedStack), JSReturnOperand);
        masm.bind(&notPrimitive);
    }
}

template<typename T>
void
CodeGenerator::emitCallInvokeFunction(T* apply, Register extraStackSize)
{
    Register objreg = ToRegister(apply->getTempObject());
    MOZ_ASSERT(objreg != extraStackSize);

    // Push the space used by the arguments.
    masm.moveStackPtrTo(objreg);
    masm.Push(extraStackSize);

    pushArg(objreg);                           // argv.
    pushArg(ToRegister(apply->getArgc()));     // argc.
    pushArg(Imm32(false));                     // ignoresReturnValue.
    pushArg(Imm32(false));                     // isConstrucing.
    pushArg(ToRegister(apply->getFunction())); // JSFunction*.

    // This specialization og callVM restore the extraStackSize after the call.
    callVM(InvokeFunctionInfo, apply, &extraStackSize);

    masm.Pop(extraStackSize);
}

// Do not bailout after the execution of this function since the stack no longer
// correspond to what is expected by the snapshots.
void
CodeGenerator::emitAllocateSpaceForApply(Register argcreg, Register extraStackSpace, Label* end)
{
    // Initialize the loop counter AND Compute the stack usage (if == 0)
    masm.movePtr(argcreg, extraStackSpace);

    // Align the JitFrameLayout on the JitStackAlignment.
    if (JitStackValueAlignment > 1) {
        MOZ_ASSERT(frameSize() % JitStackAlignment == 0,
            "Stack padding assumes that the frameSize is correct");
        MOZ_ASSERT(JitStackValueAlignment == 2);
        Label noPaddingNeeded;
        // if the number of arguments is odd, then we do not need any padding.
        masm.branchTestPtr(Assembler::NonZero, argcreg, Imm32(1), &noPaddingNeeded);
        masm.addPtr(Imm32(1), extraStackSpace);
        masm.bind(&noPaddingNeeded);
    }

    // Reserve space for copying the arguments.
    NativeObject::elementsSizeMustNotOverflow();
    masm.lshiftPtr(Imm32(ValueShift), extraStackSpace);
    masm.subFromStackPtr(extraStackSpace);

#ifdef DEBUG
    // Put a magic value in the space reserved for padding. Note, this code
    // cannot be merged with the previous test, as not all architectures can
    // write below their stack pointers.
    if (JitStackValueAlignment > 1) {
        MOZ_ASSERT(JitStackValueAlignment == 2);
        Label noPaddingNeeded;
        // if the number of arguments is odd, then we do not need any padding.
        masm.branchTestPtr(Assembler::NonZero, argcreg, Imm32(1), &noPaddingNeeded);
        BaseValueIndex dstPtr(masm.getStackPointer(), argcreg);
        masm.storeValue(MagicValue(JS_ARG_POISON), dstPtr);
        masm.bind(&noPaddingNeeded);
    }
#endif

    // Skip the copy of arguments if there are none.
    masm.branchTestPtr(Assembler::Zero, argcreg, argcreg, end);
}

// Destroys argvIndex and copyreg.
void
CodeGenerator::emitCopyValuesForApply(Register argvSrcBase, Register argvIndex, Register copyreg,
                                      size_t argvSrcOffset, size_t argvDstOffset)
{
    Label loop;
    masm.bind(&loop);

    // As argvIndex is off by 1, and we use the decBranchPtr instruction
    // to loop back, we have to substract the size of the word which are
    // copied.
    BaseValueIndex srcPtr(argvSrcBase, argvIndex, argvSrcOffset - sizeof(void*));
    BaseValueIndex dstPtr(masm.getStackPointer(), argvIndex, argvDstOffset - sizeof(void*));
    masm.loadPtr(srcPtr, copyreg);
    masm.storePtr(copyreg, dstPtr);

    // Handle 32 bits architectures.
    if (sizeof(Value) == 2 * sizeof(void*)) {
        BaseValueIndex srcPtrLow(argvSrcBase, argvIndex, argvSrcOffset - 2 * sizeof(void*));
        BaseValueIndex dstPtrLow(masm.getStackPointer(), argvIndex, argvDstOffset - 2 * sizeof(void*));
        masm.loadPtr(srcPtrLow, copyreg);
        masm.storePtr(copyreg, dstPtrLow);
    }

    masm.decBranchPtr(Assembler::NonZero, argvIndex, Imm32(1), &loop);
}

void
CodeGenerator::emitPopArguments(Register extraStackSpace)
{
    // Pop |this| and Arguments.
    masm.freeStack(extraStackSpace);
}

void
CodeGenerator::emitPushArguments(LApplyArgsGeneric* apply, Register extraStackSpace)
{
    // Holds the function nargs. Initially the number of args to the caller.
    Register argcreg = ToRegister(apply->getArgc());
    Register copyreg = ToRegister(apply->getTempObject());

    Label end;
    emitAllocateSpaceForApply(argcreg, extraStackSpace, &end);

    // We are making a copy of the arguments which are above the JitFrameLayout
    // of the current Ion frame.
    //
    // [arg1] [arg0] <- src [this] [JitFrameLayout] [.. frameSize ..] [pad] [arg1] [arg0] <- dst

    // Compute the source and destination offsets into the stack.
    size_t argvSrcOffset = frameSize() + JitFrameLayout::offsetOfActualArgs();
    size_t argvDstOffset = 0;

    // Save the extra stack space, and re-use the register as a base.
    masm.push(extraStackSpace);
    Register argvSrcBase = extraStackSpace;
    argvSrcOffset += sizeof(void*);
    argvDstOffset += sizeof(void*);

    // Save the actual number of register, and re-use the register as an index register.
    masm.push(argcreg);
    Register argvIndex = argcreg;
    argvSrcOffset += sizeof(void*);
    argvDstOffset += sizeof(void*);

    // srcPtr = (StackPointer + extraStackSpace) + argvSrcOffset
    // dstPtr = (StackPointer                  ) + argvDstOffset
    masm.addStackPtrTo(argvSrcBase);

    // Copy arguments.
    emitCopyValuesForApply(argvSrcBase, argvIndex, copyreg, argvSrcOffset, argvDstOffset);

    // Restore argcreg and the extra stack space counter.
    masm.pop(argcreg);
    masm.pop(extraStackSpace);

    // Join with all arguments copied and the extra stack usage computed.
    masm.bind(&end);

    // Push |this|.
    masm.addPtr(Imm32(sizeof(Value)), extraStackSpace);
    masm.pushValue(ToValue(apply, LApplyArgsGeneric::ThisIndex));
}

void
CodeGenerator::emitPushArguments(LApplyArrayGeneric* apply, Register extraStackSpace)
{
    Label noCopy, epilogue;
    Register tmpArgc = ToRegister(apply->getTempObject());
    Register elementsAndArgc = ToRegister(apply->getElements());

    // Invariants guarded in the caller:
    //  - the array is not too long
    //  - the array length equals its initialized length

    // The array length is our argc for the purposes of allocating space.
    Address length(ToRegister(apply->getElements()), ObjectElements::offsetOfLength());
    masm.load32(length, tmpArgc);

    // Allocate space for the values.
    emitAllocateSpaceForApply(tmpArgc, extraStackSpace, &noCopy);

    // Copy the values.  This code is skipped entirely if there are
    // no values.
    size_t argvDstOffset = 0;

    Register argvSrcBase = elementsAndArgc; // Elements value

    masm.push(extraStackSpace);
    Register copyreg = extraStackSpace;
    argvDstOffset += sizeof(void*);

    masm.push(tmpArgc);
    Register argvIndex = tmpArgc;
    argvDstOffset += sizeof(void*);

    // Copy
    emitCopyValuesForApply(argvSrcBase, argvIndex, copyreg, 0, argvDstOffset);

    // Restore.
    masm.pop(elementsAndArgc);
    masm.pop(extraStackSpace);
    masm.jump(&epilogue);

    // Clear argc if we skipped the copy step.
    masm.bind(&noCopy);
    masm.movePtr(ImmPtr(0), elementsAndArgc);

    // Join with all arguments copied and the extra stack usage computed.
    // Note, "elements" has become "argc".
    masm.bind(&epilogue);

    // Push |this|.
    masm.addPtr(Imm32(sizeof(Value)), extraStackSpace);
    masm.pushValue(ToValue(apply, LApplyArgsGeneric::ThisIndex));
}

template<typename T>
void
CodeGenerator::emitApplyGeneric(T* apply)
{
    // Holds the function object.
    Register calleereg = ToRegister(apply->getFunction());

    // Temporary register for modifying the function object.
    Register objreg = ToRegister(apply->getTempObject());
    Register extraStackSpace = ToRegister(apply->getTempStackCounter());

    // Holds the function nargs, computed in the invoker or (for
    // ApplyArray) in the argument pusher.
    Register argcreg = ToRegister(apply->getArgc());

    // Unless already known, guard that calleereg is actually a function object.
    if (!apply->hasSingleTarget()) {
        Label bail;
        masm.branchTestObjClass(Assembler::NotEqual, calleereg, &JSFunction::class_, objreg,
                                calleereg, &bail);
        bailoutFrom(&bail, apply->snapshot());
    }

    // Copy the arguments of the current function.
    //
    // In the case of ApplyArray, also compute argc: the argc register
    // and the elements register are the same; argc must not be
    // referenced before the call to emitPushArguments() and elements
    // must not be referenced after it returns.
    //
    // objreg is dead across this call.
    //
    // extraStackSpace is garbage on entry and defined on exit.
    emitPushArguments(apply, extraStackSpace);

    masm.checkStackAlignment();

    // If the function is native, only emit the call to InvokeFunction.
    if (apply->hasSingleTarget() && apply->getSingleTarget()->isNativeWithCppEntry()) {
        emitCallInvokeFunction(apply, extraStackSpace);
        emitPopArguments(extraStackSpace);
        return;
    }

    Label end, invoke;

    // Guard that calleereg is an interpreted function with a JSScript.
    masm.branchIfFunctionHasNoJitEntry(calleereg, /* constructing */ false, &invoke);

    // Guard that calleereg is not a class constrcuctor
    masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor,
                            calleereg, objreg, &invoke);

    // Knowing that calleereg is a non-native function, load jitcode.
    masm.loadJitCodeRaw(calleereg, objreg);

    // Call with an Ion frame or a rectifier frame.
    {
        // Create the frame descriptor.
        unsigned pushed = masm.framePushed();
        Register stackSpace = extraStackSpace;
        masm.addPtr(Imm32(pushed), stackSpace);
        masm.makeFrameDescriptor(stackSpace, JitFrame_IonJS, JitFrameLayout::Size());

        masm.Push(argcreg);
        masm.Push(calleereg);
        masm.Push(stackSpace); // descriptor

        Label underflow, rejoin;

        // Check whether the provided arguments satisfy target argc.
        if (!apply->hasSingleTarget()) {
            Register nformals = extraStackSpace;
            masm.load16ZeroExtend(Address(calleereg, JSFunction::offsetOfNargs()), nformals);
            masm.branch32(Assembler::Below, argcreg, nformals, &underflow);
        } else {
            masm.branch32(Assembler::Below, argcreg, Imm32(apply->getSingleTarget()->nargs()),
                          &underflow);
        }

        // Skip the construction of the rectifier frame because we have no
        // underflow.
        masm.jump(&rejoin);

        // Argument fixup needed. Get ready to call the argumentsRectifier.
        {
            masm.bind(&underflow);

            // Hardcode the address of the argumentsRectifier code.
            TrampolinePtr argumentsRectifier = gen->jitRuntime()->getArgumentsRectifier();
            masm.movePtr(argumentsRectifier, objreg);
        }

        masm.bind(&rejoin);

        // Finally call the function in objreg, as assigned by one of the paths above.
        uint32_t callOffset = masm.callJit(objreg);
        markSafepointAt(callOffset, apply);

        // Recover the number of arguments from the frame descriptor.
        masm.loadPtr(Address(masm.getStackPointer(), 0), stackSpace);
        masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), stackSpace);
        masm.subPtr(Imm32(pushed), stackSpace);

        // Increment to remove IonFramePrefix; decrement to fill FrameSizeClass.
        // The return address has already been removed from the Ion frame.
        int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*);
        masm.adjustStack(prefixGarbage);
        masm.jump(&end);
    }

    // Handle uncompiled or native functions.
    {
        masm.bind(&invoke);
        emitCallInvokeFunction(apply, extraStackSpace);
    }

    // Pop arguments and continue.
    masm.bind(&end);
    emitPopArguments(extraStackSpace);
}

void
CodeGenerator::visitApplyArgsGeneric(LApplyArgsGeneric* apply)
{
    // Limit the number of parameters we can handle to a number that does not risk
    // us allocating too much stack, notably on Windows where there is a 4K guard page
    // that has to be touched to extend the stack.  See bug 1351278.  The value "3000"
    // is the size of the guard page minus an arbitrary, but large, safety margin.

    LSnapshot* snapshot = apply->snapshot();
    Register argcreg = ToRegister(apply->getArgc());

    uint32_t limit = 3000 / sizeof(Value);
    bailoutCmp32(Assembler::Above, argcreg, Imm32(limit), snapshot);

    emitApplyGeneric(apply);
}

void
CodeGenerator::visitApplyArrayGeneric(LApplyArrayGeneric* apply)
{
    LSnapshot* snapshot = apply->snapshot();
    Register tmp = ToRegister(apply->getTempObject());

    Address length(ToRegister(apply->getElements()), ObjectElements::offsetOfLength());
    masm.load32(length, tmp);

    // See comment in visitApplyArgsGeneric, above.

    uint32_t limit = 3000 / sizeof(Value);
    bailoutCmp32(Assembler::Above, tmp, Imm32(limit), snapshot);

    // Ensure that the array does not contain an uninitialized tail.

    Address initializedLength(ToRegister(apply->getElements()),
                              ObjectElements::offsetOfInitializedLength());
    masm.sub32(initializedLength, tmp);
    bailoutCmp32(Assembler::NotEqual, tmp, Imm32(0), snapshot);

    emitApplyGeneric(apply);
}

void
CodeGenerator::visitBail(LBail* lir)
{
    bailout(lir->snapshot());
}

void
CodeGenerator::visitUnreachable(LUnreachable* lir)
{
    masm.assumeUnreachable("end-of-block assumed unreachable");
}

void
CodeGenerator::visitEncodeSnapshot(LEncodeSnapshot* lir)
{
    encode(lir->snapshot());
}

void
CodeGenerator::visitGetDynamicName(LGetDynamicName* lir)
{
    Register envChain = ToRegister(lir->getEnvironmentChain());
    Register name = ToRegister(lir->getName());
    Register temp1 = ToRegister(lir->temp1());
    Register temp2 = ToRegister(lir->temp2());
    Register temp3 = ToRegister(lir->temp3());

    masm.loadJSContext(temp3);

    /* Make space for the outparam. */
    masm.adjustStack(-int32_t(sizeof(Value)));
    masm.moveStackPtrTo(temp2);

    masm.setupUnalignedABICall(temp1);
    masm.passABIArg(temp3);
    masm.passABIArg(envChain);
    masm.passABIArg(name);
    masm.passABIArg(temp2);
    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, GetDynamicName));

    const ValueOperand out = ToOutValue(lir);

    masm.loadValue(Address(masm.getStackPointer(), 0), out);
    masm.adjustStack(sizeof(Value));

    Label undefined;
    masm.branchTestUndefined(Assembler::Equal, out, &undefined);
    bailoutFrom(&undefined, lir->snapshot());
}

typedef bool (*DirectEvalSFn)(JSContext*, HandleObject, HandleScript, HandleValue,
                              HandleString, jsbytecode*, MutableHandleValue);
static const VMFunction DirectEvalStringInfo =
    FunctionInfo<DirectEvalSFn>(DirectEvalStringFromIon, "DirectEvalStringFromIon");

void
CodeGenerator::visitCallDirectEval(LCallDirectEval* lir)
{
    Register envChain = ToRegister(lir->getEnvironmentChain());
    Register string = ToRegister(lir->getString());

    pushArg(ImmPtr(lir->mir()->pc()));
    pushArg(string);
    pushArg(ToValue(lir, LCallDirectEval::NewTarget));
    pushArg(ImmGCPtr(current->mir()->info().script()));
    pushArg(envChain);

    callVM(DirectEvalStringInfo, lir);
}

void
CodeGenerator::generateArgumentsChecks(bool assert)
{
    // This function can be used the normal way to check the argument types,
    // before entering the function and bailout when arguments don't match.
    // For debug purpose, this is can also be used to force/check that the
    // arguments are correct. Upon fail it will hit a breakpoint.

    MIRGraph& mir = gen->graph();
    MResumePoint* rp = mir.entryResumePoint();

    // No registers are allocated yet, so it's safe to grab anything.
    AllocatableGeneralRegisterSet temps(GeneralRegisterSet::All());
    Register temp1 = temps.takeAny();
    Register temp2 = temps.takeAny();

    const CompileInfo& info = gen->info();

    Label miss;
    for (uint32_t i = info.startArgSlot(); i < info.endArgSlot(); i++) {
        // All initial parameters are guaranteed to be MParameters.
        MParameter* param = rp->getOperand(i)->toParameter();
        const TypeSet* types = param->resultTypeSet();
        if (!types || types->unknown())
            continue;

#ifndef JS_CODEGEN_ARM64
        // Calculate the offset on the stack of the argument.
        // (i - info.startArgSlot())    - Compute index of arg within arg vector.
        // ... * sizeof(Value)          - Scale by value size.
        // ArgToStackOffset(...)        - Compute displacement within arg vector.
        int32_t offset = ArgToStackOffset((i - info.startArgSlot()) * sizeof(Value));
        Address argAddr(masm.getStackPointer(), offset);

        // guardObjectType will zero the stack pointer register on speculative
        // paths.
        Register spectreRegToZero = masm.getStackPointer();
        masm.guardTypeSet(argAddr, types, BarrierKind::TypeSet, temp1, temp2,
                          spectreRegToZero, &miss);
#else
        // On ARM64, the stack pointer situation is more complicated. When we
        // enable Ion, we should figure out how to mitigate Spectre there.
        mozilla::Unused << temp1;
        mozilla::Unused << temp2;
        MOZ_CRASH("NYI");
#endif
    }

    if (miss.used()) {
        if (assert) {
#ifdef DEBUG
            Label success;
            masm.jump(&success);
            masm.bind(&miss);

            // Check for cases where the type set guard might have missed due to
            // changing object groups.
            for (uint32_t i = info.startArgSlot(); i < info.endArgSlot(); i++) {
                MParameter* param = rp->getOperand(i)->toParameter();
                const TemporaryTypeSet* types = param->resultTypeSet();
                if (!types || types->unknown())
                    continue;

                Label skip;
                Address addr(masm.getStackPointer(), ArgToStackOffset((i - info.startArgSlot()) * sizeof(Value)));
                masm.branchTestObject(Assembler::NotEqual, addr, &skip);
                Register obj = masm.extractObject(addr, temp1);
                masm.guardTypeSetMightBeIncomplete(types, obj, temp1, &success);
                masm.bind(&skip);
            }

            masm.assumeUnreachable("Argument check fail.");
            masm.bind(&success);
#else
            MOZ_CRASH("Shouldn't get here in opt builds");
#endif
        } else {
            bailoutFrom(&miss, graph.entrySnapshot());
        }
    }
}

// Out-of-line path to report over-recursed error and fail.
class CheckOverRecursedFailure : public OutOfLineCodeBase<CodeGenerator>
{
    LInstruction* lir_;

  public:
    explicit CheckOverRecursedFailure(LInstruction* lir)
      : lir_(lir)
    { }

    void accept(CodeGenerator* codegen) override {
        codegen->visitCheckOverRecursedFailure(this);
    }

    LInstruction* lir() const {
        return lir_;
    }
};

void
CodeGenerator::visitCheckOverRecursed(LCheckOverRecursed* lir)
{
    // If we don't push anything on the stack, skip the check.
    if (omitOverRecursedCheck())
        return;

    // Ensure that this frame will not cross the stack limit.
    // This is a weak check, justified by Ion using the C stack: we must always
    // be some distance away from the actual limit, since if the limit is
    // crossed, an error must be thrown, which requires more frames.
    //
    // It must always be possible to trespass past the stack limit.
    // Ion may legally place frames very close to the limit. Calling additional
    // C functions may then violate the limit without any checking.
    //
    // Since Ion frames exist on the C stack, the stack limit may be
    // dynamically set by JS_SetThreadStackLimit() and JS_SetNativeStackQuota().

    CheckOverRecursedFailure* ool = new(alloc()) CheckOverRecursedFailure(lir);
    addOutOfLineCode(ool, lir->mir());

    // Conditional forward (unlikely) branch to failure.
    const void* limitAddr = gen->runtime->addressOfJitStackLimit();
    masm.branchStackPtrRhs(Assembler::AboveOrEqual, AbsoluteAddress(limitAddr), ool->entry());
    masm.bind(ool->rejoin());
}

typedef bool (*DefVarFn)(JSContext*, HandlePropertyName, unsigned, HandleObject);
static const VMFunction DefVarInfo = FunctionInfo<DefVarFn>(DefVar, "DefVar");

void
CodeGenerator::visitDefVar(LDefVar* lir)
{
    Register envChain = ToRegister(lir->environmentChain());

    pushArg(envChain); // JSObject*
    pushArg(Imm32(lir->mir()->attrs())); // unsigned
    pushArg(ImmGCPtr(lir->mir()->name())); // PropertyName*

    callVM(DefVarInfo, lir);
}

typedef bool (*DefLexicalFn)(JSContext*, HandlePropertyName, unsigned);
static const VMFunction DefLexicalInfo =
    FunctionInfo<DefLexicalFn>(DefGlobalLexical, "DefGlobalLexical");

void
CodeGenerator::visitDefLexical(LDefLexical* lir)
{
    pushArg(Imm32(lir->mir()->attrs())); // unsigned
    pushArg(ImmGCPtr(lir->mir()->name())); // PropertyName*

    callVM(DefLexicalInfo, lir);
}

typedef bool (*DefFunOperationFn)(JSContext*, HandleScript, HandleObject, HandleFunction);
static const VMFunction DefFunOperationInfo =
    FunctionInfo<DefFunOperationFn>(DefFunOperation, "DefFunOperation");

void
CodeGenerator::visitDefFun(LDefFun* lir)
{
    Register envChain = ToRegister(lir->environmentChain());

    Register fun = ToRegister(lir->fun());
    pushArg(fun);
    pushArg(envChain);
    pushArg(ImmGCPtr(current->mir()->info().script()));

    callVM(DefFunOperationInfo, lir);
}

typedef bool (*CheckOverRecursedFn)(JSContext*);
static const VM