Bug 1534840 part 2 - Prevent ARM64 from generating constant pools within jump tables. r=sstangl
authorNicolas B. Pierron <nicolas.b.pierron@nbp.name>
Tue, 16 Apr 2019 13:56:50 +0000
changeset 469671 e024cb135284b83edb799f6f8cf84a4f054d7e35
parent 469670 4f54d68ba18e0e74beac705ae76c7f9b7263f994
child 469672 e55ace0633daf9eb7bd5f260300fdbd770834bb3
push id35879
push usernerli@mozilla.com
push dateTue, 16 Apr 2019 22:01:48 +0000
treeherdermozilla-central@12a60898fdc1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssstangl
bugs1534840
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1534840 part 2 - Prevent ARM64 from generating constant pools within jump tables. r=sstangl Depends on D26521 Differential Revision: https://phabricator.services.mozilla.com/D26522
js/src/jit/CodeGenerator.cpp
js/src/jit/arm64/Assembler-arm64.h
js/src/jit/arm64/CodeGenerator-arm64.cpp
js/src/jit/arm64/MacroAssembler-arm64-inl.h
js/src/jit/arm64/MacroAssembler-arm64.cpp
js/src/wasm/WasmBaselineCompile.cpp
js/src/wasm/WasmFrameIter.cpp
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -12050,28 +12050,32 @@ class OutOfLineSwitch : public OutOfLine
   void setOutOfLine() { isOutOfLine_ = true; }
 };
 
 template <SwitchTableType tableType>
 void CodeGenerator::visitOutOfLineSwitch(
     OutOfLineSwitch<tableType>* jumpTable) {
   jumpTable->setOutOfLine();
   auto& labels = jumpTable->labels();
-#if defined(JS_CODEGEN_ARM64)
-  AutoForbidPools afp(
-      &masm, (labels.length() + 1) * (sizeof(void*) / vixl::kInstructionSize));
-#endif
 
   if (tableType == SwitchTableType::OutOfLine) {
 #if defined(JS_CODEGEN_ARM)
     MOZ_CRASH("NYI: SwitchTableType::OutOfLine");
 #elif defined(JS_CODEGEN_NONE)
     MOZ_CRASH();
 #else
+
+#  if defined(JS_CODEGEN_ARM64)
+    AutoForbidPoolsAndNops afp(
+        &masm,
+        (labels.length() + 1) * (sizeof(void*) / vixl::kInstructionSize));
+#  endif
+
     masm.haltingAlign(sizeof(void*));
+
     // Bind the address of the jump table and reserve the space for code
     // pointers to jump in the newly generated code.
     masm.bind(jumpTable->start());
     masm.addCodeLabel(*jumpTable->start());
     for (size_t i = 0, e = labels.length(); i < e; i++) {
       jumpTable->addTableEntry(masm);
     }
 #endif
--- a/js/src/jit/arm64/Assembler-arm64.h
+++ b/js/src/jit/arm64/Assembler-arm64.h
@@ -375,16 +375,17 @@ class Assembler : public vixl::Assembler
     Instruction* getLdr() { return reinterpret_cast<Instruction*>(&ldr); }
   };
 
   // Offset of the patchable target for the given entry.
   static const size_t OffsetOfJumpTableEntryPointer = 8;
 
  public:
   void writeCodePointer(CodeLabel* label) {
+    armbuffer_.assertNoPoolAndNoNops();
     uintptr_t x = uintptr_t(-1);
     BufferOffset off = EmitData(&x, sizeof(uintptr_t));
     label->patchAt()->bind(off.getOffset());
   }
 
   void verifyHeapAccessDisassembly(uint32_t begin, uint32_t end,
                                    const Disassembler::HeapAccess& heapAccess) {
     MOZ_CRASH("verifyHeapAccessDisassembly");
@@ -518,32 +519,31 @@ static inline bool GetTempRegForIntArg(u
 }
 
 inline Imm32 Imm64::firstHalf() const { return low(); }
 
 inline Imm32 Imm64::secondHalf() const { return hi(); }
 
 void PatchJump(CodeLocationJump& jump_, CodeLocationLabel label);
 
-// Forbids pool generation during a specified interval. Not nestable.
-class AutoForbidPools {
-  Assembler* asm_;
-
- public:
-  AutoForbidPools(Assembler* asm_, size_t maxInst) : asm_(asm_) {
-    asm_->enterNoPool(maxInst);
-  }
-  ~AutoForbidPools() { asm_->leaveNoPool(); }
-};
-
 // Forbids nop filling for testing purposes. Not nestable.
 class AutoForbidNops {
+ protected:
   Assembler* asm_;
 
  public:
   explicit AutoForbidNops(Assembler* asm_) : asm_(asm_) { asm_->enterNoNops(); }
   ~AutoForbidNops() { asm_->leaveNoNops(); }
 };
 
+// Forbids pool generation during a specified interval. Not nestable.
+class AutoForbidPoolsAndNops : public AutoForbidNops {
+ public:
+  AutoForbidPoolsAndNops(Assembler* asm_, size_t maxInst) : AutoForbidNops(asm_) {
+    asm_->enterNoPool(maxInst);
+  }
+  ~AutoForbidPoolsAndNops() { asm_->leaveNoPool(); }
+};
+
 }  // namespace jit
 }  // namespace js
 
 #endif  // A64_ASSEMBLER_A64_H_
--- a/js/src/jit/arm64/CodeGenerator-arm64.cpp
+++ b/js/src/jit/arm64/CodeGenerator-arm64.cpp
@@ -806,17 +806,18 @@ class js::jit::OutOfLineTableSwitch
   MTableSwitch* mir() const { return mir_; }
 
   CodeLabel* jumpLabel() { return &jumpLabel_; }
 };
 
 void CodeGeneratorARM64::visitOutOfLineTableSwitch(OutOfLineTableSwitch* ool) {
   MTableSwitch* mir = ool->mir();
 
-  AutoForbidPools afp(
+  // Prevent nop and pools sequences to appear in the jump table.
+  AutoForbidPoolsAndNops afp(
       &masm, (mir->numCases() + 1) * (sizeof(void*) / vixl::kInstructionSize));
   masm.haltingAlign(sizeof(void*));
   masm.bind(ool->jumpLabel());
   masm.addCodeLabel(*ool->jumpLabel());
 
   for (size_t i = 0; i < mir->numCases(); i++) {
     LBlock* caseblock = skipTrivialBlocks(mir->getCase(i))->lir();
     Label* caseheader = caseblock->label();
--- a/js/src/jit/arm64/MacroAssembler-arm64-inl.h
+++ b/js/src/jit/arm64/MacroAssembler-arm64-inl.h
@@ -271,17 +271,17 @@ void MacroAssembler::add64(Imm32 imm, Re
 
 void MacroAssembler::add64(Imm64 imm, Register64 dest) {
   Add(ARMRegister(dest.reg, 64), ARMRegister(dest.reg, 64), Operand(imm.value));
 }
 
 CodeOffset MacroAssembler::sub32FromStackPtrWithPatch(Register dest) {
   vixl::UseScratchRegisterScope temps(this);
   const ARMRegister scratch = temps.AcquireX();
-  AutoForbidPools afp(this, /* max number of instructions in scope = */ 3);
+  AutoForbidPoolsAndNops afp(this, /* max number of instructions in scope = */ 3);
   CodeOffset offs = CodeOffset(currentOffset());
   movz(scratch, 0, 0);
   movk(scratch, 0, 16);
   Sub(ARMRegister(dest, 64), sp, scratch);
   return offs;
 }
 
 void MacroAssembler::patchSub32FromStackPtr(CodeOffset offset, Imm32 imm) {
--- a/js/src/jit/arm64/MacroAssembler-arm64.cpp
+++ b/js/src/jit/arm64/MacroAssembler-arm64.cpp
@@ -304,22 +304,22 @@ void MacroAssemblerCompat::wasmLoadImpl(
   ARMRegister ptr(ptr_, 64);
   if (offset) {
     Add(ptr, ptr, Operand(offset));
   }
 
   asMasm().memoryBarrierBefore(access.sync());
 
   // Reg+Reg addressing is directly encodable in one Load instruction, hence
-  // the AutoForbidPools will ensure that the access metadata is emitted at
+  // the AutoForbidPoolsAndNops will ensure that the access metadata is emitted at
   // the address of the Load.
   MemOperand srcAddr(memoryBase, ptr);
 
   {
-    AutoForbidPools afp(this, /* max number of instructions in scope = */ 1);
+    AutoForbidPoolsAndNops afp(this, /* max number of instructions in scope = */ 1);
     append(access, asMasm().currentOffset());
     switch (access.type()) {
       case Scalar::Int8:
         Ldrsb(SelectGPReg(outany, out64), srcAddr);
         break;
       case Scalar::Uint8:
         Ldrb(SelectGPReg(outany, out64), srcAddr);
         break;
@@ -372,22 +372,22 @@ void MacroAssemblerCompat::wasmStoreImpl
   ARMRegister ptr(ptr_, 64);
   if (offset) {
     Add(ptr, ptr, Operand(offset));
   }
 
   asMasm().memoryBarrierBefore(access.sync());
 
   // Reg+Reg addressing is directly encodable in one Store instruction, hence
-  // the AutoForbidPools will ensure that the access metadata is emitted at
+  // the AutoForbidPoolsAndNops will ensure that the access metadata is emitted at
   // the address of the Store.
   MemOperand dstAddr(memoryBase, ptr);
 
   {
-    AutoForbidPools afp(this, /* max number of instructions in scope = */ 1);
+    AutoForbidPoolsAndNops afp(this, /* max number of instructions in scope = */ 1);
     append(access, asMasm().currentOffset());
     switch (access.type()) {
       case Scalar::Int8:
       case Scalar::Uint8:
         Strb(SelectGPReg(valany, val64), dstAddr);
         break;
       case Scalar::Int16:
       case Scalar::Uint16:
@@ -678,17 +678,17 @@ void MacroAssembler::patchCall(uint32_t 
   AutoFlushICache::flush(uintptr_t(inst), 4);
 }
 
 CodeOffset MacroAssembler::farJumpWithPatch() {
   vixl::UseScratchRegisterScope temps(this);
   const ARMRegister scratch = temps.AcquireX();
   const ARMRegister scratch2 = temps.AcquireX();
 
-  AutoForbidPools afp(this, /* max number of instructions in scope = */ 7);
+  AutoForbidPoolsAndNops afp(this, /* max number of instructions in scope = */ 7);
 
   mozilla::DebugOnly<uint32_t> before = currentOffset();
 
   align(8);  // At most one nop
 
   Label branch;
   adr(scratch2, &branch);
   ldr(scratch, vixl::MemOperand(scratch2, 4));
@@ -715,17 +715,17 @@ void MacroAssembler::patchFarJump(CodeOf
   MOZ_ASSERT(inst1->InstructionBits() == UINT32_MAX);
   MOZ_ASSERT(inst2->InstructionBits() == UINT32_MAX);
 
   inst1->SetInstructionBits((uint32_t)distance);
   inst2->SetInstructionBits((uint32_t)(distance >> 32));
 }
 
 CodeOffset MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc) {
-  AutoForbidPools afp(this, /* max number of instructions in scope = */ 1);
+  AutoForbidPoolsAndNops afp(this, /* max number of instructions in scope = */ 1);
   CodeOffset offset(currentOffset());
   Nop();
   append(desc, CodeOffset(currentOffset()));
   return offset;
 }
 
 void MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target) {
   uint8_t* inst = call - 4;
@@ -1070,17 +1070,17 @@ template void MacroAssembler::storeUnbox
     const BaseObjectElementIndex& dest, MIRType slotType);
 
 void MacroAssembler::comment(const char* msg) { Assembler::comment(msg); }
 
 // ========================================================================
 // wasm support
 
 CodeOffset MacroAssembler::wasmTrapInstruction() {
-  AutoForbidPools afp(this, /* max number of instructions in scope = */ 1);
+  AutoForbidPoolsAndNops afp(this, /* max number of instructions in scope = */ 1);
   CodeOffset offs(currentOffset());
   Unreachable();
   return offs;
 }
 
 void MacroAssembler::wasmBoundsCheck(Condition cond, Register index,
                                      Register boundsCheckLimit, Label* label) {
   // Not used on ARM64, we rely on signal handling instead
@@ -1510,66 +1510,66 @@ static void SignOrZeroExtend(MacroAssemb
 
 static void LoadExclusive(MacroAssembler& masm,
                           const wasm::MemoryAccessDesc* access,
                           Scalar::Type srcType, Width targetWidth,
                           MemOperand ptr, Register dest) {
   bool signExtend = Scalar::isSignedIntType(srcType);
 
   // With this address form, a single native ldxr* will be emitted, and the
-  // AutoForbidPools ensures that the metadata is emitted at the address of
+  // AutoForbidPoolsAndNops ensures that the metadata is emitted at the address of
   // the ldxr*.
   MOZ_ASSERT(ptr.IsImmediateOffset() && ptr.offset() == 0);
 
   switch (Scalar::byteSize(srcType)) {
     case 1: {
       {
-        AutoForbidPools afp(&masm,
+        AutoForbidPoolsAndNops afp(&masm,
                             /* max number of instructions in scope = */ 1);
         if (access) {
           masm.append(*access, masm.currentOffset());
         }
         masm.Ldxrb(W(dest), ptr);
       }
       if (signExtend) {
         masm.Sbfm(R(dest, targetWidth), R(dest, targetWidth), 0, 7);
       }
       break;
     }
     case 2: {
       {
-        AutoForbidPools afp(&masm,
+        AutoForbidPoolsAndNops afp(&masm,
                             /* max number of instructions in scope = */ 1);
         if (access) {
           masm.append(*access, masm.currentOffset());
         }
         masm.Ldxrh(W(dest), ptr);
       }
       if (signExtend) {
         masm.Sbfm(R(dest, targetWidth), R(dest, targetWidth), 0, 15);
       }
       break;
     }
     case 4: {
       {
-        AutoForbidPools afp(&masm,
+        AutoForbidPoolsAndNops afp(&masm,
                             /* max number of instructions in scope = */ 1);
         if (access) {
           masm.append(*access, masm.currentOffset());
         }
         masm.Ldxr(W(dest), ptr);
       }
       if (targetWidth == Width::_64 && signExtend) {
         masm.Sbfm(X(dest), X(dest), 0, 31);
       }
       break;
     }
     case 8: {
       {
-        AutoForbidPools afp(&masm,
+        AutoForbidPoolsAndNops afp(&masm,
                             /* max number of instructions in scope = */ 1);
         if (access) {
           masm.append(*access, masm.currentOffset());
         }
         masm.Ldxr(X(dest), ptr);
       }
       break;
     }
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -4723,17 +4723,17 @@ class BaseCompiler final : public BaseCo
 
     masm.ma_li(scratch, &tableCl);
 
     tableCl.target()->bind(theTable->offset());
     masm.addCodeLabel(tableCl);
 
     masm.branchToComputedAddress(BaseIndex(scratch, switchValue, ScalePointer));
 #elif defined(JS_CODEGEN_ARM64)
-    AutoForbidPools afp(&masm, /* number of instructions in scope = */ 4);
+    AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 4);
 
     ScratchI32 scratch(*this);
 
     ARMRegister s(scratch, 64);
     ARMRegister v(switchValue, 64);
     masm.Adr(s, theTable);
     masm.Add(s, s, Operand(v, vixl::LSL, 3));
     masm.Ldr(s, MemOperand(s, 0));
--- a/js/src/wasm/WasmFrameIter.cpp
+++ b/js/src/wasm/WasmFrameIter.cpp
@@ -434,17 +434,17 @@ static void GenerateCallablePrologue(Mac
     masm.moveStackPtrTo(FramePointer);
     MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
   }
 #elif defined(JS_CODEGEN_ARM64)
   {
     // We do not use the PseudoStackPointer.
     MOZ_ASSERT(masm.GetStackPointer64().code() == sp.code());
 
-    AutoForbidPools afp(&masm, /* number of instructions in scope = */ 5);
+    AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 5);
 
     *entry = masm.currentOffset();
 
     masm.Sub(sp, sp, sizeof(Frame));
     masm.Str(ARMRegister(lr, 64),
              MemOperand(sp, offsetof(Frame, returnAddress)));
     MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
     masm.Str(ARMRegister(WasmTlsReg, 64), MemOperand(sp, offsetof(Frame, tls)));
@@ -505,17 +505,17 @@ static void GenerateCallableEpilogue(Mac
   masm.as_jr(ra);
   masm.addToStackPtr(Imm32(sizeof(Frame)));
 
 #elif defined(JS_CODEGEN_ARM64)
 
   // We do not use the PseudoStackPointer.
   MOZ_ASSERT(masm.GetStackPointer64().code() == sp.code());
 
-  AutoForbidPools afp(&masm, /* number of instructions in scope = */ 5);
+  AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 5);
 
   masm.Ldr(ARMRegister(FramePointer, 64),
            MemOperand(sp, offsetof(Frame, callerFP)));
   poppedFP = masm.currentOffset();
 
   masm.Ldr(ARMRegister(WasmTlsReg, 64), MemOperand(sp, offsetof(Frame, tls)));
   poppedTlsReg = masm.currentOffset();