Index: src/mips/full-codegen-mips.cc |
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc |
index b042a3eca36cdf64d27c538519e300951852944e..ad2a5c75e2a7bebd4a0ad9bcbb54606fc0e3c701 100644 |
--- a/src/mips/full-codegen-mips.cc |
+++ b/src/mips/full-codegen-mips.cc |
@@ -62,9 +62,11 @@ static unsigned GetPropertyId(Property* property) { |
// A patch site is a location in the code which it is possible to patch. This |
// class has a number of methods to emit the code which is patchable and the |
// method EmitPatchInfo to record a marker back to the patchable code. This |
-// marker is a andi at, rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 |
-// bit immediate value is used) is the delta from the pc to the first |
+// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy |
+// (raw 16 bit immediate value is used) is the delta from the pc to the first |
// instruction of the patchable code. |
+// The marker instruction is effectively a NOP (dest is zero_reg) and will |
+// never be emitted by normal code. |
class JumpPatchSite BASE_EMBEDDED { |
public: |
explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
@@ -103,7 +105,7 @@ class JumpPatchSite BASE_EMBEDDED { |
if (patch_site_.is_bound()) { |
int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); |
- __ andi(at, reg, delta_to_patch_site % kImm16Mask); |
+ __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); |
#ifdef DEBUG |
info_emitted_ = true; |
#endif |
@@ -310,17 +312,25 @@ void FullCodeGenerator::ClearAccumulator() { |
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
+ // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need |
+ // to make sure it is constant. Branch may emit a skip-or-jump sequence |
+ // instead of the normal Branch. It seems that the "skip" part of that |
+ // sequence is about as long as this Branch would be so it is safe to ignore |
+ // that. |
+ Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
Comment cmnt(masm_, "[ Stack check"); |
Label ok; |
__ LoadRoot(t0, Heap::kStackLimitRootIndex); |
- __ Branch(&ok, hs, sp, Operand(t0)); |
+ __ sltu(at, sp, t0); |
+ __ beq(at, zero_reg, &ok); |
+ // CallStub will emit a li t9, ... first, so it is safe to use the delay slot. |
StackCheckStub stub; |
+ __ CallStub(&stub); |
// Record a mapping of this PC offset to the OSR id. This is used to find |
// the AST id from the unoptimized code in order to use it as a key into |
// the deoptimization input data found in the optimized code. |
RecordStackCheck(stmt->OsrEntryId()); |
- __ CallStub(&stub); |
__ bind(&ok); |
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
// Record a mapping of the OSR id to this PC. This is used if the OSR |