Index: src/arm/full-codegen-arm.cc |
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc |
index f1e6380a5cf55eaf2668632eabfecb54496d6999..302055173b700f724020f972ae14a81dd2bee90d 100644 |
--- a/src/arm/full-codegen-arm.cc |
+++ b/src/arm/full-codegen-arm.cc |
@@ -346,13 +346,27 @@ void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
} |
+static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; |
+ |
+ |
void FullCodeGenerator::EmitProfilingCounterReset() { |
+ Assembler::BlockConstPoolScope block_const_pool(masm_); |
+ PredictableCodeSizeScope predictable_code_size_scope( |
+ masm_, kProfileCounterResetSequenceLength); |
+ Label start; |
+ __ bind(&start); |
int reset_value = FLAG_interrupt_budget; |
if (info_->is_debug()) { |
// Detect debug break requests as soon as possible. |
reset_value = FLAG_interrupt_budget >> 4; |
} |
__ mov(r2, Operand(profiling_counter_)); |
+ // The mov instruction above can be either 1, 2 or 3 instructions depending |
+ // upon whether it is an extended constant pool - insert nop to compensate. |
+ ASSERT(masm_->InstructionsGeneratedSince(&start) <= 3); |
+ while (masm_->InstructionsGeneratedSince(&start) != 3) { |
+ __ nop(); |
+ } |
__ mov(r3, Operand(Smi::FromInt(reset_value))); |
__ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
} |
@@ -4753,10 +4767,18 @@ static Address GetInterruptImmediateLoadAddress(Address pc) { |
Address load_address = pc - 2 * Assembler::kInstrSize; |
if (!FLAG_enable_ool_constant_pool) { |
ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); |
+ } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { |
+ // This is an extended constant pool lookup. |
+ load_address -= 2 * Assembler::kInstrSize; |
+ ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); |
+ ASSERT(Assembler::IsMovT( |
+ Memory::int32_at(load_address + Assembler::kInstrSize))); |
} else if (Assembler::IsMovT(Memory::int32_at(load_address))) { |
+ // This is a movw_movt immediate load. |
load_address -= Assembler::kInstrSize; |
ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); |
} else { |
+ // This is a small constant pool lookup. |
ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); |
} |
return load_address; |
@@ -4778,12 +4800,16 @@ void BackEdgeTable::PatchAt(Code* unoptimized_code, |
// ; load interrupt stub address into ip - either of: |
// ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> |
ulan
2014/07/01 13:29:47
This comments suggests only two cases, but there a
rmcilroy
2014/07/02 16:35:54
Done.
|
// | movt ip, <immed high> |
+ // | ldr ip, [pp, ip] |
// blx ip |
+ // <reset profiling counter> |
// ok-label |
- // Calculate branch offet to the ok-label - this is the difference between |
- // the branch address and |pc| (which points at <blx ip>) plus one instr. |
- int branch_offset = pc + Assembler::kInstrSize - branch_address; |
+ // Calculate branch offset to the ok-label - this is the difference |
+ // between the branch address and |pc| (which points at <blx ip>) plus |
+ // kProfileCounterResetSequence instructions |
+ int branch_offset = pc - Instruction::kPCReadOffset - branch_address + |
ulan
2014/07/01 13:29:48
Where does Instruction::kPCReadOffset come from?
rmcilroy
2014/07/02 16:35:54
If you look at Assembler::branch_offset() it appli
ulan
2014/07/03 09:19:55
Thanks, now I got it. Nice catch!
|
+ kProfileCounterResetSequenceLength; |
patcher.masm()->b(branch_offset, pl); |
break; |
} |
@@ -4795,6 +4821,7 @@ void BackEdgeTable::PatchAt(Code* unoptimized_code, |
// ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> |
// | movt ip, <immed high> |
ulan
2014/07/01 13:29:47
Update the comment?
rmcilroy
2014/07/02 16:35:54
Done.
|
// blx ip |
+ // <reset profiling counter> |
// ok-label |
patcher.masm()->nop(); |
break; |