OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
339 | 339 |
340 | 340 |
341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
342 __ mov(r2, Operand(profiling_counter_)); | 342 __ mov(r2, Operand(profiling_counter_)); |
343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); | 344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); |
345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
346 } | 346 } |
347 | 347 |
348 | 348 |
349 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; | |
350 | |
351 | |
349 void FullCodeGenerator::EmitProfilingCounterReset() { | 352 void FullCodeGenerator::EmitProfilingCounterReset() { |
353 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
354 PredictableCodeSizeScope predictable_code_size_scope( | |
355 masm_, kProfileCounterResetSequenceLength); | |
356 Label start; | |
357 __ bind(&start); | |
350 int reset_value = FLAG_interrupt_budget; | 358 int reset_value = FLAG_interrupt_budget; |
351 if (info_->is_debug()) { | 359 if (info_->is_debug()) { |
352 // Detect debug break requests as soon as possible. | 360 // Detect debug break requests as soon as possible. |
353 reset_value = FLAG_interrupt_budget >> 4; | 361 reset_value = FLAG_interrupt_budget >> 4; |
354 } | 362 } |
355 __ mov(r2, Operand(profiling_counter_)); | 363 __ mov(r2, Operand(profiling_counter_)); |
364 // The mov instruction above can be either 1, 2 or 3 instructions depending | |
365 // upon whether it is an extended constant pool - insert nop to compensate. | |
366 ASSERT(masm_->InstructionsGeneratedSince(&start) <= 3); | |
367 while (masm_->InstructionsGeneratedSince(&start) != 3) { | |
368 __ nop(); | |
369 } | |
356 __ mov(r3, Operand(Smi::FromInt(reset_value))); | 370 __ mov(r3, Operand(Smi::FromInt(reset_value))); |
357 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 371 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
358 } | 372 } |
359 | 373 |
360 | 374 |
361 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 375 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
362 Label* back_edge_target) { | 376 Label* back_edge_target) { |
363 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 377 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
364 // Block literal pools whilst emitting back edge code. | 378 // Block literal pools whilst emitting back edge code. |
365 Assembler::BlockConstPoolScope block_const_pool(masm_); | 379 Assembler::BlockConstPoolScope block_const_pool(masm_); |
(...skipping 4380 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4746 } | 4760 } |
4747 | 4761 |
4748 | 4762 |
4749 #undef __ | 4763 #undef __ |
4750 | 4764 |
4751 | 4765 |
4752 static Address GetInterruptImmediateLoadAddress(Address pc) { | 4766 static Address GetInterruptImmediateLoadAddress(Address pc) { |
4753 Address load_address = pc - 2 * Assembler::kInstrSize; | 4767 Address load_address = pc - 2 * Assembler::kInstrSize; |
4754 if (!FLAG_enable_ool_constant_pool) { | 4768 if (!FLAG_enable_ool_constant_pool) { |
4755 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); | 4769 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); |
4770 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { | |
4771 // This is an extended constant pool lookup. | |
4772 load_address -= 2 * Assembler::kInstrSize; | |
4773 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); | |
4774 ASSERT(Assembler::IsMovT( | |
4775 Memory::int32_at(load_address + Assembler::kInstrSize))); | |
4756 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) { | 4776 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) { |
4777 // This is a movw_movt immediate load. | |
4757 load_address -= Assembler::kInstrSize; | 4778 load_address -= Assembler::kInstrSize; |
4758 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); | 4779 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); |
4759 } else { | 4780 } else { |
4781 // This is a small constant pool lookup. | |
4760 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); | 4782 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); |
4761 } | 4783 } |
4762 return load_address; | 4784 return load_address; |
4763 } | 4785 } |
4764 | 4786 |
4765 | 4787 |
4766 void BackEdgeTable::PatchAt(Code* unoptimized_code, | 4788 void BackEdgeTable::PatchAt(Code* unoptimized_code, |
4767 Address pc, | 4789 Address pc, |
4768 BackEdgeState target_state, | 4790 BackEdgeState target_state, |
4769 Code* replacement_code) { | 4791 Code* replacement_code) { |
4770 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); | 4792 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); |
4771 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; | 4793 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; |
4772 CodePatcher patcher(branch_address, 1); | 4794 CodePatcher patcher(branch_address, 1); |
4773 switch (target_state) { | 4795 switch (target_state) { |
4774 case INTERRUPT: | 4796 case INTERRUPT: |
4775 { | 4797 { |
4776 // <decrement profiling counter> | 4798 // <decrement profiling counter> |
4777 // bpl ok | 4799 // bpl ok |
4778 // ; load interrupt stub address into ip - either of: | 4800 // ; load interrupt stub address into ip - either of: |
4779 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> | 4801 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> |
ulan
2014/07/01 13:29:47
This comments suggests only two cases, but there a
rmcilroy
2014/07/02 16:35:54
Done.
| |
4780 // | movt ip, <immed high> | 4802 // | movt ip, <immed high> |
4803 // | ldr ip, [pp, ip] | |
4781 // blx ip | 4804 // blx ip |
4805 // <reset profiling counter> | |
4782 // ok-label | 4806 // ok-label |
4783 | 4807 |
4784 // Calculate branch offet to the ok-label - this is the difference between | 4808 // Calculate branch offset to the ok-label - this is the difference |
4785 // the branch address and |pc| (which points at <blx ip>) plus one instr. | 4809 // between the branch address and |pc| (which points at <blx ip>) plus |
4786 int branch_offset = pc + Assembler::kInstrSize - branch_address; | 4810 // kProfileCounterResetSequence instructions |
4811 int branch_offset = pc - Instruction::kPCReadOffset - branch_address + | |
ulan
2014/07/01 13:29:48
Where does Instruction::kPCReadOffset come from?
rmcilroy
2014/07/02 16:35:54
If you look at Assembler::branch_offset() it appli
ulan
2014/07/03 09:19:55
Thanks, now I got it. Nice catch!
| |
4812 kProfileCounterResetSequenceLength; | |
4787 patcher.masm()->b(branch_offset, pl); | 4813 patcher.masm()->b(branch_offset, pl); |
4788 break; | 4814 break; |
4789 } | 4815 } |
4790 case ON_STACK_REPLACEMENT: | 4816 case ON_STACK_REPLACEMENT: |
4791 case OSR_AFTER_STACK_CHECK: | 4817 case OSR_AFTER_STACK_CHECK: |
4792 // <decrement profiling counter> | 4818 // <decrement profiling counter> |
4793 // mov r0, r0 (NOP) | 4819 // mov r0, r0 (NOP) |
4794 // ; load on-stack replacement address into ip - either of: | 4820 // ; load on-stack replacement address into ip - either of: |
4795 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> | 4821 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> |
4796 // | movt ip, <immed high> | 4822 // | movt ip, <immed high> |
ulan
2014/07/01 13:29:47
Update the comment?
rmcilroy
2014/07/02 16:35:54
Done.
| |
4797 // blx ip | 4823 // blx ip |
4824 // <reset profiling counter> | |
4798 // ok-label | 4825 // ok-label |
4799 patcher.masm()->nop(); | 4826 patcher.masm()->nop(); |
4800 break; | 4827 break; |
4801 } | 4828 } |
4802 | 4829 |
4803 // Replace the call address. | 4830 // Replace the call address. |
4804 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, | 4831 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, |
4805 replacement_code->entry()); | 4832 replacement_code->entry()); |
4806 | 4833 |
4807 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4834 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
(...skipping 27 matching lines...) Expand all Loading... | |
4835 | 4862 |
4836 ASSERT(interrupt_address == | 4863 ASSERT(interrupt_address == |
4837 isolate->builtins()->OsrAfterStackCheck()->entry()); | 4864 isolate->builtins()->OsrAfterStackCheck()->entry()); |
4838 return OSR_AFTER_STACK_CHECK; | 4865 return OSR_AFTER_STACK_CHECK; |
4839 } | 4866 } |
4840 | 4867 |
4841 | 4868 |
4842 } } // namespace v8::internal | 4869 } } // namespace v8::internal |
4843 | 4870 |
4844 #endif // V8_TARGET_ARCH_ARM | 4871 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |