OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 | 339 |
340 | 340 |
341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
342 __ mov(r2, Operand(profiling_counter_)); | 342 __ mov(r2, Operand(profiling_counter_)); |
343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); | 344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); |
345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
346 } | 346 } |
347 | 347 |
348 | 348 |
| 349 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; |
| 350 |
| 351 |
349 void FullCodeGenerator::EmitProfilingCounterReset() { | 352 void FullCodeGenerator::EmitProfilingCounterReset() { |
| 353 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 354 PredictableCodeSizeScope predictable_code_size_scope( |
| 355 masm_, kProfileCounterResetSequenceLength); |
| 356 Label start; |
| 357 __ bind(&start); |
350 int reset_value = FLAG_interrupt_budget; | 358 int reset_value = FLAG_interrupt_budget; |
351 if (info_->is_debug()) { | 359 if (info_->is_debug()) { |
352 // Detect debug break requests as soon as possible. | 360 // Detect debug break requests as soon as possible. |
353 reset_value = FLAG_interrupt_budget >> 4; | 361 reset_value = FLAG_interrupt_budget >> 4; |
354 } | 362 } |
355 __ mov(r2, Operand(profiling_counter_)); | 363 __ mov(r2, Operand(profiling_counter_)); |
| 364 // The mov instruction above can be either 1, 2 or 3 instructions depending |
| 365 // upon whether it is an extended constant pool - insert nop to compensate. |
| 366 ASSERT(masm_->InstructionsGeneratedSince(&start) <= 3); |
| 367 while (masm_->InstructionsGeneratedSince(&start) != 3) { |
| 368 __ nop(); |
| 369 } |
356 __ mov(r3, Operand(Smi::FromInt(reset_value))); | 370 __ mov(r3, Operand(Smi::FromInt(reset_value))); |
357 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 371 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
358 } | 372 } |
359 | 373 |
360 | 374 |
361 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 375 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
362 Label* back_edge_target) { | 376 Label* back_edge_target) { |
363 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 377 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
364 // Block literal pools whilst emitting back edge code. | 378 // Block literal pools whilst emitting back edge code. |
365 Assembler::BlockConstPoolScope block_const_pool(masm_); | 379 Assembler::BlockConstPoolScope block_const_pool(masm_); |
(...skipping 4374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4740 } | 4754 } |
4741 | 4755 |
4742 | 4756 |
4743 #undef __ | 4757 #undef __ |
4744 | 4758 |
4745 | 4759 |
4746 static Address GetInterruptImmediateLoadAddress(Address pc) { | 4760 static Address GetInterruptImmediateLoadAddress(Address pc) { |
4747 Address load_address = pc - 2 * Assembler::kInstrSize; | 4761 Address load_address = pc - 2 * Assembler::kInstrSize; |
4748 if (!FLAG_enable_ool_constant_pool) { | 4762 if (!FLAG_enable_ool_constant_pool) { |
4749 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); | 4763 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); |
| 4764 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { |
| 4765 // This is an extended constant pool lookup. |
| 4766 load_address -= 2 * Assembler::kInstrSize; |
| 4767 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); |
| 4768 ASSERT(Assembler::IsMovT( |
| 4769 Memory::int32_at(load_address + Assembler::kInstrSize))); |
4750 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) { | 4770 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) { |
| 4771 // This is a movw_movt immediate load. |
4751 load_address -= Assembler::kInstrSize; | 4772 load_address -= Assembler::kInstrSize; |
4752 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); | 4773 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); |
4753 } else { | 4774 } else { |
| 4775 // This is a small constant pool lookup. |
4754 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); | 4776 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); |
4755 } | 4777 } |
4756 return load_address; | 4778 return load_address; |
4757 } | 4779 } |
4758 | 4780 |
4759 | 4781 |
4760 void BackEdgeTable::PatchAt(Code* unoptimized_code, | 4782 void BackEdgeTable::PatchAt(Code* unoptimized_code, |
4761 Address pc, | 4783 Address pc, |
4762 BackEdgeState target_state, | 4784 BackEdgeState target_state, |
4763 Code* replacement_code) { | 4785 Code* replacement_code) { |
4764 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); | 4786 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); |
4765 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; | 4787 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; |
4766 CodePatcher patcher(branch_address, 1); | 4788 CodePatcher patcher(branch_address, 1); |
4767 switch (target_state) { | 4789 switch (target_state) { |
4768 case INTERRUPT: | 4790 case INTERRUPT: |
4769 { | 4791 { |
4770 // <decrement profiling counter> | 4792 // <decrement profiling counter> |
4771 // bpl ok | 4793 // bpl ok |
4772 // ; load interrupt stub address into ip - either of: | 4794 // ; load interrupt stub address into ip - either of: |
4773 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> | 4795 // ; <small cp load> | <extended cp load> | <immediate load> |
4774 // | movt ip, <immed high> | 4796 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm |
| 4797 // | movt ip, #imm> | movw ip, #imm |
| 4798 // | ldr ip, [pp, ip] |
4775 // blx ip | 4799 // blx ip |
| 4800 // <reset profiling counter> |
4776 // ok-label | 4801 // ok-label |
4777 | 4802 |
4778 // Calculate branch offet to the ok-label - this is the difference between | 4803 // Calculate branch offset to the ok-label - this is the difference |
4779 // the branch address and |pc| (which points at <blx ip>) plus one instr. | 4804 // between the branch address and |pc| (which points at <blx ip>) plus |
4780 int branch_offset = pc + Assembler::kInstrSize - branch_address; | 4805 // kProfileCounterResetSequence instructions |
| 4806 int branch_offset = pc - Instruction::kPCReadOffset - branch_address + |
| 4807 kProfileCounterResetSequenceLength; |
4781 patcher.masm()->b(branch_offset, pl); | 4808 patcher.masm()->b(branch_offset, pl); |
4782 break; | 4809 break; |
4783 } | 4810 } |
4784 case ON_STACK_REPLACEMENT: | 4811 case ON_STACK_REPLACEMENT: |
4785 case OSR_AFTER_STACK_CHECK: | 4812 case OSR_AFTER_STACK_CHECK: |
4786 // <decrement profiling counter> | 4813 // <decrement profiling counter> |
4787 // mov r0, r0 (NOP) | 4814 // mov r0, r0 (NOP) |
4788 // ; load on-stack replacement address into ip - either of: | 4815 // ; load on-stack replacement address into ip - either of: |
4789 // ldr ip, [pc/pp, <constant pool offset>] | movw ip, <immed low> | 4816 // ; <small cp load> | <extended cp load> | <immediate load> |
4790 // | movt ip, <immed high> | 4817 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm |
| 4818 // | movt ip, #imm> | movw ip, #imm |
| 4819 // | ldr ip, [pp, ip] |
4791 // blx ip | 4820 // blx ip |
| 4821 // <reset profiling counter> |
4792 // ok-label | 4822 // ok-label |
4793 patcher.masm()->nop(); | 4823 patcher.masm()->nop(); |
4794 break; | 4824 break; |
4795 } | 4825 } |
4796 | 4826 |
4797 // Replace the call address. | 4827 // Replace the call address. |
4798 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, | 4828 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, |
4799 replacement_code->entry()); | 4829 replacement_code->entry()); |
4800 | 4830 |
4801 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4831 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
(...skipping 27 matching lines...) Expand all Loading... |
4829 | 4859 |
4830 ASSERT(interrupt_address == | 4860 ASSERT(interrupt_address == |
4831 isolate->builtins()->OsrAfterStackCheck()->entry()); | 4861 isolate->builtins()->OsrAfterStackCheck()->entry()); |
4832 return OSR_AFTER_STACK_CHECK; | 4862 return OSR_AFTER_STACK_CHECK; |
4833 } | 4863 } |
4834 | 4864 |
4835 | 4865 |
4836 } } // namespace v8::internal | 4866 } } // namespace v8::internal |
4837 | 4867 |
4838 #endif // V8_TARGET_ARCH_ARM | 4868 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |