OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 | 339 |
340 | 340 |
341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
342 __ mov(r2, Operand(profiling_counter_)); | 342 __ mov(r2, Operand(profiling_counter_)); |
343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); | 344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); |
345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
346 } | 346 } |
347 | 347 |
348 | 348 |
| 349 #ifdef CAN_USE_ARMV7_INSTRUCTIONS |
349 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; | 350 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; |
| 351 #else |
| 352 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize; |
| 353 #endif |
350 | 354 |
351 | 355 |
352 void FullCodeGenerator::EmitProfilingCounterReset() { | 356 void FullCodeGenerator::EmitProfilingCounterReset() { |
353 Assembler::BlockConstPoolScope block_const_pool(masm_); | 357 Assembler::BlockConstPoolScope block_const_pool(masm_); |
354 PredictableCodeSizeScope predictable_code_size_scope( | 358 PredictableCodeSizeScope predictable_code_size_scope( |
355 masm_, kProfileCounterResetSequenceLength); | 359 masm_, kProfileCounterResetSequenceLength); |
356 Label start; | 360 Label start; |
357 __ bind(&start); | 361 __ bind(&start); |
358 int reset_value = FLAG_interrupt_budget; | 362 int reset_value = FLAG_interrupt_budget; |
359 if (info_->is_debug()) { | 363 if (info_->is_debug()) { |
360 // Detect debug break requests as soon as possible. | 364 // Detect debug break requests as soon as possible. |
361 reset_value = FLAG_interrupt_budget >> 4; | 365 reset_value = FLAG_interrupt_budget >> 4; |
362 } | 366 } |
363 __ mov(r2, Operand(profiling_counter_)); | 367 __ mov(r2, Operand(profiling_counter_)); |
364 // The mov instruction above can be either 1, 2 or 3 instructions depending | 368 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5 |
365 // upon whether it is an extended constant pool - insert nop to compensate. | 369 // instructions (for ARMv6) depending upon whether it is an extended constant |
366 DCHECK(masm_->InstructionsGeneratedSince(&start) <= 3); | 370 // pool - insert nop to compensate. |
367 while (masm_->InstructionsGeneratedSince(&start) != 3) { | 371 int expected_instr_count = |
| 372 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2; |
| 373 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count); |
| 374 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) { |
368 __ nop(); | 375 __ nop(); |
369 } | 376 } |
370 __ mov(r3, Operand(Smi::FromInt(reset_value))); | 377 __ mov(r3, Operand(Smi::FromInt(reset_value))); |
371 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 378 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
372 } | 379 } |
373 | 380 |
374 | 381 |
375 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 382 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
376 Label* back_edge_target) { | 383 Label* back_edge_target) { |
377 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 384 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
(...skipping 4400 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4778 | 4785 |
4779 #undef __ | 4786 #undef __ |
4780 | 4787 |
4781 | 4788 |
4782 static Address GetInterruptImmediateLoadAddress(Address pc) { | 4789 static Address GetInterruptImmediateLoadAddress(Address pc) { |
4783 Address load_address = pc - 2 * Assembler::kInstrSize; | 4790 Address load_address = pc - 2 * Assembler::kInstrSize; |
4784 if (!FLAG_enable_ool_constant_pool) { | 4791 if (!FLAG_enable_ool_constant_pool) { |
4785 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); | 4792 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); |
4786 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { | 4793 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { |
4787 // This is an extended constant pool lookup. | 4794 // This is an extended constant pool lookup. |
4788 load_address -= 2 * Assembler::kInstrSize; | 4795 if (CpuFeatures::IsSupported(ARMv7)) { |
4789 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); | 4796 load_address -= 2 * Assembler::kInstrSize; |
4790 DCHECK(Assembler::IsMovT( | 4797 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); |
4791 Memory::int32_at(load_address + Assembler::kInstrSize))); | 4798 DCHECK(Assembler::IsMovT( |
4792 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) { | 4799 Memory::int32_at(load_address + Assembler::kInstrSize))); |
4793 // This is a movw_movt immediate load. | 4800 } else { |
| 4801 load_address -= 4 * Assembler::kInstrSize; |
| 4802 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address))); |
| 4803 DCHECK(Assembler::IsOrrImmed( |
| 4804 Memory::int32_at(load_address + Assembler::kInstrSize))); |
| 4805 DCHECK(Assembler::IsOrrImmed( |
| 4806 Memory::int32_at(load_address + 2 * Assembler::kInstrSize))); |
| 4807 DCHECK(Assembler::IsOrrImmed( |
| 4808 Memory::int32_at(load_address + 3 * Assembler::kInstrSize))); |
| 4809 } |
| 4810 } else if (CpuFeatures::IsSupported(ARMv7) && |
| 4811 Assembler::IsMovT(Memory::int32_at(load_address))) { |
| 4812 // This is a movw / movt immediate load. |
4794 load_address -= Assembler::kInstrSize; | 4813 load_address -= Assembler::kInstrSize; |
4795 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); | 4814 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); |
| 4815 } else if (!CpuFeatures::IsSupported(ARMv7) && |
| 4816 Assembler::IsOrrImmed(Memory::int32_at(load_address))) { |
| 4817 // This is a mov / orr immediate load. |
| 4818 load_address -= 3 * Assembler::kInstrSize; |
| 4819 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address))); |
| 4820 DCHECK(Assembler::IsOrrImmed( |
| 4821 Memory::int32_at(load_address + Assembler::kInstrSize))); |
| 4822 DCHECK(Assembler::IsOrrImmed( |
| 4823 Memory::int32_at(load_address + 2 * Assembler::kInstrSize))); |
4796 } else { | 4824 } else { |
4797 // This is a small constant pool lookup. | 4825 // This is a small constant pool lookup. |
4798 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); | 4826 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); |
4799 } | 4827 } |
4800 return load_address; | 4828 return load_address; |
4801 } | 4829 } |
4802 | 4830 |
4803 | 4831 |
4804 void BackEdgeTable::PatchAt(Code* unoptimized_code, | 4832 void BackEdgeTable::PatchAt(Code* unoptimized_code, |
4805 Address pc, | 4833 Address pc, |
4806 BackEdgeState target_state, | 4834 BackEdgeState target_state, |
4807 Code* replacement_code) { | 4835 Code* replacement_code) { |
4808 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); | 4836 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); |
4809 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; | 4837 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; |
4810 CodePatcher patcher(branch_address, 1); | 4838 CodePatcher patcher(branch_address, 1); |
4811 switch (target_state) { | 4839 switch (target_state) { |
4812 case INTERRUPT: | 4840 case INTERRUPT: |
4813 { | 4841 { |
4814 // <decrement profiling counter> | 4842 // <decrement profiling counter> |
4815 // bpl ok | 4843 // bpl ok |
4816 // ; load interrupt stub address into ip - either of: | 4844 // ; load interrupt stub address into ip - either of (for ARMv7): |
4817 // ; <small cp load> | <extended cp load> | <immediate load> | 4845 // ; <small cp load> | <extended cp load> | <immediate load> |
4818 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm | 4846 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm |
4819 // | movt ip, #imm> | movw ip, #imm | 4847 // | movt ip, #imm | movw ip, #imm |
4820 // | ldr ip, [pp, ip] | 4848 // | ldr ip, [pp, ip] |
| 4849 // ; or (for ARMv6): |
| 4850 // ; <small cp load> | <extended cp load> | <immediate load> |
| 4851 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm |
| 4852 // | orr ip, ip, #imm> | orr ip, ip, #imm |
| 4853 // | orr ip, ip, #imm> | orr ip, ip, #imm |
| 4854 // | orr ip, ip, #imm> | orr ip, ip, #imm |
4821 // blx ip | 4855 // blx ip |
4822 // <reset profiling counter> | 4856 // <reset profiling counter> |
4823 // ok-label | 4857 // ok-label |
4824 | 4858 |
4825 // Calculate branch offset to the ok-label - this is the difference | 4859 // Calculate branch offset to the ok-label - this is the difference |
4826 // between the branch address and |pc| (which points at <blx ip>) plus | 4860 // between the branch address and |pc| (which points at <blx ip>) plus |
4827 // kProfileCounterResetSequence instructions | 4861 // kProfileCounterResetSequence instructions |
4828 int branch_offset = pc - Instruction::kPCReadOffset - branch_address + | 4862 int branch_offset = pc - Instruction::kPCReadOffset - branch_address + |
4829 kProfileCounterResetSequenceLength; | 4863 kProfileCounterResetSequenceLength; |
4830 patcher.masm()->b(branch_offset, pl); | 4864 patcher.masm()->b(branch_offset, pl); |
4831 break; | 4865 break; |
4832 } | 4866 } |
4833 case ON_STACK_REPLACEMENT: | 4867 case ON_STACK_REPLACEMENT: |
4834 case OSR_AFTER_STACK_CHECK: | 4868 case OSR_AFTER_STACK_CHECK: |
4835 // <decrement profiling counter> | 4869 // <decrement profiling counter> |
4836 // mov r0, r0 (NOP) | 4870 // mov r0, r0 (NOP) |
4837 // ; load on-stack replacement address into ip - either of: | 4871 // ; load on-stack replacement address into ip - either of (for ARMv7): |
4838 // ; <small cp load> | <extended cp load> | <immediate load> | 4872 // ; <small cp load> | <extended cp load> | <immediate load> |
4839 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm | 4873 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm |
4840 // | movt ip, #imm> | movw ip, #imm | 4874 // | movt ip, #imm> | movw ip, #imm |
4841 // | ldr ip, [pp, ip] | 4875 // | ldr ip, [pp, ip] |
| 4876 // ; or (for ARMv6): |
| 4877 // ; <small cp load> | <extended cp load> | <immediate load> |
| 4878 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm |
| 4879 // | orr ip, ip, #imm> | orr ip, ip, #imm |
| 4880 // | orr ip, ip, #imm> | orr ip, ip, #imm |
| 4881 // | orr ip, ip, #imm> | orr ip, ip, #imm |
4842 // blx ip | 4882 // blx ip |
4843 // <reset profiling counter> | 4883 // <reset profiling counter> |
4844 // ok-label | 4884 // ok-label |
4845 patcher.masm()->nop(); | 4885 patcher.masm()->nop(); |
4846 break; | 4886 break; |
4847 } | 4887 } |
4848 | 4888 |
4849 // Replace the call address. | 4889 // Replace the call address. |
4850 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, | 4890 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, |
4851 replacement_code->entry()); | 4891 replacement_code->entry()); |
(...skipping 29 matching lines...) Expand all Loading... |
4881 | 4921 |
4882 DCHECK(interrupt_address == | 4922 DCHECK(interrupt_address == |
4883 isolate->builtins()->OsrAfterStackCheck()->entry()); | 4923 isolate->builtins()->OsrAfterStackCheck()->entry()); |
4884 return OSR_AFTER_STACK_CHECK; | 4924 return OSR_AFTER_STACK_CHECK; |
4885 } | 4925 } |
4886 | 4926 |
4887 | 4927 |
4888 } } // namespace v8::internal | 4928 } } // namespace v8::internal |
4889 | 4929 |
4890 #endif // V8_TARGET_ARCH_ARM | 4930 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |