| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 556 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 567 Memory::int32_at(pc + 2 * Assembler::kInstrSize))); | 567 Memory::int32_at(pc + 2 * Assembler::kInstrSize))); |
| 568 } else { | 568 } else { |
| 569 return !Assembler::IsMovImmed(Memory::int32_at(pc)) || | 569 return !Assembler::IsMovImmed(Memory::int32_at(pc)) || |
| 570 (FLAG_enable_ool_constant_pool && | 570 (FLAG_enable_ool_constant_pool && |
| 571 Assembler::IsLdrPpRegOffset( | 571 Assembler::IsLdrPpRegOffset( |
| 572 Memory::int32_at(pc + 4 * Assembler::kInstrSize))); | 572 Memory::int32_at(pc + 4 * Assembler::kInstrSize))); |
| 573 } | 573 } |
| 574 } | 574 } |
| 575 | 575 |
| 576 | 576 |
| 577 Address Assembler::constant_pool_entry_address( | 577 Address Assembler::constant_pool_entry_address(Address pc, |
| 578 Address pc, ConstantPoolArray* constant_pool) { | 578 Address constant_pool) { |
| 579 if (FLAG_enable_ool_constant_pool) { | 579 if (FLAG_enable_ool_constant_pool) { |
| 580 DCHECK(constant_pool != NULL); | 580 DCHECK(constant_pool != NULL); |
| 581 int cp_offset; | 581 int cp_offset; |
| 582 if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) { | 582 if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) { |
| 583 DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && | 583 DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && |
| 584 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && | 584 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && |
| 585 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) && | 585 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) && |
| 586 IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))); | 586 IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))); |
| 587 // This is an extended constant pool lookup (ARMv6). | 587 // This is an extended constant pool lookup (ARMv6). |
| 588 Instr mov_instr = instr_at(pc); | 588 Instr mov_instr = instr_at(pc); |
| 589 Instr orr_instr_1 = instr_at(pc + kInstrSize); | 589 Instr orr_instr_1 = instr_at(pc + kInstrSize); |
| 590 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); | 590 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); |
| 591 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); | 591 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); |
| 592 cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | | 592 cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | |
| 593 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3); | 593 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3); |
| 594 } else if (IsMovW(Memory::int32_at(pc))) { | 594 } else if (IsMovW(Memory::int32_at(pc))) { |
| 595 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) && | 595 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) && |
| 596 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))); | 596 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))); |
| 597 // This is an extended constant pool lookup (ARMv7). | 597 // This is an extended constant pool lookup (ARMv7). |
| 598 Instruction* movw_instr = Instruction::At(pc); | 598 Instruction* movw_instr = Instruction::At(pc); |
| 599 Instruction* movt_instr = Instruction::At(pc + kInstrSize); | 599 Instruction* movt_instr = Instruction::At(pc + kInstrSize); |
| 600 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) | | 600 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) | |
| 601 movw_instr->ImmedMovwMovtValue(); | 601 movw_instr->ImmedMovwMovtValue(); |
| 602 } else { | 602 } else { |
| 603 // This is a small constant pool lookup. | 603 // This is a small constant pool lookup. |
| 604 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc))); | 604 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc))); |
| 605 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc)); | 605 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc)); |
| 606 } | 606 } |
| 607 return reinterpret_cast<Address>(constant_pool) + cp_offset; | 607 return constant_pool + cp_offset; |
| 608 } else { | 608 } else { |
| 609 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc))); | 609 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc))); |
| 610 Instr instr = Memory::int32_at(pc); | 610 Instr instr = Memory::int32_at(pc); |
| 611 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta; | 611 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta; |
| 612 } | 612 } |
| 613 } | 613 } |
| 614 | 614 |
| 615 | 615 |
| 616 Address Assembler::target_address_at(Address pc, | 616 Address Assembler::target_address_at(Address pc, Address constant_pool) { |
| 617 ConstantPoolArray* constant_pool) { | |
| 618 if (is_constant_pool_load(pc)) { | 617 if (is_constant_pool_load(pc)) { |
| 619 // This is a constant pool lookup. Return the value in the constant pool. | 618 // This is a constant pool lookup. Return the value in the constant pool. |
| 620 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); | 619 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); |
| 621 } else if (CpuFeatures::IsSupported(ARMv7)) { | 620 } else if (CpuFeatures::IsSupported(ARMv7)) { |
| 622 // This is an movw / movt immediate load. Return the immediate. | 621 // This is an movw / movt immediate load. Return the immediate. |
| 623 DCHECK(IsMovW(Memory::int32_at(pc)) && | 622 DCHECK(IsMovW(Memory::int32_at(pc)) && |
| 624 IsMovT(Memory::int32_at(pc + kInstrSize))); | 623 IsMovT(Memory::int32_at(pc + kInstrSize))); |
| 625 Instruction* movw_instr = Instruction::At(pc); | 624 Instruction* movw_instr = Instruction::At(pc); |
| 626 Instruction* movt_instr = Instruction::At(pc + kInstrSize); | 625 Instruction* movt_instr = Instruction::At(pc + kInstrSize); |
| 627 return reinterpret_cast<Address>( | 626 return reinterpret_cast<Address>( |
| (...skipping 10 matching lines...) Expand all Loading... |
| 638 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); | 637 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); |
| 639 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); | 638 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); |
| 640 Address ret = reinterpret_cast<Address>( | 639 Address ret = reinterpret_cast<Address>( |
| 641 DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | | 640 DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | |
| 642 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3)); | 641 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3)); |
| 643 return ret; | 642 return ret; |
| 644 } | 643 } |
| 645 } | 644 } |
| 646 | 645 |
| 647 | 646 |
| 648 void Assembler::set_target_address_at(Address pc, | 647 void Assembler::set_target_address_at(Address pc, Address constant_pool, |
| 649 ConstantPoolArray* constant_pool, | |
| 650 Address target, | 648 Address target, |
| 651 ICacheFlushMode icache_flush_mode) { | 649 ICacheFlushMode icache_flush_mode) { |
| 652 if (is_constant_pool_load(pc)) { | 650 if (is_constant_pool_load(pc)) { |
| 653 // This is a constant pool lookup. Update the entry in the constant pool. | 651 // This is a constant pool lookup. Update the entry in the constant pool. |
| 654 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target; | 652 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target; |
| 655 // Intuitively, we would think it is necessary to always flush the | 653 // Intuitively, we would think it is necessary to always flush the |
| 656 // instruction cache after patching a target address in the code as follows: | 654 // instruction cache after patching a target address in the code as follows: |
| 657 // CpuFeatures::FlushICache(pc, sizeof(target)); | 655 // CpuFeatures::FlushICache(pc, sizeof(target)); |
| 658 // However, on ARM, no instruction is actually patched in the case | 656 // However, on ARM, no instruction is actually patched in the case |
| 659 // of embedded constants of the form: | 657 // of embedded constants of the form: |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 694 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { | 692 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { |
| 695 CpuFeatures::FlushICache(pc, 4 * kInstrSize); | 693 CpuFeatures::FlushICache(pc, 4 * kInstrSize); |
| 696 } | 694 } |
| 697 } | 695 } |
| 698 } | 696 } |
| 699 | 697 |
| 700 | 698 |
| 701 } } // namespace v8::internal | 699 } } // namespace v8::internal |
| 702 | 700 |
| 703 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ | 701 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ |
| OLD | NEW |