| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 16 matching lines...) Expand all Loading... |
| 27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING | 27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
| 28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | 28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS |
| 29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 30 | 30 |
| 31 // The original source code covered by the above license above has been | 31 // The original source code covered by the above license above has been |
| 32 // modified significantly by Google Inc. | 32 // modified significantly by Google Inc. |
| 33 // Copyright 2012 the V8 project authors. All rights reserved. | 33 // Copyright 2012 the V8 project authors. All rights reserved. |
| 34 | 34 |
| 35 // A light-weight IA32 Assembler. | 35 // A light-weight IA32 Assembler. |
| 36 | 36 |
| 37 #ifndef V8_IA32_ASSEMBLER_IA32_INL_H_ | 37 #ifndef V8_X87_ASSEMBLER_X87_INL_H_ |
| 38 #define V8_IA32_ASSEMBLER_IA32_INL_H_ | 38 #define V8_X87_ASSEMBLER_X87_INL_H_ |
| 39 | 39 |
| 40 #include "ia32/assembler-ia32.h" | 40 #include "x87/assembler-x87.h" |
| 41 | 41 |
| 42 #include "cpu.h" | 42 #include "cpu.h" |
| 43 #include "debug.h" | 43 #include "debug.h" |
| 44 | 44 |
| 45 namespace v8 { | 45 namespace v8 { |
| 46 namespace internal { | 46 namespace internal { |
| 47 | 47 |
| 48 bool CpuFeatures::SupportsCrankshaft() { return true; } | 48 bool CpuFeatures::SupportsCrankshaft() { return false; } |
| 49 | 49 |
| 50 | 50 |
| 51 static const byte kCallOpcode = 0xE8; | 51 static const byte kCallOpcode = 0xE8; |
| 52 static const int kNoCodeAgeSequenceLength = 5; | 52 static const int kNoCodeAgeSequenceLength = 5; |
| 53 | 53 |
| 54 | 54 |
| 55 // The modes possibly affected by apply must be in kApplyMask. | 55 // The modes possibly affected by apply must be in kApplyMask. |
| 56 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) { | 56 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) { |
| 57 bool flush_icache = icache_flush_mode != SKIP_ICACHE_FLUSH; | 57 bool flush_icache = icache_flush_mode != SKIP_ICACHE_FLUSH; |
| 58 if (IsRuntimeEntry(rmode_) || IsCodeTarget(rmode_)) { | 58 if (IsRuntimeEntry(rmode_) || IsCodeTarget(rmode_)) { |
| 59 int32_t* p = reinterpret_cast<int32_t*>(pc_); | 59 int32_t* p = reinterpret_cast<int32_t*>(pc_); |
| 60 *p -= delta; // Relocate entry. | 60 *p -= delta; // Relocate entry. |
| 61 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t)); | 61 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t)); |
| 62 } else if (rmode_ == CODE_AGE_SEQUENCE) { | 62 } else if (rmode_ == CODE_AGE_SEQUENCE) { |
| 63 if (*pc_ == kCallOpcode) { | 63 if (*pc_ == kCallOpcode) { |
| 64 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); | 64 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); |
| 65 *p -= delta; // Relocate entry. | 65 *p -= delta; // Relocate entry. |
| 66 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t)); | 66 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t)); |
| 67 } | 67 } |
| 68 } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) { | 68 } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) { |
| 69 // Special handling of js_return when a break point is set (call | 69 // Special handling of js_return when a break point is set (call |
| 70 // instruction has been inserted). | 70 // instruction has been inserted). |
| 71 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); | 71 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); |
| 72 *p -= delta; // Relocate entry. | 72 *p -= delta; // Relocate entry. |
| 73 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t)); | 73 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t)); |
| 74 } else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) { | 74 } else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) { |
| 75 // Special handling of a debug break slot when a break point is set (call | 75 // Special handling of a debug break slot when a break point is set (call |
| 76 // instruction has been inserted). | 76 // instruction has been inserted). |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 108 | 108 |
| 109 int RelocInfo::target_address_size() { | 109 int RelocInfo::target_address_size() { |
| 110 return Assembler::kSpecialTargetSize; | 110 return Assembler::kSpecialTargetSize; |
| 111 } | 111 } |
| 112 | 112 |
| 113 | 113 |
| 114 void RelocInfo::set_target_address(Address target, | 114 void RelocInfo::set_target_address(Address target, |
| 115 WriteBarrierMode write_barrier_mode, | 115 WriteBarrierMode write_barrier_mode, |
| 116 ICacheFlushMode icache_flush_mode) { | 116 ICacheFlushMode icache_flush_mode) { |
| 117 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode); | 117 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode); |
| 118 Assembler::set_target_address_at(pc_, host_, target); |
| 118 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); | 119 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); |
| 119 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL && | 120 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL && |
| 120 IsCodeTarget(rmode_)) { | 121 IsCodeTarget(rmode_)) { |
| 121 Object* target_code = Code::GetCodeFromTargetAddress(target); | 122 Object* target_code = Code::GetCodeFromTargetAddress(target); |
| 122 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( | 123 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( |
| 123 host(), this, HeapObject::cast(target_code)); | 124 host(), this, HeapObject::cast(target_code)); |
| 124 } | 125 } |
| 125 } | 126 } |
| 126 | 127 |
| 127 | 128 |
| (...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 542 len_ += sizeof(int32_t); | 543 len_ += sizeof(int32_t); |
| 543 rmode_ = rmode; | 544 rmode_ = rmode; |
| 544 } | 545 } |
| 545 | 546 |
| 546 Operand::Operand(Register reg) { | 547 Operand::Operand(Register reg) { |
| 547 // reg | 548 // reg |
| 548 set_modrm(3, reg); | 549 set_modrm(3, reg); |
| 549 } | 550 } |
| 550 | 551 |
| 551 | 552 |
| 552 Operand::Operand(XMMRegister xmm_reg) { | |
| 553 Register reg = { xmm_reg.code() }; | |
| 554 set_modrm(3, reg); | |
| 555 } | |
| 556 | |
| 557 | |
| 558 Operand::Operand(int32_t disp, RelocInfo::Mode rmode) { | 553 Operand::Operand(int32_t disp, RelocInfo::Mode rmode) { |
| 559 // [disp/r] | 554 // [disp/r] |
| 560 set_modrm(0, ebp); | 555 set_modrm(0, ebp); |
| 561 set_dispr(disp, rmode); | 556 set_dispr(disp, rmode); |
| 562 } | 557 } |
| 563 | 558 |
| 564 } } // namespace v8::internal | 559 } } // namespace v8::internal |
| 565 | 560 |
| 566 #endif // V8_IA32_ASSEMBLER_IA32_INL_H_ | 561 #endif // V8_X87_ASSEMBLER_X87_INL_H_ |
| OLD | NEW |