OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // | 2 // |
3 // Redistribution and use in source and binary forms, with or without | 3 // Redistribution and use in source and binary forms, with or without |
4 // modification, are permitted provided that the following conditions are | 4 // modification, are permitted provided that the following conditions are |
5 // met: | 5 // met: |
6 // | 6 // |
7 // * Redistributions of source code must retain the above copyright | 7 // * Redistributions of source code must retain the above copyright |
8 // notice, this list of conditions and the following disclaimer. | 8 // notice, this list of conditions and the following disclaimer. |
9 // * Redistributions in binary form must reproduce the above | 9 // * Redistributions in binary form must reproduce the above |
10 // copyright notice, this list of conditions and the following | 10 // copyright notice, this list of conditions and the following |
(...skipping 21 matching lines...) Expand all Loading... |
32 | 32 |
33 #include "src/arm64/assembler-arm64-inl.h" | 33 #include "src/arm64/assembler-arm64-inl.h" |
34 #include "src/arm64/frames-arm64.h" | 34 #include "src/arm64/frames-arm64.h" |
35 #include "src/base/bits.h" | 35 #include "src/base/bits.h" |
36 #include "src/base/cpu.h" | 36 #include "src/base/cpu.h" |
37 #include "src/register-configuration.h" | 37 #include "src/register-configuration.h" |
38 | 38 |
39 namespace v8 { | 39 namespace v8 { |
40 namespace internal { | 40 namespace internal { |
41 | 41 |
42 | |
43 // ----------------------------------------------------------------------------- | 42 // ----------------------------------------------------------------------------- |
44 // CpuFeatures implementation. | 43 // CpuFeatures implementation. |
45 | 44 |
46 void CpuFeatures::ProbeImpl(bool cross_compile) { | 45 void CpuFeatures::ProbeImpl(bool cross_compile) { |
47 // AArch64 has no configuration options, no further probing is required. | 46 // AArch64 has no configuration options, no further probing is required. |
48 supported_ = 0; | 47 supported_ = 0; |
49 | 48 |
50 // Only use statically determined features for cross compile (snapshot). | 49 // Only use statically determined features for cross compile (snapshot). |
51 if (cross_compile) return; | 50 if (cross_compile) return; |
52 | 51 |
(...skipping 528 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
581 pc_ = buffer_; | 580 pc_ = buffer_; |
582 reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_), | 581 reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_), |
583 reinterpret_cast<byte*>(pc_)); | 582 reinterpret_cast<byte*>(pc_)); |
584 constpool_.Clear(); | 583 constpool_.Clear(); |
585 next_constant_pool_check_ = 0; | 584 next_constant_pool_check_ = 0; |
586 next_veneer_pool_check_ = kMaxInt; | 585 next_veneer_pool_check_ = kMaxInt; |
587 no_const_pool_before_ = 0; | 586 no_const_pool_before_ = 0; |
588 ClearRecordedAstId(); | 587 ClearRecordedAstId(); |
589 } | 588 } |
590 | 589 |
| 590 void Assembler::set_heap_number(Handle<HeapObject> number, Address pc) { |
| 591 Memory::Address_at(target_pointer_address_at(pc)) = |
| 592 reinterpret_cast<Address>(number.location()); |
| 593 } |
591 | 594 |
592 void Assembler::GetCode(CodeDesc* desc) { | 595 void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { |
593 // Emit constant pool if necessary. | 596 // Emit constant pool if necessary. |
594 CheckConstPool(true, false); | 597 CheckConstPool(true, false); |
595 DCHECK(constpool_.IsEmpty()); | 598 DCHECK(constpool_.IsEmpty()); |
596 | 599 |
| 600 AllocateRequestedHeapNumbers(isolate); |
| 601 |
597 // Set up code descriptor. | 602 // Set up code descriptor. |
598 if (desc) { | 603 if (desc) { |
599 desc->buffer = reinterpret_cast<byte*>(buffer_); | 604 desc->buffer = reinterpret_cast<byte*>(buffer_); |
600 desc->buffer_size = buffer_size_; | 605 desc->buffer_size = buffer_size_; |
601 desc->instr_size = pc_offset(); | 606 desc->instr_size = pc_offset(); |
602 desc->reloc_size = | 607 desc->reloc_size = |
603 static_cast<int>((reinterpret_cast<byte*>(buffer_) + buffer_size_) - | 608 static_cast<int>((reinterpret_cast<byte*>(buffer_) + buffer_size_) - |
604 reloc_info_writer.pos()); | 609 reloc_info_writer.pos()); |
605 desc->origin = this; | 610 desc->origin = this; |
606 desc->constant_pool_size = 0; | 611 desc->constant_pool_size = 0; |
(...skipping 1068 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1675 } | 1680 } |
1676 | 1681 |
1677 | 1682 |
1678 void Assembler::ldr_pcrel(const CPURegister& rt, int imm19) { | 1683 void Assembler::ldr_pcrel(const CPURegister& rt, int imm19) { |
1679 // The pattern 'ldr xzr, #offset' is used to indicate the beginning of a | 1684 // The pattern 'ldr xzr, #offset' is used to indicate the beginning of a |
1680 // constant pool. It should not be emitted. | 1685 // constant pool. It should not be emitted. |
1681 DCHECK(!rt.IsZero()); | 1686 DCHECK(!rt.IsZero()); |
1682 Emit(LoadLiteralOpFor(rt) | ImmLLiteral(imm19) | Rt(rt)); | 1687 Emit(LoadLiteralOpFor(rt) | ImmLLiteral(imm19) | Rt(rt)); |
1683 } | 1688 } |
1684 | 1689 |
| 1690 Operand Operand::EmbeddedNumber(double value) { |
| 1691 int32_t smi; |
| 1692 if (DoubleToSmiInteger(value, &smi)) { |
| 1693 return Operand(Immediate(Smi::FromInt(smi))); |
| 1694 } |
| 1695 Operand result(bit_cast<int64_t>(value), RelocInfo::EMBEDDED_OBJECT); |
| 1696 result.is_heap_number_ = true; |
| 1697 return result; |
| 1698 } |
| 1699 |
| 1700 void Assembler::ldr(const CPURegister& rt, const Operand& operand) { |
| 1701 if (operand.is_heap_number()) { |
| 1702 RequestHeapNumber(operand.heap_number()); |
| 1703 ldr(rt, Immediate(0, RelocInfo::EMBEDDED_OBJECT)); |
| 1704 } else { |
| 1705 ldr(rt, operand.immediate()); |
| 1706 } |
| 1707 } |
1685 | 1708 |
1686 void Assembler::ldr(const CPURegister& rt, const Immediate& imm) { | 1709 void Assembler::ldr(const CPURegister& rt, const Immediate& imm) { |
1687 // Currently we only support 64-bit literals. | 1710 // Currently we only support 64-bit literals. |
1688 DCHECK(rt.Is64Bits()); | 1711 DCHECK(rt.Is64Bits()); |
1689 | 1712 |
1690 RecordRelocInfo(imm.rmode(), imm.value()); | 1713 RecordRelocInfo(imm.rmode(), imm.value()); |
1691 BlockConstPoolFor(1); | 1714 BlockConstPoolFor(1); |
1692 // The load will be patched when the constpool is emitted, patching code | 1715 // The load will be patched when the constpool is emitted, patching code |
1693 // expect a load literal with offset 0. | 1716 // expect a load literal with offset 0. |
1694 ldr_pcrel(rt, 0); | 1717 ldr_pcrel(rt, 0); |
(...skipping 1529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3224 movk(scratch, (target_offset >> 32) & 0xFFFF, 32); | 3247 movk(scratch, (target_offset >> 32) & 0xFFFF, 32); |
3225 DCHECK((target_offset >> 48) == 0); | 3248 DCHECK((target_offset >> 48) == 0); |
3226 add(rd, rd, scratch); | 3249 add(rd, rd, scratch); |
3227 } | 3250 } |
3228 | 3251 |
3229 | 3252 |
3230 } // namespace internal | 3253 } // namespace internal |
3231 } // namespace v8 | 3254 } // namespace v8 |
3232 | 3255 |
3233 #endif // V8_TARGET_ARCH_ARM64 | 3256 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |