OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 13 matching lines...) Expand all Loading... |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #if defined(V8_TARGET_ARCH_ARM) | 30 #if defined(V8_TARGET_ARCH_ARM) |
31 | 31 |
32 #include "codegen.h" | 32 #include "codegen.h" |
33 #include "macro-assembler.h" | 33 #include "macro-assembler.h" |
| 34 #include "simulator-arm.h" |
34 | 35 |
35 namespace v8 { | 36 namespace v8 { |
36 namespace internal { | 37 namespace internal { |
37 | 38 |
38 #define __ ACCESS_MASM(masm) | |
39 | 39 |
40 UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) { | 40 UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) { |
41 switch (type) { | 41 switch (type) { |
42 case TranscendentalCache::SIN: return &sin; | 42 case TranscendentalCache::SIN: return &sin; |
43 case TranscendentalCache::COS: return &cos; | 43 case TranscendentalCache::COS: return &cos; |
44 case TranscendentalCache::TAN: return &tan; | 44 case TranscendentalCache::TAN: return &tan; |
45 case TranscendentalCache::LOG: return &log; | 45 case TranscendentalCache::LOG: return &log; |
46 default: UNIMPLEMENTED(); | 46 default: UNIMPLEMENTED(); |
47 } | 47 } |
48 return NULL; | 48 return NULL; |
49 } | 49 } |
50 | 50 |
51 | 51 |
| 52 #define __ masm. |
| 53 |
| 54 |
| 55 #if defined(USE_SIMULATOR) |
| 56 byte* fast_exp_arm_machine_code = NULL; |
| 57 double fast_exp_simulator(double x) { |
| 58 return Simulator::current(Isolate::Current())->CallFP( |
| 59 fast_exp_arm_machine_code, x, 0); |
| 60 } |
| 61 #endif |
| 62 |
| 63 |
| 64 UnaryMathFunction CreateExpFunction() { |
| 65 if (!CpuFeatures::IsSupported(VFP2)) return &exp; |
| 66 if (!FLAG_fast_math) return &exp; |
| 67 size_t actual_size; |
| 68 byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB, &actual_size, true)); |
| 69 if (buffer == NULL) return &exp; |
| 70 ExternalReference::InitializeMathExpData(); |
| 71 |
| 72 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size)); |
| 73 |
| 74 { |
| 75 CpuFeatures::Scope use_vfp(VFP2); |
| 76 DoubleRegister input = d0; |
| 77 DoubleRegister result = d1; |
| 78 DoubleRegister double_scratch1 = d2; |
| 79 DoubleRegister double_scratch2 = d3; |
| 80 Register temp1 = r4; |
| 81 Register temp2 = r5; |
| 82 Register temp3 = r6; |
| 83 |
| 84 if (masm.use_eabi_hardfloat()) { |
| 85 // Input value is in d0 anyway, nothing to do. |
| 86 } else { |
| 87 __ vmov(input, r0, r1); |
| 88 } |
| 89 __ Push(temp3, temp2, temp1); |
| 90 MathExpGenerator::EmitMathExp( |
| 91 &masm, input, result, double_scratch1, double_scratch2, |
| 92 temp1, temp2, temp3); |
| 93 __ Pop(temp3, temp2, temp1); |
| 94 if (masm.use_eabi_hardfloat()) { |
| 95 __ vmov(d0, result); |
| 96 } else { |
| 97 __ vmov(r0, r1, result); |
| 98 } |
| 99 __ Ret(); |
| 100 } |
| 101 |
| 102 CodeDesc desc; |
| 103 masm.GetCode(&desc); |
| 104 |
| 105 CPU::FlushICache(buffer, actual_size); |
| 106 OS::ProtectCode(buffer, actual_size); |
| 107 |
| 108 #if !defined(USE_SIMULATOR) |
| 109 return FUNCTION_CAST<UnaryMathFunction>(buffer); |
| 110 #else |
| 111 fast_exp_arm_machine_code = buffer; |
| 112 return &fast_exp_simulator; |
| 113 #endif |
| 114 } |
| 115 |
| 116 |
| 117 #undef __ |
| 118 |
| 119 |
52 UnaryMathFunction CreateSqrtFunction() { | 120 UnaryMathFunction CreateSqrtFunction() { |
53 return &sqrt; | 121 return &sqrt; |
54 } | 122 } |
55 | 123 |
56 // ------------------------------------------------------------------------- | 124 // ------------------------------------------------------------------------- |
57 // Platform-specific RuntimeCallHelper functions. | 125 // Platform-specific RuntimeCallHelper functions. |
58 | 126 |
59 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const { | 127 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const { |
60 masm->EnterFrame(StackFrame::INTERNAL); | 128 masm->EnterFrame(StackFrame::INTERNAL); |
61 ASSERT(!masm->has_frame()); | 129 ASSERT(!masm->has_frame()); |
62 masm->set_has_frame(true); | 130 masm->set_has_frame(true); |
63 } | 131 } |
64 | 132 |
65 | 133 |
66 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const { | 134 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const { |
67 masm->LeaveFrame(StackFrame::INTERNAL); | 135 masm->LeaveFrame(StackFrame::INTERNAL); |
68 ASSERT(masm->has_frame()); | 136 ASSERT(masm->has_frame()); |
69 masm->set_has_frame(false); | 137 masm->set_has_frame(false); |
70 } | 138 } |
71 | 139 |
72 | 140 |
73 // ------------------------------------------------------------------------- | 141 // ------------------------------------------------------------------------- |
74 // Code generators | 142 // Code generators |
75 | 143 |
| 144 #define __ ACCESS_MASM(masm) |
| 145 |
76 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition( | 146 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition( |
77 MacroAssembler* masm) { | 147 MacroAssembler* masm) { |
78 // ----------- S t a t e ------------- | 148 // ----------- S t a t e ------------- |
79 // -- r0 : value | 149 // -- r0 : value |
80 // -- r1 : key | 150 // -- r1 : key |
81 // -- r2 : receiver | 151 // -- r2 : receiver |
82 // -- lr : return address | 152 // -- lr : return address |
83 // -- r3 : target map, scratch for subsequent call | 153 // -- r3 : target map, scratch for subsequent call |
84 // -- r4 : scratch (elements) | 154 // -- r4 : scratch (elements) |
85 // ----------------------------------- | 155 // ----------------------------------- |
(...skipping 357 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
443 __ b(ne, &ascii); | 513 __ b(ne, &ascii); |
444 // Two-byte string. | 514 // Two-byte string. |
445 __ ldrh(result, MemOperand(string, index, LSL, 1)); | 515 __ ldrh(result, MemOperand(string, index, LSL, 1)); |
446 __ jmp(&done); | 516 __ jmp(&done); |
447 __ bind(&ascii); | 517 __ bind(&ascii); |
448 // Ascii string. | 518 // Ascii string. |
449 __ ldrb(result, MemOperand(string, index)); | 519 __ ldrb(result, MemOperand(string, index)); |
450 __ bind(&done); | 520 __ bind(&done); |
451 } | 521 } |
452 | 522 |
| 523 |
| 524 static MemOperand ExpConstant(int index, Register base) { |
| 525 return MemOperand(base, index * kDoubleSize); |
| 526 } |
| 527 |
| 528 |
| 529 void MathExpGenerator::EmitMathExp(MacroAssembler* masm, |
| 530 DoubleRegister input, |
| 531 DoubleRegister result, |
| 532 DoubleRegister double_scratch1, |
| 533 DoubleRegister double_scratch2, |
| 534 Register temp1, |
| 535 Register temp2, |
| 536 Register temp3) { |
| 537 ASSERT(!input.is(result)); |
| 538 ASSERT(!input.is(double_scratch1)); |
| 539 ASSERT(!input.is(double_scratch2)); |
| 540 ASSERT(!result.is(double_scratch1)); |
| 541 ASSERT(!result.is(double_scratch2)); |
| 542 ASSERT(!double_scratch1.is(double_scratch2)); |
| 543 ASSERT(!temp1.is(temp2)); |
| 544 ASSERT(!temp1.is(temp3)); |
| 545 ASSERT(!temp2.is(temp3)); |
| 546 ASSERT(ExternalReference::math_exp_constants(0).address() != NULL); |
| 547 |
| 548 Label done; |
| 549 |
| 550 __ mov(temp3, Operand(ExternalReference::math_exp_constants(0))); |
| 551 |
| 552 __ vldr(double_scratch1, ExpConstant(0, temp3)); |
| 553 __ vmov(result, kDoubleRegZero); |
| 554 __ VFPCompareAndSetFlags(double_scratch1, input); |
| 555 __ b(ge, &done); |
| 556 __ vldr(double_scratch2, ExpConstant(1, temp3)); |
| 557 __ VFPCompareAndSetFlags(input, double_scratch2); |
| 558 __ vldr(result, ExpConstant(2, temp3)); |
| 559 __ b(ge, &done); |
| 560 __ vldr(double_scratch1, ExpConstant(3, temp3)); |
| 561 __ vldr(result, ExpConstant(4, temp3)); |
| 562 __ vmul(double_scratch1, double_scratch1, input); |
| 563 __ vadd(double_scratch1, double_scratch1, result); |
| 564 __ vmov(temp2, temp1, double_scratch1); |
| 565 __ vsub(double_scratch1, double_scratch1, result); |
| 566 __ vldr(result, ExpConstant(6, temp3)); |
| 567 __ vldr(double_scratch2, ExpConstant(5, temp3)); |
| 568 __ vmul(double_scratch1, double_scratch1, double_scratch2); |
| 569 __ vsub(double_scratch1, double_scratch1, input); |
| 570 __ vsub(result, result, double_scratch1); |
| 571 __ vmul(input, double_scratch1, double_scratch1); |
| 572 __ vmul(result, result, input); |
| 573 __ mov(temp1, Operand(temp2, LSR, 11)); |
| 574 __ vldr(double_scratch2, ExpConstant(7, temp3)); |
| 575 __ vmul(result, result, double_scratch2); |
| 576 __ vsub(result, result, double_scratch1); |
| 577 __ vldr(double_scratch2, ExpConstant(8, temp3)); |
| 578 __ vadd(result, result, double_scratch2); |
| 579 __ movw(ip, 0x7ff); |
| 580 __ and_(temp2, temp2, Operand(ip)); |
| 581 __ add(temp1, temp1, Operand(0x3ff)); |
| 582 __ mov(temp1, Operand(temp1, LSL, 20)); |
| 583 |
| 584 // Must not call ExpConstant() after overwriting temp3! |
| 585 __ mov(temp3, Operand(ExternalReference::math_exp_log_table())); |
| 586 __ ldr(ip, MemOperand(temp3, temp2, LSL, 3)); |
| 587 __ add(temp3, temp3, Operand(kPointerSize)); |
| 588 __ ldr(temp2, MemOperand(temp3, temp2, LSL, 3)); |
| 589 __ orr(temp1, temp1, temp2); |
| 590 __ vmov(input, ip, temp1); |
| 591 __ vmul(result, result, input); |
| 592 __ bind(&done); |
| 593 } |
| 594 |
453 #undef __ | 595 #undef __ |
454 | 596 |
455 // add(r0, pc, Operand(-8)) | 597 // add(r0, pc, Operand(-8)) |
456 static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008; | 598 static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008; |
457 | 599 |
458 static byte* GetNoCodeAgeSequence(uint32_t* length) { | 600 static byte* GetNoCodeAgeSequence(uint32_t* length) { |
459 // The sequence of instructions that is patched out for aging code is the | 601 // The sequence of instructions that is patched out for aging code is the |
460 // following boilerplate stack-building prologue that is found in FUNCTIONS | 602 // following boilerplate stack-building prologue that is found in FUNCTIONS |
461 static bool initialized = false; | 603 static bool initialized = false; |
462 static uint32_t sequence[kNoCodeAgeSequenceLength]; | 604 static uint32_t sequence[kNoCodeAgeSequenceLength]; |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
534 patcher.masm()->add(r0, pc, Operand(-8)); | 676 patcher.masm()->add(r0, pc, Operand(-8)); |
535 patcher.masm()->ldr(pc, MemOperand(pc, -4)); | 677 patcher.masm()->ldr(pc, MemOperand(pc, -4)); |
536 patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start())); | 678 patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start())); |
537 } | 679 } |
538 } | 680 } |
539 | 681 |
540 | 682 |
541 } } // namespace v8::internal | 683 } } // namespace v8::internal |
542 | 684 |
543 #endif // V8_TARGET_ARCH_ARM | 685 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |