| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Rrdistribution and use in source and binary forms, with or without | 2 // Rrdistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Rrdistributions of source code must retain the above copyright | 6 // * Rrdistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Rrdistributions in binary form must reproduce the above | 8 // * Rrdistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 51 &actual_size, | 51 &actual_size, |
| 52 true)); | 52 true)); |
| 53 CHECK(buffer); | 53 CHECK(buffer); |
| 54 HandleScope handles(isolate); | 54 HandleScope handles(isolate); |
| 55 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size)); | 55 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size)); |
| 56 DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath); | 56 DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath); |
| 57 | 57 |
| 58 byte* start = stub.GetCode(isolate)->instruction_start(); | 58 byte* start = stub.GetCode(isolate)->instruction_start(); |
| 59 Label done; | 59 Label done; |
| 60 | 60 |
| 61 // Save callee save registers. | 61 __ SetStackPointer(csp); |
| 62 __ Push(r7, r6, r5, r4); | 62 __ PushCalleeSavedRegisters(); |
| 63 __ Push(lr); | 63 __ Mov(jssp, csp); |
| 64 __ SetStackPointer(jssp); |
| 64 | 65 |
| 65 // For softfp, move the input value into d0. | |
| 66 if (!masm.use_eabi_hardfloat()) { | |
| 67 __ vmov(d0, r0, r1); | |
| 68 } | |
| 69 // Push the double argument. | 66 // Push the double argument. |
| 70 __ sub(sp, sp, Operand(kDoubleSize)); | 67 __ Push(d0); |
| 71 __ vstr(d0, sp, 0); | 68 if (!source_reg.is(jssp)) { |
| 72 if (!source_reg.is(sp)) { | 69 __ Mov(source_reg, jssp); |
| 73 __ mov(source_reg, sp); | |
| 74 } | 70 } |
| 75 | 71 |
| 76 // Save registers make sure they don't get clobbered. | 72 // Save registers make sure they don't get clobbered. |
| 77 int source_reg_offset = kDoubleSize; | 73 int source_reg_offset = kDoubleSize; |
| 78 int reg_num = 0; | 74 int reg_num = 0; |
| 79 for (;reg_num < Register::NumAllocatableRegisters(); ++reg_num) { | 75 for (;reg_num < Register::NumAllocatableRegisters(); ++reg_num) { |
| 80 Register reg = Register::from_code(reg_num); | 76 Register reg = Register::from_code(reg_num); |
| 81 if (!reg.is(destination_reg)) { | 77 if (!reg.is(destination_reg)) { |
| 82 __ push(reg); | 78 __ Push(reg); |
| 83 source_reg_offset += kPointerSize; | 79 source_reg_offset += kPointerSize; |
| 84 } | 80 } |
| 85 } | 81 } |
| 86 | 82 |
| 87 // Re-push the double argument. | 83 // Re-push the double argument. |
| 88 __ sub(sp, sp, Operand(kDoubleSize)); | 84 __ Push(d0); |
| 89 __ vstr(d0, sp, 0); | |
| 90 | 85 |
| 91 // Call through to the actual stub | 86 // Call through to the actual stub |
| 92 if (inline_fastpath) { | 87 if (inline_fastpath) { |
| 93 __ vldr(d0, MemOperand(source_reg)); | 88 __ Ldr(d0, MemOperand(source_reg)); |
| 94 __ TryInlineTruncateDoubleToI(destination_reg, d0, &done); | 89 __ TryInlineTruncateDoubleToI(destination_reg, d0, &done); |
| 95 if (destination_reg.is(source_reg) && !source_reg.is(sp)) { | 90 if (destination_reg.is(source_reg)) { |
| 96 // Restore clobbered source_reg. | 91 // Restore clobbered source_reg. |
| 97 __ add(source_reg, sp, Operand(source_reg_offset)); | 92 __ add(source_reg, jssp, Operand(source_reg_offset)); |
| 98 } | 93 } |
| 99 } | 94 } |
| 100 __ Call(start, RelocInfo::EXTERNAL_REFERENCE); | 95 __ Call(start, RelocInfo::EXTERNAL_REFERENCE); |
| 101 __ bind(&done); | 96 __ bind(&done); |
| 102 | 97 |
| 103 __ add(sp, sp, Operand(kDoubleSize)); | 98 __ Drop(1, kDoubleSize); |
| 104 | 99 |
| 105 // Make sure no registers have been unexpectedly clobbered | 100 // // Make sure no registers have been unexpectedly clobbered |
| 106 for (--reg_num; reg_num >= 0; --reg_num) { | 101 for (--reg_num; reg_num >= 0; --reg_num) { |
| 107 Register reg = Register::from_code(reg_num); | 102 Register reg = Register::from_code(reg_num); |
| 108 if (!reg.is(destination_reg)) { | 103 if (!reg.is(destination_reg)) { |
| 109 __ ldr(ip, MemOperand(sp, 0)); | 104 __ Pop(ip0); |
| 110 __ cmp(reg, ip); | 105 __ cmp(reg, ip0); |
| 111 __ Assert(eq, kRegisterWasClobbered); | 106 __ Assert(eq, kRegisterWasClobbered); |
| 112 __ add(sp, sp, Operand(kPointerSize)); | |
| 113 } | 107 } |
| 114 } | 108 } |
| 115 | 109 |
| 116 __ add(sp, sp, Operand(kDoubleSize)); | 110 __ Drop(1, kDoubleSize); |
| 117 | 111 |
| 118 if (!destination_reg.is(r0)) | 112 if (!destination_reg.is(x0)) |
| 119 __ mov(r0, destination_reg); | 113 __ Mov(x0, destination_reg); |
| 120 | 114 |
| 121 // Restore callee save registers. | 115 // Restore callee save registers. |
| 122 __ Pop(lr); | 116 __ Mov(csp, jssp); |
| 123 __ Pop(r7, r6, r5, r4); | 117 __ SetStackPointer(csp); |
| 118 __ PopCalleeSavedRegisters(); |
| 124 | 119 |
| 125 __ Ret(0); | 120 __ Ret(); |
| 126 | 121 |
| 127 CodeDesc desc; | 122 CodeDesc desc; |
| 128 masm.GetCode(&desc); | 123 masm.GetCode(&desc); |
| 129 CPU::FlushICache(buffer, actual_size); | 124 CPU::FlushICache(buffer, actual_size); |
| 130 return (reinterpret_cast<ConvertDToIFunc>( | 125 return (reinterpret_cast<ConvertDToIFunc>( |
| 131 reinterpret_cast<intptr_t>(buffer))); | 126 reinterpret_cast<intptr_t>(buffer))); |
| 132 } | 127 } |
| 133 | 128 |
| 134 #undef __ | 129 #undef __ |
| 135 | 130 |
| 136 | 131 |
| 137 static Isolate* GetIsolateFrom(LocalContext* context) { | 132 static Isolate* GetIsolateFrom(LocalContext* context) { |
| 138 return reinterpret_cast<Isolate*>((*context)->GetIsolate()); | 133 return reinterpret_cast<Isolate*>((*context)->GetIsolate()); |
| 139 } | 134 } |
| 140 | 135 |
| 141 | 136 |
| 142 int32_t RunGeneratedCodeCallWrapper(ConvertDToIFunc func, | 137 int32_t RunGeneratedCodeCallWrapper(ConvertDToIFunc func, |
| 143 double from) { | 138 double from) { |
| 144 #ifdef USE_SIMULATOR | 139 #ifdef USE_SIMULATOR |
| 145 return CALL_GENERATED_FP_INT(func, from, 0); | 140 return Simulator::current(Isolate::Current())->CallInt64( |
| 141 FUNCTION_ADDR(func), Simulator::CallArgument(from), |
| 142 Simulator::CallArgument::End()); |
| 146 #else | 143 #else |
| 147 return (*func)(from); | 144 return (*func)(from); |
| 148 #endif | 145 #endif |
| 149 } | 146 } |
| 150 | 147 |
| 151 | 148 |
| 152 TEST(ConvertDToI) { | 149 TEST(ConvertDToI) { |
| 153 CcTest::InitializeVM(); | 150 CcTest::InitializeVM(); |
| 154 LocalContext context; | 151 LocalContext context; |
| 155 Isolate* isolate = GetIsolateFrom(&context); | 152 Isolate* isolate = GetIsolateFrom(&context); |
| 156 HandleScope scope(isolate); | 153 HandleScope scope(isolate); |
| 157 | 154 |
| 158 #if DEBUG | 155 #if DEBUG |
| 159 // Verify that the tests actually work with the C version. In the release | 156 // Verify that the tests actually work with the C version. In the release |
| 160 // code, the compiler optimizes it away because it's all constant, but does it | 157 // code, the compiler optimizes it away because it's all constant, but does it |
| 161 // wrong, triggering an assert on gcc. | 158 // wrong, triggering an assert on gcc. |
| 162 RunAllTruncationTests(&ConvertDToICVersion); | 159 RunAllTruncationTests(&ConvertDToICVersion); |
| 163 #endif | 160 #endif |
| 164 | 161 |
| 165 Register source_registers[] = {sp, r0, r1, r2, r3, r4, r5, r6, r7}; | 162 Register source_registers[] = {jssp, x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, |
| 166 Register dest_registers[] = {r0, r1, r2, r3, r4, r5, r6, r7}; | 163 x10, x11, x12, x13, x14, x15, x18, x19, x20, |
| 164 x21, x22, x23, x24}; |
| 165 Register dest_registers[] = {x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, |
| 166 x12, x13, x14, x15, x18, x19, x20, x21, x22, x23, |
| 167 x24}; |
| 167 | 168 |
| 168 for (size_t s = 0; s < sizeof(source_registers) / sizeof(Register); s++) { | 169 for (size_t s = 0; s < sizeof(source_registers) / sizeof(Register); s++) { |
| 169 for (size_t d = 0; d < sizeof(dest_registers) / sizeof(Register); d++) { | 170 for (size_t d = 0; d < sizeof(dest_registers) / sizeof(Register); d++) { |
| 170 RunAllTruncationTests( | 171 RunAllTruncationTests( |
| 171 RunGeneratedCodeCallWrapper, | 172 RunGeneratedCodeCallWrapper, |
| 172 MakeConvertDToIFuncTrampoline(isolate, | 173 MakeConvertDToIFuncTrampoline(isolate, |
| 173 source_registers[s], | 174 source_registers[s], |
| 174 dest_registers[d], | 175 dest_registers[d], |
| 175 false)); | 176 false)); |
| 176 RunAllTruncationTests( | 177 RunAllTruncationTests( |
| 177 RunGeneratedCodeCallWrapper, | 178 RunGeneratedCodeCallWrapper, |
| 178 MakeConvertDToIFuncTrampoline(isolate, | 179 MakeConvertDToIFuncTrampoline(isolate, |
| 179 source_registers[s], | 180 source_registers[s], |
| 180 dest_registers[d], | 181 dest_registers[d], |
| 181 true)); | 182 true)); |
| 182 } | 183 } |
| 183 } | 184 } |
| 184 } | 185 } |
| OLD | NEW |