| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Rrdistribution and use in source and binary forms, with or without | 2 // Rrdistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Rrdistributions of source code must retain the above copyright | 6 // * Rrdistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Rrdistributions in binary form must reproduce the above | 8 // * Rrdistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 68 __ MultiPushFPU(kCalleeSavedFPU); | 68 __ MultiPushFPU(kCalleeSavedFPU); |
| 69 // Set up the reserved register for 0.0. | 69 // Set up the reserved register for 0.0. |
| 70 __ Move(kDoubleRegZero, 0.0); | 70 __ Move(kDoubleRegZero, 0.0); |
| 71 | 71 |
| 72 // For softfp, move the input value into f12. | 72 // For softfp, move the input value into f12. |
| 73 if (IsMipsSoftFloatABI) { | 73 if (IsMipsSoftFloatABI) { |
| 74 __ Move(f12, a0, a1); | 74 __ Move(f12, a0, a1); |
| 75 } | 75 } |
| 76 // Push the double argument. | 76 // Push the double argument. |
| 77 __ Dsubu(sp, sp, Operand(kDoubleSize)); | 77 __ Dsubu(sp, sp, Operand(kDoubleSize)); |
| 78 __ sdc1(f12, MemOperand(sp)); | 78 __ Sdc1(f12, MemOperand(sp)); |
| 79 __ Move(source_reg, sp); | 79 __ Move(source_reg, sp); |
| 80 | 80 |
| 81 // Save registers make sure they don't get clobbered. | 81 // Save registers make sure they don't get clobbered. |
| 82 int source_reg_offset = kDoubleSize; | 82 int source_reg_offset = kDoubleSize; |
| 83 int reg_num = 2; | 83 int reg_num = 2; |
| 84 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft(); | 84 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft(); |
| 85 for (; reg_num < config->num_allocatable_general_registers(); ++reg_num) { | 85 for (; reg_num < config->num_allocatable_general_registers(); ++reg_num) { |
| 86 Register reg = Register::from_code(reg_num); | 86 Register reg = Register::from_code(reg_num); |
| 87 if (!reg.is(destination_reg)) { | 87 if (!reg.is(destination_reg)) { |
| 88 __ push(reg); | 88 __ push(reg); |
| 89 source_reg_offset += kPointerSize; | 89 source_reg_offset += kPointerSize; |
| 90 } | 90 } |
| 91 } | 91 } |
| 92 | 92 |
| 93 // Re-push the double argument. | 93 // Re-push the double argument. |
| 94 __ Dsubu(sp, sp, Operand(kDoubleSize)); | 94 __ Dsubu(sp, sp, Operand(kDoubleSize)); |
| 95 __ sdc1(f12, MemOperand(sp)); | 95 __ Sdc1(f12, MemOperand(sp)); |
| 96 | 96 |
| 97 // Call through to the actual stub | 97 // Call through to the actual stub |
| 98 if (inline_fastpath) { | 98 if (inline_fastpath) { |
| 99 __ ldc1(f12, MemOperand(source_reg)); | 99 __ Ldc1(f12, MemOperand(source_reg)); |
| 100 __ TryInlineTruncateDoubleToI(destination_reg, f12, &done); | 100 __ TryInlineTruncateDoubleToI(destination_reg, f12, &done); |
| 101 if (destination_reg.is(source_reg) && !source_reg.is(sp)) { | 101 if (destination_reg.is(source_reg) && !source_reg.is(sp)) { |
| 102 // Restore clobbered source_reg. | 102 // Restore clobbered source_reg. |
| 103 __ Daddu(source_reg, sp, Operand(source_reg_offset)); | 103 __ Daddu(source_reg, sp, Operand(source_reg_offset)); |
| 104 } | 104 } |
| 105 } | 105 } |
| 106 __ Call(start, RelocInfo::EXTERNAL_REFERENCE); | 106 __ Call(start, RelocInfo::EXTERNAL_REFERENCE); |
| 107 __ bind(&done); | 107 __ bind(&done); |
| 108 | 108 |
| 109 __ Daddu(sp, sp, Operand(kDoubleSize)); | 109 __ Daddu(sp, sp, Operand(kDoubleSize)); |
| 110 | 110 |
| 111 // Make sure no registers have been unexpectedly clobbered | 111 // Make sure no registers have been unexpectedly clobbered |
| 112 for (--reg_num; reg_num >= 2; --reg_num) { | 112 for (--reg_num; reg_num >= 2; --reg_num) { |
| 113 Register reg = Register::from_code(reg_num); | 113 Register reg = Register::from_code(reg_num); |
| 114 if (!reg.is(destination_reg)) { | 114 if (!reg.is(destination_reg)) { |
| 115 __ ld(at, MemOperand(sp, 0)); | 115 __ Ld(at, MemOperand(sp, 0)); |
| 116 __ Assert(eq, kRegisterWasClobbered, reg, Operand(at)); | 116 __ Assert(eq, kRegisterWasClobbered, reg, Operand(at)); |
| 117 __ Daddu(sp, sp, Operand(kPointerSize)); | 117 __ Daddu(sp, sp, Operand(kPointerSize)); |
| 118 } | 118 } |
| 119 } | 119 } |
| 120 | 120 |
| 121 __ Daddu(sp, sp, Operand(kDoubleSize)); | 121 __ Daddu(sp, sp, Operand(kDoubleSize)); |
| 122 | 122 |
| 123 __ Move(v0, destination_reg); | 123 __ Move(v0, destination_reg); |
| 124 Label ok; | 124 Label ok; |
| 125 __ Branch(&ok, eq, v0, Operand(zero_reg)); | 125 __ Branch(&ok, eq, v0, Operand(zero_reg)); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 192 false)); | 192 false)); |
| 193 RunAllTruncationTests( | 193 RunAllTruncationTests( |
| 194 RunGeneratedCodeCallWrapper, | 194 RunGeneratedCodeCallWrapper, |
| 195 MakeConvertDToIFuncTrampoline(isolate, | 195 MakeConvertDToIFuncTrampoline(isolate, |
| 196 source_registers[s], | 196 source_registers[s], |
| 197 dest_registers[d], | 197 dest_registers[d], |
| 198 true)); | 198 true)); |
| 199 } | 199 } |
| 200 } | 200 } |
| 201 } | 201 } |
| OLD | NEW |