OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2248 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); | 2248 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); |
2249 __ ucomisd(xmm_scratch, input_reg); | 2249 __ ucomisd(xmm_scratch, input_reg); |
2250 DeoptimizeIf(equal, instr->environment()); | 2250 DeoptimizeIf(equal, instr->environment()); |
2251 __ sqrtsd(input_reg, input_reg); | 2251 __ sqrtsd(input_reg, input_reg); |
2252 } | 2252 } |
2253 | 2253 |
2254 | 2254 |
2255 void LCodeGen::DoPower(LPower* instr) { | 2255 void LCodeGen::DoPower(LPower* instr) { |
2256 LOperand* left = instr->left(); | 2256 LOperand* left = instr->left(); |
2257 LOperand* right = instr->right(); | 2257 LOperand* right = instr->right(); |
2258 DoubleRegister result_reg = ToDoubleRegister(instr->result()); | |
2258 Representation exponent_type = instr->hydrogen()->right()->representation(); | 2259 Representation exponent_type = instr->hydrogen()->right()->representation(); |
2259 if (exponent_type.IsDouble()) { | 2260 if (exponent_type.IsDouble()) { |
2260 // Pass two doubles as arguments on the stack. | 2261 // It is safe to use ebx directly since the instruction is marked |
2261 __ PrepareCallCFunction(4, eax); | 2262 // as a call. |
2263 __ PrepareCallCFunction(4, ebx); | |
2262 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 2264 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); |
2263 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); | 2265 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); |
2264 __ CallCFunction(ExternalReference::power_double_double_function(), 4); | 2266 __ CallCFunction(ExternalReference::power_double_double_function(), 4); |
2265 } else if (exponent_type.IsInteger32()) { | 2267 } else if (exponent_type.IsInteger32()) { |
2268 // It is safe to use ebx directly since the instruction is marked | |
2269 // as a call. | |
2270 ASSERT(!ToRegister(right).is(ebx)); | |
2266 __ PrepareCallCFunction(4, ebx); | 2271 __ PrepareCallCFunction(4, ebx); |
2267 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 2272 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); |
2268 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right)); | 2273 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right)); |
2269 __ CallCFunction(ExternalReference::power_double_int_function(), 4); | 2274 __ CallCFunction(ExternalReference::power_double_int_function(), 4); |
2270 } else { | 2275 } else { |
2271 ASSERT(exponent_type.IsTagged()); | 2276 ASSERT(exponent_type.IsTagged()); |
2272 __ PrepareCallCFunction(4, ebx); | 2277 CpuFeatures::Scope scope(SSE2); |
2273 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | |
2274 Register right_reg = ToRegister(right); | 2278 Register right_reg = ToRegister(right); |
2275 Label non_smi; | 2279 |
2276 Label done; | 2280 Label non_smi, call; |
2277 __ test(right_reg, Immediate(kSmiTagMask)); | 2281 __ test(right_reg, Immediate(kSmiTagMask)); |
2278 __ j(not_zero, &non_smi); | 2282 __ j(not_zero, &non_smi); |
2279 __ SmiUntag(right_reg); | 2283 __ SmiUntag(right_reg); |
2280 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right)); | 2284 __ cvtsi2sd(result_reg, Operand(right_reg)); |
2281 __ CallCFunction(ExternalReference::power_double_int_function(), 4); | 2285 __ jmp(&call); |
William Hesse
2010/12/15 14:38:38
Calling power_double_double rather than power_doub
Mads Ager (chromium)
2010/12/15 14:43:20
Good point. I'll check performance. I didn't notic
| |
2282 __ jmp(&done); | |
2283 | 2286 |
2284 __ bind(&non_smi); | 2287 __ bind(&non_smi); |
2288 // It is safe to use ebx directly since the instruction is marked | |
2289 // as a call. | |
2290 ASSERT(!right_reg.is(ebx)); | |
2285 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx); | 2291 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx); |
2286 DeoptimizeIf(not_equal, instr->environment()); | 2292 DeoptimizeIf(not_equal, instr->environment()); |
2287 __ movdbl(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset)); | 2293 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset)); |
2288 __ movdbl(Operand(esp, 1 * kDoubleSize), xmm1); | 2294 |
2295 __ bind(&call); | |
2296 __ PrepareCallCFunction(4, ebx); | |
2297 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | |
2298 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg); | |
2289 __ CallCFunction(ExternalReference::power_double_double_function(), 4); | 2299 __ CallCFunction(ExternalReference::power_double_double_function(), 4); |
2290 | |
2291 __ bind(&done); | |
2292 } | 2300 } |
2293 | 2301 |
2294 // Return value is in st(0) on ia32. | 2302 // Return value is in st(0) on ia32. |
2295 // Store it into the (fixed) result register. | 2303 // Store it into the (fixed) result register. |
2296 __ sub(Operand(esp), Immediate(kDoubleSize)); | 2304 __ sub(Operand(esp), Immediate(kDoubleSize)); |
2297 __ fstp_d(Operand(esp, 0)); | 2305 __ fstp_d(Operand(esp, 0)); |
2298 __ movdbl(ToDoubleRegister(instr->result()), Operand(esp, 0)); | 2306 __ movdbl(result_reg, Operand(esp, 0)); |
2299 __ add(Operand(esp), Immediate(kDoubleSize)); | 2307 __ add(Operand(esp), Immediate(kDoubleSize)); |
2300 } | 2308 } |
2301 | 2309 |
2302 | 2310 |
2303 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2311 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
2304 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2312 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2305 TranscendentalCacheSSE2Stub stub(TranscendentalCache::LOG); | 2313 TranscendentalCacheSSE2Stub stub(TranscendentalCache::LOG); |
2306 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2314 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2307 } | 2315 } |
2308 | 2316 |
(...skipping 922 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3231 ASSERT(!environment->HasBeenRegistered()); | 3239 ASSERT(!environment->HasBeenRegistered()); |
3232 RegisterEnvironmentForDeoptimization(environment); | 3240 RegisterEnvironmentForDeoptimization(environment); |
3233 ASSERT(osr_pc_offset_ == -1); | 3241 ASSERT(osr_pc_offset_ == -1); |
3234 osr_pc_offset_ = masm()->pc_offset(); | 3242 osr_pc_offset_ = masm()->pc_offset(); |
3235 } | 3243 } |
3236 | 3244 |
3237 | 3245 |
3238 #undef __ | 3246 #undef __ |
3239 | 3247 |
3240 } } // namespace v8::internal | 3248 } } // namespace v8::internal |
OLD | NEW |