OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1219 __ mulsd(left, right); | 1219 __ mulsd(left, right); |
1220 break; | 1220 break; |
1221 case Token::DIV: | 1221 case Token::DIV: |
1222 __ divsd(left, right); | 1222 __ divsd(left, right); |
1223 break; | 1223 break; |
1224 case Token::MOD: { | 1224 case Token::MOD: { |
1225 // Pass two doubles as arguments on the stack. | 1225 // Pass two doubles as arguments on the stack. |
1226 __ PrepareCallCFunction(4, eax); | 1226 __ PrepareCallCFunction(4, eax); |
1227 __ movdbl(Operand(esp, 0 * kDoubleSize), left); | 1227 __ movdbl(Operand(esp, 0 * kDoubleSize), left); |
1228 __ movdbl(Operand(esp, 1 * kDoubleSize), right); | 1228 __ movdbl(Operand(esp, 1 * kDoubleSize), right); |
1229 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4); | 1229 __ CallCFunction( |
| 1230 ExternalReference::double_fp_operation(Token::MOD, isolate()), |
| 1231 4); |
1230 | 1232 |
1231 // Return value is in st(0) on ia32. | 1233 // Return value is in st(0) on ia32. |
1232 // Store it into the (fixed) result register. | 1234 // Store it into the (fixed) result register. |
1233 __ sub(Operand(esp), Immediate(kDoubleSize)); | 1235 __ sub(Operand(esp), Immediate(kDoubleSize)); |
1234 __ fstp_d(Operand(esp, 0)); | 1236 __ fstp_d(Operand(esp, 0)); |
1235 __ movdbl(result, Operand(esp, 0)); | 1237 __ movdbl(result, Operand(esp, 0)); |
1236 __ add(Operand(esp), Immediate(kDoubleSize)); | 1238 __ add(Operand(esp), Immediate(kDoubleSize)); |
1237 break; | 1239 break; |
1238 } | 1240 } |
1239 default: | 1241 default: |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1341 } | 1343 } |
1342 | 1344 |
1343 | 1345 |
1344 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { | 1346 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { |
1345 block = chunk_->LookupDestination(block); | 1347 block = chunk_->LookupDestination(block); |
1346 int next_block = GetNextEmittedBlock(current_block_); | 1348 int next_block = GetNextEmittedBlock(current_block_); |
1347 if (block != next_block) { | 1349 if (block != next_block) { |
1348 // Perform stack overflow check if this goto needs it before jumping. | 1350 // Perform stack overflow check if this goto needs it before jumping. |
1349 if (deferred_stack_check != NULL) { | 1351 if (deferred_stack_check != NULL) { |
1350 ExternalReference stack_limit = | 1352 ExternalReference stack_limit = |
1351 ExternalReference::address_of_stack_limit(); | 1353 ExternalReference::address_of_stack_limit(isolate()); |
1352 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 1354 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
1353 __ j(above_equal, chunk_->GetAssemblyLabel(block)); | 1355 __ j(above_equal, chunk_->GetAssemblyLabel(block)); |
1354 __ jmp(deferred_stack_check->entry()); | 1356 __ jmp(deferred_stack_check->entry()); |
1355 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); | 1357 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
1356 } else { | 1358 } else { |
1357 __ jmp(chunk_->GetAssemblyLabel(block)); | 1359 __ jmp(chunk_->GetAssemblyLabel(block)); |
1358 } | 1360 } |
1359 } | 1361 } |
1360 } | 1362 } |
1361 | 1363 |
(...skipping 1271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2633 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. | 2635 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. |
2634 __ sqrtsd(input_reg, input_reg); | 2636 __ sqrtsd(input_reg, input_reg); |
2635 } | 2637 } |
2636 | 2638 |
2637 | 2639 |
2638 void LCodeGen::DoPower(LPower* instr) { | 2640 void LCodeGen::DoPower(LPower* instr) { |
2639 LOperand* left = instr->InputAt(0); | 2641 LOperand* left = instr->InputAt(0); |
2640 LOperand* right = instr->InputAt(1); | 2642 LOperand* right = instr->InputAt(1); |
2641 DoubleRegister result_reg = ToDoubleRegister(instr->result()); | 2643 DoubleRegister result_reg = ToDoubleRegister(instr->result()); |
2642 Representation exponent_type = instr->hydrogen()->right()->representation(); | 2644 Representation exponent_type = instr->hydrogen()->right()->representation(); |
| 2645 |
2643 if (exponent_type.IsDouble()) { | 2646 if (exponent_type.IsDouble()) { |
2644 // It is safe to use ebx directly since the instruction is marked | 2647 // It is safe to use ebx directly since the instruction is marked |
2645 // as a call. | 2648 // as a call. |
2646 __ PrepareCallCFunction(4, ebx); | 2649 __ PrepareCallCFunction(4, ebx); |
2647 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 2650 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); |
2648 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); | 2651 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); |
2649 __ CallCFunction(ExternalReference::power_double_double_function(), 4); | 2652 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), |
| 2653 4); |
2650 } else if (exponent_type.IsInteger32()) { | 2654 } else if (exponent_type.IsInteger32()) { |
2651 // It is safe to use ebx directly since the instruction is marked | 2655 // It is safe to use ebx directly since the instruction is marked |
2652 // as a call. | 2656 // as a call. |
2653 ASSERT(!ToRegister(right).is(ebx)); | 2657 ASSERT(!ToRegister(right).is(ebx)); |
2654 __ PrepareCallCFunction(4, ebx); | 2658 __ PrepareCallCFunction(4, ebx); |
2655 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 2659 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); |
2656 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right)); | 2660 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right)); |
2657 __ CallCFunction(ExternalReference::power_double_int_function(), 4); | 2661 __ CallCFunction(ExternalReference::power_double_int_function(isolate()), |
| 2662 4); |
2658 } else { | 2663 } else { |
2659 ASSERT(exponent_type.IsTagged()); | 2664 ASSERT(exponent_type.IsTagged()); |
2660 CpuFeatures::Scope scope(SSE2); | 2665 CpuFeatures::Scope scope(SSE2); |
2661 Register right_reg = ToRegister(right); | 2666 Register right_reg = ToRegister(right); |
2662 | 2667 |
2663 Label non_smi, call; | 2668 Label non_smi, call; |
2664 __ test(right_reg, Immediate(kSmiTagMask)); | 2669 __ test(right_reg, Immediate(kSmiTagMask)); |
2665 __ j(not_zero, &non_smi); | 2670 __ j(not_zero, &non_smi); |
2666 __ SmiUntag(right_reg); | 2671 __ SmiUntag(right_reg); |
2667 __ cvtsi2sd(result_reg, Operand(right_reg)); | 2672 __ cvtsi2sd(result_reg, Operand(right_reg)); |
2668 __ jmp(&call); | 2673 __ jmp(&call); |
2669 | 2674 |
2670 __ bind(&non_smi); | 2675 __ bind(&non_smi); |
2671 // It is safe to use ebx directly since the instruction is marked | 2676 // It is safe to use ebx directly since the instruction is marked |
2672 // as a call. | 2677 // as a call. |
2673 ASSERT(!right_reg.is(ebx)); | 2678 ASSERT(!right_reg.is(ebx)); |
2674 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx); | 2679 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx); |
2675 DeoptimizeIf(not_equal, instr->environment()); | 2680 DeoptimizeIf(not_equal, instr->environment()); |
2676 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset)); | 2681 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset)); |
2677 | 2682 |
2678 __ bind(&call); | 2683 __ bind(&call); |
2679 __ PrepareCallCFunction(4, ebx); | 2684 __ PrepareCallCFunction(4, ebx); |
2680 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); | 2685 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); |
2681 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg); | 2686 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg); |
2682 __ CallCFunction(ExternalReference::power_double_double_function(), 4); | 2687 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), |
| 2688 4); |
2683 } | 2689 } |
2684 | 2690 |
2685 // Return value is in st(0) on ia32. | 2691 // Return value is in st(0) on ia32. |
2686 // Store it into the (fixed) result register. | 2692 // Store it into the (fixed) result register. |
2687 __ sub(Operand(esp), Immediate(kDoubleSize)); | 2693 __ sub(Operand(esp), Immediate(kDoubleSize)); |
2688 __ fstp_d(Operand(esp, 0)); | 2694 __ fstp_d(Operand(esp, 0)); |
2689 __ movdbl(result_reg, Operand(esp, 0)); | 2695 __ movdbl(result_reg, Operand(esp, 0)); |
2690 __ add(Operand(esp), Immediate(kDoubleSize)); | 2696 __ add(Operand(esp), Immediate(kDoubleSize)); |
2691 } | 2697 } |
2692 | 2698 |
(...skipping 1278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3971 true); | 3977 true); |
3972 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3978 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
3973 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 3979 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); |
3974 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); | 3980 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); |
3975 } | 3981 } |
3976 | 3982 |
3977 | 3983 |
3978 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 3984 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
3979 // Perform stack overflow check. | 3985 // Perform stack overflow check. |
3980 NearLabel done; | 3986 NearLabel done; |
3981 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); | 3987 ExternalReference stack_limit = |
| 3988 ExternalReference::address_of_stack_limit(isolate()); |
3982 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 3989 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
3983 __ j(above_equal, &done); | 3990 __ j(above_equal, &done); |
3984 | 3991 |
3985 StackCheckStub stub; | 3992 StackCheckStub stub; |
3986 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3993 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
3987 __ bind(&done); | 3994 __ bind(&done); |
3988 } | 3995 } |
3989 | 3996 |
3990 | 3997 |
3991 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 3998 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
(...skipping 11 matching lines...) Expand all Loading... |
4003 ASSERT(osr_pc_offset_ == -1); | 4010 ASSERT(osr_pc_offset_ == -1); |
4004 osr_pc_offset_ = masm()->pc_offset(); | 4011 osr_pc_offset_ = masm()->pc_offset(); |
4005 } | 4012 } |
4006 | 4013 |
4007 | 4014 |
4008 #undef __ | 4015 #undef __ |
4009 | 4016 |
4010 } } // namespace v8::internal | 4017 } } // namespace v8::internal |
4011 | 4018 |
4012 #endif // V8_TARGET_ARCH_IA32 | 4019 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |