OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 | 142 |
143 __ push(rbp); // Caller's frame pointer. | 143 __ push(rbp); // Caller's frame pointer. |
144 __ movq(rbp, rsp); | 144 __ movq(rbp, rsp); |
145 __ push(rsi); // Callee's context. | 145 __ push(rsi); // Callee's context. |
146 __ push(rdi); // Callee's JS function. | 146 __ push(rdi); // Callee's JS function. |
147 | 147 |
148 // Reserve space for the stack slots needed by the code. | 148 // Reserve space for the stack slots needed by the code. |
149 int slots = StackSlotCount(); | 149 int slots = StackSlotCount(); |
150 if (slots > 0) { | 150 if (slots > 0) { |
151 if (FLAG_debug_code) { | 151 if (FLAG_debug_code) { |
152 __ movl(rax, Immediate(slots)); | 152 __ Set(rax, slots); |
153 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); | 153 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); |
154 Label loop; | 154 Label loop; |
155 __ bind(&loop); | 155 __ bind(&loop); |
156 __ push(kScratchRegister); | 156 __ push(kScratchRegister); |
157 __ decl(rax); | 157 __ decl(rax); |
158 __ j(not_zero, &loop); | 158 __ j(not_zero, &loop); |
159 } else { | 159 } else { |
160 __ subq(rsp, Immediate(slots * kPointerSize)); | 160 __ subq(rsp, Immediate(slots * kPointerSize)); |
161 #ifdef _MSC_VER | 161 #ifdef _MSC_VER |
162 // On windows, you may not access the stack more than one page below | 162 // On windows, you may not access the stack more than one page below |
(...skipping 929 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1092 } | 1092 } |
1093 | 1093 |
1094 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1094 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1095 DeoptimizeIf(overflow, instr->environment()); | 1095 DeoptimizeIf(overflow, instr->environment()); |
1096 } | 1096 } |
1097 } | 1097 } |
1098 | 1098 |
1099 | 1099 |
1100 void LCodeGen::DoConstantI(LConstantI* instr) { | 1100 void LCodeGen::DoConstantI(LConstantI* instr) { |
1101 ASSERT(instr->result()->IsRegister()); | 1101 ASSERT(instr->result()->IsRegister()); |
1102 __ movl(ToRegister(instr->result()), Immediate(instr->value())); | 1102 __ Set(ToRegister(instr->result()), instr->value()); |
1103 } | 1103 } |
1104 | 1104 |
1105 | 1105 |
1106 void LCodeGen::DoConstantD(LConstantD* instr) { | 1106 void LCodeGen::DoConstantD(LConstantD* instr) { |
1107 ASSERT(instr->result()->IsDoubleRegister()); | 1107 ASSERT(instr->result()->IsDoubleRegister()); |
1108 XMMRegister res = ToDoubleRegister(instr->result()); | 1108 XMMRegister res = ToDoubleRegister(instr->result()); |
1109 double v = instr->value(); | 1109 double v = instr->value(); |
1110 uint64_t int_val = BitCast<uint64_t, double>(v); | 1110 uint64_t int_val = BitCast<uint64_t, double>(v); |
1111 // Use xor to produce +0.0 in a fast and compact way, but avoid to | 1111 // Use xor to produce +0.0 in a fast and compact way, but avoid to |
1112 // do so if the constant is -0.0. | 1112 // do so if the constant is -0.0. |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1507 // If the expression is known to be a smi, then it's | 1507 // If the expression is known to be a smi, then it's |
1508 // definitely not null. Materialize false. | 1508 // definitely not null. Materialize false. |
1509 // Consider adding other type and representation tests too. | 1509 // Consider adding other type and representation tests too. |
1510 if (instr->hydrogen()->value()->type().IsSmi()) { | 1510 if (instr->hydrogen()->value()->type().IsSmi()) { |
1511 __ LoadRoot(result, Heap::kFalseValueRootIndex); | 1511 __ LoadRoot(result, Heap::kFalseValueRootIndex); |
1512 return; | 1512 return; |
1513 } | 1513 } |
1514 | 1514 |
1515 __ CompareRoot(reg, Heap::kNullValueRootIndex); | 1515 __ CompareRoot(reg, Heap::kNullValueRootIndex); |
1516 if (instr->is_strict()) { | 1516 if (instr->is_strict()) { |
| 1517 ASSERT(Heap::kTrueValueRootIndex >= 0); |
1517 __ movl(result, Immediate(Heap::kTrueValueRootIndex)); | 1518 __ movl(result, Immediate(Heap::kTrueValueRootIndex)); |
1518 NearLabel load; | 1519 NearLabel load; |
1519 __ j(equal, &load); | 1520 __ j(equal, &load); |
1520 __ movl(result, Immediate(Heap::kFalseValueRootIndex)); | 1521 __ Set(result, Heap::kFalseValueRootIndex); |
1521 __ bind(&load); | 1522 __ bind(&load); |
1522 __ LoadRootIndexed(result, result, 0); | 1523 __ LoadRootIndexed(result, result, 0); |
1523 } else { | 1524 } else { |
1524 NearLabel true_value, false_value, done; | 1525 NearLabel true_value, false_value, done; |
1525 __ j(equal, &true_value); | 1526 __ j(equal, &true_value); |
1526 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); | 1527 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
1527 __ j(equal, &true_value); | 1528 __ j(equal, &true_value); |
1528 __ JumpIfSmi(reg, &false_value); | 1529 __ JumpIfSmi(reg, &false_value); |
1529 // Check for undetectable objects by looking in the bit field in | 1530 // Check for undetectable objects by looking in the bit field in |
1530 // the map. The object has already been smi checked. | 1531 // the map. The object has already been smi checked. |
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1969 { | 1970 { |
1970 PushSafepointRegistersScope scope(this); | 1971 PushSafepointRegistersScope scope(this); |
1971 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( | 1972 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( |
1972 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); | 1973 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); |
1973 InstanceofStub stub(flags); | 1974 InstanceofStub stub(flags); |
1974 | 1975 |
1975 __ push(ToRegister(instr->InputAt(0))); | 1976 __ push(ToRegister(instr->InputAt(0))); |
1976 __ Push(instr->function()); | 1977 __ Push(instr->function()); |
1977 | 1978 |
1978 Register temp = ToRegister(instr->TempAt(0)); | 1979 Register temp = ToRegister(instr->TempAt(0)); |
1979 static const int kAdditionalDelta = 13; | 1980 static const int kAdditionalDelta = 10; |
1980 int delta = | 1981 int delta = |
1981 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1982 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
1982 __ movq(temp, Immediate(delta)); | 1983 ASSERT(delta >= 0); |
1983 __ push(temp); | 1984 __ push_imm32(delta); |
1984 | 1985 |
1985 // We are pushing three values on the stack but recording a | 1986 // We are pushing three values on the stack but recording a |
1986 // safepoint with two arguments because stub is going to | 1987 // safepoint with two arguments because stub is going to |
1987 // remove the third argument from the stack before jumping | 1988 // remove the third argument from the stack before jumping |
1988 // to instanceof builtin on the slow path. | 1989 // to instanceof builtin on the slow path. |
1989 CallCodeGeneric(stub.GetCode(), | 1990 CallCodeGeneric(stub.GetCode(), |
1990 RelocInfo::CODE_TARGET, | 1991 RelocInfo::CODE_TARGET, |
1991 instr, | 1992 instr, |
1992 RECORD_SAFEPOINT_WITH_REGISTERS, | 1993 RECORD_SAFEPOINT_WITH_REGISTERS, |
1993 2); | 1994 2); |
1994 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); | 1995 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); |
| 1996 // Move result to a register that survives the end of the |
| 1997 // PushSafepointRegisterScope. |
1995 __ movq(kScratchRegister, rax); | 1998 __ movq(kScratchRegister, rax); |
1996 } | 1999 } |
1997 __ testq(kScratchRegister, kScratchRegister); | 2000 __ testq(kScratchRegister, kScratchRegister); |
1998 Label load_false; | 2001 Label load_false; |
1999 Label done; | 2002 Label done; |
2000 __ j(not_zero, &load_false); | 2003 __ j(not_zero, &load_false); |
2001 __ LoadRoot(rax, Heap::kTrueValueRootIndex); | 2004 __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
2002 __ jmp(&done); | 2005 __ jmp(&done); |
2003 __ bind(&load_false); | 2006 __ bind(&load_false); |
2004 __ LoadRoot(rax, Heap::kFalseValueRootIndex); | 2007 __ LoadRoot(rax, Heap::kFalseValueRootIndex); |
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2419 Register result = ToRegister(instr->result()); | 2422 Register result = ToRegister(instr->result()); |
2420 | 2423 |
2421 NearLabel done; | 2424 NearLabel done; |
2422 | 2425 |
2423 // If no arguments adaptor frame the number of arguments is fixed. | 2426 // If no arguments adaptor frame the number of arguments is fixed. |
2424 if (instr->InputAt(0)->IsRegister()) { | 2427 if (instr->InputAt(0)->IsRegister()) { |
2425 __ cmpq(rbp, ToRegister(instr->InputAt(0))); | 2428 __ cmpq(rbp, ToRegister(instr->InputAt(0))); |
2426 } else { | 2429 } else { |
2427 __ cmpq(rbp, ToOperand(instr->InputAt(0))); | 2430 __ cmpq(rbp, ToOperand(instr->InputAt(0))); |
2428 } | 2431 } |
2429 __ movq(result, Immediate(scope()->num_parameters())); | 2432 __ movl(result, Immediate(scope()->num_parameters())); |
2430 __ j(equal, &done); | 2433 __ j(equal, &done); |
2431 | 2434 |
2432 // Arguments adaptor frame present. Get argument length from there. | 2435 // Arguments adaptor frame present. Get argument length from there. |
2433 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2436 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
2434 __ movq(result, Operand(result, | 2437 __ SmiToInteger32(result, |
2435 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2438 Operand(result, |
2436 __ SmiToInteger32(result, result); | 2439 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2437 | 2440 |
2438 // Argument length is in result register. | 2441 // Argument length is in result register. |
2439 __ bind(&done); | 2442 __ bind(&done); |
2440 } | 2443 } |
2441 | 2444 |
2442 | 2445 |
2443 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 2446 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
2444 Register receiver = ToRegister(instr->receiver()); | 2447 Register receiver = ToRegister(instr->receiver()); |
2445 Register function = ToRegister(instr->function()); | 2448 Register function = ToRegister(instr->function()); |
2446 Register length = ToRegister(instr->length()); | 2449 Register length = ToRegister(instr->length()); |
(...skipping 961 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3408 // Heap number map check. | 3411 // Heap number map check. |
3409 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 3412 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
3410 Heap::kHeapNumberMapRootIndex); | 3413 Heap::kHeapNumberMapRootIndex); |
3411 | 3414 |
3412 if (instr->truncating()) { | 3415 if (instr->truncating()) { |
3413 __ j(equal, &heap_number); | 3416 __ j(equal, &heap_number); |
3414 // Check for undefined. Undefined is converted to zero for truncating | 3417 // Check for undefined. Undefined is converted to zero for truncating |
3415 // conversions. | 3418 // conversions. |
3416 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); | 3419 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); |
3417 DeoptimizeIf(not_equal, instr->environment()); | 3420 DeoptimizeIf(not_equal, instr->environment()); |
3418 __ movl(input_reg, Immediate(0)); | 3421 __ Set(input_reg, 0); |
3419 __ jmp(&done); | 3422 __ jmp(&done); |
3420 | 3423 |
3421 __ bind(&heap_number); | 3424 __ bind(&heap_number); |
3422 | 3425 |
3423 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3426 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
3424 __ cvttsd2siq(input_reg, xmm0); | 3427 __ cvttsd2siq(input_reg, xmm0); |
3425 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000)); | 3428 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000)); |
3426 __ cmpq(input_reg, kScratchRegister); | 3429 __ cmpq(input_reg, kScratchRegister); |
3427 DeoptimizeIf(equal, instr->environment()); | 3430 DeoptimizeIf(equal, instr->environment()); |
3428 } else { | 3431 } else { |
(...skipping 563 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3992 RegisterEnvironmentForDeoptimization(environment); | 3995 RegisterEnvironmentForDeoptimization(environment); |
3993 ASSERT(osr_pc_offset_ == -1); | 3996 ASSERT(osr_pc_offset_ == -1); |
3994 osr_pc_offset_ = masm()->pc_offset(); | 3997 osr_pc_offset_ = masm()->pc_offset(); |
3995 } | 3998 } |
3996 | 3999 |
3997 #undef __ | 4000 #undef __ |
3998 | 4001 |
3999 } } // namespace v8::internal | 4002 } } // namespace v8::internal |
4000 | 4003 |
4001 #endif // V8_TARGET_ARCH_X64 | 4004 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |