| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 2104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2115 __ movp(rax, Immediate(arg_count())); | 2115 __ movp(rax, Immediate(arg_count())); |
| 2116 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, | 2116 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, |
| 2117 FixedArray::kHeaderSize)); | 2117 FixedArray::kHeaderSize)); |
| 2118 // Verify that ecx contains an AllocationSite | 2118 // Verify that ecx contains an AllocationSite |
| 2119 Factory* factory = masm->isolate()->factory(); | 2119 Factory* factory = masm->isolate()->factory(); |
| 2120 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), | 2120 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), |
| 2121 factory->allocation_site_map()); | 2121 factory->allocation_site_map()); |
| 2122 __ j(not_equal, &miss); | 2122 __ j(not_equal, &miss); |
| 2123 | 2123 |
| 2124 __ movp(rbx, rcx); | 2124 __ movp(rbx, rcx); |
| 2125 __ movp(rdx, rdi); | |
| 2126 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2125 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
| 2127 __ TailCallStub(&stub); | 2126 __ TailCallStub(&stub); |
| 2128 | 2127 |
| 2129 __ bind(&miss); | 2128 __ bind(&miss); |
| 2130 GenerateMiss(masm); | 2129 GenerateMiss(masm); |
| 2131 | 2130 |
| 2132 // The slow case, we need this no matter what to complete a call after a miss. | 2131 // The slow case, we need this no matter what to complete a call after a miss. |
| 2133 CallFunctionNoFeedback(masm, | 2132 CallFunctionNoFeedback(masm, |
| 2134 arg_count(), | 2133 arg_count(), |
| 2135 true, | 2134 true, |
| (...skipping 2430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4566 UNREACHABLE(); | 4565 UNREACHABLE(); |
| 4567 } | 4566 } |
| 4568 } | 4567 } |
| 4569 | 4568 |
| 4570 | 4569 |
| 4571 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 4570 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 4572 // ----------- S t a t e ------------- | 4571 // ----------- S t a t e ------------- |
| 4573 // -- rax : argc | 4572 // -- rax : argc |
| 4574 // -- rbx : AllocationSite or undefined | 4573 // -- rbx : AllocationSite or undefined |
| 4575 // -- rdi : constructor | 4574 // -- rdi : constructor |
| 4576 // -- rdx : original constructor | |
| 4577 // -- rsp[0] : return address | 4575 // -- rsp[0] : return address |
| 4578 // -- rsp[8] : last argument | 4576 // -- rsp[8] : last argument |
| 4579 // ----------------------------------- | 4577 // ----------------------------------- |
| 4580 if (FLAG_debug_code) { | 4578 if (FLAG_debug_code) { |
| 4581 // The array construct code is only set for the global and natives | 4579 // The array construct code is only set for the global and natives |
| 4582 // builtin Array functions which always have maps. | 4580 // builtin Array functions which always have maps. |
| 4583 | 4581 |
| 4584 // Initial map for the builtin Array function should be a map. | 4582 // Initial map for the builtin Array function should be a map. |
| 4585 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 4583 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
| 4586 // Will both indicate a NULL and a Smi. | 4584 // Will both indicate a NULL and a Smi. |
| 4587 STATIC_ASSERT(kSmiTag == 0); | 4585 STATIC_ASSERT(kSmiTag == 0); |
| 4588 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 4586 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
| 4589 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | 4587 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); |
| 4590 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 4588 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
| 4591 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | 4589 __ Check(equal, kUnexpectedInitialMapForArrayFunction); |
| 4592 | 4590 |
| 4593 // We should either have undefined in rbx or a valid AllocationSite | 4591 // We should either have undefined in rbx or a valid AllocationSite |
| 4594 __ AssertUndefinedOrAllocationSite(rbx); | 4592 __ AssertUndefinedOrAllocationSite(rbx); |
| 4595 } | 4593 } |
| 4596 | 4594 |
| 4597 Label subclassing; | |
| 4598 __ cmpp(rdi, rdx); | |
| 4599 __ j(not_equal, &subclassing); | |
| 4600 | |
| 4601 Label no_info; | 4595 Label no_info; |
| 4602 // If the feedback vector is the undefined value call an array constructor | 4596 // If the feedback vector is the undefined value call an array constructor |
| 4603 // that doesn't use AllocationSites. | 4597 // that doesn't use AllocationSites. |
| 4604 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 4598 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 4605 __ j(equal, &no_info); | 4599 __ j(equal, &no_info); |
| 4606 | 4600 |
| 4607 // Only look at the lower 16 bits of the transition info. | 4601 // Only look at the lower 16 bits of the transition info. |
| 4608 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); | 4602 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); |
| 4609 __ SmiToInteger32(rdx, rdx); | 4603 __ SmiToInteger32(rdx, rdx); |
| 4610 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 4604 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
| 4611 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); | 4605 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); |
| 4612 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 4606 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| 4613 | 4607 |
| 4614 __ bind(&no_info); | 4608 __ bind(&no_info); |
| 4615 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 4609 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
| 4616 | |
| 4617 __ bind(&subclassing); | |
| 4618 __ TailCallRuntime(Runtime::kThrowArrayNotSubclassableError, 0, 1); | |
| 4619 } | 4610 } |
| 4620 | 4611 |
| 4621 | 4612 |
| 4622 void InternalArrayConstructorStub::GenerateCase( | 4613 void InternalArrayConstructorStub::GenerateCase( |
| 4623 MacroAssembler* masm, ElementsKind kind) { | 4614 MacroAssembler* masm, ElementsKind kind) { |
| 4624 Label not_zero_case, not_one_case; | 4615 Label not_zero_case, not_one_case; |
| 4625 Label normal_sequence; | 4616 Label normal_sequence; |
| 4626 | 4617 |
| 4627 __ testp(rax, rax); | 4618 __ testp(rax, rax); |
| 4628 __ j(not_zero, ¬_zero_case); | 4619 __ j(not_zero, ¬_zero_case); |
| (...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5104 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, | 5095 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, |
| 5105 kStackSpace, nullptr, return_value_operand, NULL); | 5096 kStackSpace, nullptr, return_value_operand, NULL); |
| 5106 } | 5097 } |
| 5107 | 5098 |
| 5108 | 5099 |
| 5109 #undef __ | 5100 #undef __ |
| 5110 | 5101 |
| 5111 } } // namespace v8::internal | 5102 } } // namespace v8::internal |
| 5112 | 5103 |
| 5113 #endif // V8_TARGET_ARCH_X64 | 5104 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |