| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2223 ASSERT(csp.Is(__ StackPointer())); | 2223 ASSERT(csp.Is(__ StackPointer())); |
| 2224 | 2224 |
| 2225 Isolate* isolate = masm->isolate(); | 2225 Isolate* isolate = masm->isolate(); |
| 2226 | 2226 |
| 2227 const Register& argv = x21; | 2227 const Register& argv = x21; |
| 2228 const Register& argc = x22; | 2228 const Register& argc = x22; |
| 2229 const Register& target = x23; | 2229 const Register& target = x23; |
| 2230 | 2230 |
| 2231 if (do_gc) { | 2231 if (do_gc) { |
| 2232 // Call Runtime::PerformGC, passing x0 (the result parameter for | 2232 // Call Runtime::PerformGC, passing x0 (the result parameter for |
| 2233 // PerformGC). | 2233 // PerformGC) and x1 (the isolate). |
| 2234 __ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 2234 __ CallCFunction( | 2235 __ CallCFunction( |
| 2235 ExternalReference::perform_gc_function(isolate), 1, 0); | 2236 ExternalReference::perform_gc_function(isolate), 2, 0); |
| 2236 } | 2237 } |
| 2237 | 2238 |
| 2238 ExternalReference scope_depth = | 2239 ExternalReference scope_depth = |
| 2239 ExternalReference::heap_always_allocate_scope_depth(isolate); | 2240 ExternalReference::heap_always_allocate_scope_depth(isolate); |
| 2240 if (always_allocate) { | 2241 if (always_allocate) { |
| 2241 __ Mov(x10, Operand(scope_depth)); | 2242 __ Mov(x10, Operand(scope_depth)); |
| 2242 __ Ldr(x11, MemOperand(x10)); | 2243 __ Ldr(x11, MemOperand(x10)); |
| 2243 __ Add(x11, x11, 1); | 2244 __ Add(x11, x11, 1); |
| 2244 __ Str(x11, MemOperand(x10)); | 2245 __ Str(x11, MemOperand(x10)); |
| 2245 } | 2246 } |
| (...skipping 498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2744 ASSERT(kind() == Code::LOAD_IC); | 2745 ASSERT(kind() == Code::LOAD_IC); |
| 2745 // ----------- S t a t e ------------- | 2746 // ----------- S t a t e ------------- |
| 2746 // -- lr : return address | 2747 // -- lr : return address |
| 2747 // -- x2 : name | 2748 // -- x2 : name |
| 2748 // -- x0 : receiver | 2749 // -- x0 : receiver |
| 2749 // -- sp[0] : receiver | 2750 // -- sp[0] : receiver |
| 2750 // ----------------------------------- | 2751 // ----------------------------------- |
| 2751 receiver = x0; | 2752 receiver = x0; |
| 2752 } | 2753 } |
| 2753 | 2754 |
| 2754 StubCompiler::GenerateLoadStringLength(masm, receiver, x10, x11, &miss, | 2755 StubCompiler::GenerateLoadStringLength(masm, receiver, x10, x11, &miss); |
| 2755 support_wrapper_); | |
| 2756 | 2756 |
| 2757 __ Bind(&miss); | 2757 __ Bind(&miss); |
| 2758 StubCompiler::TailCallBuiltin(masm, | 2758 StubCompiler::TailCallBuiltin(masm, |
| 2759 BaseLoadStoreStubCompiler::MissBuiltin(kind())); | 2759 BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
| 2760 } | 2760 } |
| 2761 | 2761 |
| 2762 | 2762 |
| 2763 void StoreArrayLengthStub::Generate(MacroAssembler* masm) { | 2763 void StoreArrayLengthStub::Generate(MacroAssembler* masm) { |
| 2764 ASM_LOCATION("StoreArrayLengthStub::Generate"); | 2764 ASM_LOCATION("StoreArrayLengthStub::Generate"); |
| 2765 // This accepts as a receiver anything JSArray::SetElementsLength accepts | 2765 // This accepts as a receiver anything JSArray::SetElementsLength accepts |
| (...skipping 3115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5881 Register scratch4, | 5881 Register scratch4, |
| 5882 Label* slow) { | 5882 Label* slow) { |
| 5883 ASSERT(!AreAliased(arg, scratch1, scratch2, scratch3, scratch4)); | 5883 ASSERT(!AreAliased(arg, scratch1, scratch2, scratch3, scratch4)); |
| 5884 | 5884 |
| 5885 // First check if the argument is already a string. | 5885 // First check if the argument is already a string. |
| 5886 Label not_string, done; | 5886 Label not_string, done; |
| 5887 __ JumpIfSmi(arg, ¬_string); | 5887 __ JumpIfSmi(arg, ¬_string); |
| 5888 __ JumpIfObjectType(arg, scratch1, scratch1, FIRST_NONSTRING_TYPE, &done, lt); | 5888 __ JumpIfObjectType(arg, scratch1, scratch1, FIRST_NONSTRING_TYPE, &done, lt); |
| 5889 | 5889 |
| 5890 // Check the number to string cache. | 5890 // Check the number to string cache. |
| 5891 Label not_cached; | |
| 5892 __ Bind(¬_string); | 5891 __ Bind(¬_string); |
| 5893 // Puts the cache result into scratch1. | 5892 // Puts the cache result into scratch1. |
| 5894 NumberToStringStub::GenerateLookupNumberStringCache( | 5893 NumberToStringStub::GenerateLookupNumberStringCache( |
| 5895 masm, | 5894 masm, |
| 5896 arg, | 5895 arg, |
| 5897 scratch1, | 5896 scratch1, |
| 5898 scratch2, | 5897 scratch2, |
| 5899 scratch3, | 5898 scratch3, |
| 5900 scratch4, | 5899 scratch4, |
| 5901 ¬_cached); | 5900 slow); |
| 5902 __ Mov(arg, scratch1); | 5901 __ Mov(arg, scratch1); |
| 5903 __ B(&done); | |
| 5904 | |
| 5905 // Check if the argument is a safe string wrapper. | |
| 5906 __ Bind(¬_cached); | |
| 5907 __ JumpIfSmi(arg, slow); | |
| 5908 Register map = scratch1; | |
| 5909 __ JumpIfNotObjectType(arg, map, scratch2, JS_VALUE_TYPE, slow); | |
| 5910 __ Ldrb(scratch2, FieldMemOperand(map, Map::kBitField2Offset)); | |
| 5911 __ Tbz(scratch2, Map::kStringWrapperSafeForDefaultValueOf, slow); | |
| 5912 __ Ldr(arg, FieldMemOperand(arg, JSValue::kValueOffset)); | |
| 5913 | 5902 |
| 5914 __ Bind(&done); | 5903 __ Bind(&done); |
| 5915 } | 5904 } |
| 5916 | 5905 |
| 5917 | 5906 |
| 5918 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 5907 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
| 5919 __ Push(x0, x1); | 5908 __ Push(x0, x1); |
| 5920 } | 5909 } |
| 5921 | 5910 |
| 5922 | 5911 |
| (...skipping 673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6596 __ Mov(result, 1); | 6585 __ Mov(result, 1); |
| 6597 __ Ret(); | 6586 __ Ret(); |
| 6598 | 6587 |
| 6599 __ Bind(¬_in_dictionary); | 6588 __ Bind(¬_in_dictionary); |
| 6600 __ Mov(result, 0); | 6589 __ Mov(result, 0); |
| 6601 __ Ret(); | 6590 __ Ret(); |
| 6602 } | 6591 } |
| 6603 | 6592 |
| 6604 | 6593 |
| 6605 template<class T> | 6594 template<class T> |
| 6606 static void CreateArrayDispatch(MacroAssembler* masm) { | 6595 static void CreateArrayDispatch(MacroAssembler* masm, |
| 6607 Register kind = x3; | 6596 AllocationSiteOverrideMode mode) { |
| 6608 int last_index = GetSequenceIndexFromFastElementsKind( | 6597 if (mode == DISABLE_ALLOCATION_SITES) { |
| 6609 TERMINAL_FAST_ELEMENTS_KIND); | 6598 T stub(GetInitialFastElementsKind(), |
| 6610 for (int i = 0; i <= last_index; ++i) { | 6599 CONTEXT_CHECK_REQUIRED, |
| 6611 Label next; | 6600 mode); |
| 6612 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); | 6601 __ TailCallStub(&stub); |
| 6613 // TODO(jbramley): Is this the best way to handle this? Can we make the tail | 6602 |
| 6614 // calls conditional, rather than hopping over each one? | 6603 } else if (mode == DONT_OVERRIDE) { |
| 6615 __ CompareAndBranch(kind, candidate_kind, ne, &next); | 6604 Register kind = x3; |
| 6616 T stub(candidate_kind); | 6605 int last_index = |
| 6617 __ TailCallStub(&stub); | 6606 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); |
| 6618 __ Bind(&next); | 6607 for (int i = 0; i <= last_index; ++i) { |
| 6608 Label next; |
| 6609 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); |
| 6610 // TODO(jbramley): Is this the best way to handle this? Can we make the |
| 6611 // tail calls conditional, rather than hopping over each one? |
| 6612 __ CompareAndBranch(kind, candidate_kind, ne, &next); |
| 6613 T stub(candidate_kind); |
| 6614 __ TailCallStub(&stub); |
| 6615 __ Bind(&next); |
| 6616 } |
| 6617 |
| 6618 // If we reached this point there is a problem. |
| 6619 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
| 6620 |
| 6621 } else { |
| 6622 UNREACHABLE(); |
| 6619 } | 6623 } |
| 6620 | |
| 6621 // If we reached this point there is a problem. | |
| 6622 __ Abort(kUnexpectedElementsKindInArrayConstructor); | |
| 6623 } | 6624 } |
| 6624 | 6625 |
| 6625 | 6626 |
| 6626 // TODO(jbramley): If this needs to be a special case, make it a proper template | 6627 // TODO(jbramley): If this needs to be a special case, make it a proper template |
| 6627 // specialization, and not a separate function. | 6628 // specialization, and not a separate function. |
| 6628 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { | 6629 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, |
| 6630 AllocationSiteOverrideMode mode) { |
| 6629 // x0 - argc | 6631 // x0 - argc |
| 6630 // x1 - constructor? | 6632 // x1 - constructor? |
| 6631 // x2 - type info cell | 6633 // x2 - type info cell (if mode != DISABLE_ALLOCATION_SITES) |
| 6632 // x3 - kind | 6634 // x3 - kind (if mode != DISABLE_ALLOCATION_SITES) |
| 6633 // sp[0] - last argument | 6635 // sp[0] - last argument |
| 6634 | 6636 |
| 6635 Register type_info_cell = x2; | 6637 Register type_info_cell = x2; |
| 6636 Register kind = x3; | 6638 Register kind = x3; |
| 6637 | 6639 |
| 6638 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | 6640 Label normal_sequence; |
| 6639 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 6641 if (mode == DONT_OVERRIDE) { |
| 6640 STATIC_ASSERT(FAST_ELEMENTS == 2); | 6642 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); |
| 6641 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | 6643 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
| 6642 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4); | 6644 STATIC_ASSERT(FAST_ELEMENTS == 2); |
| 6643 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); | 6645 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); |
| 6646 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4); |
| 6647 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
| 6644 | 6648 |
| 6645 // Is the low bit set? If so, the array is holey. | 6649 // Is the low bit set? If so, the array is holey. |
| 6646 Label normal_sequence; | 6650 __ Tbnz(kind, 0, &normal_sequence); |
| 6647 __ Tbnz(kind, 0, &normal_sequence); | 6651 } |
| 6648 | 6652 |
| 6649 // Look at the last argument. | 6653 // Look at the last argument. |
| 6650 // TODO(jbramley): What does a 0 argument represent? | 6654 // TODO(jbramley): What does a 0 argument represent? |
| 6651 __ Peek(x10, 0); | 6655 __ Peek(x10, 0); |
| 6652 __ Cbz(x10, &normal_sequence); | 6656 __ Cbz(x10, &normal_sequence); |
| 6653 | 6657 |
| 6654 // We are going to create a holey array, but our kind is non-holey. | 6658 if (mode == DISABLE_ALLOCATION_SITES) { |
| 6655 // Fix kind and retry (only if we have an allocation site in the cell). | 6659 ElementsKind initial = GetInitialFastElementsKind(); |
| 6656 __ Orr(kind, kind, 1); | 6660 ElementsKind holey_initial = GetHoleyElementsKind(initial); |
| 6657 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, | |
| 6658 &normal_sequence); | |
| 6659 | 6661 |
| 6660 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kValueOffset)); | 6662 ArraySingleArgumentConstructorStub stub_holey(holey_initial, |
| 6661 __ Ldr(x10, FieldMemOperand(x10, 0)); | 6663 CONTEXT_CHECK_REQUIRED, |
| 6662 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, &normal_sequence); | 6664 DISABLE_ALLOCATION_SITES); |
| 6665 __ TailCallStub(&stub_holey); |
| 6663 | 6666 |
| 6664 // Save the resulting elements kind in type info. | 6667 __ Bind(&normal_sequence); |
| 6665 // TODO(jbramley): Tag and store at the same time. | 6668 ArraySingleArgumentConstructorStub stub(initial, |
| 6666 __ SmiTag(x10, kind); | 6669 CONTEXT_CHECK_REQUIRED, |
| 6667 __ Ldr(x11, FieldMemOperand(type_info_cell, Cell::kValueOffset)); | 6670 DISABLE_ALLOCATION_SITES); |
| 6668 __ Str(x10, FieldMemOperand(x11, AllocationSite::kTransitionInfoOffset)); | 6671 __ TailCallStub(&stub); |
| 6672 } else if (mode == DONT_OVERRIDE) { |
| 6673 // We are going to create a holey array, but our kind is non-holey. |
| 6674 // Fix kind and retry (only if we have an allocation site in the cell). |
| 6675 __ Orr(kind, kind, 1); |
| 6669 | 6676 |
| 6670 __ Bind(&normal_sequence); | 6677 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kValueOffset)); |
| 6671 int last_index = GetSequenceIndexFromFastElementsKind( | 6678 |
| 6672 TERMINAL_FAST_ELEMENTS_KIND); | 6679 if (FLAG_debug_code) { |
| 6673 for (int i = 0; i <= last_index; ++i) { | 6680 __ Ldr(x10, FieldMemOperand(x10, 0)); |
| 6674 Label next; | 6681 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, |
| 6675 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); | 6682 &normal_sequence); |
| 6676 // TODO(jbramley): Is this the best way to handle this? Can we make the tail | 6683 __ Assert(eq, kExpectedAllocationSiteInCell); |
| 6677 // calls conditional, rather than hopping over each one? | 6684 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kValueOffset)); |
| 6678 __ CompareAndBranch(kind, candidate_kind, ne, &next); | 6685 } |
| 6679 ArraySingleArgumentConstructorStub stub(candidate_kind); | 6686 |
| 6680 __ TailCallStub(&stub); | 6687 // Save the resulting elements kind in type info. |
| 6681 __ Bind(&next); | 6688 // TODO(jbramley): Tag and store at the same time. |
| 6689 __ SmiTag(x10, kind); |
| 6690 __ Ldr(x11, FieldMemOperand(type_info_cell, Cell::kValueOffset)); |
| 6691 __ Str(x10, FieldMemOperand(x11, AllocationSite::kTransitionInfoOffset)); |
| 6692 |
| 6693 __ Bind(&normal_sequence); |
| 6694 int last_index = |
| 6695 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); |
| 6696 for (int i = 0; i <= last_index; ++i) { |
| 6697 Label next; |
| 6698 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); |
| 6699 // TODO(jbramley): Is this the best way to handle this? Can we make the |
| 6700 // tail calls conditional, rather than hopping over each one? |
| 6701 __ CompareAndBranch(kind, candidate_kind, ne, &next); |
| 6702 ArraySingleArgumentConstructorStub stub(candidate_kind); |
| 6703 __ TailCallStub(&stub); |
| 6704 __ Bind(&next); |
| 6705 } |
| 6706 |
| 6707 // If we reached this point there is a problem. |
| 6708 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
| 6709 } else { |
| 6710 UNREACHABLE(); |
| 6682 } | 6711 } |
| 6683 | |
| 6684 // If we reached this point there is a problem. | |
| 6685 __ Abort(kUnexpectedElementsKindInArrayConstructor); | |
| 6686 } | 6712 } |
| 6687 | 6713 |
| 6688 | 6714 |
| 6689 template<class T> | 6715 template<class T> |
| 6690 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 6716 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
| 6717 ElementsKind initial_kind = GetInitialFastElementsKind(); |
| 6718 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); |
| 6719 |
| 6691 int to_index = GetSequenceIndexFromFastElementsKind( | 6720 int to_index = GetSequenceIndexFromFastElementsKind( |
| 6692 TERMINAL_FAST_ELEMENTS_KIND); | 6721 TERMINAL_FAST_ELEMENTS_KIND); |
| 6693 for (int i = 0; i <= to_index; ++i) { | 6722 for (int i = 0; i <= to_index; ++i) { |
| 6694 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6723 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 6695 T stub(kind); | 6724 T stub(kind); |
| 6696 stub.GetCode(isolate)->set_is_pregenerated(true); | 6725 stub.GetCode(isolate)->set_is_pregenerated(true); |
| 6697 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { | 6726 if ((AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) || |
| 6727 (!FLAG_track_allocation_sites && |
| 6728 ((kind == initial_kind) || (kind == initial_holey_kind)))) { |
| 6698 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); | 6729 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
| 6699 stub1.GetCode(isolate)->set_is_pregenerated(true); | 6730 stub1.GetCode(isolate)->set_is_pregenerated(true); |
| 6700 } | 6731 } |
| 6701 } | 6732 } |
| 6702 } | 6733 } |
| 6703 | 6734 |
| 6704 | 6735 |
| 6705 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 6736 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 6706 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 6737 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
| 6707 isolate); | 6738 isolate); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 6720 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); | 6751 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); |
| 6721 stubh1.GetCode(isolate)->set_is_pregenerated(true); | 6752 stubh1.GetCode(isolate)->set_is_pregenerated(true); |
| 6722 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 6753 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); |
| 6723 stubh2.GetCode(isolate)->set_is_pregenerated(true); | 6754 stubh2.GetCode(isolate)->set_is_pregenerated(true); |
| 6724 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 6755 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); |
| 6725 stubh3.GetCode(isolate)->set_is_pregenerated(true); | 6756 stubh3.GetCode(isolate)->set_is_pregenerated(true); |
| 6726 } | 6757 } |
| 6727 } | 6758 } |
| 6728 | 6759 |
| 6729 | 6760 |
| 6761 void ArrayConstructorStub::GenerateDispatchToArrayStub( |
| 6762 MacroAssembler* masm, |
| 6763 AllocationSiteOverrideMode mode) { |
| 6764 Register argc = x0; |
| 6765 if (argument_count_ == ANY) { |
| 6766 Label zero_case, n_case; |
| 6767 __ Cbz(argc, &zero_case); |
| 6768 __ Cmp(argc, 1); |
| 6769 __ B(ne, &n_case); |
| 6770 |
| 6771 // One argument. |
| 6772 CreateArrayDispatchOneArgument(masm, mode); |
| 6773 |
| 6774 __ Bind(&zero_case); |
| 6775 // No arguments. |
| 6776 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); |
| 6777 |
| 6778 __ Bind(&n_case); |
| 6779 // N arguments. |
| 6780 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); |
| 6781 |
| 6782 } else if (argument_count_ == NONE) { |
| 6783 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); |
| 6784 } else if (argument_count_ == ONE) { |
| 6785 CreateArrayDispatchOneArgument(masm, mode); |
| 6786 } else if (argument_count_ == MORE_THAN_ONE) { |
| 6787 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); |
| 6788 } else { |
| 6789 UNREACHABLE(); |
| 6790 } |
| 6791 } |
| 6792 |
| 6793 |
| 6730 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 6794 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 6731 // ----------- S t a t e ------------- | 6795 // ----------- S t a t e ------------- |
| 6732 // -- x0 : argc (only if argument_count_ == ANY) | 6796 // -- x0 : argc (only if argument_count_ == ANY) |
| 6733 // -- x1 : constructor | 6797 // -- x1 : constructor |
| 6734 // -- x2 : type info cell | 6798 // -- x2 : type info cell |
| 6735 // -- sp[0] : return address | 6799 // -- sp[0] : return address |
| 6736 // -- sp[4] : last argument | 6800 // -- sp[4] : last argument |
| 6737 // ----------------------------------- | 6801 // ----------------------------------- |
| 6738 Register argc = x0; | |
| 6739 Register constructor = x1; | 6802 Register constructor = x1; |
| 6740 Register type_info_cell = x2; | 6803 Register type_info_cell = x2; |
| 6741 | 6804 |
| 6742 if (FLAG_debug_code) { | 6805 if (FLAG_debug_code) { |
| 6743 // The array construct code is only set for the global and natives | 6806 // The array construct code is only set for the global and natives |
| 6744 // builtin Array functions which always have maps. | 6807 // builtin Array functions which always have maps. |
| 6745 | 6808 |
| 6746 Label unexpected_map, map_ok; | 6809 Label unexpected_map, map_ok; |
| 6747 // Initial map for the builtin Array function should be a map. | 6810 // Initial map for the builtin Array function should be a map. |
| 6748 __ Ldr(x10, FieldMemOperand(constructor, | 6811 __ Ldr(x10, FieldMemOperand(constructor, |
| 6749 JSFunction::kPrototypeOrInitialMapOffset)); | 6812 JSFunction::kPrototypeOrInitialMapOffset)); |
| 6750 // Will both indicate a NULL and a Smi. | 6813 // Will both indicate a NULL and a Smi. |
| 6751 __ JumpIfSmi(x10, &unexpected_map); | 6814 __ JumpIfSmi(x10, &unexpected_map); |
| 6752 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); | 6815 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); |
| 6753 __ Bind(&unexpected_map); | 6816 __ Bind(&unexpected_map); |
| 6754 __ Abort(kUnexpectedInitialMapForArrayFunction); | 6817 __ Abort(kUnexpectedInitialMapForArrayFunction); |
| 6755 __ Bind(&map_ok); | 6818 __ Bind(&map_ok); |
| 6756 | 6819 |
| 6757 // In type_info_cell, we expect either undefined or a valid Cell. | 6820 // In type_info_cell, we expect either undefined or a valid Cell. |
| 6758 Label okay_here; | 6821 Label okay_here; |
| 6759 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 6822 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
| 6760 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &okay_here); | 6823 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &okay_here); |
| 6761 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kMapOffset)); | 6824 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kMapOffset)); |
| 6762 __ Cmp(x10, Operand(cell_map)); | 6825 __ Cmp(x10, Operand(cell_map)); |
| 6763 __ Assert(eq, kExpectedPropertyCellInTypeInfoCell); | 6826 __ Assert(eq, kExpectedPropertyCellInTypeInfoCell); |
| 6764 __ Bind(&okay_here); | 6827 __ Bind(&okay_here); |
| 6765 } | 6828 } |
| 6766 | 6829 |
| 6767 Register kind = x3; | 6830 Register kind = x3; |
| 6768 Label no_info, switch_ready; | 6831 Label no_info; |
| 6769 // Get the elements kind and case on that. | 6832 // Get the elements kind and case on that. |
| 6770 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &no_info); | 6833 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &no_info); |
| 6771 __ Ldr(kind, FieldMemOperand(type_info_cell, PropertyCell::kValueOffset)); | 6834 __ Ldr(kind, FieldMemOperand(type_info_cell, PropertyCell::kValueOffset)); |
| 6772 | 6835 |
| 6773 // The type cell may have undefined in its value. | 6836 // If the type cell is undefined, or contains anything other than an |
| 6774 __ JumpIfRoot(kind, Heap::kUndefinedValueRootIndex, &no_info); | 6837 // AllocationSite, call an array constructor that doesn't use AllocationSites. |
| 6775 | |
| 6776 // The type cell has either an AllocationSite or a JSFunction. | |
| 6777 __ Ldr(x10, FieldMemOperand(kind, AllocationSite::kMapOffset)); | 6838 __ Ldr(x10, FieldMemOperand(kind, AllocationSite::kMapOffset)); |
| 6778 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, &no_info); | 6839 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, &no_info); |
| 6779 | 6840 |
| 6780 __ Ldrsw(kind, | 6841 __ Ldrsw(kind, |
| 6781 UntagSmiFieldMemOperand(kind, | 6842 UntagSmiFieldMemOperand(kind, |
| 6782 AllocationSite::kTransitionInfoOffset)); | 6843 AllocationSite::kTransitionInfoOffset)); |
| 6783 __ B(&switch_ready); | 6844 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| 6784 | 6845 |
| 6785 __ Bind(&no_info); | 6846 __ Bind(&no_info); |
| 6786 __ Mov(kind, GetInitialFastElementsKind()); | 6847 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
| 6787 __ Bind(&switch_ready); | |
| 6788 | |
| 6789 if (argument_count_ == ANY) { | |
| 6790 Label zero_case, n_case; | |
| 6791 __ Cbz(argc, &zero_case); | |
| 6792 __ Cmp(argc, 1); | |
| 6793 __ B(ne, &n_case); | |
| 6794 | |
| 6795 // One argument. | |
| 6796 CreateArrayDispatchOneArgument(masm); | |
| 6797 | |
| 6798 __ Bind(&zero_case); | |
| 6799 // No arguments. | |
| 6800 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); | |
| 6801 | |
| 6802 __ Bind(&n_case); | |
| 6803 // N arguments. | |
| 6804 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); | |
| 6805 | |
| 6806 } else if (argument_count_ == NONE) { | |
| 6807 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); | |
| 6808 } else if (argument_count_ == ONE) { | |
| 6809 CreateArrayDispatchOneArgument(masm); | |
| 6810 } else if (argument_count_ == MORE_THAN_ONE) { | |
| 6811 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); | |
| 6812 } else { | |
| 6813 UNREACHABLE(); | |
| 6814 } | |
| 6815 } | 6848 } |
| 6816 | 6849 |
| 6817 | 6850 |
| 6818 void InternalArrayConstructorStub::GenerateCase( | 6851 void InternalArrayConstructorStub::GenerateCase( |
| 6819 MacroAssembler* masm, ElementsKind kind) { | 6852 MacroAssembler* masm, ElementsKind kind) { |
| 6820 Label zero_case, n_case; | 6853 Label zero_case, n_case; |
| 6821 Register argc = x0; | 6854 Register argc = x0; |
| 6822 | 6855 |
| 6823 __ Cbz(argc, &zero_case); | 6856 __ Cbz(argc, &zero_case); |
| 6824 __ CompareAndBranch(argc, 1, ne, &n_case); | 6857 __ CompareAndBranch(argc, 1, ne, &n_case); |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6906 __ Bind(&fast_elements_case); | 6939 __ Bind(&fast_elements_case); |
| 6907 GenerateCase(masm, FAST_ELEMENTS); | 6940 GenerateCase(masm, FAST_ELEMENTS); |
| 6908 } | 6941 } |
| 6909 | 6942 |
| 6910 | 6943 |
| 6911 #undef __ | 6944 #undef __ |
| 6912 | 6945 |
| 6913 } } // namespace v8::internal | 6946 } } // namespace v8::internal |
| 6914 | 6947 |
| 6915 #endif // V8_TARGET_ARCH_A64 | 6948 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |