OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1777 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1788 Handle<FixedArrayBase> constant_elements_values( | 1788 Handle<FixedArrayBase> constant_elements_values( |
1789 FixedArrayBase::cast(constant_elements->get(1))); | 1789 FixedArrayBase::cast(constant_elements->get(1))); |
1790 | 1790 |
1791 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | 1791 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; |
1792 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) { | 1792 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) { |
1793 // If the only customer of allocation sites is transitioning, then | 1793 // If the only customer of allocation sites is transitioning, then |
1794 // we can turn it off if we don't have anywhere else to transition to. | 1794 // we can turn it off if we don't have anywhere else to transition to. |
1795 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | 1795 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
1796 } | 1796 } |
1797 | 1797 |
1798 Heap* heap = isolate()->heap(); | 1798 if (expr->depth() > 1) { |
1799 if (has_constant_fast_elements && | |
1800 constant_elements_values->map() == heap->fixed_cow_array_map()) { | |
1801 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot | |
1802 // change, so it's possible to specialize the stub in advance. | |
1803 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); | |
1804 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | |
1805 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); | |
1806 __ Move(rbx, Smi::FromInt(expr->literal_index())); | |
1807 __ Move(rcx, constant_elements); | |
1808 FastCloneShallowArrayStub stub( | |
1809 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, | |
1810 allocation_site_mode, | |
1811 length); | |
1812 __ CallStub(&stub); | |
1813 } else if (expr->depth() > 1 || Serializer::enabled() || | |
1814 length > FastCloneShallowArrayStub::kMaximumClonedLength) { | |
1815 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1799 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1816 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 1800 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
1817 __ Push(Smi::FromInt(expr->literal_index())); | 1801 __ Push(Smi::FromInt(expr->literal_index())); |
1818 __ Push(constant_elements); | 1802 __ Push(constant_elements); |
1819 __ Push(Smi::FromInt(flags)); | 1803 __ Push(Smi::FromInt(flags)); |
1820 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); | 1804 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); |
1821 } else { | 1805 } else { |
1822 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || | |
1823 FLAG_smi_only_arrays); | |
1824 FastCloneShallowArrayStub::Mode mode = | |
1825 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; | |
1826 | |
1827 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot | |
1828 // change, so it's possible to specialize the stub in advance. | |
1829 if (has_constant_fast_elements) { | |
1830 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; | |
1831 } | |
1832 | |
1833 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1806 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1834 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 1807 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
1835 __ Move(rbx, Smi::FromInt(expr->literal_index())); | 1808 __ Move(rbx, Smi::FromInt(expr->literal_index())); |
1836 __ Move(rcx, constant_elements); | 1809 __ Move(rcx, constant_elements); |
1837 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 1810 FastCloneShallowArrayStub stub(allocation_site_mode); |
1838 __ CallStub(&stub); | 1811 __ CallStub(&stub); |
1839 } | 1812 } |
1840 | 1813 |
1841 bool result_saved = false; // Is the result saved to the stack? | 1814 bool result_saved = false; // Is the result saved to the stack? |
1842 | 1815 |
1843 // Emit code to evaluate all the non-constant subexpressions and to store | 1816 // Emit code to evaluate all the non-constant subexpressions and to store |
1844 // them into the newly cloned array. | 1817 // them into the newly cloned array. |
1845 for (int i = 0; i < length; i++) { | 1818 for (int i = 0; i < length; i++) { |
1846 Expression* subexpr = subexprs->at(i); | 1819 Expression* subexpr = subexprs->at(i); |
1847 // If the subexpression is a literal or a simple materialized literal it | 1820 // If the subexpression is a literal or a simple materialized literal it |
(...skipping 3076 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4924 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), | 4897 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), |
4925 Assembler::target_address_at(call_target_address, | 4898 Assembler::target_address_at(call_target_address, |
4926 unoptimized_code)); | 4899 unoptimized_code)); |
4927 return OSR_AFTER_STACK_CHECK; | 4900 return OSR_AFTER_STACK_CHECK; |
4928 } | 4901 } |
4929 | 4902 |
4930 | 4903 |
4931 } } // namespace v8::internal | 4904 } } // namespace v8::internal |
4932 | 4905 |
4933 #endif // V8_TARGET_ARCH_X64 | 4906 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |