OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4971 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4982 | 4982 |
4983 // Load the literals array of the function. | 4983 // Load the literals array of the function. |
4984 __ movq(literals.reg(), | 4984 __ movq(literals.reg(), |
4985 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); | 4985 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); |
4986 | 4986 |
4987 frame_->Push(&literals); | 4987 frame_->Push(&literals); |
4988 frame_->Push(Smi::FromInt(node->literal_index())); | 4988 frame_->Push(Smi::FromInt(node->literal_index())); |
4989 frame_->Push(node->constant_elements()); | 4989 frame_->Push(node->constant_elements()); |
4990 int length = node->values()->length(); | 4990 int length = node->values()->length(); |
4991 Result clone; | 4991 Result clone; |
4992 if (node->depth() > 1) { | 4992 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { |
| 4993 FastCloneShallowArrayStub stub( |
| 4994 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); |
| 4995 clone = frame_->CallStub(&stub, 3); |
| 4996 } else if (node->depth() > 1) { |
4993 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); | 4997 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
4994 } else if (length > FastCloneShallowArrayStub::kMaximumLength) { | 4998 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
4995 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); | 4999 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); |
4996 } else { | 5000 } else { |
4997 FastCloneShallowArrayStub stub(length); | 5001 FastCloneShallowArrayStub stub( |
| 5002 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); |
4998 clone = frame_->CallStub(&stub, 3); | 5003 clone = frame_->CallStub(&stub, 3); |
4999 } | 5004 } |
5000 frame_->Push(&clone); | 5005 frame_->Push(&clone); |
5001 | 5006 |
5002 // Generate code to set the elements in the array that are not | 5007 // Generate code to set the elements in the array that are not |
5003 // literals. | 5008 // literals. |
5004 for (int i = 0; i < length; i++) { | 5009 for (int i = 0; i < length; i++) { |
5005 Expression* value = node->values()->at(i); | 5010 Expression* value = node->values()->at(i); |
5006 | 5011 |
5007 // If value is a literal the property value is already set in the | 5012 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) { |
5008 // boilerplate object. | 5013 continue; |
5009 if (value->AsLiteral() != NULL) continue; | 5014 } |
5010 // If value is a materialized literal the property value is already set | |
5011 // in the boilerplate object if it is simple. | |
5012 if (CompileTimeValue::IsCompileTimeValue(value)) continue; | |
5013 | 5015 |
5014 // The property must be set by generated code. | 5016 // The property must be set by generated code. |
5015 Load(value); | 5017 Load(value); |
5016 | 5018 |
5017 // Get the property value off the stack. | 5019 // Get the property value off the stack. |
5018 Result prop_value = frame_->Pop(); | 5020 Result prop_value = frame_->Pop(); |
5019 prop_value.ToRegister(); | 5021 prop_value.ToRegister(); |
5020 | 5022 |
5021 // Fetch the array literal while leaving a copy on the stack and | 5023 // Fetch the array literal while leaving a copy on the stack and |
5022 // use it to get the elements array. | 5024 // use it to get the elements array. |
(...skipping 1845 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6868 | 6870 |
6869 // Fetch the map and check if array is in fast case. | 6871 // Fetch the map and check if array is in fast case. |
6870 // Check that object doesn't require security checks and | 6872 // Check that object doesn't require security checks and |
6871 // has no indexed interceptor. | 6873 // has no indexed interceptor. |
6872 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); | 6874 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); |
6873 deferred->Branch(below); | 6875 deferred->Branch(below); |
6874 __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset), | 6876 __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset), |
6875 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); | 6877 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); |
6876 deferred->Branch(not_zero); | 6878 deferred->Branch(not_zero); |
6877 | 6879 |
6878 // Check the object's elements are in fast case. | 6880 // Check the object's elements are in fast case and writable. |
6879 __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); | 6881 __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); |
6880 __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), | 6882 __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), |
6881 Heap::kFixedArrayMapRootIndex); | 6883 Heap::kFixedArrayMapRootIndex); |
6882 deferred->Branch(not_equal); | 6884 deferred->Branch(not_equal); |
6883 | 6885 |
6884 // Check that both indices are smis. | 6886 // Check that both indices are smis. |
6885 Condition both_smi = __ CheckBothSmi(index1.reg(), index2.reg()); | 6887 Condition both_smi = __ CheckBothSmi(index1.reg(), index2.reg()); |
6886 deferred->Branch(NegateCondition(both_smi)); | 6888 deferred->Branch(NegateCondition(both_smi)); |
6887 | 6889 |
6888 // Bring addresses into index1 and index2. | 6890 // Bring addresses into index1 and index2. |
(...skipping 1523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8412 // with the expected receiver map, which is not in the root array. | 8414 // with the expected receiver map, which is not in the root array. |
8413 masm_->movq(kScratchRegister, Factory::null_value(), | 8415 masm_->movq(kScratchRegister, Factory::null_value(), |
8414 RelocInfo::EMBEDDED_OBJECT); | 8416 RelocInfo::EMBEDDED_OBJECT); |
8415 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), | 8417 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
8416 kScratchRegister); | 8418 kScratchRegister); |
8417 deferred->Branch(not_equal); | 8419 deferred->Branch(not_equal); |
8418 | 8420 |
8419 // Check that the key is a non-negative smi. | 8421 // Check that the key is a non-negative smi. |
8420 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label()); | 8422 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label()); |
8421 | 8423 |
8422 // Get the elements array from the receiver and check that it | 8424 // Get the elements array from the receiver. |
8423 // is not a dictionary. | |
8424 __ movq(elements.reg(), | 8425 __ movq(elements.reg(), |
8425 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 8426 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
8426 if (FLAG_debug_code) { | 8427 __ AssertFastElements(elements.reg()); |
8427 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), | |
8428 Factory::fixed_array_map()); | |
8429 __ Assert(equal, "JSObject with fast elements map has slow elements"); | |
8430 } | |
8431 | 8428 |
8432 // Check that key is within bounds. | 8429 // Check that key is within bounds. |
8433 __ SmiCompare(key.reg(), | 8430 __ SmiCompare(key.reg(), |
8434 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); | 8431 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
8435 deferred->Branch(above_equal); | 8432 deferred->Branch(above_equal); |
8436 | 8433 |
8437 // Load and check that the result is not the hole. We could | 8434 // Load and check that the result is not the hole. We could |
8438 // reuse the index or elements register for the value. | 8435 // reuse the index or elements register for the value. |
8439 // | 8436 // |
8440 // TODO(206): Consider whether it makes sense to try some | 8437 // TODO(206): Consider whether it makes sense to try some |
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8834 // boilerplate. | 8831 // boilerplate. |
8835 Label slow_case; | 8832 Label slow_case; |
8836 __ movq(rcx, Operand(rsp, 3 * kPointerSize)); | 8833 __ movq(rcx, Operand(rsp, 3 * kPointerSize)); |
8837 __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 8834 __ movq(rax, Operand(rsp, 2 * kPointerSize)); |
8838 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 8835 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); |
8839 __ movq(rcx, | 8836 __ movq(rcx, |
8840 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); | 8837 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); |
8841 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 8838 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); |
8842 __ j(equal, &slow_case); | 8839 __ j(equal, &slow_case); |
8843 | 8840 |
| 8841 if (FLAG_debug_code) { |
| 8842 const char* message; |
| 8843 Heap::RootListIndex expected_map_index; |
| 8844 if (mode_ == CLONE_ELEMENTS) { |
| 8845 message = "Expected (writable) fixed array"; |
| 8846 expected_map_index = Heap::kFixedArrayMapRootIndex; |
| 8847 } else { |
| 8848 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); |
| 8849 message = "Expected copy-on-write fixed array"; |
| 8850 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
| 8851 } |
| 8852 __ push(rcx); |
| 8853 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
| 8854 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 8855 expected_map_index); |
| 8856 __ Assert(equal, message); |
| 8857 __ pop(rcx); |
| 8858 } |
| 8859 |
8844 // Allocate both the JS array and the elements array in one big | 8860 // Allocate both the JS array and the elements array in one big |
8845 // allocation. This avoids multiple limit checks. | 8861 // allocation. This avoids multiple limit checks. |
8846 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT); | 8862 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT); |
8847 | 8863 |
8848 // Copy the JS array part. | 8864 // Copy the JS array part. |
8849 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 8865 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
8850 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { | 8866 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { |
8851 __ movq(rbx, FieldOperand(rcx, i)); | 8867 __ movq(rbx, FieldOperand(rcx, i)); |
8852 __ movq(FieldOperand(rax, i), rbx); | 8868 __ movq(FieldOperand(rax, i), rbx); |
8853 } | 8869 } |
8854 } | 8870 } |
8855 | 8871 |
8856 if (length_ > 0) { | 8872 if (length_ > 0) { |
8857 // Get hold of the elements array of the boilerplate and setup the | 8873 // Get hold of the elements array of the boilerplate and setup the |
8858 // elements pointer in the resulting object. | 8874 // elements pointer in the resulting object. |
8859 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 8875 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
8860 __ lea(rdx, Operand(rax, JSArray::kSize)); | 8876 __ lea(rdx, Operand(rax, JSArray::kSize)); |
8861 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); | 8877 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); |
8862 | 8878 |
8863 // Copy the elements array. | 8879 // Copy the elements array. |
8864 for (int i = 0; i < elements_size; i += kPointerSize) { | 8880 for (int i = 0; i < elements_size; i += kPointerSize) { |
8865 __ movq(rbx, FieldOperand(rcx, i)); | 8881 __ movq(rbx, FieldOperand(rcx, i)); |
8866 __ movq(FieldOperand(rdx, i), rbx); | 8882 __ movq(FieldOperand(rdx, i), rbx); |
8867 } | 8883 } |
8868 } | 8884 } |
8869 | 8885 |
| 8886 if (mode_ == COPY_ON_WRITE_ELEMENTS) { |
| 8887 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1); |
| 8888 } |
| 8889 |
8870 // Return and remove the on-stack parameters. | 8890 // Return and remove the on-stack parameters. |
8871 __ ret(3 * kPointerSize); | 8891 __ ret(3 * kPointerSize); |
8872 | 8892 |
8873 __ bind(&slow_case); | 8893 __ bind(&slow_case); |
8874 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 8894 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
8875 } | 8895 } |
8876 | 8896 |
8877 | 8897 |
8878 void ToBooleanStub::Generate(MacroAssembler* masm) { | 8898 void ToBooleanStub::Generate(MacroAssembler* masm) { |
8879 Label false_result, true_result, not_string; | 8899 Label false_result, true_result, not_string; |
(...skipping 3864 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
12744 #undef __ | 12764 #undef __ |
12745 | 12765 |
12746 void RecordWriteStub::Generate(MacroAssembler* masm) { | 12766 void RecordWriteStub::Generate(MacroAssembler* masm) { |
12747 masm->RecordWriteHelper(object_, addr_, scratch_); | 12767 masm->RecordWriteHelper(object_, addr_, scratch_); |
12748 masm->ret(0); | 12768 masm->ret(0); |
12749 } | 12769 } |
12750 | 12770 |
12751 } } // namespace v8::internal | 12771 } } // namespace v8::internal |
12752 | 12772 |
12753 #endif // V8_TARGET_ARCH_X64 | 12773 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |