OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3390 | 3390 |
3391 Register tos = frame_->GetTOSRegister(); | 3391 Register tos = frame_->GetTOSRegister(); |
3392 // Load the function of this activation. | 3392 // Load the function of this activation. |
3393 __ ldr(tos, frame_->Function()); | 3393 __ ldr(tos, frame_->Function()); |
3394 // Load the literals array of the function. | 3394 // Load the literals array of the function. |
3395 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); | 3395 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); |
3396 frame_->EmitPush(tos); | 3396 frame_->EmitPush(tos); |
3397 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); | 3397 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); |
3398 frame_->EmitPush(Operand(node->constant_elements())); | 3398 frame_->EmitPush(Operand(node->constant_elements())); |
3399 int length = node->values()->length(); | 3399 int length = node->values()->length(); |
3400 if (node->depth() > 1) { | 3400 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { |
| 3401 FastCloneShallowArrayStub stub( |
| 3402 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); |
| 3403 frame_->CallStub(&stub, 3); |
| 3404 } else if (node->depth() > 1) { |
3401 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); | 3405 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
3402 } else if (length > FastCloneShallowArrayStub::kMaximumLength) { | 3406 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
3403 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); | 3407 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); |
3404 } else { | 3408 } else { |
3405 FastCloneShallowArrayStub stub(length); | 3409 FastCloneShallowArrayStub stub( |
| 3410 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); |
3406 frame_->CallStub(&stub, 3); | 3411 frame_->CallStub(&stub, 3); |
3407 } | 3412 } |
3408 frame_->EmitPush(r0); // save the result | 3413 frame_->EmitPush(r0); // save the result |
3409 // r0: created object literal | 3414 // r0: created object literal |
3410 | 3415 |
3411 // Generate code to set the elements in the array that are not | 3416 // Generate code to set the elements in the array that are not |
3412 // literals. | 3417 // literals. |
3413 for (int i = 0; i < node->values()->length(); i++) { | 3418 for (int i = 0; i < node->values()->length(); i++) { |
3414 Expression* value = node->values()->at(i); | 3419 Expression* value = node->values()->at(i); |
3415 | 3420 |
(...skipping 1938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5354 | 5359 |
5355 // Fetch the map and check if array is in fast case. | 5360 // Fetch the map and check if array is in fast case. |
5356 // Check that object doesn't require security checks and | 5361 // Check that object doesn't require security checks and |
5357 // has no indexed interceptor. | 5362 // has no indexed interceptor. |
5358 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE); | 5363 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE); |
5359 deferred->Branch(lt); | 5364 deferred->Branch(lt); |
5360 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset)); | 5365 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset)); |
5361 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | 5366 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); |
5362 deferred->Branch(nz); | 5367 deferred->Branch(nz); |
5363 | 5368 |
5364 // Check the object's elements are in fast case. | 5369 // Check the object's elements are in fast case and writable. |
5365 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset)); | 5370 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset)); |
5366 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset)); | 5371 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset)); |
5367 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | 5372 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
5368 __ cmp(tmp2, ip); | 5373 __ cmp(tmp2, ip); |
5369 deferred->Branch(ne); | 5374 deferred->Branch(ne); |
5370 | 5375 |
5371 // Smi-tagging is equivalent to multiplying by 2. | 5376 // Smi-tagging is equivalent to multiplying by 2. |
5372 STATIC_ASSERT(kSmiTag == 0); | 5377 STATIC_ASSERT(kSmiTag == 0); |
5373 STATIC_ASSERT(kSmiTagSize == 1); | 5378 STATIC_ASSERT(kSmiTagSize == 1); |
5374 | 5379 |
(...skipping 1271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6646 } | 6651 } |
6647 | 6652 |
6648 #ifdef DEBUG | 6653 #ifdef DEBUG |
6649 Label check_inlined_codesize; | 6654 Label check_inlined_codesize; |
6650 masm_->bind(&check_inlined_codesize); | 6655 masm_->bind(&check_inlined_codesize); |
6651 #endif | 6656 #endif |
6652 __ mov(scratch2, Operand(Factory::null_value())); | 6657 __ mov(scratch2, Operand(Factory::null_value())); |
6653 __ cmp(scratch1, scratch2); | 6658 __ cmp(scratch1, scratch2); |
6654 deferred->Branch(ne); | 6659 deferred->Branch(ne); |
6655 | 6660 |
6656 // Get the elements array from the receiver and check that it | 6661 // Get the elements array from the receiver. |
6657 // is not a dictionary. | |
6658 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 6662 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
6659 if (FLAG_debug_code) { | 6663 __ AssertFastElements(scratch1); |
6660 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset)); | |
6661 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | |
6662 __ cmp(scratch2, ip); | |
6663 __ Assert(eq, "JSObject with fast elements map has slow elements"); | |
6664 } | |
6665 | 6664 |
6666 // Check that key is within bounds. Use unsigned comparison to handle | 6665 // Check that key is within bounds. Use unsigned comparison to handle |
6667 // negative keys. | 6666 // negative keys. |
6668 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | 6667 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); |
6669 __ cmp(scratch2, key); | 6668 __ cmp(scratch2, key); |
6670 deferred->Branch(ls); // Unsigned less equal. | 6669 deferred->Branch(ls); // Unsigned less equal. |
6671 | 6670 |
6672 // Load and check that the result is not the hole (key is a smi). | 6671 // Load and check that the result is not the hole (key is a smi). |
6673 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex); | 6672 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex); |
6674 __ add(scratch1, | 6673 __ add(scratch1, |
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7064 // boilerplate. | 7063 // boilerplate. |
7065 Label slow_case; | 7064 Label slow_case; |
7066 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); | 7065 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); |
7067 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); | 7066 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); |
7068 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 7067 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
7069 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 7068 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
7070 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 7069 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
7071 __ cmp(r3, ip); | 7070 __ cmp(r3, ip); |
7072 __ b(eq, &slow_case); | 7071 __ b(eq, &slow_case); |
7073 | 7072 |
| 7073 if (FLAG_debug_code) { |
| 7074 const char* message; |
| 7075 Heap::RootListIndex expected_map_index; |
| 7076 if (mode_ == CLONE_ELEMENTS) { |
| 7077 message = "Expected (writable) fixed array"; |
| 7078 expected_map_index = Heap::kFixedArrayMapRootIndex; |
| 7079 } else { |
| 7080 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); |
| 7081 message = "Expected copy-on-write fixed array"; |
| 7082 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
| 7083 } |
| 7084 __ push(r3); |
| 7085 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 7086 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 7087 __ LoadRoot(ip, expected_map_index); |
| 7088 __ cmp(r3, ip); |
| 7089 __ Assert(eq, message); |
| 7090 __ pop(r3); |
| 7091 } |
| 7092 |
7074 // Allocate both the JS array and the elements array in one big | 7093 // Allocate both the JS array and the elements array in one big |
7075 // allocation. This avoids multiple limit checks. | 7094 // allocation. This avoids multiple limit checks. |
7076 __ AllocateInNewSpace(size, | 7095 __ AllocateInNewSpace(size, |
7077 r0, | 7096 r0, |
7078 r1, | 7097 r1, |
7079 r2, | 7098 r2, |
7080 &slow_case, | 7099 &slow_case, |
7081 TAG_OBJECT); | 7100 TAG_OBJECT); |
7082 | 7101 |
7083 // Copy the JS array part. | 7102 // Copy the JS array part. |
7084 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 7103 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
7085 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { | 7104 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { |
7086 __ ldr(r1, FieldMemOperand(r3, i)); | 7105 __ ldr(r1, FieldMemOperand(r3, i)); |
7087 __ str(r1, FieldMemOperand(r0, i)); | 7106 __ str(r1, FieldMemOperand(r0, i)); |
7088 } | 7107 } |
7089 } | 7108 } |
7090 | 7109 |
7091 if (length_ > 0) { | 7110 if (length_ > 0) { |
7092 // Get hold of the elements array of the boilerplate and setup the | 7111 // Get hold of the elements array of the boilerplate and setup the |
7093 // elements pointer in the resulting object. | 7112 // elements pointer in the resulting object. |
7094 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | 7113 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
7095 __ add(r2, r0, Operand(JSArray::kSize)); | 7114 __ add(r2, r0, Operand(JSArray::kSize)); |
7096 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); | 7115 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); |
7097 | 7116 |
7098 // Copy the elements array. | 7117 // Copy the elements array. |
7099 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); | 7118 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); |
7100 } | 7119 } |
7101 | 7120 |
| 7121 if (mode_ == COPY_ON_WRITE_ELEMENTS) { |
| 7122 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r2, r3); |
| 7123 } |
| 7124 |
7102 // Return and remove the on-stack parameters. | 7125 // Return and remove the on-stack parameters. |
7103 __ add(sp, sp, Operand(3 * kPointerSize)); | 7126 __ add(sp, sp, Operand(3 * kPointerSize)); |
7104 __ Ret(); | 7127 __ Ret(); |
7105 | 7128 |
7106 __ bind(&slow_case); | 7129 __ bind(&slow_case); |
7107 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 7130 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
7108 } | 7131 } |
7109 | 7132 |
7110 | 7133 |
7111 // Takes a Smi and converts to an IEEE 64 bit floating point value in two | 7134 // Takes a Smi and converts to an IEEE 64 bit floating point value in two |
(...skipping 4454 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11566 __ bind(&string_add_runtime); | 11589 __ bind(&string_add_runtime); |
11567 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 11590 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
11568 } | 11591 } |
11569 | 11592 |
11570 | 11593 |
11571 #undef __ | 11594 #undef __ |
11572 | 11595 |
11573 } } // namespace v8::internal | 11596 } } // namespace v8::internal |
11574 | 11597 |
11575 #endif // V8_TARGET_ARCH_ARM | 11598 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |