OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5634 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5645 FastCloneShallowArrayStub::Mode mode = | 5645 FastCloneShallowArrayStub::Mode mode = |
5646 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS | 5646 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS |
5647 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 5647 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
5648 : FastCloneShallowArrayStub::CLONE_ELEMENTS; | 5648 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
5649 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 5649 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
5650 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5650 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5651 } | 5651 } |
5652 } | 5652 } |
5653 | 5653 |
5654 | 5654 |
5655 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, | |
5656 Register result, | |
5657 Register source, | |
5658 int* offset, | |
5659 AllocationSiteMode mode) { | |
5660 ASSERT(!source.is(ecx)); | |
5661 ASSERT(!result.is(ecx)); | |
5662 | |
5663 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && | |
5664 object->map()->CanTrackAllocationSite(); | |
5665 | |
5666 if (FLAG_debug_code) { | |
5667 __ LoadHeapObject(ecx, object); | |
5668 __ cmp(source, ecx); | |
5669 __ Assert(equal, "Unexpected object literal boilerplate"); | |
5670 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset)); | |
5671 __ cmp(ecx, Handle<Map>(object->map())); | |
5672 __ Assert(equal, "Unexpected boilerplate map"); | |
5673 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
5674 __ and_(ecx, Map::kElementsKindMask); | |
5675 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift); | |
5676 __ Assert(equal, "Unexpected boilerplate elements kind"); | |
5677 } | |
5678 | |
5679 // Only elements backing stores for non-COW arrays need to be copied. | |
5680 Handle<FixedArrayBase> elements(object->elements()); | |
5681 bool has_elements = elements->length() > 0 && | |
5682 elements->map() != isolate()->heap()->fixed_cow_array_map(); | |
5683 | |
5684 // Increase the offset so that subsequent objects end up right after | |
5685 // this object and its backing store. | |
5686 int object_offset = *offset; | |
5687 int object_size = object->map()->instance_size(); | |
5688 int elements_size = has_elements ? elements->Size() : 0; | |
5689 int elements_offset = *offset + object_size; | |
5690 if (create_allocation_site_info) { | |
5691 elements_offset += AllocationSiteInfo::kSize; | |
5692 *offset += AllocationSiteInfo::kSize; | |
5693 } | |
5694 | |
5695 *offset += object_size + elements_size; | |
5696 | |
5697 // Copy object header. | |
5698 ASSERT(object->properties()->length() == 0); | |
5699 int inobject_properties = object->map()->inobject_properties(); | |
5700 int header_size = object_size - inobject_properties * kPointerSize; | |
5701 for (int i = 0; i < header_size; i += kPointerSize) { | |
5702 if (has_elements && i == JSObject::kElementsOffset) { | |
5703 __ lea(ecx, Operand(result, elements_offset)); | |
5704 } else { | |
5705 __ mov(ecx, FieldOperand(source, i)); | |
5706 } | |
5707 __ mov(FieldOperand(result, object_offset + i), ecx); | |
5708 } | |
5709 | |
5710 // Copy in-object properties. | |
5711 for (int i = 0; i < inobject_properties; i++) { | |
5712 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); | |
5713 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i), | |
5714 isolate()); | |
5715 if (value->IsJSObject()) { | |
5716 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | |
5717 __ lea(ecx, Operand(result, *offset)); | |
5718 __ mov(FieldOperand(result, total_offset), ecx); | |
5719 __ LoadHeapObject(source, value_object); | |
5720 EmitDeepCopy(value_object, result, source, offset, | |
5721 DONT_TRACK_ALLOCATION_SITE); | |
5722 } else if (value->IsHeapObject()) { | |
5723 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | |
5724 __ mov(FieldOperand(result, total_offset), ecx); | |
5725 } else { | |
5726 __ mov(FieldOperand(result, total_offset), Immediate(value)); | |
5727 } | |
5728 } | |
5729 | |
5730 // Build Allocation Site Info if desired | |
5731 if (create_allocation_site_info) { | |
5732 __ mov(FieldOperand(result, object_size), | |
5733 Immediate(Handle<Map>(isolate()->heap()-> | |
5734 allocation_site_info_map()))); | |
5735 __ mov(FieldOperand(result, object_size + kPointerSize), source); | |
5736 } | |
5737 | |
5738 if (has_elements) { | |
5739 // Copy elements backing store header. | |
5740 __ LoadHeapObject(source, elements); | |
5741 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { | |
5742 __ mov(ecx, FieldOperand(source, i)); | |
5743 __ mov(FieldOperand(result, elements_offset + i), ecx); | |
5744 } | |
5745 | |
5746 // Copy elements backing store content. | |
5747 int elements_length = elements->length(); | |
5748 if (elements->IsFixedDoubleArray()) { | |
5749 Handle<FixedDoubleArray> double_array = | |
5750 Handle<FixedDoubleArray>::cast(elements); | |
5751 for (int i = 0; i < elements_length; i++) { | |
5752 int64_t value = double_array->get_representation(i); | |
5753 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); | |
5754 int32_t value_high = static_cast<int32_t>(value >> 32); | |
5755 int total_offset = | |
5756 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); | |
5757 __ mov(FieldOperand(result, total_offset), Immediate(value_low)); | |
5758 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high)); | |
5759 } | |
5760 } else if (elements->IsFixedArray()) { | |
5761 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); | |
5762 for (int i = 0; i < elements_length; i++) { | |
5763 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); | |
5764 Handle<Object> value(fast_elements->get(i), isolate()); | |
5765 if (value->IsJSObject()) { | |
5766 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | |
5767 __ lea(ecx, Operand(result, *offset)); | |
5768 __ mov(FieldOperand(result, total_offset), ecx); | |
5769 __ LoadHeapObject(source, value_object); | |
5770 EmitDeepCopy(value_object, result, source, offset, | |
5771 DONT_TRACK_ALLOCATION_SITE); | |
5772 } else if (value->IsHeapObject()) { | |
5773 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | |
5774 __ mov(FieldOperand(result, total_offset), ecx); | |
5775 } else { | |
5776 __ mov(FieldOperand(result, total_offset), Immediate(value)); | |
5777 } | |
5778 } | |
5779 } else { | |
5780 UNREACHABLE(); | |
5781 } | |
5782 } | |
5783 } | |
5784 | |
5785 | |
5786 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { | |
5787 ASSERT(ToRegister(instr->context()).is(esi)); | |
5788 int size = instr->hydrogen()->total_size(); | |
5789 ElementsKind boilerplate_elements_kind = | |
5790 instr->hydrogen()->boilerplate()->GetElementsKind(); | |
5791 | |
5792 // Deopt if the literal boilerplate ElementsKind is of a type different than | |
5793 // the expected one. The check isn't necessary if the boilerplate has already | |
5794 // already been converted to TERMINAL_FAST_ELEMENTS_KIND. | |
5795 if (CanTransitionToMoreGeneralFastElementsKind( | |
5796 boilerplate_elements_kind, true)) { | |
5797 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
5798 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); | |
5799 // Load the map's "bit field 2". We only need the first byte, | |
5800 // but the following masking takes care of that anyway. | |
5801 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
5802 // Retrieve elements_kind from bit field 2. | |
5803 __ and_(ecx, Map::kElementsKindMask); | |
5804 __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift); | |
5805 DeoptimizeIf(not_equal, instr->environment()); | |
5806 } | |
5807 | |
5808 // Allocate all objects that are part of the literal in one big | |
5809 // allocation. This avoids multiple limit checks. | |
5810 Label allocated, runtime_allocate; | |
5811 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | |
5812 __ jmp(&allocated); | |
5813 | |
5814 __ bind(&runtime_allocate); | |
5815 __ push(Immediate(Smi::FromInt(size))); | |
5816 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | |
5817 | |
5818 __ bind(&allocated); | |
5819 int offset = 0; | |
5820 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
5821 EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset, | |
5822 instr->hydrogen()->allocation_site_mode()); | |
5823 ASSERT_EQ(size, offset); | |
5824 } | |
5825 | |
5826 | |
5827 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 5655 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
5828 ASSERT(ToRegister(instr->context()).is(esi)); | 5656 ASSERT(ToRegister(instr->context()).is(esi)); |
5829 Handle<FixedArray> literals(instr->environment()->closure()->literals()); | 5657 Handle<FixedArray> literals(instr->environment()->closure()->literals()); |
5830 Handle<FixedArray> constant_properties = | 5658 Handle<FixedArray> constant_properties = |
5831 instr->hydrogen()->constant_properties(); | 5659 instr->hydrogen()->constant_properties(); |
5832 | 5660 |
5833 int flags = instr->hydrogen()->fast_elements() | 5661 int flags = instr->hydrogen()->fast_elements() |
5834 ? ObjectLiteral::kFastElements | 5662 ? ObjectLiteral::kFastElements |
5835 : ObjectLiteral::kNoFlags; | 5663 : ObjectLiteral::kNoFlags; |
5836 flags |= instr->hydrogen()->has_function() | 5664 flags |= instr->hydrogen()->has_function() |
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6299 FixedArray::kHeaderSize - kPointerSize)); | 6127 FixedArray::kHeaderSize - kPointerSize)); |
6300 __ bind(&done); | 6128 __ bind(&done); |
6301 } | 6129 } |
6302 | 6130 |
6303 | 6131 |
6304 #undef __ | 6132 #undef __ |
6305 | 6133 |
6306 } } // namespace v8::internal | 6134 } } // namespace v8::internal |
6307 | 6135 |
6308 #endif // V8_TARGET_ARCH_IA32 | 6136 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |