| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 5628 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5639 FastCloneShallowArrayStub::Mode mode = | 5639 FastCloneShallowArrayStub::Mode mode = |
| 5640 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS | 5640 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS |
| 5641 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 5641 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 5642 : FastCloneShallowArrayStub::CLONE_ELEMENTS; | 5642 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
| 5643 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 5643 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
| 5644 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5644 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5645 } | 5645 } |
| 5646 } | 5646 } |
| 5647 | 5647 |
| 5648 | 5648 |
| 5649 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, | |
| 5650 Register result, | |
| 5651 Register source, | |
| 5652 int* offset, | |
| 5653 AllocationSiteMode mode) { | |
| 5654 ASSERT(!source.is(ecx)); | |
| 5655 ASSERT(!result.is(ecx)); | |
| 5656 | |
| 5657 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && | |
| 5658 object->map()->CanTrackAllocationSite(); | |
| 5659 | |
| 5660 if (FLAG_debug_code) { | |
| 5661 __ LoadHeapObject(ecx, object); | |
| 5662 __ cmp(source, ecx); | |
| 5663 __ Assert(equal, "Unexpected object literal boilerplate"); | |
| 5664 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset)); | |
| 5665 __ cmp(ecx, Handle<Map>(object->map())); | |
| 5666 __ Assert(equal, "Unexpected boilerplate map"); | |
| 5667 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
| 5668 __ and_(ecx, Map::kElementsKindMask); | |
| 5669 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift); | |
| 5670 __ Assert(equal, "Unexpected boilerplate elements kind"); | |
| 5671 } | |
| 5672 | |
| 5673 // Only elements backing stores for non-COW arrays need to be copied. | |
| 5674 Handle<FixedArrayBase> elements(object->elements()); | |
| 5675 bool has_elements = elements->length() > 0 && | |
| 5676 elements->map() != isolate()->heap()->fixed_cow_array_map(); | |
| 5677 | |
| 5678 // Increase the offset so that subsequent objects end up right after | |
| 5679 // this object and its backing store. | |
| 5680 int object_offset = *offset; | |
| 5681 int object_size = object->map()->instance_size(); | |
| 5682 int elements_size = has_elements ? elements->Size() : 0; | |
| 5683 int elements_offset = *offset + object_size; | |
| 5684 if (create_allocation_site_info) { | |
| 5685 elements_offset += AllocationSiteInfo::kSize; | |
| 5686 *offset += AllocationSiteInfo::kSize; | |
| 5687 } | |
| 5688 | |
| 5689 *offset += object_size + elements_size; | |
| 5690 | |
| 5691 // Copy object header. | |
| 5692 ASSERT(object->properties()->length() == 0); | |
| 5693 int inobject_properties = object->map()->inobject_properties(); | |
| 5694 int header_size = object_size - inobject_properties * kPointerSize; | |
| 5695 for (int i = 0; i < header_size; i += kPointerSize) { | |
| 5696 if (has_elements && i == JSObject::kElementsOffset) { | |
| 5697 __ lea(ecx, Operand(result, elements_offset)); | |
| 5698 } else { | |
| 5699 __ mov(ecx, FieldOperand(source, i)); | |
| 5700 } | |
| 5701 __ mov(FieldOperand(result, object_offset + i), ecx); | |
| 5702 } | |
| 5703 | |
| 5704 // Copy in-object properties. | |
| 5705 for (int i = 0; i < inobject_properties; i++) { | |
| 5706 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); | |
| 5707 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i), | |
| 5708 isolate()); | |
| 5709 if (value->IsJSObject()) { | |
| 5710 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | |
| 5711 __ lea(ecx, Operand(result, *offset)); | |
| 5712 __ mov(FieldOperand(result, total_offset), ecx); | |
| 5713 __ LoadHeapObject(source, value_object); | |
| 5714 EmitDeepCopy(value_object, result, source, offset, | |
| 5715 DONT_TRACK_ALLOCATION_SITE); | |
| 5716 } else if (value->IsHeapObject()) { | |
| 5717 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | |
| 5718 __ mov(FieldOperand(result, total_offset), ecx); | |
| 5719 } else { | |
| 5720 __ mov(FieldOperand(result, total_offset), Immediate(value)); | |
| 5721 } | |
| 5722 } | |
| 5723 | |
| 5724 // Build Allocation Site Info if desired | |
| 5725 if (create_allocation_site_info) { | |
| 5726 __ mov(FieldOperand(result, object_size), | |
| 5727 Immediate(Handle<Map>(isolate()->heap()-> | |
| 5728 allocation_site_info_map()))); | |
| 5729 __ mov(FieldOperand(result, object_size + kPointerSize), source); | |
| 5730 } | |
| 5731 | |
| 5732 if (has_elements) { | |
| 5733 // Copy elements backing store header. | |
| 5734 __ LoadHeapObject(source, elements); | |
| 5735 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { | |
| 5736 __ mov(ecx, FieldOperand(source, i)); | |
| 5737 __ mov(FieldOperand(result, elements_offset + i), ecx); | |
| 5738 } | |
| 5739 | |
| 5740 // Copy elements backing store content. | |
| 5741 int elements_length = elements->length(); | |
| 5742 if (elements->IsFixedDoubleArray()) { | |
| 5743 Handle<FixedDoubleArray> double_array = | |
| 5744 Handle<FixedDoubleArray>::cast(elements); | |
| 5745 for (int i = 0; i < elements_length; i++) { | |
| 5746 int64_t value = double_array->get_representation(i); | |
| 5747 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); | |
| 5748 int32_t value_high = static_cast<int32_t>(value >> 32); | |
| 5749 int total_offset = | |
| 5750 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); | |
| 5751 __ mov(FieldOperand(result, total_offset), Immediate(value_low)); | |
| 5752 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high)); | |
| 5753 } | |
| 5754 } else if (elements->IsFixedArray()) { | |
| 5755 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); | |
| 5756 for (int i = 0; i < elements_length; i++) { | |
| 5757 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); | |
| 5758 Handle<Object> value(fast_elements->get(i), isolate()); | |
| 5759 if (value->IsJSObject()) { | |
| 5760 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | |
| 5761 __ lea(ecx, Operand(result, *offset)); | |
| 5762 __ mov(FieldOperand(result, total_offset), ecx); | |
| 5763 __ LoadHeapObject(source, value_object); | |
| 5764 EmitDeepCopy(value_object, result, source, offset, | |
| 5765 DONT_TRACK_ALLOCATION_SITE); | |
| 5766 } else if (value->IsHeapObject()) { | |
| 5767 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | |
| 5768 __ mov(FieldOperand(result, total_offset), ecx); | |
| 5769 } else { | |
| 5770 __ mov(FieldOperand(result, total_offset), Immediate(value)); | |
| 5771 } | |
| 5772 } | |
| 5773 } else { | |
| 5774 UNREACHABLE(); | |
| 5775 } | |
| 5776 } | |
| 5777 } | |
| 5778 | |
| 5779 | |
| 5780 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { | |
| 5781 ASSERT(ToRegister(instr->context()).is(esi)); | |
| 5782 int size = instr->hydrogen()->total_size(); | |
| 5783 ElementsKind boilerplate_elements_kind = | |
| 5784 instr->hydrogen()->boilerplate()->GetElementsKind(); | |
| 5785 | |
| 5786 // Deopt if the literal boilerplate ElementsKind is of a type different than | |
| 5787 // the expected one. The check isn't necessary if the boilerplate has already | |
| 5788 // already been converted to TERMINAL_FAST_ELEMENTS_KIND. | |
| 5789 if (CanTransitionToMoreGeneralFastElementsKind( | |
| 5790 boilerplate_elements_kind, true)) { | |
| 5791 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
| 5792 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); | |
| 5793 // Load the map's "bit field 2". We only need the first byte, | |
| 5794 // but the following masking takes care of that anyway. | |
| 5795 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
| 5796 // Retrieve elements_kind from bit field 2. | |
| 5797 __ and_(ecx, Map::kElementsKindMask); | |
| 5798 __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift); | |
| 5799 DeoptimizeIf(not_equal, instr->environment()); | |
| 5800 } | |
| 5801 | |
| 5802 // Allocate all objects that are part of the literal in one big | |
| 5803 // allocation. This avoids multiple limit checks. | |
| 5804 Label allocated, runtime_allocate; | |
| 5805 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | |
| 5806 __ jmp(&allocated); | |
| 5807 | |
| 5808 __ bind(&runtime_allocate); | |
| 5809 __ push(Immediate(Smi::FromInt(size))); | |
| 5810 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | |
| 5811 | |
| 5812 __ bind(&allocated); | |
| 5813 int offset = 0; | |
| 5814 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
| 5815 EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset, | |
| 5816 instr->hydrogen()->allocation_site_mode()); | |
| 5817 ASSERT_EQ(size, offset); | |
| 5818 } | |
| 5819 | |
| 5820 | |
| 5821 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 5649 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 5822 ASSERT(ToRegister(instr->context()).is(esi)); | 5650 ASSERT(ToRegister(instr->context()).is(esi)); |
| 5823 Handle<FixedArray> literals(instr->environment()->closure()->literals()); | 5651 Handle<FixedArray> literals(instr->environment()->closure()->literals()); |
| 5824 Handle<FixedArray> constant_properties = | 5652 Handle<FixedArray> constant_properties = |
| 5825 instr->hydrogen()->constant_properties(); | 5653 instr->hydrogen()->constant_properties(); |
| 5826 | 5654 |
| 5827 int flags = instr->hydrogen()->fast_elements() | 5655 int flags = instr->hydrogen()->fast_elements() |
| 5828 ? ObjectLiteral::kFastElements | 5656 ? ObjectLiteral::kFastElements |
| 5829 : ObjectLiteral::kNoFlags; | 5657 : ObjectLiteral::kNoFlags; |
| 5830 flags |= instr->hydrogen()->has_function() | 5658 flags |= instr->hydrogen()->has_function() |
| (...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6293 FixedArray::kHeaderSize - kPointerSize)); | 6121 FixedArray::kHeaderSize - kPointerSize)); |
| 6294 __ bind(&done); | 6122 __ bind(&done); |
| 6295 } | 6123 } |
| 6296 | 6124 |
| 6297 | 6125 |
| 6298 #undef __ | 6126 #undef __ |
| 6299 | 6127 |
| 6300 } } // namespace v8::internal | 6128 } } // namespace v8::internal |
| 6301 | 6129 |
| 6302 #endif // V8_TARGET_ARCH_IA32 | 6130 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |