OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4885 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4896 __ Push(Smi::FromInt(instance_size)); | 4896 __ Push(Smi::FromInt(instance_size)); |
4897 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); | 4897 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); |
4898 __ StoreToSafepointRegisterSlot(result, rax); | 4898 __ StoreToSafepointRegisterSlot(result, rax); |
4899 } | 4899 } |
4900 | 4900 |
4901 | 4901 |
4902 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { | 4902 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
4903 Handle<FixedArray> literals(instr->environment()->closure()->literals()); | 4903 Handle<FixedArray> literals(instr->environment()->closure()->literals()); |
4904 ElementsKind boilerplate_elements_kind = | 4904 ElementsKind boilerplate_elements_kind = |
4905 instr->hydrogen()->boilerplate_elements_kind(); | 4905 instr->hydrogen()->boilerplate_elements_kind(); |
| 4906 AllocationSiteMode allocation_site_mode = |
| 4907 instr->hydrogen()->allocation_site_mode(); |
4906 | 4908 |
4907 // Deopt if the array literal boilerplate ElementsKind is of a type different | 4909 // Deopt if the array literal boilerplate ElementsKind is of a type different |
4908 // than the expected one. The check isn't necessary if the boilerplate has | 4910 // than the expected one. The check isn't necessary if the boilerplate has |
4909 // already been converted to TERMINAL_FAST_ELEMENTS_KIND. | 4911 // already been converted to TERMINAL_FAST_ELEMENTS_KIND. |
4910 if (CanTransitionToMoreGeneralFastElementsKind( | 4912 if (CanTransitionToMoreGeneralFastElementsKind( |
4911 boilerplate_elements_kind, true)) { | 4913 boilerplate_elements_kind, true)) { |
4912 __ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object()); | 4914 __ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object()); |
4913 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 4915 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
4914 // Load the map's "bit field 2". | 4916 // Load the map's "bit field 2". |
4915 __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset)); | 4917 __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset)); |
(...skipping 10 matching lines...) Expand all Loading... |
4926 // Boilerplate already exists, constant elements are never accessed. | 4928 // Boilerplate already exists, constant elements are never accessed. |
4927 // Pass an empty fixed array. | 4929 // Pass an empty fixed array. |
4928 __ Push(isolate()->factory()->empty_fixed_array()); | 4930 __ Push(isolate()->factory()->empty_fixed_array()); |
4929 | 4931 |
4930 // Pick the right runtime function or stub to call. | 4932 // Pick the right runtime function or stub to call. |
4931 int length = instr->hydrogen()->length(); | 4933 int length = instr->hydrogen()->length(); |
4932 if (instr->hydrogen()->IsCopyOnWrite()) { | 4934 if (instr->hydrogen()->IsCopyOnWrite()) { |
4933 ASSERT(instr->hydrogen()->depth() == 1); | 4935 ASSERT(instr->hydrogen()->depth() == 1); |
4934 FastCloneShallowArrayStub::Mode mode = | 4936 FastCloneShallowArrayStub::Mode mode = |
4935 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 4937 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
4936 FastCloneShallowArrayStub stub(mode, length); | 4938 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); |
4937 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4939 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
4938 } else if (instr->hydrogen()->depth() > 1) { | 4940 } else if (instr->hydrogen()->depth() > 1) { |
4939 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); | 4941 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); |
4940 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 4942 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
4941 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); | 4943 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); |
4942 } else { | 4944 } else { |
4943 FastCloneShallowArrayStub::Mode mode = | 4945 FastCloneShallowArrayStub::Mode mode = |
4944 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS | 4946 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS |
4945 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 4947 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
4946 : FastCloneShallowArrayStub::CLONE_ELEMENTS; | 4948 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
4947 FastCloneShallowArrayStub stub(mode, length); | 4949 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
4948 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4950 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
4949 } | 4951 } |
4950 } | 4952 } |
4951 | 4953 |
4952 | 4954 |
4953 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, | 4955 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, |
4954 Register result, | 4956 Register result, |
4955 Register source, | 4957 Register source, |
4956 int* offset) { | 4958 int* offset, |
| 4959 AllocationSiteMode mode) { |
4957 ASSERT(!source.is(rcx)); | 4960 ASSERT(!source.is(rcx)); |
4958 ASSERT(!result.is(rcx)); | 4961 ASSERT(!result.is(rcx)); |
4959 | 4962 |
| 4963 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && |
| 4964 object->map()->CanTrackAllocationSite(); |
| 4965 |
4960 // Only elements backing stores for non-COW arrays need to be copied. | 4966 // Only elements backing stores for non-COW arrays need to be copied. |
4961 Handle<FixedArrayBase> elements(object->elements()); | 4967 Handle<FixedArrayBase> elements(object->elements()); |
4962 bool has_elements = elements->length() > 0 && | 4968 bool has_elements = elements->length() > 0 && |
4963 elements->map() != isolate()->heap()->fixed_cow_array_map(); | 4969 elements->map() != isolate()->heap()->fixed_cow_array_map(); |
4964 | 4970 |
4965 // Increase the offset so that subsequent objects end up right after | 4971 // Increase the offset so that subsequent objects end up right after |
4966 // this object and its backing store. | 4972 // this object and its backing store. |
4967 int object_offset = *offset; | 4973 int object_offset = *offset; |
4968 int object_size = object->map()->instance_size(); | 4974 int object_size = object->map()->instance_size(); |
| 4975 int elements_size = has_elements ? elements->Size() : 0; |
4969 int elements_offset = *offset + object_size; | 4976 int elements_offset = *offset + object_size; |
4970 int elements_size = has_elements ? elements->Size() : 0; | 4977 if (create_allocation_site_info) { |
| 4978 elements_offset += AllocationSiteInfo::kSize; |
| 4979 *offset += AllocationSiteInfo::kSize; |
| 4980 } |
| 4981 |
4971 *offset += object_size + elements_size; | 4982 *offset += object_size + elements_size; |
4972 | 4983 |
4973 // Copy object header. | 4984 // Copy object header. |
4974 ASSERT(object->properties()->length() == 0); | 4985 ASSERT(object->properties()->length() == 0); |
4975 int inobject_properties = object->map()->inobject_properties(); | 4986 int inobject_properties = object->map()->inobject_properties(); |
4976 int header_size = object_size - inobject_properties * kPointerSize; | 4987 int header_size = object_size - inobject_properties * kPointerSize; |
4977 for (int i = 0; i < header_size; i += kPointerSize) { | 4988 for (int i = 0; i < header_size; i += kPointerSize) { |
4978 if (has_elements && i == JSObject::kElementsOffset) { | 4989 if (has_elements && i == JSObject::kElementsOffset) { |
4979 __ lea(rcx, Operand(result, elements_offset)); | 4990 __ lea(rcx, Operand(result, elements_offset)); |
4980 } else { | 4991 } else { |
4981 __ movq(rcx, FieldOperand(source, i)); | 4992 __ movq(rcx, FieldOperand(source, i)); |
4982 } | 4993 } |
4983 __ movq(FieldOperand(result, object_offset + i), rcx); | 4994 __ movq(FieldOperand(result, object_offset + i), rcx); |
4984 } | 4995 } |
4985 | 4996 |
4986 // Copy in-object properties. | 4997 // Copy in-object properties. |
4987 for (int i = 0; i < inobject_properties; i++) { | 4998 for (int i = 0; i < inobject_properties; i++) { |
4988 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); | 4999 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); |
4989 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); | 5000 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); |
4990 if (value->IsJSObject()) { | 5001 if (value->IsJSObject()) { |
4991 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | 5002 Handle<JSObject> value_object = Handle<JSObject>::cast(value); |
4992 __ lea(rcx, Operand(result, *offset)); | 5003 __ lea(rcx, Operand(result, *offset)); |
4993 __ movq(FieldOperand(result, total_offset), rcx); | 5004 __ movq(FieldOperand(result, total_offset), rcx); |
4994 __ LoadHeapObject(source, value_object); | 5005 __ LoadHeapObject(source, value_object); |
4995 EmitDeepCopy(value_object, result, source, offset); | 5006 EmitDeepCopy(value_object, result, source, offset, |
| 5007 DONT_TRACK_ALLOCATION_SITE); |
4996 } else if (value->IsHeapObject()) { | 5008 } else if (value->IsHeapObject()) { |
4997 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value)); | 5009 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value)); |
4998 __ movq(FieldOperand(result, total_offset), rcx); | 5010 __ movq(FieldOperand(result, total_offset), rcx); |
4999 } else { | 5011 } else { |
5000 __ movq(rcx, value, RelocInfo::NONE64); | 5012 __ movq(rcx, value, RelocInfo::NONE64); |
5001 __ movq(FieldOperand(result, total_offset), rcx); | 5013 __ movq(FieldOperand(result, total_offset), rcx); |
5002 } | 5014 } |
5003 } | 5015 } |
5004 | 5016 |
| 5017 // Build Allocation Site Info if desired |
| 5018 if (create_allocation_site_info) { |
| 5019 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); |
| 5020 __ movq(FieldOperand(result, object_size), kScratchRegister); |
| 5021 __ movq(FieldOperand(result, object_size + kPointerSize), source); |
| 5022 } |
| 5023 |
5005 if (has_elements) { | 5024 if (has_elements) { |
5006 // Copy elements backing store header. | 5025 // Copy elements backing store header. |
5007 __ LoadHeapObject(source, elements); | 5026 __ LoadHeapObject(source, elements); |
5008 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { | 5027 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { |
5009 __ movq(rcx, FieldOperand(source, i)); | 5028 __ movq(rcx, FieldOperand(source, i)); |
5010 __ movq(FieldOperand(result, elements_offset + i), rcx); | 5029 __ movq(FieldOperand(result, elements_offset + i), rcx); |
5011 } | 5030 } |
5012 | 5031 |
5013 // Copy elements backing store content. | 5032 // Copy elements backing store content. |
5014 int elements_length = elements->length(); | 5033 int elements_length = elements->length(); |
(...skipping 10 matching lines...) Expand all Loading... |
5025 } else if (elements->IsFixedArray()) { | 5044 } else if (elements->IsFixedArray()) { |
5026 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); | 5045 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); |
5027 for (int i = 0; i < elements_length; i++) { | 5046 for (int i = 0; i < elements_length; i++) { |
5028 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); | 5047 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); |
5029 Handle<Object> value(fast_elements->get(i)); | 5048 Handle<Object> value(fast_elements->get(i)); |
5030 if (value->IsJSObject()) { | 5049 if (value->IsJSObject()) { |
5031 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | 5050 Handle<JSObject> value_object = Handle<JSObject>::cast(value); |
5032 __ lea(rcx, Operand(result, *offset)); | 5051 __ lea(rcx, Operand(result, *offset)); |
5033 __ movq(FieldOperand(result, total_offset), rcx); | 5052 __ movq(FieldOperand(result, total_offset), rcx); |
5034 __ LoadHeapObject(source, value_object); | 5053 __ LoadHeapObject(source, value_object); |
5035 EmitDeepCopy(value_object, result, source, offset); | 5054 EmitDeepCopy(value_object, result, source, offset, |
| 5055 DONT_TRACK_ALLOCATION_SITE); |
5036 } else if (value->IsHeapObject()) { | 5056 } else if (value->IsHeapObject()) { |
5037 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value)); | 5057 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value)); |
5038 __ movq(FieldOperand(result, total_offset), rcx); | 5058 __ movq(FieldOperand(result, total_offset), rcx); |
5039 } else { | 5059 } else { |
5040 __ movq(rcx, value, RelocInfo::NONE64); | 5060 __ movq(rcx, value, RelocInfo::NONE64); |
5041 __ movq(FieldOperand(result, total_offset), rcx); | 5061 __ movq(FieldOperand(result, total_offset), rcx); |
5042 } | 5062 } |
5043 } | 5063 } |
5044 } else { | 5064 } else { |
5045 UNREACHABLE(); | 5065 UNREACHABLE(); |
(...skipping 29 matching lines...) Expand all Loading... |
5075 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); | 5095 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); |
5076 __ jmp(&allocated); | 5096 __ jmp(&allocated); |
5077 | 5097 |
5078 __ bind(&runtime_allocate); | 5098 __ bind(&runtime_allocate); |
5079 __ Push(Smi::FromInt(size)); | 5099 __ Push(Smi::FromInt(size)); |
5080 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5100 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
5081 | 5101 |
5082 __ bind(&allocated); | 5102 __ bind(&allocated); |
5083 int offset = 0; | 5103 int offset = 0; |
5084 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate()); | 5104 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate()); |
5085 EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset); | 5105 EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset, |
| 5106 instr->hydrogen()->allocation_site_mode()); |
5086 ASSERT_EQ(size, offset); | 5107 ASSERT_EQ(size, offset); |
5087 } | 5108 } |
5088 | 5109 |
5089 | 5110 |
5090 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 5111 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
5091 Handle<FixedArray> literals(instr->environment()->closure()->literals()); | 5112 Handle<FixedArray> literals(instr->environment()->closure()->literals()); |
5092 Handle<FixedArray> constant_properties = | 5113 Handle<FixedArray> constant_properties = |
5093 instr->hydrogen()->constant_properties(); | 5114 instr->hydrogen()->constant_properties(); |
5094 | 5115 |
5095 // Set up the parameters to the stub/runtime call. | 5116 // Set up the parameters to the stub/runtime call. |
(...skipping 457 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5553 FixedArray::kHeaderSize - kPointerSize)); | 5574 FixedArray::kHeaderSize - kPointerSize)); |
5554 __ bind(&done); | 5575 __ bind(&done); |
5555 } | 5576 } |
5556 | 5577 |
5557 | 5578 |
5558 #undef __ | 5579 #undef __ |
5559 | 5580 |
5560 } } // namespace v8::internal | 5581 } } // namespace v8::internal |
5561 | 5582 |
5562 #endif // V8_TARGET_ARCH_X64 | 5583 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |