OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 6078 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6089 FastCloneShallowArrayStub::Mode mode = | 6089 FastCloneShallowArrayStub::Mode mode = |
6090 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS | 6090 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS |
6091 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 6091 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
6092 : FastCloneShallowArrayStub::CLONE_ELEMENTS; | 6092 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
6093 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 6093 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
6094 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 6094 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
6095 } | 6095 } |
6096 } | 6096 } |
6097 | 6097 |
6098 | 6098 |
6099 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, | |
6100 Register result, | |
6101 Register source, | |
6102 int* offset, | |
6103 AllocationSiteMode mode) { | |
6104 ASSERT(!source.is(ecx)); | |
6105 ASSERT(!result.is(ecx)); | |
6106 | |
6107 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && | |
6108 object->map()->CanTrackAllocationSite(); | |
6109 | |
6110 if (FLAG_debug_code) { | |
6111 __ LoadHeapObject(ecx, object); | |
6112 __ cmp(source, ecx); | |
6113 __ Assert(equal, "Unexpected object literal boilerplate"); | |
6114 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset)); | |
6115 __ cmp(ecx, Handle<Map>(object->map())); | |
6116 __ Assert(equal, "Unexpected boilerplate map"); | |
6117 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
6118 __ and_(ecx, Map::kElementsKindMask); | |
6119 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift); | |
6120 __ Assert(equal, "Unexpected boilerplate elements kind"); | |
6121 } | |
6122 | |
6123 // Only elements backing stores for non-COW arrays need to be copied. | |
6124 Handle<FixedArrayBase> elements(object->elements()); | |
6125 bool has_elements = elements->length() > 0 && | |
6126 elements->map() != isolate()->heap()->fixed_cow_array_map(); | |
6127 | |
6128 // Increase the offset so that subsequent objects end up right after | |
6129 // this object and its backing store. | |
6130 int object_offset = *offset; | |
6131 int object_size = object->map()->instance_size(); | |
6132 int elements_size = has_elements ? elements->Size() : 0; | |
6133 int elements_offset = *offset + object_size; | |
6134 if (create_allocation_site_info) { | |
6135 elements_offset += AllocationSiteInfo::kSize; | |
6136 *offset += AllocationSiteInfo::kSize; | |
6137 } | |
6138 | |
6139 *offset += object_size + elements_size; | |
6140 | |
6141 // Copy object header. | |
6142 ASSERT(object->properties()->length() == 0); | |
6143 int inobject_properties = object->map()->inobject_properties(); | |
6144 int header_size = object_size - inobject_properties * kPointerSize; | |
6145 for (int i = 0; i < header_size; i += kPointerSize) { | |
6146 if (has_elements && i == JSObject::kElementsOffset) { | |
6147 __ lea(ecx, Operand(result, elements_offset)); | |
6148 } else { | |
6149 __ mov(ecx, FieldOperand(source, i)); | |
6150 } | |
6151 __ mov(FieldOperand(result, object_offset + i), ecx); | |
6152 } | |
6153 | |
6154 // Copy in-object properties. | |
6155 for (int i = 0; i < inobject_properties; i++) { | |
6156 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); | |
6157 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i), | |
6158 isolate()); | |
6159 if (value->IsJSObject()) { | |
6160 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | |
6161 __ lea(ecx, Operand(result, *offset)); | |
6162 __ mov(FieldOperand(result, total_offset), ecx); | |
6163 __ LoadHeapObject(source, value_object); | |
6164 EmitDeepCopy(value_object, result, source, offset, | |
6165 DONT_TRACK_ALLOCATION_SITE); | |
6166 } else if (value->IsHeapObject()) { | |
6167 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | |
6168 __ mov(FieldOperand(result, total_offset), ecx); | |
6169 } else { | |
6170 __ mov(FieldOperand(result, total_offset), Immediate(value)); | |
6171 } | |
6172 } | |
6173 | |
6174 // Build Allocation Site Info if desired | |
6175 if (create_allocation_site_info) { | |
6176 __ mov(FieldOperand(result, object_size), | |
6177 Immediate(Handle<Map>(isolate()->heap()-> | |
6178 allocation_site_info_map()))); | |
6179 __ mov(FieldOperand(result, object_size + kPointerSize), source); | |
6180 } | |
6181 | |
6182 if (has_elements) { | |
6183 // Copy elements backing store header. | |
6184 __ LoadHeapObject(source, elements); | |
6185 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { | |
6186 __ mov(ecx, FieldOperand(source, i)); | |
6187 __ mov(FieldOperand(result, elements_offset + i), ecx); | |
6188 } | |
6189 | |
6190 // Copy elements backing store content. | |
6191 int elements_length = elements->length(); | |
6192 if (elements->IsFixedDoubleArray()) { | |
6193 Handle<FixedDoubleArray> double_array = | |
6194 Handle<FixedDoubleArray>::cast(elements); | |
6195 for (int i = 0; i < elements_length; i++) { | |
6196 int64_t value = double_array->get_representation(i); | |
6197 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); | |
6198 int32_t value_high = static_cast<int32_t>(value >> 32); | |
6199 int total_offset = | |
6200 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); | |
6201 __ mov(FieldOperand(result, total_offset), Immediate(value_low)); | |
6202 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high)); | |
6203 } | |
6204 } else if (elements->IsFixedArray()) { | |
6205 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); | |
6206 for (int i = 0; i < elements_length; i++) { | |
6207 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); | |
6208 Handle<Object> value(fast_elements->get(i), isolate()); | |
6209 if (value->IsJSObject()) { | |
6210 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | |
6211 __ lea(ecx, Operand(result, *offset)); | |
6212 __ mov(FieldOperand(result, total_offset), ecx); | |
6213 __ LoadHeapObject(source, value_object); | |
6214 EmitDeepCopy(value_object, result, source, offset, | |
6215 DONT_TRACK_ALLOCATION_SITE); | |
6216 } else if (value->IsHeapObject()) { | |
6217 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | |
6218 __ mov(FieldOperand(result, total_offset), ecx); | |
6219 } else { | |
6220 __ mov(FieldOperand(result, total_offset), Immediate(value)); | |
6221 } | |
6222 } | |
6223 } else { | |
6224 UNREACHABLE(); | |
6225 } | |
6226 } | |
6227 } | |
6228 | |
6229 | |
6230 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { | |
6231 ASSERT(ToRegister(instr->context()).is(esi)); | |
6232 int size = instr->hydrogen()->total_size(); | |
6233 ElementsKind boilerplate_elements_kind = | |
6234 instr->hydrogen()->boilerplate()->GetElementsKind(); | |
6235 | |
6236 // Deopt if the literal boilerplate ElementsKind is of a type different than | |
6237 // the expected one. The check isn't necessary if the boilerplate has already | |
6238 // already been converted to TERMINAL_FAST_ELEMENTS_KIND. | |
6239 if (CanTransitionToMoreGeneralFastElementsKind( | |
6240 boilerplate_elements_kind, true)) { | |
6241 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
6242 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); | |
6243 // Load the map's "bit field 2". We only need the first byte, | |
6244 // but the following masking takes care of that anyway. | |
6245 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
6246 // Retrieve elements_kind from bit field 2. | |
6247 __ and_(ecx, Map::kElementsKindMask); | |
6248 __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift); | |
6249 DeoptimizeIf(not_equal, instr->environment()); | |
6250 } | |
6251 | |
6252 // Allocate all objects that are part of the literal in one big | |
6253 // allocation. This avoids multiple limit checks. | |
6254 Label allocated, runtime_allocate; | |
6255 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | |
6256 __ jmp(&allocated); | |
6257 | |
6258 __ bind(&runtime_allocate); | |
6259 __ push(Immediate(Smi::FromInt(size))); | |
6260 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | |
6261 | |
6262 __ bind(&allocated); | |
6263 int offset = 0; | |
6264 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
6265 EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset, | |
6266 instr->hydrogen()->allocation_site_mode()); | |
6267 ASSERT_EQ(size, offset); | |
6268 } | |
6269 | |
6270 | |
6271 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 6099 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
6272 ASSERT(ToRegister(instr->context()).is(esi)); | 6100 ASSERT(ToRegister(instr->context()).is(esi)); |
6273 Handle<FixedArray> literals(instr->environment()->closure()->literals()); | 6101 Handle<FixedArray> literals(instr->environment()->closure()->literals()); |
6274 Handle<FixedArray> constant_properties = | 6102 Handle<FixedArray> constant_properties = |
6275 instr->hydrogen()->constant_properties(); | 6103 instr->hydrogen()->constant_properties(); |
6276 | 6104 |
6277 int flags = instr->hydrogen()->fast_elements() | 6105 int flags = instr->hydrogen()->fast_elements() |
6278 ? ObjectLiteral::kFastElements | 6106 ? ObjectLiteral::kFastElements |
6279 : ObjectLiteral::kNoFlags; | 6107 : ObjectLiteral::kNoFlags; |
6280 flags |= instr->hydrogen()->has_function() | 6108 flags |= instr->hydrogen()->has_function() |
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6743 FixedArray::kHeaderSize - kPointerSize)); | 6571 FixedArray::kHeaderSize - kPointerSize)); |
6744 __ bind(&done); | 6572 __ bind(&done); |
6745 } | 6573 } |
6746 | 6574 |
6747 | 6575 |
6748 #undef __ | 6576 #undef __ |
6749 | 6577 |
6750 } } // namespace v8::internal | 6578 } } // namespace v8::internal |
6751 | 6579 |
6752 #endif // V8_TARGET_ARCH_IA32 | 6580 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |