OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
220 | 220 |
221 | 221 |
222 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 222 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
223 // Stack layout on entry: | 223 // Stack layout on entry: |
224 // | 224 // |
225 // [rsp + kPointerSize]: constant elements. | 225 // [rsp + kPointerSize]: constant elements. |
226 // [rsp + (2 * kPointerSize)]: literal index. | 226 // [rsp + (2 * kPointerSize)]: literal index. |
227 // [rsp + (3 * kPointerSize)]: literals array. | 227 // [rsp + (3 * kPointerSize)]: literals array. |
228 | 228 |
229 // All sizes here are multiples of kPointerSize. | 229 // All sizes here are multiples of kPointerSize. |
230 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; | 230 int elements_size = 0; |
| 231 if (length_ > 0) { |
| 232 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS |
| 233 ? FixedDoubleArray::SizeFor(length_) |
| 234 : FixedArray::SizeFor(length_); |
| 235 } |
231 int size = JSArray::kSize + elements_size; | 236 int size = JSArray::kSize + elements_size; |
232 | 237 |
233 // Load boilerplate object into rcx and check if we need to create a | 238 // Load boilerplate object into rcx and check if we need to create a |
234 // boilerplate. | 239 // boilerplate. |
235 Label slow_case; | 240 Label slow_case; |
236 __ movq(rcx, Operand(rsp, 3 * kPointerSize)); | 241 __ movq(rcx, Operand(rsp, 3 * kPointerSize)); |
237 __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 242 __ movq(rax, Operand(rsp, 2 * kPointerSize)); |
238 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 243 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); |
239 __ movq(rcx, | 244 __ movq(rcx, |
240 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); | 245 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); |
241 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 246 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); |
242 __ j(equal, &slow_case); | 247 __ j(equal, &slow_case); |
243 | 248 |
244 if (FLAG_debug_code) { | 249 if (FLAG_debug_code) { |
245 const char* message; | 250 const char* message; |
246 Heap::RootListIndex expected_map_index; | 251 Heap::RootListIndex expected_map_index; |
247 if (mode_ == CLONE_ELEMENTS) { | 252 if (mode_ == CLONE_ELEMENTS) { |
248 message = "Expected (writable) fixed array"; | 253 message = "Expected (writable) fixed array"; |
249 expected_map_index = Heap::kFixedArrayMapRootIndex; | 254 expected_map_index = Heap::kFixedArrayMapRootIndex; |
| 255 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { |
| 256 message = "Expected (writable) fixed double array"; |
| 257 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; |
250 } else { | 258 } else { |
251 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); | 259 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); |
252 message = "Expected copy-on-write fixed array"; | 260 message = "Expected copy-on-write fixed array"; |
253 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 261 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
254 } | 262 } |
255 __ push(rcx); | 263 __ push(rcx); |
256 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 264 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
257 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 265 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
258 expected_map_index); | 266 expected_map_index); |
259 __ Assert(equal, message); | 267 __ Assert(equal, message); |
(...skipping 13 matching lines...) Expand all Loading... |
273 } | 281 } |
274 | 282 |
275 if (length_ > 0) { | 283 if (length_ > 0) { |
276 // Get hold of the elements array of the boilerplate and setup the | 284 // Get hold of the elements array of the boilerplate and setup the |
277 // elements pointer in the resulting object. | 285 // elements pointer in the resulting object. |
278 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 286 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
279 __ lea(rdx, Operand(rax, JSArray::kSize)); | 287 __ lea(rdx, Operand(rax, JSArray::kSize)); |
280 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); | 288 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); |
281 | 289 |
282 // Copy the elements array. | 290 // Copy the elements array. |
283 for (int i = 0; i < elements_size; i += kPointerSize) { | 291 // Copy the elements array. |
284 __ movq(rbx, FieldOperand(rcx, i)); | 292 if (mode_ == CLONE_ELEMENTS) { |
285 __ movq(FieldOperand(rdx, i), rbx); | 293 for (int i = 0; i < elements_size; i += kPointerSize) { |
| 294 __ movq(rbx, FieldOperand(rcx, i)); |
| 295 __ movq(FieldOperand(rdx, i), rbx); |
| 296 } |
| 297 } else { |
| 298 ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS); |
| 299 int i; |
| 300 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { |
| 301 __ movq(rbx, FieldOperand(rcx, i)); |
| 302 __ movq(FieldOperand(rdx, i), rbx); |
| 303 } |
| 304 while (i < elements_size) { |
| 305 __ movsd(xmm0, FieldOperand(rcx, i)); |
| 306 __ movsd(FieldOperand(rdx, i), xmm0); |
| 307 i += kDoubleSize; |
| 308 } |
| 309 ASSERT(i == elements_size); |
286 } | 310 } |
287 } | 311 } |
288 | 312 |
289 // Return and remove the on-stack parameters. | 313 // Return and remove the on-stack parameters. |
290 __ ret(3 * kPointerSize); | 314 __ ret(3 * kPointerSize); |
291 | 315 |
292 __ bind(&slow_case); | 316 __ bind(&slow_case); |
293 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 317 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
294 } | 318 } |
295 | 319 |
(...skipping 5796 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6092 OMIT_SMI_CHECK); | 6116 OMIT_SMI_CHECK); |
6093 __ pop(rax); | 6117 __ pop(rax); |
6094 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 6118 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
6095 } | 6119 } |
6096 | 6120 |
6097 #undef __ | 6121 #undef __ |
6098 | 6122 |
6099 } } // namespace v8::internal | 6123 } } // namespace v8::internal |
6100 | 6124 |
6101 #endif // V8_TARGET_ARCH_X64 | 6125 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |