OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/macro-assembler.h" | 10 #include "src/macro-assembler.h" |
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
279 kDontSaveFPRegs, | 279 kDontSaveFPRegs, |
280 EMIT_REMEMBERED_SET, | 280 EMIT_REMEMBERED_SET, |
281 OMIT_SMI_CHECK); | 281 OMIT_SMI_CHECK); |
282 | 282 |
283 // Convert smis to doubles and holes to hole NaNs. The Array's length | 283 // Convert smis to doubles and holes to hole NaNs. The Array's length |
284 // remains unchanged. | 284 // remains unchanged. |
285 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); | 285 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); |
286 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); | 286 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); |
287 | 287 |
288 Label loop, entry, convert_hole; | 288 Label loop, entry, convert_hole; |
289 __ movq(r15, bit_cast<int64_t, uint64_t>(kHoleNanInt64)); | 289 __ movq(r15, bit_cast<int64_t>(FixedDoubleArray::hole_nan_as_double())); |
290 // r15: the-hole NaN | 290 // r15: the-hole NaN |
291 __ jmp(&entry); | 291 __ jmp(&entry); |
292 | 292 |
293 // Allocate new backing store. | 293 // Allocate new backing store. |
294 __ bind(&new_backing_store); | 294 __ bind(&new_backing_store); |
295 __ leap(rdi, Operand(r9, times_8, FixedArray::kHeaderSize)); | 295 __ leap(rdi, Operand(r9, times_8, FixedArray::kHeaderSize)); |
296 __ Allocate(rdi, r14, r11, r15, fail, TAG_OBJECT); | 296 __ Allocate(rdi, r14, r11, r15, fail, TAG_OBJECT); |
297 // Set backing store's map | 297 // Set backing store's map |
298 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); | 298 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
299 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi); | 299 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi); |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
386 // r9 : number of elements | 386 // r9 : number of elements |
387 __ leap(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); | 387 __ leap(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); |
388 __ Allocate(rdi, r11, r14, r15, &gc_required, TAG_OBJECT); | 388 __ Allocate(rdi, r11, r14, r15, &gc_required, TAG_OBJECT); |
389 // r11: destination FixedArray | 389 // r11: destination FixedArray |
390 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); | 390 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); |
391 __ movp(FieldOperand(r11, HeapObject::kMapOffset), rdi); | 391 __ movp(FieldOperand(r11, HeapObject::kMapOffset), rdi); |
392 __ Integer32ToSmi(r14, r9); | 392 __ Integer32ToSmi(r14, r9); |
393 __ movp(FieldOperand(r11, FixedArray::kLengthOffset), r14); | 393 __ movp(FieldOperand(r11, FixedArray::kLengthOffset), r14); |
394 | 394 |
395 // Prepare for conversion loop. | 395 // Prepare for conversion loop. |
396 __ movq(rsi, bit_cast<int64_t, uint64_t>(kHoleNanInt64)); | 396 __ movq(rsi, bit_cast<int64_t>(FixedDoubleArray::hole_nan_as_double())); |
397 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); | 397 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); |
398 // rsi: the-hole NaN | 398 // rsi: the-hole NaN |
399 // rdi: pointer to the-hole | 399 // rdi: pointer to the-hole |
400 | 400 |
401 // Allocating heap numbers in the loop below can fail and cause a jump to | 401 // Allocating heap numbers in the loop below can fail and cause a jump to |
402 // gc_required. We can't leave a partly initialized FixedArray behind, | 402 // gc_required. We can't leave a partly initialized FixedArray behind, |
403 // so pessimistically fill it with holes now. | 403 // so pessimistically fill it with holes now. |
404 Label initialization_loop, initialization_loop_entry; | 404 Label initialization_loop, initialization_loop_entry; |
405 __ jmp(&initialization_loop_entry, Label::kNear); | 405 __ jmp(&initialization_loop_entry, Label::kNear); |
406 __ bind(&initialization_loop); | 406 __ bind(&initialization_loop); |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
720 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. | 720 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. |
721 return Operand(base_reg_, argument_count_reg_, times_pointer_size, | 721 return Operand(base_reg_, argument_count_reg_, times_pointer_size, |
722 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); | 722 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); |
723 } | 723 } |
724 } | 724 } |
725 | 725 |
726 | 726 |
727 } } // namespace v8::internal | 727 } } // namespace v8::internal |
728 | 728 |
729 #endif // V8_TARGET_ARCH_X64 | 729 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |