| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 309 // Need to collect. Call into runtime system. | 309 // Need to collect. Call into runtime system. |
| 310 __ bind(&gc); | 310 __ bind(&gc); |
| 311 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 311 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
| 312 } | 312 } |
| 313 | 313 |
| 314 | 314 |
| 315 static void GenerateFastCloneShallowArrayCommon( | 315 static void GenerateFastCloneShallowArrayCommon( |
| 316 MacroAssembler* masm, | 316 MacroAssembler* masm, |
| 317 int length, | 317 int length, |
| 318 FastCloneShallowArrayStub::Mode mode, | 318 FastCloneShallowArrayStub::Mode mode, |
| 319 AllocationSiteInfoMode allocation_site_info_mode, | 319 AllocationSiteMode allocation_site_mode, |
| 320 Label* fail) { | 320 Label* fail) { |
| 321 // Registers on entry: | 321 // Registers on entry: |
| 322 // | 322 // |
| 323 // rcx: boilerplate literal array. | 323 // rcx: boilerplate literal array. |
| 324 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | 324 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
| 325 | 325 |
| 326 // All sizes here are multiples of kPointerSize. | 326 // All sizes here are multiples of kPointerSize. |
| 327 int elements_size = 0; | 327 int elements_size = 0; |
| 328 if (length > 0) { | 328 if (length > 0) { |
| 329 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 329 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 330 ? FixedDoubleArray::SizeFor(length) | 330 ? FixedDoubleArray::SizeFor(length) |
| 331 : FixedArray::SizeFor(length); | 331 : FixedArray::SizeFor(length); |
| 332 } | 332 } |
| 333 int size = JSArray::kSize; | 333 int size = JSArray::kSize; |
| 334 int allocation_info_start = size; | 334 int allocation_info_start = size; |
| 335 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { | 335 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
| 336 size += AllocationSiteInfo::kSize; | 336 size += AllocationSiteInfo::kSize; |
| 337 } | 337 } |
| 338 size += elements_size; | 338 size += elements_size; |
| 339 | 339 |
| 340 // Allocate both the JS array and the elements array in one big | 340 // Allocate both the JS array and the elements array in one big |
| 341 // allocation. This avoids multiple limit checks. | 341 // allocation. This avoids multiple limit checks. |
| 342 AllocationFlags flags = TAG_OBJECT; | 342 AllocationFlags flags = TAG_OBJECT; |
| 343 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | 343 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { |
| 344 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | 344 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
| 345 } | 345 } |
| 346 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags); | 346 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags); |
| 347 | 347 |
| 348 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { | 348 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
| 349 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); | 349 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); |
| 350 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); | 350 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); |
| 351 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); | 351 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); |
| 352 } | 352 } |
| 353 | 353 |
| 354 // Copy the JS array part. | 354 // Copy the JS array part. |
| 355 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 355 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
| 356 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 356 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
| 357 __ movq(rbx, FieldOperand(rcx, i)); | 357 __ movq(rbx, FieldOperand(rcx, i)); |
| 358 __ movq(FieldOperand(rax, i), rbx); | 358 __ movq(FieldOperand(rax, i), rbx); |
| 359 } | 359 } |
| 360 } | 360 } |
| 361 | 361 |
| 362 if (length > 0) { | 362 if (length > 0) { |
| 363 // Get hold of the elements array of the boilerplate and setup the | 363 // Get hold of the elements array of the boilerplate and setup the |
| 364 // elements pointer in the resulting object. | 364 // elements pointer in the resulting object. |
| 365 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 365 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
| 366 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { | 366 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
| 367 __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize)); | 367 __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize)); |
| 368 } else { | 368 } else { |
| 369 __ lea(rdx, Operand(rax, JSArray::kSize)); | 369 __ lea(rdx, Operand(rax, JSArray::kSize)); |
| 370 } | 370 } |
| 371 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); | 371 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); |
| 372 | 372 |
| 373 // Copy the elements array. | 373 // Copy the elements array. |
| 374 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { | 374 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { |
| 375 for (int i = 0; i < elements_size; i += kPointerSize) { | 375 for (int i = 0; i < elements_size; i += kPointerSize) { |
| 376 __ movq(rbx, FieldOperand(rcx, i)); | 376 __ movq(rbx, FieldOperand(rcx, i)); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 407 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 407 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); |
| 408 __ movq(rcx, | 408 __ movq(rcx, |
| 409 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); | 409 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); |
| 410 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 410 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); |
| 411 Label slow_case; | 411 Label slow_case; |
| 412 __ j(equal, &slow_case); | 412 __ j(equal, &slow_case); |
| 413 | 413 |
| 414 FastCloneShallowArrayStub::Mode mode = mode_; | 414 FastCloneShallowArrayStub::Mode mode = mode_; |
| 415 // rcx is boilerplate object. | 415 // rcx is boilerplate object. |
| 416 Factory* factory = masm->isolate()->factory(); | 416 Factory* factory = masm->isolate()->factory(); |
| 417 AllocationSiteInfoMode allocation_site_info_mode = | |
| 418 DONT_TRACK_ALLOCATION_SITE_INFO; | |
| 419 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) { | |
| 420 mode = CLONE_ANY_ELEMENTS; | |
| 421 allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO; | |
| 422 } | |
| 423 | |
| 424 if (mode == CLONE_ANY_ELEMENTS) { | 417 if (mode == CLONE_ANY_ELEMENTS) { |
| 425 Label double_elements, check_fast_elements; | 418 Label double_elements, check_fast_elements; |
| 426 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset)); | 419 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset)); |
| 427 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 420 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
| 428 factory->fixed_cow_array_map()); | 421 factory->fixed_cow_array_map()); |
| 429 __ j(not_equal, &check_fast_elements); | 422 __ j(not_equal, &check_fast_elements); |
| 430 GenerateFastCloneShallowArrayCommon(masm, 0, | 423 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS, |
| 431 COPY_ON_WRITE_ELEMENTS, | 424 allocation_site_mode_, |
| 432 allocation_site_info_mode, | |
| 433 &slow_case); | 425 &slow_case); |
| 434 __ ret(3 * kPointerSize); | 426 __ ret(3 * kPointerSize); |
| 435 | 427 |
| 436 __ bind(&check_fast_elements); | 428 __ bind(&check_fast_elements); |
| 437 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 429 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
| 438 factory->fixed_array_map()); | 430 factory->fixed_array_map()); |
| 439 __ j(not_equal, &double_elements); | 431 __ j(not_equal, &double_elements); |
| 440 GenerateFastCloneShallowArrayCommon(masm, length_, | 432 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, |
| 441 CLONE_ELEMENTS, | 433 allocation_site_mode_, |
| 442 allocation_site_info_mode, | |
| 443 &slow_case); | 434 &slow_case); |
| 444 __ ret(3 * kPointerSize); | 435 __ ret(3 * kPointerSize); |
| 445 | 436 |
| 446 __ bind(&double_elements); | 437 __ bind(&double_elements); |
| 447 mode = CLONE_DOUBLE_ELEMENTS; | 438 mode = CLONE_DOUBLE_ELEMENTS; |
| 448 // Fall through to generate the code to handle double elements. | 439 // Fall through to generate the code to handle double elements. |
| 449 } | 440 } |
| 450 | 441 |
| 451 if (FLAG_debug_code) { | 442 if (FLAG_debug_code) { |
| 452 const char* message; | 443 const char* message; |
| (...skipping 11 matching lines...) Expand all Loading... |
| 464 } | 455 } |
| 465 __ push(rcx); | 456 __ push(rcx); |
| 466 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 457 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); |
| 467 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 458 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 468 expected_map_index); | 459 expected_map_index); |
| 469 __ Assert(equal, message); | 460 __ Assert(equal, message); |
| 470 __ pop(rcx); | 461 __ pop(rcx); |
| 471 } | 462 } |
| 472 | 463 |
| 473 GenerateFastCloneShallowArrayCommon(masm, length_, mode, | 464 GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
| 474 allocation_site_info_mode, &slow_case); | 465 allocation_site_mode_, |
| 466 &slow_case); |
| 475 __ ret(3 * kPointerSize); | 467 __ ret(3 * kPointerSize); |
| 476 | 468 |
| 477 __ bind(&slow_case); | 469 __ bind(&slow_case); |
| 478 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 470 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
| 479 } | 471 } |
| 480 | 472 |
| 481 | 473 |
| 482 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { | 474 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
| 483 // Stack layout on entry: | 475 // Stack layout on entry: |
| 484 // | 476 // |
| (...skipping 6062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6547 #endif | 6539 #endif |
| 6548 | 6540 |
| 6549 __ Ret(); | 6541 __ Ret(); |
| 6550 } | 6542 } |
| 6551 | 6543 |
| 6552 #undef __ | 6544 #undef __ |
| 6553 | 6545 |
| 6554 } } // namespace v8::internal | 6546 } } // namespace v8::internal |
| 6555 | 6547 |
| 6556 #endif // V8_TARGET_ARCH_X64 | 6548 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |