Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 337 // Need to collect. Call into runtime system. | 337 // Need to collect. Call into runtime system. |
| 338 __ bind(&gc); | 338 __ bind(&gc); |
| 339 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 339 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
| 340 } | 340 } |
| 341 | 341 |
| 342 | 342 |
| 343 static void GenerateFastCloneShallowArrayCommon( | 343 static void GenerateFastCloneShallowArrayCommon( |
| 344 MacroAssembler* masm, | 344 MacroAssembler* masm, |
| 345 int length, | 345 int length, |
| 346 FastCloneShallowArrayStub::Mode mode, | 346 FastCloneShallowArrayStub::Mode mode, |
| 347 AllocationSiteInfoMode allocation_site_info_mode, | |
| 348 Label* fail) { | 347 Label* fail) { |
| 349 // Registers on entry: | 348 // Registers on entry: |
| 350 // | 349 // |
| 351 // r3: boilerplate literal array. | 350 // r3: boilerplate literal array. |
| 352 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | 351 ASSERT(!FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)); |
| 352 bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode); | |
| 353 | 353 |
| 354 // All sizes here are multiples of kPointerSize. | 354 // All sizes here are multiples of kPointerSize. |
| 355 int elements_size = 0; | 355 int elements_size = 0; |
| 356 if (length > 0) { | 356 if (length > 0) { |
| 357 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 357 elements_size = FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode) |
| 358 ? FixedDoubleArray::SizeFor(length) | 358 ? FixedDoubleArray::SizeFor(length) |
| 359 : FixedArray::SizeFor(length); | 359 : FixedArray::SizeFor(length); |
| 360 } | 360 } |
| 361 | |
| 361 int size = JSArray::kSize; | 362 int size = JSArray::kSize; |
| 362 int allocation_info_start = size; | 363 int allocation_info_start = size; |
| 363 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { | 364 size += tracking_on ? AllocationSiteInfo::kSize + elements_size |
|
danno
2013/01/11 16:14:40
nit: "? AllocationSiteInfo::kSize + elements_size"
| |
| 364 size += AllocationSiteInfo::kSize; | 365 : elements_size; |
| 365 } | |
| 366 size += elements_size; | |
| 367 | 366 |
| 368 // Allocate both the JS array and the elements array in one big | 367 // Allocate both the JS array and the elements array in one big |
| 369 // allocation. This avoids multiple limit checks. | 368 // allocation. This avoids multiple limit checks. |
| 370 AllocationFlags flags = TAG_OBJECT; | 369 AllocationFlags flags = TAG_OBJECT; |
| 371 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | 370 if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) { |
| 372 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | 371 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
| 373 } | 372 } |
| 374 __ AllocateInNewSpace(size, r0, r1, r2, fail, flags); | 373 __ AllocateInNewSpace(size, r0, r1, r2, fail, flags); |
| 375 | 374 |
| 376 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { | 375 if (tracking_on) { |
| 377 __ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()-> | 376 __ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()-> |
| 378 allocation_site_info_map()))); | 377 allocation_site_info_map()))); |
| 379 __ str(r2, FieldMemOperand(r0, allocation_info_start)); | 378 __ str(r2, FieldMemOperand(r0, allocation_info_start)); |
| 380 __ str(r3, FieldMemOperand(r0, allocation_info_start + kPointerSize)); | 379 __ str(r3, FieldMemOperand(r0, allocation_info_start + kPointerSize)); |
| 381 } | 380 } |
| 382 | 381 |
| 383 // Copy the JS array part. | 382 // Copy the JS array part. |
| 384 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 383 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
| 385 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 384 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
| 386 __ ldr(r1, FieldMemOperand(r3, i)); | 385 __ ldr(r1, FieldMemOperand(r3, i)); |
| 387 __ str(r1, FieldMemOperand(r0, i)); | 386 __ str(r1, FieldMemOperand(r0, i)); |
| 388 } | 387 } |
| 389 } | 388 } |
| 390 | 389 |
| 391 if (length > 0) { | 390 if (length > 0) { |
| 392 // Get hold of the elements array of the boilerplate and setup the | 391 // Get hold of the elements array of the boilerplate and setup the |
| 393 // elements pointer in the resulting object. | 392 // elements pointer in the resulting object. |
| 394 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | 393 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 395 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { | 394 if (tracking_on) { |
| 396 __ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize)); | 395 __ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize)); |
| 397 } else { | 396 } else { |
| 398 __ add(r2, r0, Operand(JSArray::kSize)); | 397 __ add(r2, r0, Operand(JSArray::kSize)); |
| 399 } | 398 } |
| 400 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); | 399 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); |
| 401 | 400 |
| 402 // Copy the elements array. | 401 // Copy the elements array. |
| 403 ASSERT((elements_size % kPointerSize) == 0); | 402 ASSERT((elements_size % kPointerSize) == 0); |
| 404 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); | 403 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); |
| 405 } | 404 } |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 416 // boilerplate. | 415 // boilerplate. |
| 417 Label slow_case; | 416 Label slow_case; |
| 418 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); | 417 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); |
| 419 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); | 418 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); |
| 420 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 419 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 421 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 420 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 422 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 421 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
| 423 __ b(eq, &slow_case); | 422 __ b(eq, &slow_case); |
| 424 | 423 |
| 425 FastCloneShallowArrayStub::Mode mode = mode_; | 424 FastCloneShallowArrayStub::Mode mode = mode_; |
| 426 AllocationSiteInfoMode allocation_site_info_mode = | 425 if (FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)) { |
| 427 DONT_TRACK_ALLOCATION_SITE_INFO; | |
| 428 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) { | |
| 429 mode = CLONE_ANY_ELEMENTS; | |
| 430 allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO; | |
| 431 } | |
| 432 | |
| 433 if (mode == CLONE_ANY_ELEMENTS) { | |
| 434 Label double_elements, check_fast_elements; | 426 Label double_elements, check_fast_elements; |
| 435 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); | 427 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 436 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 428 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 437 __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); | 429 __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); |
| 438 __ b(ne, &check_fast_elements); | 430 __ b(ne, &check_fast_elements); |
| 439 GenerateFastCloneShallowArrayCommon(masm, 0, | 431 bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode); |
| 440 COPY_ON_WRITE_ELEMENTS, | 432 mode = tracking_on ? COPY_ON_WRITE_ELEMENTS_WITH_ALLOCATION_SITE_INFO |
|
danno
2013/01/11 16:14:40
nit: ? on next line
| |
| 441 allocation_site_info_mode, | 433 : COPY_ON_WRITE_ELEMENTS; |
| 442 &slow_case); | 434 GenerateFastCloneShallowArrayCommon(masm, 0, mode, &slow_case); |
| 443 // Return and remove the on-stack parameters. | 435 // Return and remove the on-stack parameters. |
| 444 __ add(sp, sp, Operand(3 * kPointerSize)); | 436 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 445 __ Ret(); | 437 __ Ret(); |
| 446 | 438 |
| 447 __ bind(&check_fast_elements); | 439 __ bind(&check_fast_elements); |
| 448 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); | 440 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); |
| 449 __ b(ne, &double_elements); | 441 __ b(ne, &double_elements); |
| 450 GenerateFastCloneShallowArrayCommon(masm, length_, | 442 mode = tracking_on ? CLONE_ELEMENTS_WITH_ALLOCATION_SITE_INFO |
|
danno
2013/01/11 16:14:40
nit: ? on next line
| |
| 451 CLONE_ELEMENTS, | 443 : CLONE_ELEMENTS; |
| 452 allocation_site_info_mode, | 444 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
| 453 &slow_case); | |
| 454 // Return and remove the on-stack parameters. | 445 // Return and remove the on-stack parameters. |
| 455 __ add(sp, sp, Operand(3 * kPointerSize)); | 446 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 456 __ Ret(); | 447 __ Ret(); |
| 457 | 448 |
| 458 __ bind(&double_elements); | 449 __ bind(&double_elements); |
| 459 mode = CLONE_DOUBLE_ELEMENTS; | 450 mode = tracking_on ? CLONE_DOUBLE_ELEMENTS_WITH_ALLOCATION_SITE_INFO |
|
danno
2013/01/11 16:14:40
nit: ? on next line
| |
| 451 : CLONE_DOUBLE_ELEMENTS; | |
| 460 // Fall through to generate the code to handle double elements. | 452 // Fall through to generate the code to handle double elements. |
| 461 } | 453 } |
| 462 | 454 |
| 463 if (FLAG_debug_code) { | 455 if (FLAG_debug_code) { |
| 464 const char* message; | 456 const char* message; |
| 465 Heap::RootListIndex expected_map_index; | 457 Heap::RootListIndex expected_map_index; |
| 466 if (mode == CLONE_ELEMENTS) { | 458 if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) { |
| 467 message = "Expected (writable) fixed array"; | 459 message = "Expected (writable) fixed array"; |
| 468 expected_map_index = Heap::kFixedArrayMapRootIndex; | 460 expected_map_index = Heap::kFixedArrayMapRootIndex; |
| 469 } else if (mode == CLONE_DOUBLE_ELEMENTS) { | 461 } else if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) { |
| 470 message = "Expected (writable) fixed double array"; | 462 message = "Expected (writable) fixed double array"; |
| 471 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; | 463 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; |
| 472 } else { | 464 } else { |
| 473 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); | 465 ASSERT(FastCloneShallowArrayStub::IsCopyOnWriteElementsMode(mode)); |
| 474 message = "Expected copy-on-write fixed array"; | 466 message = "Expected copy-on-write fixed array"; |
| 475 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 467 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
| 476 } | 468 } |
| 477 __ push(r3); | 469 __ push(r3); |
| 478 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | 470 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 479 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 471 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 480 __ CompareRoot(r3, expected_map_index); | 472 __ CompareRoot(r3, expected_map_index); |
| 481 __ Assert(eq, message); | 473 __ Assert(eq, message); |
| 482 __ pop(r3); | 474 __ pop(r3); |
| 483 } | 475 } |
| 484 | 476 |
| 485 GenerateFastCloneShallowArrayCommon(masm, length_, mode, | 477 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
| 486 allocation_site_info_mode, &slow_case); | |
| 487 | 478 |
| 488 // Return and remove the on-stack parameters. | 479 // Return and remove the on-stack parameters. |
| 489 __ add(sp, sp, Operand(3 * kPointerSize)); | 480 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 490 __ Ret(); | 481 __ Ret(); |
| 491 | 482 |
| 492 __ bind(&slow_case); | 483 __ bind(&slow_case); |
| 493 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 484 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
| 494 } | 485 } |
| 495 | 486 |
| 496 | 487 |
| (...skipping 7242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7739 | 7730 |
| 7740 __ Pop(lr, r5, r1); | 7731 __ Pop(lr, r5, r1); |
| 7741 __ Ret(); | 7732 __ Ret(); |
| 7742 } | 7733 } |
| 7743 | 7734 |
| 7744 #undef __ | 7735 #undef __ |
| 7745 | 7736 |
| 7746 } } // namespace v8::internal | 7737 } } // namespace v8::internal |
| 7747 | 7738 |
| 7748 #endif // V8_TARGET_ARCH_ARM | 7739 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |