Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 403 | 403 |
| 404 // Copy the elements array. | 404 // Copy the elements array. |
| 405 ASSERT((elements_size % kPointerSize) == 0); | 405 ASSERT((elements_size % kPointerSize) == 0); |
| 406 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); | 406 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); |
| 407 } | 407 } |
| 408 } | 408 } |
| 409 | 409 |
| 410 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 410 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
| 411 // Stack layout on entry: | 411 // Stack layout on entry: |
| 412 // | 412 // |
| 413 // [sp]: constant elements. | 413 // [sp]: flags (ignored) |
|
danno
2013/02/08 13:44:38
I don't think you need to pass flags explicitly. I
mvstanton
2013/02/11 11:11:24
good idea, thanks.
| |
| 414 // [sp + kPointerSize]: literal index. | 414 // [sp + kPointerSize]: constant elements. |
| 415 // [sp + (2 * kPointerSize)]: literals array. | 415 // [sp + (2 * kPointerSize)]: literal index. |
| 416 // [sp + (3 * kPointerSize)]: literals array. | |
| 416 | 417 |
| 417 // Load boilerplate object into r3 and check if we need to create a | 418 // Load boilerplate object into r3 and check if we need to create a |
| 418 // boilerplate. | 419 // boilerplate. |
| 419 Label slow_case; | 420 Label slow_case; |
| 420 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); | 421 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); |
| 421 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); | 422 __ ldr(r0, MemOperand(sp, 2 * kPointerSize)); |
| 422 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 423 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 423 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 424 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 424 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 425 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
| 425 __ b(eq, &slow_case); | 426 __ b(eq, &slow_case); |
| 426 | 427 |
| 427 FastCloneShallowArrayStub::Mode mode = mode_; | 428 FastCloneShallowArrayStub::Mode mode = mode_; |
| 428 if (mode == CLONE_ANY_ELEMENTS) { | 429 if (mode == CLONE_ANY_ELEMENTS) { |
| 429 Label double_elements, check_fast_elements; | 430 Label double_elements, check_fast_elements; |
| 430 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); | 431 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 431 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 432 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 432 __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); | 433 __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); |
| 433 __ b(ne, &check_fast_elements); | 434 __ b(ne, &check_fast_elements); |
| 434 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS, | 435 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS, |
| 435 allocation_site_mode_, | 436 allocation_site_mode_, |
| 436 &slow_case); | 437 &slow_case); |
| 437 // Return and remove the on-stack parameters. | 438 // Return and remove the on-stack parameters. |
| 438 __ add(sp, sp, Operand(3 * kPointerSize)); | 439 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 439 __ Ret(); | 440 __ Ret(); |
| 440 | 441 |
| 441 __ bind(&check_fast_elements); | 442 __ bind(&check_fast_elements); |
| 442 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); | 443 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); |
| 443 __ b(ne, &double_elements); | 444 __ b(ne, &double_elements); |
| 444 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, | 445 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, |
| 445 allocation_site_mode_, | 446 allocation_site_mode_, |
| 446 &slow_case); | 447 &slow_case); |
| 447 // Return and remove the on-stack parameters. | 448 // Return and remove the on-stack parameters. |
| 448 __ add(sp, sp, Operand(3 * kPointerSize)); | 449 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 449 __ Ret(); | 450 __ Ret(); |
| 450 | 451 |
| 451 __ bind(&double_elements); | 452 __ bind(&double_elements); |
| 452 mode = CLONE_DOUBLE_ELEMENTS; | 453 mode = CLONE_DOUBLE_ELEMENTS; |
| 453 // Fall through to generate the code to handle double elements. | 454 // Fall through to generate the code to handle double elements. |
| 454 } | 455 } |
| 455 | 456 |
| 456 if (FLAG_debug_code) { | 457 if (FLAG_debug_code) { |
| 457 const char* message; | 458 const char* message; |
| 458 Heap::RootListIndex expected_map_index; | 459 Heap::RootListIndex expected_map_index; |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 473 __ CompareRoot(r3, expected_map_index); | 474 __ CompareRoot(r3, expected_map_index); |
| 474 __ Assert(eq, message); | 475 __ Assert(eq, message); |
| 475 __ pop(r3); | 476 __ pop(r3); |
| 476 } | 477 } |
| 477 | 478 |
| 478 GenerateFastCloneShallowArrayCommon(masm, length_, mode, | 479 GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
| 479 allocation_site_mode_, | 480 allocation_site_mode_, |
| 480 &slow_case); | 481 &slow_case); |
| 481 | 482 |
| 482 // Return and remove the on-stack parameters. | 483 // Return and remove the on-stack parameters. |
| 483 __ add(sp, sp, Operand(3 * kPointerSize)); | 484 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 484 __ Ret(); | 485 __ Ret(); |
| 485 | 486 |
| 486 __ bind(&slow_case); | 487 __ bind(&slow_case); |
|
danno
2013/02/08 13:44:38
In the parameterized stub, you'd have to push the
mvstanton
2013/02/11 11:11:24
Done.
| |
| 487 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 488 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 4, 1); |
| 488 } | 489 } |
| 489 | 490 |
| 490 | 491 |
| 491 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { | 492 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
| 492 // Stack layout on entry: | 493 // Stack layout on entry: |
| 493 // | 494 // |
| 494 // [sp]: object literal flags. | 495 // [sp]: object literal flags. |
| 495 // [sp + kPointerSize]: constant properties. | 496 // [sp + kPointerSize]: constant properties. |
| 496 // [sp + (2 * kPointerSize)]: literal index. | 497 // [sp + (2 * kPointerSize)]: literal index. |
| 497 // [sp + (3 * kPointerSize)]: literals array. | 498 // [sp + (3 * kPointerSize)]: literals array. |
| (...skipping 7431 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7929 | 7930 |
| 7930 __ Pop(lr, r5, r1); | 7931 __ Pop(lr, r5, r1); |
| 7931 __ Ret(); | 7932 __ Ret(); |
| 7932 } | 7933 } |
| 7933 | 7934 |
| 7934 #undef __ | 7935 #undef __ |
| 7935 | 7936 |
| 7936 } } // namespace v8::internal | 7937 } } // namespace v8::internal |
| 7937 | 7938 |
| 7938 #endif // V8_TARGET_ARCH_ARM | 7939 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |