Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(668)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 1348773002: [turbofan] Call ArgumentsAccessStub to materialize arguments. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebased. Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/arm/interface-descriptors-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM 5 #if V8_TARGET_ARCH_ARM
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1475 matching lines...) Expand 10 before | Expand all | Expand 10 after
1486 1486
1487 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1487 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1488 // by calling the runtime system. 1488 // by calling the runtime system.
1489 __ bind(&slow); 1489 __ bind(&slow);
1490 __ push(r1); 1490 __ push(r1);
1491 __ TailCallRuntime(Runtime::kArguments, 1, 1); 1491 __ TailCallRuntime(Runtime::kArguments, 1, 1);
1492 } 1492 }
1493 1493
1494 1494
1495 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1495 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1496 // sp[0] : number of parameters 1496 // r1 : function
1497 // sp[4] : receiver displacement 1497 // r2 : number of parameters (tagged)
1498 // sp[8] : function 1498 // r3 : parameters pointer
1499
1500 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1501 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1502 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1499 1503
1500 // Check if the calling frame is an arguments adaptor frame. 1504 // Check if the calling frame is an arguments adaptor frame.
1501 Label runtime; 1505 Label runtime;
1502 __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1506 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1503 __ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset)); 1507 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1504 __ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1508 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1505 __ b(ne, &runtime); 1509 __ b(ne, &runtime);
1506 1510
1507 // Patch the arguments.length and the parameters pointer in the current frame. 1511 // Patch the arguments.length and the parameters pointer in the current frame.
1508 __ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1512 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1509 __ str(r2, MemOperand(sp, 0 * kPointerSize)); 1513 __ add(r4, r4, Operand(r2, LSL, 1));
1510 __ add(r3, r3, Operand(r2, LSL, 1)); 1514 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
1511 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
1512 __ str(r3, MemOperand(sp, 1 * kPointerSize));
1513 1515
1514 __ bind(&runtime); 1516 __ bind(&runtime);
1517 __ Push(r1, r3, r2);
1515 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1518 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1516 } 1519 }
1517 1520
1518 1521
1519 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 1522 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1520 // Stack layout: 1523 // r1 : function
1521 // sp[0] : number of parameters (tagged) 1524 // r2 : number of parameters (tagged)
1522 // sp[4] : address of receiver argument 1525 // r3 : parameters pointer
1523 // sp[8] : function
1524 // Registers used over whole function: 1526 // Registers used over whole function:
1525 // r6 : allocated object (tagged) 1527 // r5 : arguments count (tagged)
1526 // r9 : mapped parameter count (tagged) 1528 // r6 : mapped parameter count (tagged)
1527 1529
1528 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); 1530 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1529 // r1 = parameter count (tagged) 1531 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1532 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1530 1533
1531 // Check if the calling frame is an arguments adaptor frame. 1534 // Check if the calling frame is an arguments adaptor frame.
1532 Label runtime; 1535 Label adaptor_frame, try_allocate, runtime;
1533 Label adaptor_frame, try_allocate; 1536 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1534 __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1537 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1535 __ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset)); 1538 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1536 __ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1537 __ b(eq, &adaptor_frame); 1539 __ b(eq, &adaptor_frame);
1538 1540
1539 // No adaptor, parameter count = argument count. 1541 // No adaptor, parameter count = argument count.
1540 __ mov(r2, r1); 1542 __ mov(r5, r2);
1543 __ mov(r6, r2);
1541 __ b(&try_allocate); 1544 __ b(&try_allocate);
1542 1545
1543 // We have an adaptor frame. Patch the parameters pointer. 1546 // We have an adaptor frame. Patch the parameters pointer.
1544 __ bind(&adaptor_frame); 1547 __ bind(&adaptor_frame);
1545 __ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1548 __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1546 __ add(r3, r3, Operand(r2, LSL, 1)); 1549 __ add(r4, r4, Operand(r5, LSL, 1));
1547 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); 1550 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
1548 __ str(r3, MemOperand(sp, 1 * kPointerSize));
1549 1551
1550 // r1 = parameter count (tagged) 1552 // r5 = argument count (tagged)
1551 // r2 = argument count (tagged) 1553 // r6 = parameter count (tagged)
1552 // Compute the mapped parameter count = min(r1, r2) in r1. 1554 // Compute the mapped parameter count = min(r6, r5) in r6.
1553 __ cmp(r1, Operand(r2)); 1555 __ mov(r6, r2);
1554 __ mov(r1, Operand(r2), LeaveCC, gt); 1556 __ cmp(r6, Operand(r5));
1557 __ mov(r6, Operand(r5), LeaveCC, gt);
1555 1558
1556 __ bind(&try_allocate); 1559 __ bind(&try_allocate);
1557 1560
1558 // Compute the sizes of backing store, parameter map, and arguments object. 1561 // Compute the sizes of backing store, parameter map, and arguments object.
1559 // 1. Parameter map, has 2 extra words containing context and backing store. 1562 // 1. Parameter map, has 2 extra words containing context and backing store.
1560 const int kParameterMapHeaderSize = 1563 const int kParameterMapHeaderSize =
1561 FixedArray::kHeaderSize + 2 * kPointerSize; 1564 FixedArray::kHeaderSize + 2 * kPointerSize;
1562 // If there are no mapped parameters, we do not need the parameter_map. 1565 // If there are no mapped parameters, we do not need the parameter_map.
1563 __ cmp(r1, Operand(Smi::FromInt(0))); 1566 __ cmp(r6, Operand(Smi::FromInt(0)));
1564 __ mov(r9, Operand::Zero(), LeaveCC, eq); 1567 __ mov(r9, Operand::Zero(), LeaveCC, eq);
1565 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne); 1568 __ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
1566 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne); 1569 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
1567 1570
1568 // 2. Backing store. 1571 // 2. Backing store.
1569 __ add(r9, r9, Operand(r2, LSL, 1)); 1572 __ add(r9, r9, Operand(r5, LSL, 1));
1570 __ add(r9, r9, Operand(FixedArray::kHeaderSize)); 1573 __ add(r9, r9, Operand(FixedArray::kHeaderSize));
1571 1574
1572 // 3. Arguments object. 1575 // 3. Arguments object.
1573 __ add(r9, r9, Operand(Heap::kSloppyArgumentsObjectSize)); 1576 __ add(r9, r9, Operand(Heap::kSloppyArgumentsObjectSize));
1574 1577
1575 // Do the allocation of all three objects in one go. 1578 // Do the allocation of all three objects in one go.
1576 __ Allocate(r9, r0, r3, r4, &runtime, TAG_OBJECT); 1579 __ Allocate(r9, r0, r4, r9, &runtime, TAG_OBJECT);
1577 1580
1578 // r0 = address of new object(s) (tagged) 1581 // r0 = address of new object(s) (tagged)
1579 // r2 = argument count (smi-tagged) 1582 // r2 = argument count (smi-tagged)
1580 // Get the arguments boilerplate from the current native context into r4. 1583 // Get the arguments boilerplate from the current native context into r4.
1581 const int kNormalOffset = 1584 const int kNormalOffset =
1582 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); 1585 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
1583 const int kAliasedOffset = 1586 const int kAliasedOffset =
1584 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); 1587 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
1585 1588
1586 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1589 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1587 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); 1590 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
1588 __ cmp(r1, Operand::Zero()); 1591 __ cmp(r6, Operand::Zero());
1589 __ ldr(r4, MemOperand(r4, kNormalOffset), eq); 1592 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
1590 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); 1593 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
1591 1594
1592 // r0 = address of new object (tagged) 1595 // r0 = address of new object (tagged)
1593 // r1 = mapped parameter count (tagged)
1594 // r2 = argument count (smi-tagged) 1596 // r2 = argument count (smi-tagged)
1595 // r4 = address of arguments map (tagged) 1597 // r4 = address of arguments map (tagged)
1598 // r6 = mapped parameter count (tagged)
1596 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset)); 1599 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
1597 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); 1600 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
1598 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 1601 __ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1599 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); 1602 __ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
1600 1603
1601 // Set up the callee in-object property. 1604 // Set up the callee in-object property.
1602 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); 1605 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
1603 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); 1606 __ AssertNotSmi(r1);
1604 __ AssertNotSmi(r3);
1605 const int kCalleeOffset = JSObject::kHeaderSize + 1607 const int kCalleeOffset = JSObject::kHeaderSize +
1606 Heap::kArgumentsCalleeIndex * kPointerSize; 1608 Heap::kArgumentsCalleeIndex * kPointerSize;
1607 __ str(r3, FieldMemOperand(r0, kCalleeOffset)); 1609 __ str(r1, FieldMemOperand(r0, kCalleeOffset));
1608 1610
1609 // Use the length (smi tagged) and set that as an in-object property too. 1611 // Use the length (smi tagged) and set that as an in-object property too.
1610 __ AssertSmi(r2); 1612 __ AssertSmi(r5);
1611 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1613 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1612 const int kLengthOffset = JSObject::kHeaderSize + 1614 const int kLengthOffset = JSObject::kHeaderSize +
1613 Heap::kArgumentsLengthIndex * kPointerSize; 1615 Heap::kArgumentsLengthIndex * kPointerSize;
1614 __ str(r2, FieldMemOperand(r0, kLengthOffset)); 1616 __ str(r5, FieldMemOperand(r0, kLengthOffset));
1615 1617
1616 // Set up the elements pointer in the allocated arguments object. 1618 // Set up the elements pointer in the allocated arguments object.
1617 // If we allocated a parameter map, r4 will point there, otherwise 1619 // If we allocated a parameter map, r4 will point there, otherwise
1618 // it will point to the backing store. 1620 // it will point to the backing store.
1619 __ add(r4, r0, Operand(Heap::kSloppyArgumentsObjectSize)); 1621 __ add(r4, r0, Operand(Heap::kSloppyArgumentsObjectSize));
1620 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 1622 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1621 1623
1622 // r0 = address of new object (tagged) 1624 // r0 = address of new object (tagged)
1623 // r1 = mapped parameter count (tagged)
1624 // r2 = argument count (tagged) 1625 // r2 = argument count (tagged)
1625 // r4 = address of parameter map or backing store (tagged) 1626 // r4 = address of parameter map or backing store (tagged)
1627 // r6 = mapped parameter count (tagged)
1626 // Initialize parameter map. If there are no mapped arguments, we're done. 1628 // Initialize parameter map. If there are no mapped arguments, we're done.
1627 Label skip_parameter_map; 1629 Label skip_parameter_map;
1628 __ cmp(r1, Operand(Smi::FromInt(0))); 1630 __ cmp(r6, Operand(Smi::FromInt(0)));
1629 // Move backing store address to r3, because it is 1631 // Move backing store address to r1, because it is
1630 // expected there when filling in the unmapped arguments. 1632 // expected there when filling in the unmapped arguments.
1631 __ mov(r3, r4, LeaveCC, eq); 1633 __ mov(r1, r4, LeaveCC, eq);
1632 __ b(eq, &skip_parameter_map); 1634 __ b(eq, &skip_parameter_map);
1633 1635
1634 __ LoadRoot(r6, Heap::kSloppyArgumentsElementsMapRootIndex); 1636 __ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
1635 __ str(r6, FieldMemOperand(r4, FixedArray::kMapOffset)); 1637 __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
1636 __ add(r6, r1, Operand(Smi::FromInt(2))); 1638 __ add(r5, r6, Operand(Smi::FromInt(2)));
1637 __ str(r6, FieldMemOperand(r4, FixedArray::kLengthOffset)); 1639 __ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
1638 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize)); 1640 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
1639 __ add(r6, r4, Operand(r1, LSL, 1)); 1641 __ add(r5, r4, Operand(r6, LSL, 1));
1640 __ add(r6, r6, Operand(kParameterMapHeaderSize)); 1642 __ add(r5, r5, Operand(kParameterMapHeaderSize));
1641 __ str(r6, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize)); 1643 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
1642 1644
1643 // Copy the parameter slots and the holes in the arguments. 1645 // Copy the parameter slots and the holes in the arguments.
1644 // We need to fill in mapped_parameter_count slots. They index the context, 1646 // We need to fill in mapped_parameter_count slots. They index the context,
1645 // where parameters are stored in reverse order, at 1647 // where parameters are stored in reverse order, at
1646 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 1648 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1647 // The mapped parameter thus need to get indices 1649 // The mapped parameter thus need to get indices
1648 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 1650 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1649 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 1651 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1650 // We loop from right to left. 1652 // We loop from right to left.
1651 Label parameters_loop, parameters_test; 1653 Label parameters_loop, parameters_test;
1652 __ mov(r6, r1); 1654 __ mov(r5, r6);
1653 __ ldr(r9, MemOperand(sp, 0 * kPointerSize)); 1655 __ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1654 __ add(r9, r9, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); 1656 __ sub(r9, r9, Operand(r6));
1655 __ sub(r9, r9, Operand(r1)); 1657 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1656 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); 1658 __ add(r1, r4, Operand(r5, LSL, 1));
1657 __ add(r3, r4, Operand(r6, LSL, 1)); 1659 __ add(r1, r1, Operand(kParameterMapHeaderSize));
1658 __ add(r3, r3, Operand(kParameterMapHeaderSize));
1659 1660
1660 // r6 = loop variable (tagged) 1661 // r1 = address of backing store (tagged)
1661 // r1 = mapping index (tagged)
1662 // r3 = address of backing store (tagged)
1663 // r4 = address of parameter map (tagged), which is also the address of new 1662 // r4 = address of parameter map (tagged), which is also the address of new
1664 // object + Heap::kSloppyArgumentsObjectSize (tagged) 1663 // object + Heap::kSloppyArgumentsObjectSize (tagged)
1665 // r0 = temporary scratch (a.o., for address calculation) 1664 // r0 = temporary scratch (a.o., for address calculation)
1666 // r5 = the hole value 1665 // r5 = loop variable (tagged)
1666 // ip = the hole value
1667 __ jmp(&parameters_test); 1667 __ jmp(&parameters_test);
1668 1668
1669 __ bind(&parameters_loop); 1669 __ bind(&parameters_loop);
1670 __ sub(r6, r6, Operand(Smi::FromInt(1))); 1670 __ sub(r5, r5, Operand(Smi::FromInt(1)));
1671 __ mov(r0, Operand(r6, LSL, 1)); 1671 __ mov(r0, Operand(r5, LSL, 1));
1672 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); 1672 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
1673 __ str(r9, MemOperand(r4, r0)); 1673 __ str(r9, MemOperand(r4, r0));
1674 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); 1674 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
1675 __ str(r5, MemOperand(r3, r0)); 1675 __ str(ip, MemOperand(r1, r0));
1676 __ add(r9, r9, Operand(Smi::FromInt(1))); 1676 __ add(r9, r9, Operand(Smi::FromInt(1)));
1677 __ bind(&parameters_test); 1677 __ bind(&parameters_test);
1678 __ cmp(r6, Operand(Smi::FromInt(0))); 1678 __ cmp(r5, Operand(Smi::FromInt(0)));
1679 __ b(ne, &parameters_loop); 1679 __ b(ne, &parameters_loop);
1680 1680
1681 // Restore r0 = new object (tagged) 1681 // Restore r0 = new object (tagged) and r5 = argument count (tagged).
1682 __ sub(r0, r4, Operand(Heap::kSloppyArgumentsObjectSize)); 1682 __ sub(r0, r4, Operand(Heap::kSloppyArgumentsObjectSize));
1683 __ ldr(r5, FieldMemOperand(r0, kLengthOffset));
1683 1684
1684 __ bind(&skip_parameter_map); 1685 __ bind(&skip_parameter_map);
1685 // r0 = address of new object (tagged) 1686 // r0 = address of new object (tagged)
1686 // r2 = argument count (tagged) 1687 // r1 = address of backing store (tagged)
1687 // r3 = address of backing store (tagged) 1688 // r5 = argument count (tagged)
1688 // r5 = scratch 1689 // r6 = mapped parameter count (tagged)
1690 // r9 = scratch
1689 // Copy arguments header and remaining slots (if there are any). 1691 // Copy arguments header and remaining slots (if there are any).
1690 __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex); 1692 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
1691 __ str(r5, FieldMemOperand(r3, FixedArray::kMapOffset)); 1693 __ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
1692 __ str(r2, FieldMemOperand(r3, FixedArray::kLengthOffset)); 1694 __ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
1693 1695
1694 Label arguments_loop, arguments_test; 1696 Label arguments_loop, arguments_test;
1695 __ mov(r9, r1); 1697 __ sub(r3, r3, Operand(r6, LSL, 1));
1696 __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
1697 __ sub(r4, r4, Operand(r9, LSL, 1));
1698 __ jmp(&arguments_test); 1698 __ jmp(&arguments_test);
1699 1699
1700 __ bind(&arguments_loop); 1700 __ bind(&arguments_loop);
1701 __ sub(r4, r4, Operand(kPointerSize)); 1701 __ sub(r3, r3, Operand(kPointerSize));
1702 __ ldr(r6, MemOperand(r4, 0)); 1702 __ ldr(r4, MemOperand(r3, 0));
1703 __ add(r5, r3, Operand(r9, LSL, 1)); 1703 __ add(r9, r1, Operand(r6, LSL, 1));
1704 __ str(r6, FieldMemOperand(r5, FixedArray::kHeaderSize)); 1704 __ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
1705 __ add(r9, r9, Operand(Smi::FromInt(1))); 1705 __ add(r6, r6, Operand(Smi::FromInt(1)));
1706 1706
1707 __ bind(&arguments_test); 1707 __ bind(&arguments_test);
1708 __ cmp(r9, Operand(r2)); 1708 __ cmp(r6, Operand(r5));
1709 __ b(lt, &arguments_loop); 1709 __ b(lt, &arguments_loop);
1710 1710
1711 // Return and remove the on-stack parameters. 1711 // Return.
1712 __ add(sp, sp, Operand(3 * kPointerSize));
1713 __ Ret(); 1712 __ Ret();
1714 1713
1715 // Do the runtime call to allocate the arguments object. 1714 // Do the runtime call to allocate the arguments object.
1716 // r0 = address of new object (tagged) 1715 // r0 = address of new object (tagged)
1717 // r2 = argument count (tagged) 1716 // r5 = argument count (tagged)
1718 __ bind(&runtime); 1717 __ bind(&runtime);
1719 __ str(r2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count. 1718 __ Push(r1, r3, r5);
1720 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1719 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1721 } 1720 }
1722 1721
1723 1722
1724 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1723 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1725 // Return address is in lr. 1724 // Return address is in lr.
1726 Label slow; 1725 Label slow;
1727 1726
1728 Register receiver = LoadDescriptor::ReceiverRegister(); 1727 Register receiver = LoadDescriptor::ReceiverRegister();
1729 Register key = LoadDescriptor::NameRegister(); 1728 Register key = LoadDescriptor::NameRegister();
1730 1729
1731 // Check that the key is an array index, that is Uint32. 1730 // Check that the key is an array index, that is Uint32.
1732 __ NonNegativeSmiTst(key); 1731 __ NonNegativeSmiTst(key);
1733 __ b(ne, &slow); 1732 __ b(ne, &slow);
1734 1733
1735 // Everything is fine, call runtime. 1734 // Everything is fine, call runtime.
1736 __ Push(receiver, key); // Receiver, key. 1735 __ Push(receiver, key); // Receiver, key.
1737 1736
1738 // Perform tail call to the entry. 1737 // Perform tail call to the entry.
1739 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); 1738 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1);
1740 1739
1741 __ bind(&slow); 1740 __ bind(&slow);
1742 PropertyAccessCompiler::TailCallBuiltin( 1741 PropertyAccessCompiler::TailCallBuiltin(
1743 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 1742 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1744 } 1743 }
1745 1744
1746 1745
1747 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1746 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1748 // sp[0] : number of parameters 1747 // r1 : function
1749 // sp[4] : receiver displacement 1748 // r2 : number of parameters (tagged)
1750 // sp[8] : function 1749 // r3 : parameters pointer
1750
1751 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1752 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1753 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1754
1751 // Check if the calling frame is an arguments adaptor frame. 1755 // Check if the calling frame is an arguments adaptor frame.
1752 Label adaptor_frame, try_allocate, runtime; 1756 Label try_allocate, runtime;
1753 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1757 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1754 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 1758 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1755 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1759 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1756 __ b(eq, &adaptor_frame); 1760 __ b(ne, &try_allocate);
1757
1758 // Get the length from the frame.
1759 __ ldr(r1, MemOperand(sp, 0));
1760 __ b(&try_allocate);
1761 1761
1762 // Patch the arguments.length and the parameters pointer. 1762 // Patch the arguments.length and the parameters pointer.
1763 __ bind(&adaptor_frame); 1763 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1764 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1764 __ add(r4, r4, Operand::PointerOffsetFromSmiKey(r2));
1765 __ str(r1, MemOperand(sp, 0)); 1765 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
1766 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r1));
1767 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
1768 __ str(r3, MemOperand(sp, 1 * kPointerSize));
1769 1766
1770 // Try the new space allocation. Start out with computing the size 1767 // Try the new space allocation. Start out with computing the size
1771 // of the arguments object and the elements array in words. 1768 // of the arguments object and the elements array in words.
1772 Label add_arguments_object; 1769 Label add_arguments_object;
1773 __ bind(&try_allocate); 1770 __ bind(&try_allocate);
1774 __ SmiUntag(r1, SetCC); 1771 __ SmiUntag(r9, r2, SetCC);
1775 __ b(eq, &add_arguments_object); 1772 __ b(eq, &add_arguments_object);
1776 __ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize)); 1773 __ add(r9, r9, Operand(FixedArray::kHeaderSize / kPointerSize));
1777 __ bind(&add_arguments_object); 1774 __ bind(&add_arguments_object);
1778 __ add(r1, r1, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); 1775 __ add(r9, r9, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize));
1779 1776
1780 // Do the allocation of both objects in one go. 1777 // Do the allocation of both objects in one go.
1781 __ Allocate(r1, r0, r2, r3, &runtime, 1778 __ Allocate(r9, r0, r4, r5, &runtime,
1782 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); 1779 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
1783 1780
1784 // Get the arguments boilerplate from the current native context. 1781 // Get the arguments boilerplate from the current native context.
1785 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1782 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1786 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); 1783 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
1787 __ ldr(r4, MemOperand( 1784 __ ldr(r4, MemOperand(
1788 r4, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX))); 1785 r4, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX)));
1789 1786
1790 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset)); 1787 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
1791 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); 1788 __ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex);
1792 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 1789 __ str(r5, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1793 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); 1790 __ str(r5, FieldMemOperand(r0, JSObject::kElementsOffset));
1794 1791
1795 // Get the length (smi tagged) and set that as an in-object property too. 1792 // Get the length (smi tagged) and set that as an in-object property too.
1796 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1793 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1797 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); 1794 __ AssertSmi(r2);
1798 __ AssertSmi(r1); 1795 __ str(r2,
1799 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + 1796 FieldMemOperand(r0, JSObject::kHeaderSize +
1800 Heap::kArgumentsLengthIndex * kPointerSize)); 1797 Heap::kArgumentsLengthIndex * kPointerSize));
1801 1798
1802 // If there are no actual arguments, we're done. 1799 // If there are no actual arguments, we're done.
1803 Label done; 1800 Label done;
1804 __ cmp(r1, Operand::Zero()); 1801 __ cmp(r2, Operand::Zero());
1805 __ b(eq, &done); 1802 __ b(eq, &done);
1806 1803
1807 // Get the parameters pointer from the stack.
1808 __ ldr(r2, MemOperand(sp, 1 * kPointerSize));
1809
1810 // Set up the elements pointer in the allocated arguments object and 1804 // Set up the elements pointer in the allocated arguments object and
1811 // initialize the header in the elements fixed array. 1805 // initialize the header in the elements fixed array.
1812 __ add(r4, r0, Operand(Heap::kStrictArgumentsObjectSize)); 1806 __ add(r4, r0, Operand(Heap::kStrictArgumentsObjectSize));
1813 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 1807 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1814 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex); 1808 __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex);
1815 __ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset)); 1809 __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
1816 __ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset)); 1810 __ str(r2, FieldMemOperand(r4, FixedArray::kLengthOffset));
1817 __ SmiUntag(r1); 1811 __ SmiUntag(r2);
1818 1812
1819 // Copy the fixed array slots. 1813 // Copy the fixed array slots.
1820 Label loop; 1814 Label loop;
1821 // Set up r4 to point to the first array slot. 1815 // Set up r4 to point to the first array slot.
1822 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1816 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1823 __ bind(&loop); 1817 __ bind(&loop);
1824 // Pre-decrement r2 with kPointerSize on each iteration. 1818 // Pre-decrement r3 with kPointerSize on each iteration.
1825 // Pre-decrement in order to skip receiver. 1819 // Pre-decrement in order to skip receiver.
1826 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex)); 1820 __ ldr(r5, MemOperand(r3, kPointerSize, NegPreIndex));
1827 // Post-increment r4 with kPointerSize on each iteration. 1821 // Post-increment r4 with kPointerSize on each iteration.
1828 __ str(r3, MemOperand(r4, kPointerSize, PostIndex)); 1822 __ str(r5, MemOperand(r4, kPointerSize, PostIndex));
1829 __ sub(r1, r1, Operand(1)); 1823 __ sub(r2, r2, Operand(1));
1830 __ cmp(r1, Operand::Zero()); 1824 __ cmp(r2, Operand::Zero());
1831 __ b(ne, &loop); 1825 __ b(ne, &loop);
1832 1826
1833 // Return and remove the on-stack parameters. 1827 // Return.
1834 __ bind(&done); 1828 __ bind(&done);
1835 __ add(sp, sp, Operand(3 * kPointerSize));
1836 __ Ret(); 1829 __ Ret();
1837 1830
1838 // Do the runtime call to allocate the arguments object. 1831 // Do the runtime call to allocate the arguments object.
1839 __ bind(&runtime); 1832 __ bind(&runtime);
1833 __ Push(r1, r3, r2);
1840 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); 1834 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
1841 } 1835 }
1842 1836
1843 1837
1844 void RegExpExecStub::Generate(MacroAssembler* masm) { 1838 void RegExpExecStub::Generate(MacroAssembler* masm) {
1845 // Just jump directly to runtime if native RegExp is not selected at compile 1839 // Just jump directly to runtime if native RegExp is not selected at compile
1846 // time or if regexp entry in generated code is turned off runtime switch or 1840 // time or if regexp entry in generated code is turned off runtime switch or
1847 // at compilation. 1841 // at compilation.
1848 #ifdef V8_INTERPRETED_REGEXP 1842 #ifdef V8_INTERPRETED_REGEXP
1849 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 1843 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
(...skipping 3698 matching lines...) Expand 10 before | Expand all | Expand 10 after
5548 MemOperand(fp, 6 * kPointerSize), NULL); 5542 MemOperand(fp, 6 * kPointerSize), NULL);
5549 } 5543 }
5550 5544
5551 5545
5552 #undef __ 5546 #undef __
5553 5547
5554 } // namespace internal 5548 } // namespace internal
5555 } // namespace v8 5549 } // namespace v8
5556 5550
5557 #endif // V8_TARGET_ARCH_ARM 5551 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/interface-descriptors-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698