OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
240 // Load the offset into r3. | 240 // Load the offset into r3. |
241 int slot_offset = | 241 int slot_offset = |
242 FixedArray::kHeaderSize + slot->index() * kPointerSize; | 242 FixedArray::kHeaderSize + slot->index() * kPointerSize; |
243 __ mov(r3, Operand(slot_offset)); | 243 __ mov(r3, Operand(slot_offset)); |
244 __ RecordWrite(r2, r3, r1); | 244 __ RecordWrite(r2, r3, r1); |
245 } | 245 } |
246 } | 246 } |
247 } | 247 } |
248 | 248 |
249 // Store the arguments object. This must happen after context | 249 // Store the arguments object. This must happen after context |
250 // initialization because the arguments object may be stored in the | 250 // initialization because the arguments object may be stored in |
251 // context. | 251 // the context. |
252 if (scope()->arguments() != NULL) { | 252 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { |
253 Comment cmnt(masm_, "[ allocate arguments object"); | 253 StoreArgumentsObject(true); |
254 ASSERT(scope()->arguments_shadow() != NULL); | |
255 Variable* arguments = scope()->arguments()->var(); | |
256 Variable* shadow = scope()->arguments_shadow()->var(); | |
257 ASSERT(arguments != NULL && arguments->slot() != NULL); | |
258 ASSERT(shadow != NULL && shadow->slot() != NULL); | |
259 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); | |
260 __ ldr(r2, frame_->Function()); | |
261 // The receiver is below the arguments, the return address, and the | |
262 // frame pointer on the stack. | |
263 const int kReceiverDisplacement = 2 + scope()->num_parameters(); | |
264 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); | |
265 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); | |
266 frame_->Adjust(3); | |
267 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); | |
268 frame_->CallStub(&stub, 3); | |
269 frame_->EmitPush(r0); | |
270 StoreToSlot(arguments->slot(), NOT_CONST_INIT); | |
271 StoreToSlot(shadow->slot(), NOT_CONST_INIT); | |
272 frame_->Drop(); // Value is no longer needed. | |
273 } | 254 } |
274 | 255 |
275 // Initialize ThisFunction reference if present. | 256 // Initialize ThisFunction reference if present. |
276 if (scope()->is_function_scope() && scope()->function() != NULL) { | 257 if (scope()->is_function_scope() && scope()->function() != NULL) { |
277 __ mov(ip, Operand(Factory::the_hole_value())); | 258 __ mov(ip, Operand(Factory::the_hole_value())); |
278 frame_->EmitPush(ip); | 259 frame_->EmitPush(ip); |
279 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); | 260 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); |
280 } | 261 } |
281 } else { | 262 } else { |
282 // When used as the secondary compiler for splitting, r1, cp, | 263 // When used as the secondary compiler for splitting, r1, cp, |
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
597 | 578 |
598 void CodeGenerator::LoadGlobalReceiver(Register scratch) { | 579 void CodeGenerator::LoadGlobalReceiver(Register scratch) { |
599 VirtualFrame::SpilledScope spilled_scope(frame_); | 580 VirtualFrame::SpilledScope spilled_scope(frame_); |
600 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); | 581 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); |
601 __ ldr(scratch, | 582 __ ldr(scratch, |
602 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); | 583 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); |
603 frame_->EmitPush(scratch); | 584 frame_->EmitPush(scratch); |
604 } | 585 } |
605 | 586 |
606 | 587 |
| 588 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() { |
| 589 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION; |
| 590 ASSERT(scope()->arguments_shadow() != NULL); |
| 591 // We don't want to do lazy arguments allocation for functions that |
| 592 // have heap-allocated contexts, because it interfers with the |
| 593 // uninitialized const tracking in the context objects. |
| 594 return (scope()->num_heap_slots() > 0) |
| 595 ? EAGER_ARGUMENTS_ALLOCATION |
| 596 : LAZY_ARGUMENTS_ALLOCATION; |
| 597 } |
| 598 |
| 599 |
| 600 void CodeGenerator::StoreArgumentsObject(bool initial) { |
| 601 VirtualFrame::SpilledScope spilled_scope(frame_); |
| 602 |
| 603 ArgumentsAllocationMode mode = ArgumentsMode(); |
| 604 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); |
| 605 |
| 606 Comment cmnt(masm_, "[ store arguments object"); |
| 607 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { |
| 608 // When using lazy arguments allocation, we store the hole value |
| 609 // as a sentinel indicating that the arguments object hasn't been |
| 610 // allocated yet. |
| 611 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 612 frame_->EmitPush(ip); |
| 613 } else { |
| 614 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
| 615 __ ldr(r2, frame_->Function()); |
| 616 // The receiver is below the arguments, the return address, and the |
| 617 // frame pointer on the stack. |
| 618 const int kReceiverDisplacement = 2 + scope()->num_parameters(); |
| 619 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); |
| 620 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); |
| 621 frame_->Adjust(3); |
| 622 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); |
| 623 frame_->CallStub(&stub, 3); |
| 624 frame_->EmitPush(r0); |
| 625 } |
| 626 |
| 627 Variable* arguments = scope()->arguments()->var(); |
| 628 Variable* shadow = scope()->arguments_shadow()->var(); |
| 629 ASSERT(arguments != NULL && arguments->slot() != NULL); |
| 630 ASSERT(shadow != NULL && shadow->slot() != NULL); |
| 631 JumpTarget done; |
| 632 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { |
| 633 // We have to skip storing into the arguments slot if it has |
| 634 // already been written to. This can happen if the a function |
| 635 // has a local variable named 'arguments'. |
| 636 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
| 637 frame_->EmitPop(r0); |
| 638 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 639 __ cmp(r0, ip); |
| 640 done.Branch(ne); |
| 641 } |
| 642 StoreToSlot(arguments->slot(), NOT_CONST_INIT); |
| 643 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); |
| 644 StoreToSlot(shadow->slot(), NOT_CONST_INIT); |
| 645 } |
| 646 |
| 647 |
607 void CodeGenerator::LoadTypeofExpression(Expression* expr) { | 648 void CodeGenerator::LoadTypeofExpression(Expression* expr) { |
608 // Special handling of identifiers as subexpressions of typeof. | 649 // Special handling of identifiers as subexpressions of typeof. |
609 VirtualFrame::SpilledScope spilled_scope(frame_); | 650 VirtualFrame::SpilledScope spilled_scope(frame_); |
610 Variable* variable = expr->AsVariableProxy()->AsVariable(); | 651 Variable* variable = expr->AsVariableProxy()->AsVariable(); |
611 if (variable != NULL && !variable->is_this() && variable->is_global()) { | 652 if (variable != NULL && !variable->is_this() && variable->is_global()) { |
612 // For a global variable we build the property reference | 653 // For a global variable we build the property reference |
613 // <global>.<variable> and perform a (regular non-contextual) property | 654 // <global>.<variable> and perform a (regular non-contextual) property |
614 // load to make sure we do not get reference errors. | 655 // load to make sure we do not get reference errors. |
615 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); | 656 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); |
616 Literal key(variable->name()); | 657 Literal key(variable->name()); |
617 Property property(&global, &key, RelocInfo::kNoPosition); | 658 Property property(&global, &key, RelocInfo::kNoPosition); |
618 Reference ref(this, &property); | 659 Reference ref(this, &property); |
619 ref.GetValue(); | 660 ref.GetValue(); |
620 } else if (variable != NULL && variable->slot() != NULL) { | 661 } else if (variable != NULL && variable->slot() != NULL) { |
621 // For a variable that rewrites to a slot, we signal it is the immediate | 662 // For a variable that rewrites to a slot, we signal it is the immediate |
622 // subexpression of a typeof. | 663 // subexpression of a typeof. |
623 LoadFromSlot(variable->slot(), INSIDE_TYPEOF); | 664 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF); |
624 frame_->SpillAll(); | 665 frame_->SpillAll(); |
625 } else { | 666 } else { |
626 // Anything else can be handled normally. | 667 // Anything else can be handled normally. |
627 LoadAndSpill(expr); | 668 LoadAndSpill(expr); |
628 } | 669 } |
629 } | 670 } |
630 | 671 |
631 | 672 |
632 Reference::Reference(CodeGenerator* cgen, | 673 Reference::Reference(CodeGenerator* cgen, |
633 Expression* expression, | 674 Expression* expression, |
(...skipping 823 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1457 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; | 1498 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; |
1458 CallFunctionStub call_function(arg_count, in_loop, flags); | 1499 CallFunctionStub call_function(arg_count, in_loop, flags); |
1459 frame_->CallStub(&call_function, arg_count + 1); | 1500 frame_->CallStub(&call_function, arg_count + 1); |
1460 | 1501 |
1461 // Restore context and pop function from the stack. | 1502 // Restore context and pop function from the stack. |
1462 __ ldr(cp, frame_->Context()); | 1503 __ ldr(cp, frame_->Context()); |
1463 frame_->Drop(); // discard the TOS | 1504 frame_->Drop(); // discard the TOS |
1464 } | 1505 } |
1465 | 1506 |
1466 | 1507 |
| 1508 void CodeGenerator::CallApplyLazy(Expression* applicand, |
| 1509 Expression* receiver, |
| 1510 VariableProxy* arguments, |
| 1511 int position) { |
| 1512 // An optimized implementation of expressions of the form |
| 1513 // x.apply(y, arguments). |
| 1514 // If the arguments object of the scope has not been allocated, |
| 1515 // and x.apply is Function.prototype.apply, this optimization |
| 1516 // just copies y and the arguments of the current function on the |
| 1517 // stack, as receiver and arguments, and calls x. |
| 1518 // In the implementation comments, we call x the applicand |
| 1519 // and y the receiver. |
| 1520 VirtualFrame::SpilledScope spilled_scope(frame_); |
| 1521 |
| 1522 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); |
| 1523 ASSERT(arguments->IsArguments()); |
| 1524 |
| 1525 // Load applicand.apply onto the stack. This will usually |
| 1526 // give us a megamorphic load site. Not super, but it works. |
| 1527 LoadAndSpill(applicand); |
| 1528 Handle<String> name = Factory::LookupAsciiSymbol("apply"); |
| 1529 __ mov(r2, Operand(name)); |
| 1530 frame_->CallLoadIC(RelocInfo::CODE_TARGET); |
| 1531 frame_->EmitPush(r0); |
| 1532 |
| 1533 // Load the receiver and the existing arguments object onto the |
| 1534 // expression stack. Avoid allocating the arguments object here. |
| 1535 LoadAndSpill(receiver); |
| 1536 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
| 1537 |
| 1538 // Emit the source position information after having loaded the |
| 1539 // receiver and the arguments. |
| 1540 CodeForSourcePosition(position); |
| 1541 // Contents of the stack at this point: |
| 1542 // sp[0]: arguments object of the current function or the hole. |
| 1543 // sp[1]: receiver |
| 1544 // sp[2]: applicand.apply |
| 1545 // sp[3]: applicand. |
| 1546 |
| 1547 // Check if the arguments object has been lazily allocated |
| 1548 // already. If so, just use that instead of copying the arguments |
| 1549 // from the stack. This also deals with cases where a local variable |
| 1550 // named 'arguments' has been introduced. |
| 1551 __ ldr(r0, MemOperand(sp, 0)); |
| 1552 |
| 1553 Label slow, done; |
| 1554 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 1555 __ cmp(ip, r0); |
| 1556 __ b(ne, &slow); |
| 1557 |
| 1558 Label build_args; |
| 1559 // Get rid of the arguments object probe. |
| 1560 frame_->Drop(); |
| 1561 // Stack now has 3 elements on it. |
| 1562 // Contents of stack at this point: |
| 1563 // sp[0]: receiver |
| 1564 // sp[1]: applicand.apply |
| 1565 // sp[2]: applicand. |
| 1566 |
| 1567 // Check that the receiver really is a JavaScript object. |
| 1568 __ ldr(r0, MemOperand(sp, 0)); |
| 1569 __ BranchOnSmi(r0, &build_args); |
| 1570 // We allow all JSObjects including JSFunctions. As long as |
| 1571 // JS_FUNCTION_TYPE is the last instance type and it is right |
| 1572 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper |
| 1573 // bound. |
| 1574 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 1575 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
| 1576 __ CompareObjectType(r0, r1, r2, FIRST_JS_OBJECT_TYPE); |
| 1577 __ b(lt, &build_args); |
| 1578 |
| 1579 // Check that applicand.apply is Function.prototype.apply. |
| 1580 __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 1581 __ BranchOnSmi(r0, &build_args); |
| 1582 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); |
| 1583 __ b(ne, &build_args); |
| 1584 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); |
| 1585 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); |
| 1586 __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset)); |
| 1587 __ cmp(r1, Operand(apply_code)); |
| 1588 __ b(ne, &build_args); |
| 1589 |
| 1590 // Check that applicand is a function. |
| 1591 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); |
| 1592 __ BranchOnSmi(r1, &build_args); |
| 1593 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE); |
| 1594 __ b(ne, &build_args); |
| 1595 |
| 1596 // Copy the arguments to this function possibly from the |
| 1597 // adaptor frame below it. |
| 1598 Label invoke, adapted; |
| 1599 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1600 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 1601 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1602 __ b(eq, &adapted); |
| 1603 |
| 1604 // No arguments adaptor frame. Copy fixed number of arguments. |
| 1605 __ mov(r0, Operand(scope()->num_parameters())); |
| 1606 for (int i = 0; i < scope()->num_parameters(); i++) { |
| 1607 __ ldr(r2, frame_->ParameterAt(i)); |
| 1608 __ push(r2); |
| 1609 } |
| 1610 __ jmp(&invoke); |
| 1611 |
| 1612 // Arguments adaptor frame present. Copy arguments from there, but |
| 1613 // avoid copying too many arguments to avoid stack overflows. |
| 1614 __ bind(&adapted); |
| 1615 static const uint32_t kArgumentsLimit = 1 * KB; |
| 1616 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1617 __ mov(r0, Operand(r0, LSR, kSmiTagSize)); |
| 1618 __ mov(r3, r0); |
| 1619 __ cmp(r0, Operand(kArgumentsLimit)); |
| 1620 __ b(gt, &build_args); |
| 1621 |
| 1622 // Loop through the arguments pushing them onto the execution |
| 1623 // stack. We don't inform the virtual frame of the push, so we don't |
| 1624 // have to worry about getting rid of the elements from the virtual |
| 1625 // frame. |
| 1626 Label loop; |
| 1627 // r3 is a small non-negative integer, due to the test above. |
| 1628 __ cmp(r3, Operand(0)); |
| 1629 __ b(eq, &invoke); |
| 1630 // Compute the address of the first argument. |
| 1631 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2)); |
| 1632 __ add(r2, r2, Operand(kPointerSize)); |
| 1633 __ bind(&loop); |
| 1634 // Post-decrement argument address by kPointerSize on each iteration. |
| 1635 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex)); |
| 1636 __ push(r4); |
| 1637 __ sub(r3, r3, Operand(1), SetCC); |
| 1638 __ b(gt, &loop); |
| 1639 |
| 1640 // Invoke the function. |
| 1641 __ bind(&invoke); |
| 1642 ParameterCount actual(r0); |
| 1643 __ InvokeFunction(r1, actual, CALL_FUNCTION); |
| 1644 // Drop applicand.apply and applicand from the stack, and push |
| 1645 // the result of the function call, but leave the spilled frame |
| 1646 // unchanged, with 3 elements, so it is correct when we compile the |
| 1647 // slow-case code. |
| 1648 __ add(sp, sp, Operand(2 * kPointerSize)); |
| 1649 __ push(r0); |
| 1650 // Stack now has 1 element: |
| 1651 // sp[0]: result |
| 1652 __ jmp(&done); |
| 1653 |
| 1654 // Slow-case: Allocate the arguments object since we know it isn't |
| 1655 // there, and fall-through to the slow-case where we call |
| 1656 // applicand.apply. |
| 1657 __ bind(&build_args); |
| 1658 // Stack now has 3 elements, because we have jumped from where: |
| 1659 // sp[0]: receiver |
| 1660 // sp[1]: applicand.apply |
| 1661 // sp[2]: applicand. |
| 1662 StoreArgumentsObject(false); |
| 1663 |
| 1664 // Stack and frame now have 4 elements. |
| 1665 __ bind(&slow); |
| 1666 |
| 1667 // Generic computation of x.apply(y, args) with no special optimization. |
| 1668 // Flip applicand.apply and applicand on the stack, so |
| 1669 // applicand looks like the receiver of the applicand.apply call. |
| 1670 // Then process it as a normal function call. |
| 1671 __ ldr(r0, MemOperand(sp, 3 * kPointerSize)); |
| 1672 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); |
| 1673 __ str(r0, MemOperand(sp, 2 * kPointerSize)); |
| 1674 __ str(r1, MemOperand(sp, 3 * kPointerSize)); |
| 1675 |
| 1676 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS); |
| 1677 frame_->CallStub(&call_function, 3); |
| 1678 // The function and its two arguments have been dropped. |
| 1679 frame_->Drop(); // Drop the receiver as well. |
| 1680 frame_->EmitPush(r0); |
| 1681 // Stack now has 1 element: |
| 1682 // sp[0]: result |
| 1683 __ bind(&done); |
| 1684 |
| 1685 // Restore the context register after a call. |
| 1686 __ ldr(cp, frame_->Context()); |
| 1687 } |
| 1688 |
| 1689 |
1467 void CodeGenerator::Branch(bool if_true, JumpTarget* target) { | 1690 void CodeGenerator::Branch(bool if_true, JumpTarget* target) { |
1468 VirtualFrame::SpilledScope spilled_scope(frame_); | 1691 VirtualFrame::SpilledScope spilled_scope(frame_); |
1469 ASSERT(has_cc()); | 1692 ASSERT(has_cc()); |
1470 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_); | 1693 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_); |
1471 target->Branch(cc); | 1694 target->Branch(cc); |
1472 cc_reg_ = al; | 1695 cc_reg_ = al; |
1473 } | 1696 } |
1474 | 1697 |
1475 | 1698 |
1476 void CodeGenerator::CheckStack() { | 1699 void CodeGenerator::CheckStack() { |
(...skipping 1316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2793 frame_->EmitPop(scratch); | 3016 frame_->EmitPop(scratch); |
2794 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 3017 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
2795 __ cmp(scratch, ip); | 3018 __ cmp(scratch, ip); |
2796 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex, eq); | 3019 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex, eq); |
2797 frame_->EmitPush(scratch); | 3020 frame_->EmitPush(scratch); |
2798 } | 3021 } |
2799 } | 3022 } |
2800 } | 3023 } |
2801 | 3024 |
2802 | 3025 |
| 3026 void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, |
| 3027 TypeofState state) { |
| 3028 LoadFromSlot(slot, state); |
| 3029 |
| 3030 // Bail out quickly if we're not using lazy arguments allocation. |
| 3031 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return; |
| 3032 |
| 3033 // ... or if the slot isn't a non-parameter arguments slot. |
| 3034 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return; |
| 3035 |
| 3036 VirtualFrame::SpilledScope spilled_scope(frame_); |
| 3037 |
| 3038 // Load the loaded value from the stack into r0 but leave it on the |
| 3039 // stack. |
| 3040 __ ldr(r0, MemOperand(sp, 0)); |
| 3041 |
| 3042 // If the loaded value is the sentinel that indicates that we |
| 3043 // haven't loaded the arguments object yet, we need to do it now. |
| 3044 JumpTarget exit; |
| 3045 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 3046 __ cmp(r0, ip); |
| 3047 exit.Branch(ne); |
| 3048 frame_->Drop(); |
| 3049 StoreArgumentsObject(false); |
| 3050 exit.Bind(); |
| 3051 } |
| 3052 |
| 3053 |
2803 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { | 3054 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
2804 ASSERT(slot != NULL); | 3055 ASSERT(slot != NULL); |
2805 if (slot->type() == Slot::LOOKUP) { | 3056 if (slot->type() == Slot::LOOKUP) { |
2806 VirtualFrame::SpilledScope spilled_scope(frame_); | 3057 VirtualFrame::SpilledScope spilled_scope(frame_); |
2807 ASSERT(slot->var()->is_dynamic()); | 3058 ASSERT(slot->var()->is_dynamic()); |
2808 | 3059 |
2809 // For now, just do a runtime call. | 3060 // For now, just do a runtime call. |
2810 frame_->EmitPush(cp); | 3061 frame_->EmitPush(cp); |
2811 __ mov(r0, Operand(slot->var()->name())); | 3062 __ mov(r0, Operand(slot->var()->name())); |
2812 frame_->EmitPush(r0); | 3063 frame_->EmitPush(r0); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2948 // Drop the global object. The result is in r0. | 3199 // Drop the global object. The result is in r0. |
2949 frame_->Drop(); | 3200 frame_->Drop(); |
2950 } | 3201 } |
2951 | 3202 |
2952 | 3203 |
2953 void CodeGenerator::VisitSlot(Slot* node) { | 3204 void CodeGenerator::VisitSlot(Slot* node) { |
2954 #ifdef DEBUG | 3205 #ifdef DEBUG |
2955 int original_height = frame_->height(); | 3206 int original_height = frame_->height(); |
2956 #endif | 3207 #endif |
2957 Comment cmnt(masm_, "[ Slot"); | 3208 Comment cmnt(masm_, "[ Slot"); |
2958 LoadFromSlot(node, NOT_INSIDE_TYPEOF); | 3209 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF); |
2959 ASSERT(frame_->height() == original_height + 1); | 3210 ASSERT(frame_->height() == original_height + 1); |
2960 } | 3211 } |
2961 | 3212 |
2962 | 3213 |
2963 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { | 3214 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { |
2964 #ifdef DEBUG | 3215 #ifdef DEBUG |
2965 int original_height = frame_->height(); | 3216 int original_height = frame_->height(); |
2966 #endif | 3217 #endif |
2967 Comment cmnt(masm_, "[ VariableProxy"); | 3218 Comment cmnt(masm_, "[ VariableProxy"); |
2968 | 3219 |
(...skipping 437 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3406 | 3657 |
3407 } else if (property != NULL) { | 3658 } else if (property != NULL) { |
3408 // Check if the key is a literal string. | 3659 // Check if the key is a literal string. |
3409 Literal* literal = property->key()->AsLiteral(); | 3660 Literal* literal = property->key()->AsLiteral(); |
3410 | 3661 |
3411 if (literal != NULL && literal->handle()->IsSymbol()) { | 3662 if (literal != NULL && literal->handle()->IsSymbol()) { |
3412 // ------------------------------------------------------------------ | 3663 // ------------------------------------------------------------------ |
3413 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)' | 3664 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)' |
3414 // ------------------------------------------------------------------ | 3665 // ------------------------------------------------------------------ |
3415 | 3666 |
3416 LoadAndSpill(property->obj()); // Receiver. | 3667 Handle<String> name = Handle<String>::cast(literal->handle()); |
3417 // Load the arguments. | 3668 |
3418 int arg_count = args->length(); | 3669 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION && |
3419 for (int i = 0; i < arg_count; i++) { | 3670 name->IsEqualTo(CStrVector("apply")) && |
3420 LoadAndSpill(args->at(i)); | 3671 args->length() == 2 && |
| 3672 args->at(1)->AsVariableProxy() != NULL && |
| 3673 args->at(1)->AsVariableProxy()->IsArguments()) { |
| 3674 // Use the optimized Function.prototype.apply that avoids |
| 3675 // allocating lazily allocated arguments objects. |
| 3676 CallApplyLazy(property->obj(), |
| 3677 args->at(0), |
| 3678 args->at(1)->AsVariableProxy(), |
| 3679 node->position()); |
| 3680 |
| 3681 } else { |
| 3682 LoadAndSpill(property->obj()); // Receiver. |
| 3683 // Load the arguments. |
| 3684 int arg_count = args->length(); |
| 3685 for (int i = 0; i < arg_count; i++) { |
| 3686 LoadAndSpill(args->at(i)); |
| 3687 } |
| 3688 |
| 3689 // Set the name register and call the IC initialization code. |
| 3690 __ mov(r2, Operand(name)); |
| 3691 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; |
| 3692 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop); |
| 3693 CodeForSourcePosition(node->position()); |
| 3694 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); |
| 3695 __ ldr(cp, frame_->Context()); |
| 3696 frame_->EmitPush(r0); |
3421 } | 3697 } |
3422 | 3698 |
3423 // Set the name register and call the IC initialization code. | |
3424 __ mov(r2, Operand(literal->handle())); | |
3425 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; | |
3426 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop); | |
3427 CodeForSourcePosition(node->position()); | |
3428 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); | |
3429 __ ldr(cp, frame_->Context()); | |
3430 frame_->EmitPush(r0); | |
3431 | |
3432 } else { | 3699 } else { |
3433 // ------------------------------------------- | 3700 // ------------------------------------------- |
3434 // JavaScript example: 'array[index](1, 2, 3)' | 3701 // JavaScript example: 'array[index](1, 2, 3)' |
3435 // ------------------------------------------- | 3702 // ------------------------------------------- |
3436 | 3703 |
3437 LoadAndSpill(property->obj()); | 3704 LoadAndSpill(property->obj()); |
3438 LoadAndSpill(property->key()); | 3705 LoadAndSpill(property->key()); |
3439 EmitKeyedLoad(false); | 3706 EmitKeyedLoad(false); |
3440 frame_->Drop(); // key | 3707 frame_->Drop(); // key |
3441 // Put the function below the receiver. | 3708 // Put the function below the receiver. |
(...skipping 1607 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5049 Property* property = expression_->AsProperty(); | 5316 Property* property = expression_->AsProperty(); |
5050 if (property != NULL) { | 5317 if (property != NULL) { |
5051 cgen_->CodeForSourcePosition(property->position()); | 5318 cgen_->CodeForSourcePosition(property->position()); |
5052 } | 5319 } |
5053 | 5320 |
5054 switch (type_) { | 5321 switch (type_) { |
5055 case SLOT: { | 5322 case SLOT: { |
5056 Comment cmnt(masm, "[ Load from Slot"); | 5323 Comment cmnt(masm, "[ Load from Slot"); |
5057 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); | 5324 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
5058 ASSERT(slot != NULL); | 5325 ASSERT(slot != NULL); |
5059 cgen_->LoadFromSlot(slot, NOT_INSIDE_TYPEOF); | 5326 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); |
5060 break; | 5327 break; |
5061 } | 5328 } |
5062 | 5329 |
5063 case NAMED: { | 5330 case NAMED: { |
5064 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 5331 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
5065 bool is_global = var != NULL; | 5332 bool is_global = var != NULL; |
5066 ASSERT(!is_global || var->is_global()); | 5333 ASSERT(!is_global || var->is_global()); |
5067 cgen_->EmitNamedLoad(GetName(), is_global); | 5334 cgen_->EmitNamedLoad(GetName(), is_global); |
5068 cgen_->frame()->EmitPush(r0); | 5335 cgen_->frame()->EmitPush(r0); |
5069 break; | 5336 break; |
(...skipping 3947 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9017 | 9284 |
9018 // Just jump to runtime to add the two strings. | 9285 // Just jump to runtime to add the two strings. |
9019 __ bind(&string_add_runtime); | 9286 __ bind(&string_add_runtime); |
9020 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 9287 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
9021 } | 9288 } |
9022 | 9289 |
9023 | 9290 |
9024 #undef __ | 9291 #undef __ |
9025 | 9292 |
9026 } } // namespace v8::internal | 9293 } } // namespace v8::internal |
OLD | NEW |