Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 6529032: Merge 6168:6800 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/frames-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 26 matching lines...) Expand all
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 #include "arm/code-stubs-arm.h" 41 #include "arm/code-stubs-arm.h"
42 42
43 namespace v8 { 43 namespace v8 {
44 namespace internal { 44 namespace internal {
45 45
46 #define __ ACCESS_MASM(masm_) 46 #define __ ACCESS_MASM(masm_)
47 47
48
49 // A patch site is a location in the code which it is possible to patch. This
50 // class has a number of methods to emit the code which is patchable and the
51 // method EmitPatchInfo to record a marker back to the patchable code. This
52 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
53 // immediate value is used) is the delta from the pc to the first instruction of
54 // the patchable code.
55 class JumpPatchSite BASE_EMBEDDED {
56 public:
57 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
58 #ifdef DEBUG
59 info_emitted_ = false;
60 #endif
61 }
62
63 ~JumpPatchSite() {
64 ASSERT(patch_site_.is_bound() == info_emitted_);
65 }
66
67 // When initially emitting this ensure that a jump is always generated to skip
68 // the inlined smi code.
69 void EmitJumpIfNotSmi(Register reg, Label* target) {
70 ASSERT(!patch_site_.is_bound() && !info_emitted_);
71 __ bind(&patch_site_);
72 __ cmp(reg, Operand(reg));
73 // Don't use b(al, ...) as that might emit the constant pool right after the
74 // branch. After patching when the branch is no longer unconditional
75 // execution can continue into the constant pool.
76 __ b(eq, target); // Always taken before patched.
77 }
78
79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 __ bind(&patch_site_);
84 __ cmp(reg, Operand(reg));
85 __ b(ne, target); // Never taken before patched.
86 }
87
88 void EmitPatchInfo() {
89 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
90 Register reg;
91 reg.set_code(delta_to_patch_site / kOff12Mask);
92 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
93 #ifdef DEBUG
94 info_emitted_ = true;
95 #endif
96 }
97
98 bool is_bound() const { return patch_site_.is_bound(); }
99
100 private:
101 MacroAssembler* masm_;
102 Label patch_site_;
103 #ifdef DEBUG
104 bool info_emitted_;
105 #endif
106 };
107
108
48 // Generate code for a JS function. On entry to the function the receiver 109 // Generate code for a JS function. On entry to the function the receiver
49 // and arguments have been pushed on the stack left to right. The actual 110 // and arguments have been pushed on the stack left to right. The actual
50 // argument count matches the formal parameter count expected by the 111 // argument count matches the formal parameter count expected by the
51 // function. 112 // function.
52 // 113 //
53 // The live registers are: 114 // The live registers are:
54 // o r1: the JS function object being called (ie, ourselves) 115 // o r1: the JS function object being called (ie, ourselves)
55 // o cp: our context 116 // o cp: our context
56 // o fp: our caller's frame pointer 117 // o fp: our caller's frame pointer
57 // o sp: stack pointer 118 // o sp: stack pointer
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
217 278
218 279
219 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { 280 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
220 Comment cmnt(masm_, "[ Stack check"); 281 Comment cmnt(masm_, "[ Stack check");
221 Label ok; 282 Label ok;
222 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 283 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
223 __ cmp(sp, Operand(ip)); 284 __ cmp(sp, Operand(ip));
224 __ b(hs, &ok); 285 __ b(hs, &ok);
225 StackCheckStub stub; 286 StackCheckStub stub;
226 __ CallStub(&stub); 287 __ CallStub(&stub);
288 // Record a mapping of this PC offset to the OSR id. This is used to find
289 // the AST id from the unoptimized code in order to use it as a key into
290 // the deoptimization input data found in the optimized code.
291 RecordStackCheck(stmt->OsrEntryId());
292
227 __ bind(&ok); 293 __ bind(&ok);
228 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 294 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
295 // Record a mapping of the OSR id to this PC. This is used if the OSR
296 // entry becomes the target of a bailout. We don't expect it to be, but
297 // we want it to work if it is.
229 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 298 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
230 RecordStackCheck(stmt->OsrEntryId());
231 } 299 }
232 300
233 301
234 void FullCodeGenerator::EmitReturnSequence() { 302 void FullCodeGenerator::EmitReturnSequence() {
235 Comment cmnt(masm_, "[ Return sequence"); 303 Comment cmnt(masm_, "[ Return sequence");
236 if (return_label_.is_bound()) { 304 if (return_label_.is_bound()) {
237 __ b(&return_label_); 305 __ b(&return_label_);
238 } else { 306 } else {
239 __ bind(&return_label_); 307 __ bind(&return_label_);
240 if (FLAG_trace) { 308 if (FLAG_trace) {
(...skipping 16 matching lines...) Expand all
257 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; 325 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
258 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 326 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
259 __ RecordJSReturn(); 327 __ RecordJSReturn();
260 masm_->mov(sp, fp); 328 masm_->mov(sp, fp);
261 masm_->ldm(ia_w, sp, fp.bit() | lr.bit()); 329 masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
262 masm_->add(sp, sp, Operand(sp_delta)); 330 masm_->add(sp, sp, Operand(sp_delta));
263 masm_->Jump(lr); 331 masm_->Jump(lr);
264 } 332 }
265 333
266 #ifdef DEBUG 334 #ifdef DEBUG
267 // Check that the size of the code used for returning matches what is 335 // Check that the size of the code used for returning is large enough
268 // expected by the debugger. If the sp_delts above cannot be encoded in the 336 // for the debugger's requirements.
269 // add instruction the add will generate two instructions. 337 ASSERT(Assembler::kJSReturnSequenceInstructions <=
270 int return_sequence_length = 338 masm_->InstructionsGeneratedSince(&check_exit_codesize));
271 masm_->InstructionsGeneratedSince(&check_exit_codesize);
272 CHECK(return_sequence_length ==
273 Assembler::kJSReturnSequenceInstructions ||
274 return_sequence_length ==
275 Assembler::kJSReturnSequenceInstructions + 1);
276 #endif 339 #endif
277 } 340 }
278 } 341 }
279 342
280 343
281 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( 344 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
282 Token::Value op, Expression* left, Expression* right) { 345 Token::Value op, Expression* left, Expression* right) {
283 ASSERT(ShouldInlineSmiCase(op)); 346 ASSERT(ShouldInlineSmiCase(op));
284 return kNoConstants; 347 return kNoConstants;
285 } 348 }
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
506 // Call the runtime to find the boolean value of the source and then 569 // Call the runtime to find the boolean value of the source and then
507 // translate it into control flow to the pair of labels. 570 // translate it into control flow to the pair of labels.
508 __ push(result_register()); 571 __ push(result_register());
509 __ CallRuntime(Runtime::kToBool, 1); 572 __ CallRuntime(Runtime::kToBool, 1);
510 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 573 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
511 __ cmp(r0, ip); 574 __ cmp(r0, ip);
512 Split(eq, if_true, if_false, fall_through); 575 Split(eq, if_true, if_false, fall_through);
513 } 576 }
514 577
515 578
516 void FullCodeGenerator::Split(Condition cc, 579 void FullCodeGenerator::Split(Condition cond,
517 Label* if_true, 580 Label* if_true,
518 Label* if_false, 581 Label* if_false,
519 Label* fall_through) { 582 Label* fall_through) {
520 if (if_false == fall_through) { 583 if (if_false == fall_through) {
521 __ b(cc, if_true); 584 __ b(cond, if_true);
522 } else if (if_true == fall_through) { 585 } else if (if_true == fall_through) {
523 __ b(NegateCondition(cc), if_false); 586 __ b(NegateCondition(cond), if_false);
524 } else { 587 } else {
525 __ b(cc, if_true); 588 __ b(cond, if_true);
526 __ b(if_false); 589 __ b(if_false);
527 } 590 }
528 } 591 }
529 592
530 593
531 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 594 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
532 switch (slot->type()) { 595 switch (slot->type()) {
533 case Slot::PARAMETER: 596 case Slot::PARAMETER:
534 case Slot::LOCAL: 597 case Slot::LOCAL:
535 return MemOperand(fp, SlotOffset(slot)); 598 return MemOperand(fp, SlotOffset(slot));
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
676 __ Push(cp, r2, r1, r0); 739 __ Push(cp, r2, r1, r0);
677 } 740 }
678 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 741 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
679 break; 742 break;
680 } 743 }
681 } 744 }
682 745
683 } else if (prop != NULL) { 746 } else if (prop != NULL) {
684 if (function != NULL || mode == Variable::CONST) { 747 if (function != NULL || mode == Variable::CONST) {
685 // We are declaring a function or constant that rewrites to a 748 // We are declaring a function or constant that rewrites to a
686 // property. Use (keyed) IC to set the initial value. 749 // property. Use (keyed) IC to set the initial value. We
687 VisitForStackValue(prop->obj()); 750 // cannot visit the rewrite because it's shared and we risk
751 // recording duplicate AST IDs for bailouts from optimized code.
752 ASSERT(prop->obj()->AsVariableProxy() != NULL);
753 { AccumulatorValueContext for_object(this);
754 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
755 }
688 if (function != NULL) { 756 if (function != NULL) {
689 VisitForStackValue(prop->key()); 757 __ push(r0);
690 VisitForAccumulatorValue(function); 758 VisitForAccumulatorValue(function);
691 __ pop(r1); // Key. 759 __ pop(r2);
692 } else { 760 } else {
693 VisitForAccumulatorValue(prop->key()); 761 __ mov(r2, r0);
694 __ mov(r1, result_register()); // Key. 762 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
695 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
696 } 763 }
697 __ pop(r2); // Receiver. 764 ASSERT(prop->key()->AsLiteral() != NULL &&
765 prop->key()->AsLiteral()->handle()->IsSmi());
766 __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
698 767
699 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 768 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
700 EmitCallIC(ic, RelocInfo::CODE_TARGET); 769 EmitCallIC(ic, RelocInfo::CODE_TARGET);
701 // Value in r0 is ignored (declarations are statements). 770 // Value in r0 is ignored (declarations are statements).
702 } 771 }
703 } 772 }
704 } 773 }
705 774
706 775
707 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 776 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
(...skipping 21 matching lines...) Expand all
729 798
730 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 799 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
731 800
732 ZoneList<CaseClause*>* clauses = stmt->cases(); 801 ZoneList<CaseClause*>* clauses = stmt->cases();
733 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 802 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
734 803
735 Label next_test; // Recycled for each test. 804 Label next_test; // Recycled for each test.
736 // Compile all the tests with branches to their bodies. 805 // Compile all the tests with branches to their bodies.
737 for (int i = 0; i < clauses->length(); i++) { 806 for (int i = 0; i < clauses->length(); i++) {
738 CaseClause* clause = clauses->at(i); 807 CaseClause* clause = clauses->at(i);
808 clause->body_target()->entry_label()->Unuse();
809
739 // The default is not a test, but remember it as final fall through. 810 // The default is not a test, but remember it as final fall through.
740 if (clause->is_default()) { 811 if (clause->is_default()) {
741 default_clause = clause; 812 default_clause = clause;
742 continue; 813 continue;
743 } 814 }
744 815
745 Comment cmnt(masm_, "[ Case comparison"); 816 Comment cmnt(masm_, "[ Case comparison");
746 __ bind(&next_test); 817 __ bind(&next_test);
747 next_test.Unuse(); 818 next_test.Unuse();
748 819
749 // Compile the label expression. 820 // Compile the label expression.
750 VisitForAccumulatorValue(clause->label()); 821 VisitForAccumulatorValue(clause->label());
751 822
752 // Perform the comparison as if via '==='. 823 // Perform the comparison as if via '==='.
753 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 824 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
754 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 825 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
826 JumpPatchSite patch_site(masm_);
755 if (inline_smi_code) { 827 if (inline_smi_code) {
756 Label slow_case; 828 Label slow_case;
757 __ orr(r2, r1, r0); 829 __ orr(r2, r1, r0);
758 __ tst(r2, Operand(kSmiTagMask)); 830 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
759 __ b(ne, &slow_case); 831
760 __ cmp(r1, r0); 832 __ cmp(r1, r0);
761 __ b(ne, &next_test); 833 __ b(ne, &next_test);
762 __ Drop(1); // Switch value is no longer needed. 834 __ Drop(1); // Switch value is no longer needed.
763 __ b(clause->body_target()->entry_label()); 835 __ b(clause->body_target()->entry_label());
764 __ bind(&slow_case); 836 __ bind(&slow_case);
765 } 837 }
766 838
767 CompareFlags flags = inline_smi_code 839 // Record position before stub call for type feedback.
768 ? NO_SMI_COMPARE_IN_STUB 840 SetSourcePosition(clause->position());
769 : NO_COMPARE_FLAGS; 841 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
770 CompareStub stub(eq, true, flags, r1, r0); 842 EmitCallIC(ic, &patch_site);
771 __ CallStub(&stub); 843 __ cmp(r0, Operand(0));
772 __ cmp(r0, Operand(0, RelocInfo::NONE));
773 __ b(ne, &next_test); 844 __ b(ne, &next_test);
774 __ Drop(1); // Switch value is no longer needed. 845 __ Drop(1); // Switch value is no longer needed.
775 __ b(clause->body_target()->entry_label()); 846 __ b(clause->body_target()->entry_label());
776 } 847 }
777 848
778 // Discard the test value and jump to the default if present, otherwise to 849 // Discard the test value and jump to the default if present, otherwise to
779 // the end of the statement. 850 // the end of the statement.
780 __ bind(&next_test); 851 __ bind(&next_test);
781 __ Drop(1); // Switch value is no longer needed. 852 __ Drop(1); // Switch value is no longer needed.
782 if (default_clause == NULL) { 853 if (default_clause == NULL) {
(...skipping 29 matching lines...) Expand all
812 VisitForAccumulatorValue(stmt->enumerable()); 883 VisitForAccumulatorValue(stmt->enumerable());
813 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 884 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
814 __ cmp(r0, ip); 885 __ cmp(r0, ip);
815 __ b(eq, &exit); 886 __ b(eq, &exit);
816 __ LoadRoot(ip, Heap::kNullValueRootIndex); 887 __ LoadRoot(ip, Heap::kNullValueRootIndex);
817 __ cmp(r0, ip); 888 __ cmp(r0, ip);
818 __ b(eq, &exit); 889 __ b(eq, &exit);
819 890
820 // Convert the object to a JS object. 891 // Convert the object to a JS object.
821 Label convert, done_convert; 892 Label convert, done_convert;
822 __ BranchOnSmi(r0, &convert); 893 __ JumpIfSmi(r0, &convert);
823 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE); 894 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
824 __ b(hs, &done_convert); 895 __ b(hs, &done_convert);
825 __ bind(&convert); 896 __ bind(&convert);
826 __ push(r0); 897 __ push(r0);
827 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS); 898 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
828 __ bind(&done_convert); 899 __ bind(&done_convert);
829 __ push(r0); 900 __ push(r0);
830 901
831 // BUG(867): Check cache validity in generated code. This is a fast 902 // BUG(867): Check cache validity in generated code. This is a fast
832 // case for the JSObject::IsSimpleEnum cache validity checks. If we 903 // case for the JSObject::IsSimpleEnum cache validity checks. If we
(...skipping 712 matching lines...) Expand 10 before | Expand all | Expand 10 after
1545 Expression* right, 1616 Expression* right,
1546 ConstantOperand constant) { 1617 ConstantOperand constant) {
1547 ASSERT(constant == kNoConstants); // Only handled case. 1618 ASSERT(constant == kNoConstants); // Only handled case.
1548 EmitBinaryOp(op, mode); 1619 EmitBinaryOp(op, mode);
1549 } 1620 }
1550 1621
1551 1622
1552 void FullCodeGenerator::EmitBinaryOp(Token::Value op, 1623 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1553 OverwriteMode mode) { 1624 OverwriteMode mode) {
1554 __ pop(r1); 1625 __ pop(r1);
1555 GenericBinaryOpStub stub(op, mode, r1, r0); 1626 TypeRecordingBinaryOpStub stub(op, mode);
1556 __ CallStub(&stub); 1627 EmitCallIC(stub.GetCode(), NULL);
1557 context()->Plug(r0); 1628 context()->Plug(r0);
1558 } 1629 }
1559 1630
1560 1631
1561 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { 1632 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1562 // Invalid left-hand sides are rewritten to have a 'throw 1633 // Invalid left-hand sides are rewritten to have a 'throw
1563 // ReferenceError' on the left-hand side. 1634 // ReferenceError' on the left-hand side.
1564 if (!expr->IsValidLeftHandSide()) { 1635 if (!expr->IsValidLeftHandSide()) {
1565 VisitForEffect(expr); 1636 VisitForEffect(expr);
1566 return; 1637 return;
(...skipping 22 matching lines...) Expand all
1589 VisitForAccumulatorValue(prop->obj()); 1660 VisitForAccumulatorValue(prop->obj());
1590 __ mov(r1, r0); 1661 __ mov(r1, r0);
1591 __ pop(r0); // Restore value. 1662 __ pop(r0); // Restore value.
1592 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 1663 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1593 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1664 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1594 EmitCallIC(ic, RelocInfo::CODE_TARGET); 1665 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1595 break; 1666 break;
1596 } 1667 }
1597 case KEYED_PROPERTY: { 1668 case KEYED_PROPERTY: {
1598 __ push(r0); // Preserve value. 1669 __ push(r0); // Preserve value.
1599 VisitForStackValue(prop->obj()); 1670 if (prop->is_synthetic()) {
1600 VisitForAccumulatorValue(prop->key()); 1671 ASSERT(prop->obj()->AsVariableProxy() != NULL);
1601 __ mov(r1, r0); 1672 ASSERT(prop->key()->AsLiteral() != NULL);
1602 __ pop(r2); 1673 { AccumulatorValueContext for_object(this);
1674 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1675 }
1676 __ mov(r2, r0);
1677 __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
1678 } else {
1679 VisitForStackValue(prop->obj());
1680 VisitForAccumulatorValue(prop->key());
1681 __ mov(r1, r0);
1682 __ pop(r2);
1683 }
1603 __ pop(r0); // Restore value. 1684 __ pop(r0); // Restore value.
1604 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1685 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1605 EmitCallIC(ic, RelocInfo::CODE_TARGET); 1686 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1606 break; 1687 break;
1607 } 1688 }
1608 } 1689 }
1609 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 1690 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1610 context()->Plug(r0); 1691 context()->Plug(r0);
1611 } 1692 }
1612 1693
1613 1694
1614 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1695 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1615 Token::Value op) { 1696 Token::Value op) {
1616 // Left-hand sides that rewrite to explicit property accesses do not reach 1697 // Left-hand sides that rewrite to explicit property accesses do not reach
1617 // here. 1698 // here.
1618 ASSERT(var != NULL); 1699 ASSERT(var != NULL);
1619 ASSERT(var->is_global() || var->AsSlot() != NULL); 1700 ASSERT(var->is_global() || var->AsSlot() != NULL);
1620 1701
1621 if (var->is_global()) { 1702 if (var->is_global()) {
1622 ASSERT(!var->is_this()); 1703 ASSERT(!var->is_this());
1623 // Assignment to a global variable. Use inline caching for the 1704 // Assignment to a global variable. Use inline caching for the
1624 // assignment. Right-hand-side value is passed in r0, variable name in 1705 // assignment. Right-hand-side value is passed in r0, variable name in
1625 // r2, and the global object in r1. 1706 // r2, and the global object in r1.
1626 __ mov(r2, Operand(var->name())); 1707 __ mov(r2, Operand(var->name()));
1627 __ ldr(r1, GlobalObjectOperand()); 1708 __ ldr(r1, GlobalObjectOperand());
1628 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1709 Handle<Code> ic(Builtins::builtin(is_strict()
1629 EmitCallIC(ic, RelocInfo::CODE_TARGET); 1710 ? Builtins::StoreIC_Initialize_Strict
1711 : Builtins::StoreIC_Initialize));
1712 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1630 1713
1631 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { 1714 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
1632 // Perform the assignment for non-const variables and for initialization 1715 // Perform the assignment for non-const variables and for initialization
1633 // of const variables. Const assignments are simply skipped. 1716 // of const variables. Const assignments are simply skipped.
1634 Label done; 1717 Label done;
1635 Slot* slot = var->AsSlot(); 1718 Slot* slot = var->AsSlot();
1636 switch (slot->type()) { 1719 switch (slot->type()) {
1637 case Slot::PARAMETER: 1720 case Slot::PARAMETER:
1638 case Slot::LOCAL: 1721 case Slot::LOCAL:
1639 if (op == Token::INIT_CONST) { 1722 if (op == Token::INIT_CONST) {
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
1911 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 1994 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
1912 __ push(r1); 1995 __ push(r1);
1913 } else { 1996 } else {
1914 __ push(r2); 1997 __ push(r2);
1915 } 1998 }
1916 1999
1917 // Push the receiver of the enclosing function and do runtime call. 2000 // Push the receiver of the enclosing function and do runtime call.
1918 __ ldr(r1, 2001 __ ldr(r1,
1919 MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize)); 2002 MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
1920 __ push(r1); 2003 __ push(r1);
1921 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); 2004 // Push the strict mode flag.
2005 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
2006 __ push(r1);
2007 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
1922 2008
1923 // The runtime call returns a pair of values in r0 (function) and 2009 // The runtime call returns a pair of values in r0 (function) and
1924 // r1 (receiver). Touch up the stack with the right values. 2010 // r1 (receiver). Touch up the stack with the right values.
1925 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2011 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
1926 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2012 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
1927 } 2013 }
1928 2014
1929 // Record source position for debugger. 2015 // Record source position for debugger.
1930 SetSourcePosition(expr->position()); 2016 SetSourcePosition(expr->position());
1931 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2017 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1987 if (key != NULL && key->handle()->IsSymbol()) { 2073 if (key != NULL && key->handle()->IsSymbol()) {
1988 // Call to a named property, use call IC. 2074 // Call to a named property, use call IC.
1989 { PreservePositionScope scope(masm()->positions_recorder()); 2075 { PreservePositionScope scope(masm()->positions_recorder());
1990 VisitForStackValue(prop->obj()); 2076 VisitForStackValue(prop->obj());
1991 } 2077 }
1992 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); 2078 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
1993 } else { 2079 } else {
1994 // Call to a keyed property. 2080 // Call to a keyed property.
1995 // For a synthetic property use keyed load IC followed by function call, 2081 // For a synthetic property use keyed load IC followed by function call,
1996 // for a regular property use keyed CallIC. 2082 // for a regular property use keyed CallIC.
1997 { PreservePositionScope scope(masm()->positions_recorder());
1998 VisitForStackValue(prop->obj());
1999 }
2000 if (prop->is_synthetic()) { 2083 if (prop->is_synthetic()) {
2001 { PreservePositionScope scope(masm()->positions_recorder()); 2084 // Do not visit the object and key subexpressions (they are shared
2002 VisitForAccumulatorValue(prop->key()); 2085 // by all occurrences of the same rewritten parameter).
2003 } 2086 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2087 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2088 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2089 MemOperand operand = EmitSlotSearch(slot, r1);
2090 __ ldr(r1, operand);
2091
2092 ASSERT(prop->key()->AsLiteral() != NULL);
2093 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2094 __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
2095
2004 // Record source code position for IC call. 2096 // Record source code position for IC call.
2005 SetSourcePosition(prop->position()); 2097 SetSourcePosition(prop->position());
2006 __ pop(r1); // We do not need to keep the receiver.
2007 2098
2008 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 2099 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2009 EmitCallIC(ic, RelocInfo::CODE_TARGET); 2100 EmitCallIC(ic, RelocInfo::CODE_TARGET);
2010 __ ldr(r1, GlobalObjectOperand()); 2101 __ ldr(r1, GlobalObjectOperand());
2011 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 2102 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2012 __ Push(r0, r1); // Function, receiver. 2103 __ Push(r0, r1); // Function, receiver.
2013 EmitCallWithStub(expr); 2104 EmitCallWithStub(expr);
2014 } else { 2105 } else {
2106 { PreservePositionScope scope(masm()->positions_recorder());
2107 VisitForStackValue(prop->obj());
2108 }
2015 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET); 2109 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2016 } 2110 }
2017 } 2111 }
2018 } else { 2112 } else {
2019 // Call to some other expression. If the expression is an anonymous 2113 // Call to some other expression. If the expression is an anonymous
2020 // function literal not called in a loop, mark it as one that should 2114 // function literal not called in a loop, mark it as one that should
2021 // also use the fast code generator. 2115 // also use the fast code generator.
2022 FunctionLiteral* lit = fun->AsFunctionLiteral(); 2116 FunctionLiteral* lit = fun->AsFunctionLiteral();
2023 if (lit != NULL && 2117 if (lit != NULL &&
2024 lit->name()->Equals(Heap::empty_string()) && 2118 lit->name()->Equals(Heap::empty_string()) &&
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
2121 2215
2122 VisitForAccumulatorValue(args->at(0)); 2216 VisitForAccumulatorValue(args->at(0));
2123 2217
2124 Label materialize_true, materialize_false; 2218 Label materialize_true, materialize_false;
2125 Label* if_true = NULL; 2219 Label* if_true = NULL;
2126 Label* if_false = NULL; 2220 Label* if_false = NULL;
2127 Label* fall_through = NULL; 2221 Label* fall_through = NULL;
2128 context()->PrepareTest(&materialize_true, &materialize_false, 2222 context()->PrepareTest(&materialize_true, &materialize_false,
2129 &if_true, &if_false, &fall_through); 2223 &if_true, &if_false, &fall_through);
2130 2224
2131 __ BranchOnSmi(r0, if_false); 2225 __ JumpIfSmi(r0, if_false);
2132 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2226 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2133 __ cmp(r0, ip); 2227 __ cmp(r0, ip);
2134 __ b(eq, if_true); 2228 __ b(eq, if_true);
2135 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 2229 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2136 // Undetectable objects behave like undefined when tested with typeof. 2230 // Undetectable objects behave like undefined when tested with typeof.
2137 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 2231 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2138 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2232 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2139 __ b(ne, if_false); 2233 __ b(ne, if_false);
2140 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 2234 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2141 __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE)); 2235 __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
(...skipping 11 matching lines...) Expand all
2153 2247
2154 VisitForAccumulatorValue(args->at(0)); 2248 VisitForAccumulatorValue(args->at(0));
2155 2249
2156 Label materialize_true, materialize_false; 2250 Label materialize_true, materialize_false;
2157 Label* if_true = NULL; 2251 Label* if_true = NULL;
2158 Label* if_false = NULL; 2252 Label* if_false = NULL;
2159 Label* fall_through = NULL; 2253 Label* fall_through = NULL;
2160 context()->PrepareTest(&materialize_true, &materialize_false, 2254 context()->PrepareTest(&materialize_true, &materialize_false,
2161 &if_true, &if_false, &fall_through); 2255 &if_true, &if_false, &fall_through);
2162 2256
2163 __ BranchOnSmi(r0, if_false); 2257 __ JumpIfSmi(r0, if_false);
2164 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE); 2258 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2165 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 2259 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2166 Split(ge, if_true, if_false, fall_through); 2260 Split(ge, if_true, if_false, fall_through);
2167 2261
2168 context()->Plug(if_true, if_false); 2262 context()->Plug(if_true, if_false);
2169 } 2263 }
2170 2264
2171 2265
2172 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { 2266 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2173 ASSERT(args->length() == 1); 2267 ASSERT(args->length() == 1);
2174 2268
2175 VisitForAccumulatorValue(args->at(0)); 2269 VisitForAccumulatorValue(args->at(0));
2176 2270
2177 Label materialize_true, materialize_false; 2271 Label materialize_true, materialize_false;
2178 Label* if_true = NULL; 2272 Label* if_true = NULL;
2179 Label* if_false = NULL; 2273 Label* if_false = NULL;
2180 Label* fall_through = NULL; 2274 Label* fall_through = NULL;
2181 context()->PrepareTest(&materialize_true, &materialize_false, 2275 context()->PrepareTest(&materialize_true, &materialize_false,
2182 &if_true, &if_false, &fall_through); 2276 &if_true, &if_false, &fall_through);
2183 2277
2184 __ BranchOnSmi(r0, if_false); 2278 __ JumpIfSmi(r0, if_false);
2185 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2279 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2186 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 2280 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2187 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2281 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2188 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 2282 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2189 Split(ne, if_true, if_false, fall_through); 2283 Split(ne, if_true, if_false, fall_through);
2190 2284
2191 context()->Plug(if_true, if_false); 2285 context()->Plug(if_true, if_false);
2192 } 2286 }
2193 2287
2194 2288
(...skipping 25 matching lines...) Expand all
2220 2314
2221 VisitForAccumulatorValue(args->at(0)); 2315 VisitForAccumulatorValue(args->at(0));
2222 2316
2223 Label materialize_true, materialize_false; 2317 Label materialize_true, materialize_false;
2224 Label* if_true = NULL; 2318 Label* if_true = NULL;
2225 Label* if_false = NULL; 2319 Label* if_false = NULL;
2226 Label* fall_through = NULL; 2320 Label* fall_through = NULL;
2227 context()->PrepareTest(&materialize_true, &materialize_false, 2321 context()->PrepareTest(&materialize_true, &materialize_false,
2228 &if_true, &if_false, &fall_through); 2322 &if_true, &if_false, &fall_through);
2229 2323
2230 __ BranchOnSmi(r0, if_false); 2324 __ JumpIfSmi(r0, if_false);
2231 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 2325 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2232 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 2326 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2233 Split(eq, if_true, if_false, fall_through); 2327 Split(eq, if_true, if_false, fall_through);
2234 2328
2235 context()->Plug(if_true, if_false); 2329 context()->Plug(if_true, if_false);
2236 } 2330 }
2237 2331
2238 2332
2239 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { 2333 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2240 ASSERT(args->length() == 1); 2334 ASSERT(args->length() == 1);
2241 2335
2242 VisitForAccumulatorValue(args->at(0)); 2336 VisitForAccumulatorValue(args->at(0));
2243 2337
2244 Label materialize_true, materialize_false; 2338 Label materialize_true, materialize_false;
2245 Label* if_true = NULL; 2339 Label* if_true = NULL;
2246 Label* if_false = NULL; 2340 Label* if_false = NULL;
2247 Label* fall_through = NULL; 2341 Label* fall_through = NULL;
2248 context()->PrepareTest(&materialize_true, &materialize_false, 2342 context()->PrepareTest(&materialize_true, &materialize_false,
2249 &if_true, &if_false, &fall_through); 2343 &if_true, &if_false, &fall_through);
2250 2344
2251 __ BranchOnSmi(r0, if_false); 2345 __ JumpIfSmi(r0, if_false);
2252 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 2346 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2253 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 2347 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2254 Split(eq, if_true, if_false, fall_through); 2348 Split(eq, if_true, if_false, fall_through);
2255 2349
2256 context()->Plug(if_true, if_false); 2350 context()->Plug(if_true, if_false);
2257 } 2351 }
2258 2352
2259 2353
2260 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { 2354 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2261 ASSERT(args->length() == 1); 2355 ASSERT(args->length() == 1);
2262 2356
2263 VisitForAccumulatorValue(args->at(0)); 2357 VisitForAccumulatorValue(args->at(0));
2264 2358
2265 Label materialize_true, materialize_false; 2359 Label materialize_true, materialize_false;
2266 Label* if_true = NULL; 2360 Label* if_true = NULL;
2267 Label* if_false = NULL; 2361 Label* if_false = NULL;
2268 Label* fall_through = NULL; 2362 Label* fall_through = NULL;
2269 context()->PrepareTest(&materialize_true, &materialize_false, 2363 context()->PrepareTest(&materialize_true, &materialize_false,
2270 &if_true, &if_false, &fall_through); 2364 &if_true, &if_false, &fall_through);
2271 2365
2272 __ BranchOnSmi(r0, if_false); 2366 __ JumpIfSmi(r0, if_false);
2273 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 2367 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2274 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 2368 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2275 Split(eq, if_true, if_false, fall_through); 2369 Split(eq, if_true, if_false, fall_through);
2276 2370
2277 context()->Plug(if_true, if_false); 2371 context()->Plug(if_true, if_false);
2278 } 2372 }
2279 2373
2280 2374
2281 2375
2282 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { 2376 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
2369 } 2463 }
2370 2464
2371 2465
2372 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) { 2466 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2373 ASSERT(args->length() == 1); 2467 ASSERT(args->length() == 1);
2374 Label done, null, function, non_function_constructor; 2468 Label done, null, function, non_function_constructor;
2375 2469
2376 VisitForAccumulatorValue(args->at(0)); 2470 VisitForAccumulatorValue(args->at(0));
2377 2471
2378 // If the object is a smi, we return null. 2472 // If the object is a smi, we return null.
2379 __ BranchOnSmi(r0, &null); 2473 __ JumpIfSmi(r0, &null);
2380 2474
2381 // Check that the object is a JS object but take special care of JS 2475 // Check that the object is a JS object but take special care of JS
2382 // functions to make sure they have 'Function' as their class. 2476 // functions to make sure they have 'Function' as their class.
2383 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); // Map is now in r0. 2477 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); // Map is now in r0.
2384 __ b(lt, &null); 2478 __ b(lt, &null);
2385 2479
2386 // As long as JS_FUNCTION_TYPE is the last instance type and it is 2480 // As long as JS_FUNCTION_TYPE is the last instance type and it is
2387 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for 2481 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2388 // LAST_JS_OBJECT_TYPE. 2482 // LAST_JS_OBJECT_TYPE.
2389 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 2483 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
2520 } 2614 }
2521 2615
2522 2616
2523 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) { 2617 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2524 ASSERT(args->length() == 1); 2618 ASSERT(args->length() == 1);
2525 2619
2526 VisitForAccumulatorValue(args->at(0)); // Load the object. 2620 VisitForAccumulatorValue(args->at(0)); // Load the object.
2527 2621
2528 Label done; 2622 Label done;
2529 // If the object is a smi return the object. 2623 // If the object is a smi return the object.
2530 __ BranchOnSmi(r0, &done); 2624 __ JumpIfSmi(r0, &done);
2531 // If the object is not a value type, return the object. 2625 // If the object is not a value type, return the object.
2532 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 2626 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
2533 __ b(ne, &done); 2627 __ b(ne, &done);
2534 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); 2628 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
2535 2629
2536 __ bind(&done); 2630 __ bind(&done);
2537 context()->Plug(r0); 2631 context()->Plug(r0);
2538 } 2632 }
2539 2633
2540 2634
2541 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) { 2635 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2542 // Load the arguments on the stack and call the runtime function. 2636 // Load the arguments on the stack and call the runtime function.
2543 ASSERT(args->length() == 2); 2637 ASSERT(args->length() == 2);
2544 VisitForStackValue(args->at(0)); 2638 VisitForStackValue(args->at(0));
2545 VisitForStackValue(args->at(1)); 2639 VisitForStackValue(args->at(1));
2546 __ CallRuntime(Runtime::kMath_pow, 2); 2640 __ CallRuntime(Runtime::kMath_pow, 2);
2547 context()->Plug(r0); 2641 context()->Plug(r0);
2548 } 2642 }
2549 2643
2550 2644
2551 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { 2645 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2552 ASSERT(args->length() == 2); 2646 ASSERT(args->length() == 2);
2553 2647
2554 VisitForStackValue(args->at(0)); // Load the object. 2648 VisitForStackValue(args->at(0)); // Load the object.
2555 VisitForAccumulatorValue(args->at(1)); // Load the value. 2649 VisitForAccumulatorValue(args->at(1)); // Load the value.
2556 __ pop(r1); // r0 = value. r1 = object. 2650 __ pop(r1); // r0 = value. r1 = object.
2557 2651
2558 Label done; 2652 Label done;
2559 // If the object is a smi, return the value. 2653 // If the object is a smi, return the value.
2560 __ BranchOnSmi(r1, &done); 2654 __ JumpIfSmi(r1, &done);
2561 2655
2562 // If the object is not a value type, return the value. 2656 // If the object is not a value type, return the value.
2563 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 2657 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
2564 __ b(ne, &done); 2658 __ b(ne, &done);
2565 2659
2566 // Store the value. 2660 // Store the value.
2567 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 2661 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
2568 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER 2662 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER
2569 // Update the write barrier. Save the value as it will be 2663 // Update the write barrier. Save the value as it will be
2570 // overwritten by the write barrier code and is needed afterward. 2664 // overwritten by the write barrier code and is needed afterward.
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after
2967 context()->Plug(r0); 3061 context()->Plug(r0);
2968 } 3062 }
2969 3063
2970 3064
2971 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3065 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2972 switch (expr->op()) { 3066 switch (expr->op()) {
2973 case Token::DELETE: { 3067 case Token::DELETE: {
2974 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3068 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2975 Property* prop = expr->expression()->AsProperty(); 3069 Property* prop = expr->expression()->AsProperty();
2976 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 3070 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
2977 if (prop == NULL && var == NULL) { 3071
3072 if (prop != NULL) {
3073 if (prop->is_synthetic()) {
3074 // Result of deleting parameters is false, even when they rewrite
3075 // to accesses on the arguments object.
3076 context()->Plug(false);
3077 } else {
3078 VisitForStackValue(prop->obj());
3079 VisitForStackValue(prop->key());
3080 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
3081 __ push(r1);
3082 __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3083 context()->Plug(r0);
3084 }
3085 } else if (var != NULL) {
3086 // Delete of an unqualified identifier is disallowed in strict mode
3087 // so this code can only be reached in non-strict mode.
3088 ASSERT(strict_mode_flag() == kNonStrictMode);
3089 if (var->is_global()) {
3090 __ ldr(r2, GlobalObjectOperand());
3091 __ mov(r1, Operand(var->name()));
3092 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3093 __ Push(r2, r1, r0);
3094 __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3095 context()->Plug(r0);
3096 } else if (var->AsSlot() != NULL &&
3097 var->AsSlot()->type() != Slot::LOOKUP) {
3098 // Result of deleting non-global, non-dynamic variables is false.
3099 // The subexpression does not have side effects.
3100 context()->Plug(false);
3101 } else {
3102 // Non-global variable. Call the runtime to try to delete from the
3103 // context where the variable was introduced.
3104 __ push(context_register());
3105 __ mov(r2, Operand(var->name()));
3106 __ push(r2);
3107 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3108 context()->Plug(r0);
3109 }
3110 } else {
2978 // Result of deleting non-property, non-variable reference is true. 3111 // Result of deleting non-property, non-variable reference is true.
2979 // The subexpression may have side effects. 3112 // The subexpression may have side effects.
2980 VisitForEffect(expr->expression()); 3113 VisitForEffect(expr->expression());
2981 context()->Plug(true); 3114 context()->Plug(true);
2982 } else if (var != NULL &&
2983 !var->is_global() &&
2984 var->AsSlot() != NULL &&
2985 var->AsSlot()->type() != Slot::LOOKUP) {
2986 // Result of deleting non-global, non-dynamic variables is false.
2987 // The subexpression does not have side effects.
2988 context()->Plug(false);
2989 } else {
2990 // Property or variable reference. Call the delete builtin with
2991 // object and property name as arguments.
2992 if (prop != NULL) {
2993 VisitForStackValue(prop->obj());
2994 VisitForStackValue(prop->key());
2995 } else if (var->is_global()) {
2996 __ ldr(r1, GlobalObjectOperand());
2997 __ mov(r0, Operand(var->name()));
2998 __ Push(r1, r0);
2999 } else {
3000 // Non-global variable. Call the runtime to look up the context
3001 // where the variable was introduced.
3002 __ push(context_register());
3003 __ mov(r2, Operand(var->name()));
3004 __ push(r2);
3005 __ CallRuntime(Runtime::kLookupContext, 2);
3006 __ push(r0);
3007 __ mov(r2, Operand(var->name()));
3008 __ push(r2);
3009 }
3010 __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
3011 context()->Plug(r0);
3012 } 3115 }
3013 break; 3116 break;
3014 } 3117 }
3015 3118
3016 case Token::VOID: { 3119 case Token::VOID: {
3017 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 3120 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3018 VisitForEffect(expr->expression()); 3121 VisitForEffect(expr->expression());
3019 context()->Plug(Heap::kUndefinedValueRootIndex); 3122 context()->Plug(Heap::kUndefinedValueRootIndex);
3020 break; 3123 break;
3021 } 3124 }
(...skipping 23 matching lines...) Expand all
3045 context()->Plug(r0); 3148 context()->Plug(r0);
3046 break; 3149 break;
3047 } 3150 }
3048 3151
3049 case Token::ADD: { 3152 case Token::ADD: {
3050 Comment cmt(masm_, "[ UnaryOperation (ADD)"); 3153 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3051 VisitForAccumulatorValue(expr->expression()); 3154 VisitForAccumulatorValue(expr->expression());
3052 Label no_conversion; 3155 Label no_conversion;
3053 __ tst(result_register(), Operand(kSmiTagMask)); 3156 __ tst(result_register(), Operand(kSmiTagMask));
3054 __ b(eq, &no_conversion); 3157 __ b(eq, &no_conversion);
3055 __ push(r0); 3158 ToNumberStub convert_stub;
3056 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS); 3159 __ CallStub(&convert_stub);
3057 __ bind(&no_conversion); 3160 __ bind(&no_conversion);
3058 context()->Plug(result_register()); 3161 context()->Plug(result_register());
3059 break; 3162 break;
3060 } 3163 }
3061 3164
3062 case Token::SUB: { 3165 case Token::SUB: {
3063 Comment cmt(masm_, "[ UnaryOperation (SUB)"); 3166 Comment cmt(masm_, "[ UnaryOperation (SUB)");
3064 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); 3167 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3065 UnaryOverwriteMode overwrite = 3168 UnaryOverwriteMode overwrite =
3066 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 3169 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
(...skipping 10 matching lines...) Expand all
3077 3180
3078 case Token::BIT_NOT: { 3181 case Token::BIT_NOT: {
3079 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); 3182 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
3080 // The generic unary operation stub expects the argument to be 3183 // The generic unary operation stub expects the argument to be
3081 // in the accumulator register r0. 3184 // in the accumulator register r0.
3082 VisitForAccumulatorValue(expr->expression()); 3185 VisitForAccumulatorValue(expr->expression());
3083 Label done; 3186 Label done;
3084 bool inline_smi_code = ShouldInlineSmiCase(expr->op()); 3187 bool inline_smi_code = ShouldInlineSmiCase(expr->op());
3085 if (inline_smi_code) { 3188 if (inline_smi_code) {
3086 Label call_stub; 3189 Label call_stub;
3087 __ BranchOnNotSmi(r0, &call_stub); 3190 __ JumpIfNotSmi(r0, &call_stub);
3088 __ mvn(r0, Operand(r0)); 3191 __ mvn(r0, Operand(r0));
3089 // Bit-clear inverted smi-tag. 3192 // Bit-clear inverted smi-tag.
3090 __ bic(r0, r0, Operand(kSmiTagMask)); 3193 __ bic(r0, r0, Operand(kSmiTagMask));
3091 __ b(&done); 3194 __ b(&done);
3092 __ bind(&call_stub); 3195 __ bind(&call_stub);
3093 } 3196 }
3094 bool overwrite = expr->expression()->ResultOverwriteAllowed(); 3197 bool overwrite = expr->expression()->ResultOverwriteAllowed();
3095 UnaryOpFlags flags = inline_smi_code 3198 UnaryOpFlags flags = inline_smi_code
3096 ? NO_UNARY_SMI_CODE_IN_STUB 3199 ? NO_UNARY_SMI_CODE_IN_STUB
3097 : NO_UNARY_FLAGS; 3200 : NO_UNARY_FLAGS;
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
3164 EmitKeyedPropertyLoad(prop); 3267 EmitKeyedPropertyLoad(prop);
3165 } 3268 }
3166 } 3269 }
3167 3270
3168 // We need a second deoptimization point after loading the value 3271 // We need a second deoptimization point after loading the value
3169 // in case evaluating the property load my have a side effect. 3272 // in case evaluating the property load my have a side effect.
3170 PrepareForBailout(expr->increment(), TOS_REG); 3273 PrepareForBailout(expr->increment(), TOS_REG);
3171 3274
3172 // Call ToNumber only if operand is not a smi. 3275 // Call ToNumber only if operand is not a smi.
3173 Label no_conversion; 3276 Label no_conversion;
3174 __ BranchOnSmi(r0, &no_conversion); 3277 __ JumpIfSmi(r0, &no_conversion);
3175 __ push(r0); 3278 ToNumberStub convert_stub;
3176 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS); 3279 __ CallStub(&convert_stub);
3177 __ bind(&no_conversion); 3280 __ bind(&no_conversion);
3178 3281
3179 // Save result for postfix expressions. 3282 // Save result for postfix expressions.
3180 if (expr->is_postfix()) { 3283 if (expr->is_postfix()) {
3181 if (!context()->IsEffect()) { 3284 if (!context()->IsEffect()) {
3182 // Save the result on the stack. If we have a named or keyed property 3285 // Save the result on the stack. If we have a named or keyed property
3183 // we store the result under the receiver that is currently on top 3286 // we store the result under the receiver that is currently on top
3184 // of the stack. 3287 // of the stack.
3185 switch (assign_type) { 3288 switch (assign_type) {
3186 case VARIABLE: 3289 case VARIABLE:
(...skipping 11 matching lines...) Expand all
3198 3301
3199 3302
3200 // Inline smi case if we are in a loop. 3303 // Inline smi case if we are in a loop.
3201 Label stub_call, done; 3304 Label stub_call, done;
3202 int count_value = expr->op() == Token::INC ? 1 : -1; 3305 int count_value = expr->op() == Token::INC ? 1 : -1;
3203 if (ShouldInlineSmiCase(expr->op())) { 3306 if (ShouldInlineSmiCase(expr->op())) {
3204 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 3307 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3205 __ b(vs, &stub_call); 3308 __ b(vs, &stub_call);
3206 // We could eliminate this smi check if we split the code at 3309 // We could eliminate this smi check if we split the code at
3207 // the first smi check before calling ToNumber. 3310 // the first smi check before calling ToNumber.
3208 __ BranchOnSmi(r0, &done); 3311 __ JumpIfSmi(r0, &done);
3209 __ bind(&stub_call); 3312 __ bind(&stub_call);
3210 // Call stub. Undo operation first. 3313 // Call stub. Undo operation first.
3211 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 3314 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3212 } 3315 }
3213 __ mov(r1, Operand(Smi::FromInt(count_value))); 3316 __ mov(r1, Operand(Smi::FromInt(count_value)));
3214 3317
3215 // Record position before stub call. 3318 // Record position before stub call.
3216 SetSourcePosition(expr->position()); 3319 SetSourcePosition(expr->position());
3217 3320
3218 GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0); 3321 GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0);
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
3451 __ CallStub(&stub); 3554 __ CallStub(&stub);
3452 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 3555 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3453 // The stub returns 0 for true. 3556 // The stub returns 0 for true.
3454 __ tst(r0, r0); 3557 __ tst(r0, r0);
3455 Split(eq, if_true, if_false, fall_through); 3558 Split(eq, if_true, if_false, fall_through);
3456 break; 3559 break;
3457 } 3560 }
3458 3561
3459 default: { 3562 default: {
3460 VisitForAccumulatorValue(expr->right()); 3563 VisitForAccumulatorValue(expr->right());
3461 Condition cc = eq; 3564 Condition cond = eq;
3462 bool strict = false; 3565 bool strict = false;
3463 switch (op) { 3566 switch (op) {
3464 case Token::EQ_STRICT: 3567 case Token::EQ_STRICT:
3465 strict = true; 3568 strict = true;
3466 // Fall through 3569 // Fall through
3467 case Token::EQ: 3570 case Token::EQ:
3468 cc = eq; 3571 cond = eq;
3469 __ pop(r1); 3572 __ pop(r1);
3470 break; 3573 break;
3471 case Token::LT: 3574 case Token::LT:
3472 cc = lt; 3575 cond = lt;
3473 __ pop(r1); 3576 __ pop(r1);
3474 break; 3577 break;
3475 case Token::GT: 3578 case Token::GT:
3476 // Reverse left and right sides to obtain ECMA-262 conversion order. 3579 // Reverse left and right sides to obtain ECMA-262 conversion order.
3477 cc = lt; 3580 cond = lt;
3478 __ mov(r1, result_register()); 3581 __ mov(r1, result_register());
3479 __ pop(r0); 3582 __ pop(r0);
3480 break; 3583 break;
3481 case Token::LTE: 3584 case Token::LTE:
3482 // Reverse left and right sides to obtain ECMA-262 conversion order. 3585 // Reverse left and right sides to obtain ECMA-262 conversion order.
3483 cc = ge; 3586 cond = ge;
3484 __ mov(r1, result_register()); 3587 __ mov(r1, result_register());
3485 __ pop(r0); 3588 __ pop(r0);
3486 break; 3589 break;
3487 case Token::GTE: 3590 case Token::GTE:
3488 cc = ge; 3591 cond = ge;
3489 __ pop(r1); 3592 __ pop(r1);
3490 break; 3593 break;
3491 case Token::IN: 3594 case Token::IN:
3492 case Token::INSTANCEOF: 3595 case Token::INSTANCEOF:
3493 default: 3596 default:
3494 UNREACHABLE(); 3597 UNREACHABLE();
3495 } 3598 }
3496 3599
3497 bool inline_smi_code = ShouldInlineSmiCase(op); 3600 bool inline_smi_code = ShouldInlineSmiCase(op);
3601 JumpPatchSite patch_site(masm_);
3498 if (inline_smi_code) { 3602 if (inline_smi_code) {
3499 Label slow_case; 3603 Label slow_case;
3500 __ orr(r2, r0, Operand(r1)); 3604 __ orr(r2, r0, Operand(r1));
3501 __ BranchOnNotSmi(r2, &slow_case); 3605 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
3502 __ cmp(r1, r0); 3606 __ cmp(r1, r0);
3503 Split(cc, if_true, if_false, NULL); 3607 Split(cond, if_true, if_false, NULL);
3504 __ bind(&slow_case); 3608 __ bind(&slow_case);
3505 } 3609 }
3506 CompareFlags flags = inline_smi_code 3610
3507 ? NO_SMI_COMPARE_IN_STUB 3611 // Record position and call the compare IC.
3508 : NO_COMPARE_FLAGS; 3612 SetSourcePosition(expr->position());
3509 CompareStub stub(cc, strict, flags, r1, r0); 3613 Handle<Code> ic = CompareIC::GetUninitialized(op);
3510 __ CallStub(&stub); 3614 EmitCallIC(ic, &patch_site);
3511 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 3615 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3512 __ cmp(r0, Operand(0, RelocInfo::NONE)); 3616 __ cmp(r0, Operand(0));
3513 Split(cc, if_true, if_false, fall_through); 3617 Split(cond, if_true, if_false, fall_through);
3514 } 3618 }
3515 } 3619 }
3516 3620
3517 // Convert the result of the comparison into one expected for this 3621 // Convert the result of the comparison into one expected for this
3518 // expression's context. 3622 // expression's context.
3519 context()->Plug(if_true, if_false); 3623 context()->Plug(if_true, if_false);
3520 } 3624 }
3521 3625
3522 3626
3523 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { 3627 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3569 } 3673 }
3570 3674
3571 3675
3572 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { 3676 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3573 ASSERT(mode == RelocInfo::CODE_TARGET || 3677 ASSERT(mode == RelocInfo::CODE_TARGET ||
3574 mode == RelocInfo::CODE_TARGET_CONTEXT); 3678 mode == RelocInfo::CODE_TARGET_CONTEXT);
3575 __ Call(ic, mode); 3679 __ Call(ic, mode);
3576 } 3680 }
3577 3681
3578 3682
3683 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
3684 __ Call(ic, RelocInfo::CODE_TARGET);
3685 if (patch_site != NULL && patch_site->is_bound()) {
3686 patch_site->EmitPatchInfo();
3687 } else {
3688 __ nop(); // Signals no inlined code.
3689 }
3690 }
3691
3692
3579 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 3693 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3580 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 3694 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3581 __ str(value, MemOperand(fp, frame_offset)); 3695 __ str(value, MemOperand(fp, frame_offset));
3582 } 3696 }
3583 3697
3584 3698
3585 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 3699 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3586 __ ldr(dst, ContextOperand(cp, context_index)); 3700 __ ldr(dst, ContextOperand(cp, context_index));
3587 } 3701 }
3588 3702
(...skipping 24 matching lines...) Expand all
3613 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. 3727 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
3614 __ add(pc, r1, Operand(masm_->CodeObject())); 3728 __ add(pc, r1, Operand(masm_->CodeObject()));
3615 } 3729 }
3616 3730
3617 3731
3618 #undef __ 3732 #undef __
3619 3733
3620 } } // namespace v8::internal 3734 } } // namespace v8::internal
3621 3735
3622 #endif // V8_TARGET_ARCH_ARM 3736 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/frames-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698