Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(228)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/deoptimizer-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
140 Label ok; 140 Label ok;
141 __ testq(rcx, rcx); 141 __ testq(rcx, rcx);
142 __ j(zero, &ok, Label::kNear); 142 __ j(zero, &ok, Label::kNear);
143 // +1 for return address. 143 // +1 for return address.
144 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; 144 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
145 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 145 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
146 __ movq(Operand(rsp, receiver_offset), kScratchRegister); 146 __ movq(Operand(rsp, receiver_offset), kScratchRegister);
147 __ bind(&ok); 147 __ bind(&ok);
148 } 148 }
149 149
150 // Open a frame scope to indicate that there is a frame on the stack. The
151 // MANUAL indicates that the scope shouldn't actually generate code to set up
152 // the frame (that is done below).
153 FrameScope frame_scope(masm_, StackFrame::MANUAL);
154
150 __ push(rbp); // Caller's frame pointer. 155 __ push(rbp); // Caller's frame pointer.
151 __ movq(rbp, rsp); 156 __ movq(rbp, rsp);
152 __ push(rsi); // Callee's context. 157 __ push(rsi); // Callee's context.
153 __ push(rdi); // Callee's JS Function. 158 __ push(rdi); // Callee's JS Function.
154 159
155 { Comment cmnt(masm_, "[ Allocate locals"); 160 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots(); 161 int locals_count = info->scope()->num_stack_slots();
157 if (locals_count == 1) { 162 if (locals_count == 1) {
158 __ PushRoot(Heap::kUndefinedValueRootIndex); 163 __ PushRoot(Heap::kUndefinedValueRootIndex);
159 } else if (locals_count > 1) { 164 } else if (locals_count > 1) {
(...skipping 28 matching lines...) Expand all
188 for (int i = 0; i < num_parameters; i++) { 193 for (int i = 0; i < num_parameters; i++) {
189 Variable* var = scope()->parameter(i); 194 Variable* var = scope()->parameter(i);
190 if (var->IsContextSlot()) { 195 if (var->IsContextSlot()) {
191 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 196 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
192 (num_parameters - 1 - i) * kPointerSize; 197 (num_parameters - 1 - i) * kPointerSize;
193 // Load parameter from stack. 198 // Load parameter from stack.
194 __ movq(rax, Operand(rbp, parameter_offset)); 199 __ movq(rax, Operand(rbp, parameter_offset));
195 // Store it in the context. 200 // Store it in the context.
196 int context_offset = Context::SlotOffset(var->index()); 201 int context_offset = Context::SlotOffset(var->index());
197 __ movq(Operand(rsi, context_offset), rax); 202 __ movq(Operand(rsi, context_offset), rax);
198 // Update the write barrier. This clobbers all involved 203 // Update the write barrier. This clobbers rax and rbx.
199 // registers, so we have use a third register to avoid 204 __ RecordWriteContextSlot(
200 // clobbering rsi. 205 rsi, context_offset, rax, rbx, kDontSaveFPRegs);
201 __ movq(rcx, rsi);
202 __ RecordWrite(rcx, context_offset, rax, rbx);
203 } 206 }
204 } 207 }
205 } 208 }
206 209
207 // Possibly allocate an arguments object. 210 // Possibly allocate an arguments object.
208 Variable* arguments = scope()->arguments(); 211 Variable* arguments = scope()->arguments();
209 if (arguments != NULL) { 212 if (arguments != NULL) {
210 // Arguments object must be allocated after the context object, in 213 // Arguments object must be allocated after the context object, in
211 // case the "arguments" or ".arguments" variables are in the context. 214 // case the "arguments" or ".arguments" variables are in the context.
212 Comment cmnt(masm_, "[ Allocate arguments object"); 215 Comment cmnt(masm_, "[ Allocate arguments object");
(...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after
631 void FullCodeGenerator::SetVar(Variable* var, 634 void FullCodeGenerator::SetVar(Variable* var,
632 Register src, 635 Register src,
633 Register scratch0, 636 Register scratch0,
634 Register scratch1) { 637 Register scratch1) {
635 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 638 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
636 ASSERT(!scratch0.is(src)); 639 ASSERT(!scratch0.is(src));
637 ASSERT(!scratch0.is(scratch1)); 640 ASSERT(!scratch0.is(scratch1));
638 ASSERT(!scratch1.is(src)); 641 ASSERT(!scratch1.is(src));
639 MemOperand location = VarOperand(var, scratch0); 642 MemOperand location = VarOperand(var, scratch0);
640 __ movq(location, src); 643 __ movq(location, src);
644
641 // Emit the write barrier code if the location is in the heap. 645 // Emit the write barrier code if the location is in the heap.
642 if (var->IsContextSlot()) { 646 if (var->IsContextSlot()) {
643 int offset = Context::SlotOffset(var->index()); 647 int offset = Context::SlotOffset(var->index());
644 __ RecordWrite(scratch0, offset, src, scratch1); 648 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
645 } 649 }
646 } 650 }
647 651
648 652
649 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 653 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
650 bool should_normalize, 654 bool should_normalize,
651 Label* if_true, 655 Label* if_true,
652 Label* if_false) { 656 Label* if_false) {
653 // Only prepare for bailouts before splits if we're in a test 657 // Only prepare for bailouts before splits if we're in a test
654 // context. Otherwise, we let the Visit function deal with the 658 // context. Otherwise, we let the Visit function deal with the
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
708 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); 712 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
709 __ Check(not_equal, "Declaration in with context."); 713 __ Check(not_equal, "Declaration in with context.");
710 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); 714 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
711 __ Check(not_equal, "Declaration in catch context."); 715 __ Check(not_equal, "Declaration in catch context.");
712 } 716 }
713 if (function != NULL) { 717 if (function != NULL) {
714 Comment cmnt(masm_, "[ Declaration"); 718 Comment cmnt(masm_, "[ Declaration");
715 VisitForAccumulatorValue(function); 719 VisitForAccumulatorValue(function);
716 __ movq(ContextOperand(rsi, variable->index()), result_register()); 720 __ movq(ContextOperand(rsi, variable->index()), result_register());
717 int offset = Context::SlotOffset(variable->index()); 721 int offset = Context::SlotOffset(variable->index());
718 __ movq(rbx, rsi); 722 // We know that we have written a function, which is not a smi.
719 __ RecordWrite(rbx, offset, result_register(), rcx); 723 __ RecordWriteContextSlot(rsi,
724 offset,
725 result_register(),
726 rcx,
727 kDontSaveFPRegs,
728 EMIT_REMEMBERED_SET,
729 OMIT_SMI_CHECK);
720 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 730 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
721 } else if (mode == Variable::CONST || mode == Variable::LET) { 731 } else if (mode == Variable::CONST || mode == Variable::LET) {
722 Comment cmnt(masm_, "[ Declaration"); 732 Comment cmnt(masm_, "[ Declaration");
723 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 733 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
724 __ movq(ContextOperand(rsi, variable->index()), kScratchRegister); 734 __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
725 // No write barrier since the hole value is in old space. 735 // No write barrier since the hole value is in old space.
726 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 736 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
727 } 737 }
728 break; 738 break;
729 739
(...skipping 708 matching lines...) Expand 10 before | Expand all | Expand 10 after
1438 continue; 1448 continue;
1439 } 1449 }
1440 1450
1441 if (!result_saved) { 1451 if (!result_saved) {
1442 __ push(rax); 1452 __ push(rax);
1443 result_saved = true; 1453 result_saved = true;
1444 } 1454 }
1445 VisitForAccumulatorValue(subexpr); 1455 VisitForAccumulatorValue(subexpr);
1446 1456
1447 // Store the subexpression value in the array's elements. 1457 // Store the subexpression value in the array's elements.
1448 __ movq(rbx, Operand(rsp, 0)); // Copy of array literal. 1458 __ movq(r8, Operand(rsp, 0)); // Copy of array literal.
1449 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); 1459 __ movq(rbx, FieldOperand(r8, JSObject::kElementsOffset));
1450 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1460 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1451 __ movq(FieldOperand(rbx, offset), result_register()); 1461 __ movq(FieldOperand(rbx, offset), result_register());
1452 1462
1463 Label no_map_change;
1464 __ JumpIfSmi(result_register(), &no_map_change);
1453 // Update the write barrier for the array store. 1465 // Update the write barrier for the array store.
1454 __ RecordWrite(rbx, offset, result_register(), rcx); 1466 __ RecordWriteField(rbx, offset, result_register(), rcx,
1467 kDontSaveFPRegs,
1468 EMIT_REMEMBERED_SET,
1469 OMIT_SMI_CHECK);
1470 if (FLAG_smi_only_arrays) {
1471 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
1472 __ CheckFastSmiOnlyElements(rdi, &no_map_change, Label::kNear);
1473 __ push(r8);
1474 __ CallRuntime(Runtime::kNonSmiElementStored, 1);
1475 }
1476 __ bind(&no_map_change);
1455 1477
1456 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1478 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1457 } 1479 }
1458 1480
1459 if (result_saved) { 1481 if (result_saved) {
1460 context()->PlugTOS(); 1482 context()->PlugTOS();
1461 } else { 1483 } else {
1462 context()->Plug(rax); 1484 context()->Plug(rax);
1463 } 1485 }
1464 } 1486 }
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
1770 MemOperand location = VarOperand(var, rcx); 1792 MemOperand location = VarOperand(var, rcx);
1771 __ movq(rdx, location); 1793 __ movq(rdx, location);
1772 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 1794 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1773 __ j(not_equal, &assign, Label::kNear); 1795 __ j(not_equal, &assign, Label::kNear);
1774 __ Push(var->name()); 1796 __ Push(var->name());
1775 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1797 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1776 __ bind(&assign); 1798 __ bind(&assign);
1777 __ movq(location, rax); 1799 __ movq(location, rax);
1778 if (var->IsContextSlot()) { 1800 if (var->IsContextSlot()) {
1779 __ movq(rdx, rax); 1801 __ movq(rdx, rax);
1780 __ RecordWrite(rcx, Context::SlotOffset(var->index()), rdx, rbx); 1802 __ RecordWriteContextSlot(
1803 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1781 } 1804 }
1782 } 1805 }
1783 1806
1784 } else if (var->mode() != Variable::CONST) { 1807 } else if (var->mode() != Variable::CONST) {
1785 // Assignment to var or initializing assignment to let. 1808 // Assignment to var or initializing assignment to let.
1786 if (var->IsStackAllocated() || var->IsContextSlot()) { 1809 if (var->IsStackAllocated() || var->IsContextSlot()) {
1787 MemOperand location = VarOperand(var, rcx); 1810 MemOperand location = VarOperand(var, rcx);
1788 if (FLAG_debug_code && op == Token::INIT_LET) { 1811 if (FLAG_debug_code && op == Token::INIT_LET) {
1789 // Check for an uninitialized let binding. 1812 // Check for an uninitialized let binding.
1790 __ movq(rdx, location); 1813 __ movq(rdx, location);
1791 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 1814 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1792 __ Check(equal, "Let binding re-initialization."); 1815 __ Check(equal, "Let binding re-initialization.");
1793 } 1816 }
1794 // Perform the assignment. 1817 // Perform the assignment.
1795 __ movq(location, rax); 1818 __ movq(location, rax);
1796 if (var->IsContextSlot()) { 1819 if (var->IsContextSlot()) {
1797 __ movq(rdx, rax); 1820 __ movq(rdx, rax);
1798 __ RecordWrite(rcx, Context::SlotOffset(var->index()), rdx, rbx); 1821 __ RecordWriteContextSlot(
1822 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1799 } 1823 }
1800 } else { 1824 } else {
1801 ASSERT(var->IsLookupSlot()); 1825 ASSERT(var->IsLookupSlot());
1802 __ push(rax); // Value. 1826 __ push(rax); // Value.
1803 __ push(rsi); // Context. 1827 __ push(rsi); // Context.
1804 __ Push(var->name()); 1828 __ Push(var->name());
1805 __ Push(Smi::FromInt(strict_mode_flag())); 1829 __ Push(Smi::FromInt(strict_mode_flag()));
1806 __ CallRuntime(Runtime::kStoreContextSlot, 4); 1830 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1807 } 1831 }
1808 } 1832 }
(...skipping 729 matching lines...) Expand 10 before | Expand all | Expand 10 after
2538 ASSERT(args->length() == 1); 2562 ASSERT(args->length() == 1);
2539 Label done, null, function, non_function_constructor; 2563 Label done, null, function, non_function_constructor;
2540 2564
2541 VisitForAccumulatorValue(args->at(0)); 2565 VisitForAccumulatorValue(args->at(0));
2542 2566
2543 // If the object is a smi, we return null. 2567 // If the object is a smi, we return null.
2544 __ JumpIfSmi(rax, &null); 2568 __ JumpIfSmi(rax, &null);
2545 2569
2546 // Check that the object is a JS object but take special care of JS 2570 // Check that the object is a JS object but take special care of JS
2547 // functions to make sure they have 'Function' as their class. 2571 // functions to make sure they have 'Function' as their class.
2572 // Assume that there are only two callable types, and one of them is at
2573 // either end of the type range for JS object types. Saves extra comparisons.
2574 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2548 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); 2575 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
2549 // Map is now in rax. 2576 // Map is now in rax.
2550 __ j(below, &null); 2577 __ j(below, &null);
2578 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2579 FIRST_SPEC_OBJECT_TYPE + 1);
2580 __ j(equal, &function);
2551 2581
2552 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and 2582 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
2553 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after 2583 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2554 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter. 2584 LAST_SPEC_OBJECT_TYPE - 1);
2555 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE); 2585 __ j(equal, &function);
2556 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE == 2586 // Assume that there is no larger type.
2557 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1); 2587 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2558 __ CmpInstanceType(rax, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
2559 __ j(above_equal, &function);
2560 2588
2561 // Check if the constructor in the map is a function. 2589 // Check if the constructor in the map is a JS function.
2562 __ movq(rax, FieldOperand(rax, Map::kConstructorOffset)); 2590 __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
2563 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); 2591 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2564 __ j(not_equal, &non_function_constructor); 2592 __ j(not_equal, &non_function_constructor);
2565 2593
2566 // rax now contains the constructor function. Grab the 2594 // rax now contains the constructor function. Grab the
2567 // instance class name from there. 2595 // instance class name from there.
2568 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 2596 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2569 __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset)); 2597 __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2570 __ jmp(&done); 2598 __ jmp(&done);
2571 2599
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
2719 2747
2720 // If the object is not a value type, return the value. 2748 // If the object is not a value type, return the value.
2721 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); 2749 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
2722 __ j(not_equal, &done); 2750 __ j(not_equal, &done);
2723 2751
2724 // Store the value. 2752 // Store the value.
2725 __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax); 2753 __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
2726 // Update the write barrier. Save the value as it will be 2754 // Update the write barrier. Save the value as it will be
2727 // overwritten by the write barrier code and is needed afterward. 2755 // overwritten by the write barrier code and is needed afterward.
2728 __ movq(rdx, rax); 2756 __ movq(rdx, rax);
2729 __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx); 2757 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
2730 2758
2731 __ bind(&done); 2759 __ bind(&done);
2732 context()->Plug(rax); 2760 context()->Plug(rax);
2733 } 2761 }
2734 2762
2735 2763
2736 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { 2764 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2737 ASSERT_EQ(args->length(), 1); 2765 ASSERT_EQ(args->length(), 1);
2738 2766
2739 // Load the argument on the stack and call the stub. 2767 // Load the argument on the stack and call the stub.
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after
3003 FixedArray::kHeaderSize)); 3031 FixedArray::kHeaderSize));
3004 __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size, 3032 __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
3005 FixedArray::kHeaderSize)); 3033 FixedArray::kHeaderSize));
3006 3034
3007 // Swap elements. Use object and temp as scratch registers. 3035 // Swap elements. Use object and temp as scratch registers.
3008 __ movq(object, Operand(index_1, 0)); 3036 __ movq(object, Operand(index_1, 0));
3009 __ movq(temp, Operand(index_2, 0)); 3037 __ movq(temp, Operand(index_2, 0));
3010 __ movq(Operand(index_2, 0), object); 3038 __ movq(Operand(index_2, 0), object);
3011 __ movq(Operand(index_1, 0), temp); 3039 __ movq(Operand(index_1, 0), temp);
3012 3040
3013 Label new_space; 3041 Label no_remembered_set;
3014 __ InNewSpace(elements, temp, equal, &new_space); 3042 __ CheckPageFlag(elements,
3043 temp,
3044 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3045 not_zero,
3046 &no_remembered_set,
3047 Label::kNear);
3048 // Possible optimization: do a check that both values are Smis
3049 // (or them and test against Smi mask.)
3015 3050
3016 __ movq(object, elements); 3051 // We are swapping two objects in an array and the incremental marker never
3017 __ RecordWriteHelper(object, index_1, temp); 3052 // pauses in the middle of scanning a single object. Therefore the
3018 __ RecordWriteHelper(elements, index_2, temp); 3053 // incremental marker is not disturbed, so we don't need to call the
3054 // RecordWrite stub that notifies the incremental marker.
3055 __ RememberedSetHelper(elements,
3056 index_1,
3057 temp,
3058 kDontSaveFPRegs,
3059 MacroAssembler::kFallThroughAtEnd);
3060 __ RememberedSetHelper(elements,
3061 index_2,
3062 temp,
3063 kDontSaveFPRegs,
3064 MacroAssembler::kFallThroughAtEnd);
3019 3065
3020 __ bind(&new_space); 3066 __ bind(&no_remembered_set);
3067
3021 // We are done. Drop elements from the stack, and return undefined. 3068 // We are done. Drop elements from the stack, and return undefined.
3022 __ addq(rsp, Immediate(3 * kPointerSize)); 3069 __ addq(rsp, Immediate(3 * kPointerSize));
3023 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 3070 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3024 __ jmp(&done); 3071 __ jmp(&done);
3025 3072
3026 __ bind(&slow_case); 3073 __ bind(&slow_case);
3027 __ CallRuntime(Runtime::kSwapElements, 3); 3074 __ CallRuntime(Runtime::kSwapElements, 3);
3028 3075
3029 __ bind(&done); 3076 __ bind(&done);
3030 context()->Plug(rax); 3077 context()->Plug(rax);
(...skipping 795 matching lines...) Expand 10 before | Expand all | Expand 10 after
3826 3873
3827 context()->Plug(rax); 3874 context()->Plug(rax);
3828 } else { 3875 } else {
3829 // This expression cannot throw a reference error at the top level. 3876 // This expression cannot throw a reference error at the top level.
3830 VisitInCurrentContext(expr); 3877 VisitInCurrentContext(expr);
3831 } 3878 }
3832 } 3879 }
3833 3880
3834 3881
3835 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 3882 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3836 Handle<String> check, 3883 Handle<String> check) {
3837 Label* if_true, 3884 Label materialize_true, materialize_false;
3838 Label* if_false, 3885 Label* if_true = NULL;
3839 Label* fall_through) { 3886 Label* if_false = NULL;
3887 Label* fall_through = NULL;
3888 context()->PrepareTest(&materialize_true, &materialize_false,
3889 &if_true, &if_false, &fall_through);
3890
3840 { AccumulatorValueContext context(this); 3891 { AccumulatorValueContext context(this);
3841 VisitForTypeofValue(expr); 3892 VisitForTypeofValue(expr);
3842 } 3893 }
3843 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 3894 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3844 3895
3845 if (check->Equals(isolate()->heap()->number_symbol())) { 3896 if (check->Equals(isolate()->heap()->number_symbol())) {
3846 __ JumpIfSmi(rax, if_true); 3897 __ JumpIfSmi(rax, if_true);
3847 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); 3898 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
3848 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); 3899 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
3849 Split(equal, if_true, if_false, fall_through); 3900 Split(equal, if_true, if_false, fall_through);
(...skipping 18 matching lines...) Expand all
3868 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 3919 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
3869 __ j(equal, if_true); 3920 __ j(equal, if_true);
3870 __ JumpIfSmi(rax, if_false); 3921 __ JumpIfSmi(rax, if_false);
3871 // Check for undetectable objects => true. 3922 // Check for undetectable objects => true.
3872 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 3923 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3873 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), 3924 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3874 Immediate(1 << Map::kIsUndetectable)); 3925 Immediate(1 << Map::kIsUndetectable));
3875 Split(not_zero, if_true, if_false, fall_through); 3926 Split(not_zero, if_true, if_false, fall_through);
3876 } else if (check->Equals(isolate()->heap()->function_symbol())) { 3927 } else if (check->Equals(isolate()->heap()->function_symbol())) {
3877 __ JumpIfSmi(rax, if_false); 3928 __ JumpIfSmi(rax, if_false);
3878 STATIC_ASSERT(LAST_CALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE); 3929 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3879 __ CmpObjectType(rax, FIRST_CALLABLE_SPEC_OBJECT_TYPE, rdx); 3930 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
3880 Split(above_equal, if_true, if_false, fall_through); 3931 __ j(equal, if_true);
3932 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
3933 Split(equal, if_true, if_false, fall_through);
3881 } else if (check->Equals(isolate()->heap()->object_symbol())) { 3934 } else if (check->Equals(isolate()->heap()->object_symbol())) {
3882 __ JumpIfSmi(rax, if_false); 3935 __ JumpIfSmi(rax, if_false);
3883 if (!FLAG_harmony_typeof) { 3936 if (!FLAG_harmony_typeof) {
3884 __ CompareRoot(rax, Heap::kNullValueRootIndex); 3937 __ CompareRoot(rax, Heap::kNullValueRootIndex);
3885 __ j(equal, if_true); 3938 __ j(equal, if_true);
3886 } 3939 }
3887 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx); 3940 __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
3888 __ j(below, if_false); 3941 __ j(below, if_false);
3889 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 3942 __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3890 __ j(above, if_false); 3943 __ j(above, if_false);
3891 // Check for undetectable objects => false. 3944 // Check for undetectable objects => false.
3892 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), 3945 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3893 Immediate(1 << Map::kIsUndetectable)); 3946 Immediate(1 << Map::kIsUndetectable));
3894 Split(zero, if_true, if_false, fall_through); 3947 Split(zero, if_true, if_false, fall_through);
3895 } else { 3948 } else {
3896 if (if_false != fall_through) __ jmp(if_false); 3949 if (if_false != fall_through) __ jmp(if_false);
3897 } 3950 }
3898 } 3951 context()->Plug(if_true, if_false);
3899
3900
3901 void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr,
3902 Label* if_true,
3903 Label* if_false,
3904 Label* fall_through) {
3905 VisitForAccumulatorValue(expr);
3906 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3907
3908 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
3909 Split(equal, if_true, if_false, fall_through);
3910 } 3952 }
3911 3953
3912 3954
3913 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 3955 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3914 Comment cmnt(masm_, "[ CompareOperation"); 3956 Comment cmnt(masm_, "[ CompareOperation");
3915 SetSourcePosition(expr->position()); 3957 SetSourcePosition(expr->position());
3916 3958
3959 // First we try a fast inlined version of the compare when one of
3960 // the operands is a literal.
3961 if (TryLiteralCompare(expr)) return;
3962
3917 // Always perform the comparison for its control flow. Pack the result 3963 // Always perform the comparison for its control flow. Pack the result
3918 // into the expression's context after the comparison is performed. 3964 // into the expression's context after the comparison is performed.
3919 Label materialize_true, materialize_false; 3965 Label materialize_true, materialize_false;
3920 Label* if_true = NULL; 3966 Label* if_true = NULL;
3921 Label* if_false = NULL; 3967 Label* if_false = NULL;
3922 Label* fall_through = NULL; 3968 Label* fall_through = NULL;
3923 context()->PrepareTest(&materialize_true, &materialize_false, 3969 context()->PrepareTest(&materialize_true, &materialize_false,
3924 &if_true, &if_false, &fall_through); 3970 &if_true, &if_false, &fall_through);
3925 3971
3926 // First we try a fast inlined version of the compare when one of
3927 // the operands is a literal.
3928 if (TryLiteralCompare(expr, if_true, if_false, fall_through)) {
3929 context()->Plug(if_true, if_false);
3930 return;
3931 }
3932
3933 Token::Value op = expr->op(); 3972 Token::Value op = expr->op();
3934 VisitForStackValue(expr->left()); 3973 VisitForStackValue(expr->left());
3935 switch (op) { 3974 switch (op) {
3936 case Token::IN: 3975 case Token::IN:
3937 VisitForStackValue(expr->right()); 3976 VisitForStackValue(expr->right());
3938 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 3977 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
3939 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 3978 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
3940 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 3979 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3941 Split(equal, if_true, if_false, fall_through); 3980 Split(equal, if_true, if_false, fall_through);
3942 break; 3981 break;
3943 3982
3944 case Token::INSTANCEOF: { 3983 case Token::INSTANCEOF: {
3945 VisitForStackValue(expr->right()); 3984 VisitForStackValue(expr->right());
3946 InstanceofStub stub(InstanceofStub::kNoFlags); 3985 InstanceofStub stub(InstanceofStub::kNoFlags);
3947 __ CallStub(&stub); 3986 __ CallStub(&stub);
3948 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 3987 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3949 __ testq(rax, rax); 3988 __ testq(rax, rax);
3950 // The stub returns 0 for true. 3989 // The stub returns 0 for true.
3951 Split(zero, if_true, if_false, fall_through); 3990 Split(zero, if_true, if_false, fall_through);
3952 break; 3991 break;
3953 } 3992 }
3954 3993
3955 default: { 3994 default: {
3956 VisitForAccumulatorValue(expr->right()); 3995 VisitForAccumulatorValue(expr->right());
3957 Condition cc = no_condition; 3996 Condition cc = no_condition;
3958 switch (op) { 3997 switch (op) {
3959 case Token::EQ_STRICT: 3998 case Token::EQ_STRICT:
3960 // Fall through.
3961 case Token::EQ: 3999 case Token::EQ:
3962 cc = equal; 4000 cc = equal;
3963 __ pop(rdx); 4001 __ pop(rdx);
3964 break; 4002 break;
3965 case Token::LT: 4003 case Token::LT:
3966 cc = less; 4004 cc = less;
3967 __ pop(rdx); 4005 __ pop(rdx);
3968 break; 4006 break;
3969 case Token::GT: 4007 case Token::GT:
3970 // Reverse left and right sizes to obtain ECMA-262 conversion order. 4008 // Reverse left and right sizes to obtain ECMA-262 conversion order.
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4011 Split(cc, if_true, if_false, fall_through); 4049 Split(cc, if_true, if_false, fall_through);
4012 } 4050 }
4013 } 4051 }
4014 4052
4015 // Convert the result of the comparison into one expected for this 4053 // Convert the result of the comparison into one expected for this
4016 // expression's context. 4054 // expression's context.
4017 context()->Plug(if_true, if_false); 4055 context()->Plug(if_true, if_false);
4018 } 4056 }
4019 4057
4020 4058
4021 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { 4059 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4022 Comment cmnt(masm_, "[ CompareToNull"); 4060 Expression* sub_expr,
4061 NilValue nil) {
4023 Label materialize_true, materialize_false; 4062 Label materialize_true, materialize_false;
4024 Label* if_true = NULL; 4063 Label* if_true = NULL;
4025 Label* if_false = NULL; 4064 Label* if_false = NULL;
4026 Label* fall_through = NULL; 4065 Label* fall_through = NULL;
4027 context()->PrepareTest(&materialize_true, &materialize_false, 4066 context()->PrepareTest(&materialize_true, &materialize_false,
4028 &if_true, &if_false, &fall_through); 4067 &if_true, &if_false, &fall_through);
4029 4068
4030 VisitForAccumulatorValue(expr->expression()); 4069 VisitForAccumulatorValue(sub_expr);
4031 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 4070 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4032 __ CompareRoot(rax, Heap::kNullValueRootIndex); 4071 Heap::RootListIndex nil_value = nil == kNullValue ?
4033 if (expr->is_strict()) { 4072 Heap::kNullValueRootIndex :
4073 Heap::kUndefinedValueRootIndex;
4074 __ CompareRoot(rax, nil_value);
4075 if (expr->op() == Token::EQ_STRICT) {
4034 Split(equal, if_true, if_false, fall_through); 4076 Split(equal, if_true, if_false, fall_through);
4035 } else { 4077 } else {
4078 Heap::RootListIndex other_nil_value = nil == kNullValue ?
4079 Heap::kUndefinedValueRootIndex :
4080 Heap::kNullValueRootIndex;
4036 __ j(equal, if_true); 4081 __ j(equal, if_true);
4037 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 4082 __ CompareRoot(rax, other_nil_value);
4038 __ j(equal, if_true); 4083 __ j(equal, if_true);
4039 __ JumpIfSmi(rax, if_false); 4084 __ JumpIfSmi(rax, if_false);
4040 // It can be an undetectable object. 4085 // It can be an undetectable object.
4041 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 4086 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4042 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), 4087 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4043 Immediate(1 << Map::kIsUndetectable)); 4088 Immediate(1 << Map::kIsUndetectable));
4044 Split(not_zero, if_true, if_false, fall_through); 4089 Split(not_zero, if_true, if_false, fall_through);
4045 } 4090 }
4046 context()->Plug(if_true, if_false); 4091 context()->Plug(if_true, if_false);
4047 } 4092 }
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
4151 *context_length = 0; 4196 *context_length = 0;
4152 return previous_; 4197 return previous_;
4153 } 4198 }
4154 4199
4155 4200
4156 #undef __ 4201 #undef __
4157 4202
4158 } } // namespace v8::internal 4203 } } // namespace v8::internal
4159 4204
4160 #endif // V8_TARGET_ARCH_X64 4205 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/deoptimizer-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698