Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(468)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 105503006: Replace movq with movp for X64 when the operand size is kPointerSize (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
82 Move(scratch, target); 82 Move(scratch, target);
83 return Operand(scratch, 0); 83 return Operand(scratch, 0);
84 } 84 }
85 85
86 86
87 void MacroAssembler::Load(Register destination, ExternalReference source) { 87 void MacroAssembler::Load(Register destination, ExternalReference source) {
88 if (root_array_available_ && !Serializer::enabled()) { 88 if (root_array_available_ && !Serializer::enabled()) {
89 intptr_t delta = RootRegisterDelta(source); 89 intptr_t delta = RootRegisterDelta(source);
90 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 90 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
91 Serializer::TooLateToEnableNow(); 91 Serializer::TooLateToEnableNow();
92 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); 92 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
93 return; 93 return;
94 } 94 }
95 } 95 }
96 // Safe code. 96 // Safe code.
97 if (destination.is(rax)) { 97 if (destination.is(rax)) {
98 load_rax(source); 98 load_rax(source);
99 } else { 99 } else {
100 Move(kScratchRegister, source); 100 Move(kScratchRegister, source);
101 movq(destination, Operand(kScratchRegister, 0)); 101 movp(destination, Operand(kScratchRegister, 0));
102 } 102 }
103 } 103 }
104 104
105 105
106 void MacroAssembler::Store(ExternalReference destination, Register source) { 106 void MacroAssembler::Store(ExternalReference destination, Register source) {
107 if (root_array_available_ && !Serializer::enabled()) { 107 if (root_array_available_ && !Serializer::enabled()) {
108 intptr_t delta = RootRegisterDelta(destination); 108 intptr_t delta = RootRegisterDelta(destination);
109 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 109 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
110 Serializer::TooLateToEnableNow(); 110 Serializer::TooLateToEnableNow();
111 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source); 111 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
112 return; 112 return;
113 } 113 }
114 } 114 }
115 // Safe code. 115 // Safe code.
116 if (source.is(rax)) { 116 if (source.is(rax)) {
117 store_rax(destination); 117 store_rax(destination);
118 } else { 118 } else {
119 Move(kScratchRegister, destination); 119 Move(kScratchRegister, destination);
120 movq(Operand(kScratchRegister, 0), source); 120 movp(Operand(kScratchRegister, 0), source);
121 } 121 }
122 } 122 }
123 123
124 124
125 void MacroAssembler::LoadAddress(Register destination, 125 void MacroAssembler::LoadAddress(Register destination,
126 ExternalReference source) { 126 ExternalReference source) {
127 if (root_array_available_ && !Serializer::enabled()) { 127 if (root_array_available_ && !Serializer::enabled()) {
128 intptr_t delta = RootRegisterDelta(source); 128 intptr_t delta = RootRegisterDelta(source);
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
130 Serializer::TooLateToEnableNow(); 130 Serializer::TooLateToEnableNow();
(...skipping 16 matching lines...) Expand all
147 Serializer::TooLateToEnableNow(); 147 Serializer::TooLateToEnableNow();
148 // Operand is lea(scratch, Operand(kRootRegister, delta)); 148 // Operand is lea(scratch, Operand(kRootRegister, delta));
149 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7. 149 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
150 int size = 4; 150 int size = 4;
151 if (!is_int8(static_cast<int32_t>(delta))) { 151 if (!is_int8(static_cast<int32_t>(delta))) {
152 size += 3; // Need full four-byte displacement in lea. 152 size += 3; // Need full four-byte displacement in lea.
153 } 153 }
154 return size; 154 return size;
155 } 155 }
156 } 156 }
157 // Size of movq(destination, src); 157 // Size of movp(destination, src);
158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; 158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
159 } 159 }
160 160
161 161
162 void MacroAssembler::PushAddress(ExternalReference source) { 162 void MacroAssembler::PushAddress(ExternalReference source) {
163 int64_t address = reinterpret_cast<int64_t>(source.address()); 163 int64_t address = reinterpret_cast<int64_t>(source.address());
164 if (is_int32(address) && !Serializer::enabled()) { 164 if (is_int32(address) && !Serializer::enabled()) {
165 if (emit_debug_code()) { 165 if (emit_debug_code()) {
166 Move(kScratchRegister, kZapValue, RelocInfo::NONE64); 166 Move(kScratchRegister, kZapValue, RelocInfo::NONE64);
167 } 167 }
168 push(Immediate(static_cast<int32_t>(address))); 168 push(Immediate(static_cast<int32_t>(address)));
169 return; 169 return;
170 } 170 }
171 LoadAddress(kScratchRegister, source); 171 LoadAddress(kScratchRegister, source);
172 push(kScratchRegister); 172 push(kScratchRegister);
173 } 173 }
174 174
175 175
176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { 176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
177 ASSERT(root_array_available_); 177 ASSERT(root_array_available_);
178 movq(destination, Operand(kRootRegister, 178 movp(destination, Operand(kRootRegister,
179 (index << kPointerSizeLog2) - kRootRegisterBias)); 179 (index << kPointerSizeLog2) - kRootRegisterBias));
180 } 180 }
181 181
182 182
183 void MacroAssembler::LoadRootIndexed(Register destination, 183 void MacroAssembler::LoadRootIndexed(Register destination,
184 Register variable_offset, 184 Register variable_offset,
185 int fixed_offset) { 185 int fixed_offset) {
186 ASSERT(root_array_available_); 186 ASSERT(root_array_available_);
187 movq(destination, 187 movp(destination,
188 Operand(kRootRegister, 188 Operand(kRootRegister,
189 variable_offset, times_pointer_size, 189 variable_offset, times_pointer_size,
190 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias)); 190 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
191 } 191 }
192 192
193 193
194 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) { 194 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
195 ASSERT(root_array_available_); 195 ASSERT(root_array_available_);
196 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias), 196 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
197 source); 197 source);
198 } 198 }
199 199
200 200
201 void MacroAssembler::PushRoot(Heap::RootListIndex index) { 201 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
202 ASSERT(root_array_available_); 202 ASSERT(root_array_available_);
203 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias)); 203 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
204 } 204 }
205 205
206 206
(...skipping 20 matching lines...) Expand all
227 RememberedSetFinalAction and_then) { 227 RememberedSetFinalAction and_then) {
228 if (emit_debug_code()) { 228 if (emit_debug_code()) {
229 Label ok; 229 Label ok;
230 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear); 230 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
231 int3(); 231 int3();
232 bind(&ok); 232 bind(&ok);
233 } 233 }
234 // Load store buffer top. 234 // Load store buffer top.
235 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex); 235 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
236 // Store pointer to buffer. 236 // Store pointer to buffer.
237 movq(Operand(scratch, 0), addr); 237 movp(Operand(scratch, 0), addr);
238 // Increment buffer top. 238 // Increment buffer top.
239 addq(scratch, Immediate(kPointerSize)); 239 addq(scratch, Immediate(kPointerSize));
240 // Write back new top of buffer. 240 // Write back new top of buffer.
241 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex); 241 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
242 // Call stub on end of buffer. 242 // Call stub on end of buffer.
243 Label done; 243 Label done;
244 // Check for end of buffer. 244 // Check for end of buffer.
245 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit)); 245 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
246 if (and_then == kReturnAtEnd) { 246 if (and_then == kReturnAtEnd) {
247 Label buffer_overflowed; 247 Label buffer_overflowed;
(...skipping 446 matching lines...) Expand 10 before | Expand all | Expand 10 after
694 ExternalReference::handle_scope_level_address(isolate()), 694 ExternalReference::handle_scope_level_address(isolate()),
695 next_address); 695 next_address);
696 ExternalReference scheduled_exception_address = 696 ExternalReference scheduled_exception_address =
697 ExternalReference::scheduled_exception_address(isolate()); 697 ExternalReference::scheduled_exception_address(isolate());
698 698
699 // Allocate HandleScope in callee-save registers. 699 // Allocate HandleScope in callee-save registers.
700 Register prev_next_address_reg = r14; 700 Register prev_next_address_reg = r14;
701 Register prev_limit_reg = rbx; 701 Register prev_limit_reg = rbx;
702 Register base_reg = r15; 702 Register base_reg = r15;
703 Move(base_reg, next_address); 703 Move(base_reg, next_address);
704 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); 704 movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
705 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 705 movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
706 addl(Operand(base_reg, kLevelOffset), Immediate(1)); 706 addl(Operand(base_reg, kLevelOffset), Immediate(1));
707 707
708 if (FLAG_log_timer_events) { 708 if (FLAG_log_timer_events) {
709 FrameScope frame(this, StackFrame::MANUAL); 709 FrameScope frame(this, StackFrame::MANUAL);
710 PushSafepointRegisters(); 710 PushSafepointRegisters();
711 PrepareCallCFunction(1); 711 PrepareCallCFunction(1);
712 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); 712 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
713 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); 713 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
714 PopSafepointRegisters(); 714 PopSafepointRegisters();
715 } 715 }
(...skipping 26 matching lines...) Expand all
742 if (FLAG_log_timer_events) { 742 if (FLAG_log_timer_events) {
743 FrameScope frame(this, StackFrame::MANUAL); 743 FrameScope frame(this, StackFrame::MANUAL);
744 PushSafepointRegisters(); 744 PushSafepointRegisters();
745 PrepareCallCFunction(1); 745 PrepareCallCFunction(1);
746 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); 746 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
747 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); 747 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
748 PopSafepointRegisters(); 748 PopSafepointRegisters();
749 } 749 }
750 750
751 // Load the value from ReturnValue 751 // Load the value from ReturnValue
752 movq(rax, return_value_operand); 752 movp(rax, return_value_operand);
753 bind(&prologue); 753 bind(&prologue);
754 754
755 // No more valid handles (the result handle was the last one). Restore 755 // No more valid handles (the result handle was the last one). Restore
756 // previous handle scope. 756 // previous handle scope.
757 subl(Operand(base_reg, kLevelOffset), Immediate(1)); 757 subl(Operand(base_reg, kLevelOffset), Immediate(1));
758 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); 758 movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
759 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 759 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
760 j(not_equal, &delete_allocated_handles); 760 j(not_equal, &delete_allocated_handles);
761 bind(&leave_exit_frame); 761 bind(&leave_exit_frame);
762 762
763 // Check if the function scheduled an exception. 763 // Check if the function scheduled an exception.
764 Move(rsi, scheduled_exception_address); 764 Move(rsi, scheduled_exception_address);
765 Cmp(Operand(rsi, 0), factory->the_hole_value()); 765 Cmp(Operand(rsi, 0), factory->the_hole_value());
766 j(not_equal, &promote_scheduled_exception); 766 j(not_equal, &promote_scheduled_exception);
767 bind(&exception_handled); 767 bind(&exception_handled);
768 768
769 #if ENABLE_EXTRA_CHECKS 769 #if ENABLE_EXTRA_CHECKS
770 // Check if the function returned a valid JavaScript value. 770 // Check if the function returned a valid JavaScript value.
771 Label ok; 771 Label ok;
772 Register return_value = rax; 772 Register return_value = rax;
773 Register map = rcx; 773 Register map = rcx;
774 774
775 JumpIfSmi(return_value, &ok, Label::kNear); 775 JumpIfSmi(return_value, &ok, Label::kNear);
776 movq(map, FieldOperand(return_value, HeapObject::kMapOffset)); 776 movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
777 777
778 CmpInstanceType(map, FIRST_NONSTRING_TYPE); 778 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
779 j(below, &ok, Label::kNear); 779 j(below, &ok, Label::kNear);
780 780
781 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); 781 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
782 j(above_equal, &ok, Label::kNear); 782 j(above_equal, &ok, Label::kNear);
783 783
784 CompareRoot(map, Heap::kHeapNumberMapRootIndex); 784 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
785 j(equal, &ok, Label::kNear); 785 j(equal, &ok, Label::kNear);
786 786
787 CompareRoot(return_value, Heap::kUndefinedValueRootIndex); 787 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
788 j(equal, &ok, Label::kNear); 788 j(equal, &ok, Label::kNear);
789 789
790 CompareRoot(return_value, Heap::kTrueValueRootIndex); 790 CompareRoot(return_value, Heap::kTrueValueRootIndex);
791 j(equal, &ok, Label::kNear); 791 j(equal, &ok, Label::kNear);
792 792
793 CompareRoot(return_value, Heap::kFalseValueRootIndex); 793 CompareRoot(return_value, Heap::kFalseValueRootIndex);
794 j(equal, &ok, Label::kNear); 794 j(equal, &ok, Label::kNear);
795 795
796 CompareRoot(return_value, Heap::kNullValueRootIndex); 796 CompareRoot(return_value, Heap::kNullValueRootIndex);
797 j(equal, &ok, Label::kNear); 797 j(equal, &ok, Label::kNear);
798 798
799 Abort(kAPICallReturnedInvalidObject); 799 Abort(kAPICallReturnedInvalidObject);
800 800
801 bind(&ok); 801 bind(&ok);
802 #endif 802 #endif
803 803
804 bool restore_context = context_restore_operand != NULL; 804 bool restore_context = context_restore_operand != NULL;
805 if (restore_context) { 805 if (restore_context) {
806 movq(rsi, *context_restore_operand); 806 movp(rsi, *context_restore_operand);
807 } 807 }
808 LeaveApiExitFrame(!restore_context); 808 LeaveApiExitFrame(!restore_context);
809 ret(stack_space * kPointerSize); 809 ret(stack_space * kPointerSize);
810 810
811 bind(&promote_scheduled_exception); 811 bind(&promote_scheduled_exception);
812 { 812 {
813 FrameScope frame(this, StackFrame::INTERNAL); 813 FrameScope frame(this, StackFrame::INTERNAL);
814 CallRuntime(Runtime::kPromoteScheduledException, 0); 814 CallRuntime(Runtime::kPromoteScheduledException, 0);
815 } 815 }
816 jmp(&exception_handled); 816 jmp(&exception_handled);
817 817
818 // HandleScope limit has changed. Delete allocated extensions. 818 // HandleScope limit has changed. Delete allocated extensions.
819 bind(&delete_allocated_handles); 819 bind(&delete_allocated_handles);
820 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); 820 movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
821 movq(prev_limit_reg, rax); 821 movp(prev_limit_reg, rax);
822 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); 822 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
823 LoadAddress(rax, 823 LoadAddress(rax,
824 ExternalReference::delete_handle_scope_extensions(isolate())); 824 ExternalReference::delete_handle_scope_extensions(isolate()));
825 call(rax); 825 call(rax);
826 movq(rax, prev_limit_reg); 826 movp(rax, prev_limit_reg);
827 jmp(&leave_exit_frame); 827 jmp(&leave_exit_frame);
828 } 828 }
829 829
830 830
831 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, 831 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
832 int result_size) { 832 int result_size) {
833 // Set the entry point and jump to the C entry runtime stub. 833 // Set the entry point and jump to the C entry runtime stub.
834 LoadAddress(rbx, ext); 834 LoadAddress(rbx, ext);
835 CEntryStub ces(result_size); 835 CEntryStub ces(result_size);
836 jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET); 836 jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
(...skipping 11 matching lines...) Expand all
848 // parameter count to avoid emitting code to do the check. 848 // parameter count to avoid emitting code to do the check.
849 ParameterCount expected(0); 849 ParameterCount expected(0);
850 GetBuiltinEntry(rdx, id); 850 GetBuiltinEntry(rdx, id);
851 InvokeCode(rdx, expected, expected, flag, call_wrapper); 851 InvokeCode(rdx, expected, expected, flag, call_wrapper);
852 } 852 }
853 853
854 854
855 void MacroAssembler::GetBuiltinFunction(Register target, 855 void MacroAssembler::GetBuiltinFunction(Register target,
856 Builtins::JavaScript id) { 856 Builtins::JavaScript id) {
857 // Load the builtins object into target register. 857 // Load the builtins object into target register.
858 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 858 movp(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
859 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); 859 movp(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
860 movq(target, FieldOperand(target, 860 movp(target, FieldOperand(target,
861 JSBuiltinsObject::OffsetOfFunctionWithId(id))); 861 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
862 } 862 }
863 863
864 864
865 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { 865 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
866 ASSERT(!target.is(rdi)); 866 ASSERT(!target.is(rdi));
867 // Load the JavaScript builtin function from the builtins object. 867 // Load the JavaScript builtin function from the builtins object.
868 GetBuiltinFunction(rdi, id); 868 GetBuiltinFunction(rdi, id);
869 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 869 movp(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
870 } 870 }
871 871
872 872
873 #define REG(Name) { kRegister_ ## Name ## _Code } 873 #define REG(Name) { kRegister_ ## Name ## _Code }
874 874
875 static const Register saved_regs[] = { 875 static const Register saved_regs[] = {
876 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8), 876 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
877 REG(r9), REG(r10), REG(r11) 877 REG(r9), REG(r10), REG(r11)
878 }; 878 };
879 879
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
944 movsxbq(dst, src); 944 movsxbq(dst, src);
945 } else if (r.IsUInteger8()) { 945 } else if (r.IsUInteger8()) {
946 movzxbl(dst, src); 946 movzxbl(dst, src);
947 } else if (r.IsInteger16()) { 947 } else if (r.IsInteger16()) {
948 movsxwq(dst, src); 948 movsxwq(dst, src);
949 } else if (r.IsUInteger16()) { 949 } else if (r.IsUInteger16()) {
950 movzxwl(dst, src); 950 movzxwl(dst, src);
951 } else if (r.IsInteger32()) { 951 } else if (r.IsInteger32()) {
952 movl(dst, src); 952 movl(dst, src);
953 } else { 953 } else {
954 movq(dst, src); 954 movp(dst, src);
955 } 955 }
956 } 956 }
957 957
958 958
959 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) { 959 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
960 ASSERT(!r.IsDouble()); 960 ASSERT(!r.IsDouble());
961 if (r.IsInteger8() || r.IsUInteger8()) { 961 if (r.IsInteger8() || r.IsUInteger8()) {
962 movb(dst, src); 962 movb(dst, src);
963 } else if (r.IsInteger16() || r.IsUInteger16()) { 963 } else if (r.IsInteger16() || r.IsUInteger16()) {
964 movw(dst, src); 964 movw(dst, src);
965 } else if (r.IsInteger32()) { 965 } else if (r.IsInteger32()) {
966 movl(dst, src); 966 movl(dst, src);
967 } else { 967 } else {
968 movq(dst, src); 968 movp(dst, src);
969 } 969 }
970 } 970 }
971 971
972 972
973 void MacroAssembler::Set(Register dst, int64_t x) { 973 void MacroAssembler::Set(Register dst, int64_t x) {
974 if (x == 0) { 974 if (x == 0) {
975 xorl(dst, dst); 975 xorl(dst, dst);
976 } else if (is_uint32(x)) { 976 } else if (is_uint32(x)) {
977 movl(dst, Immediate(static_cast<uint32_t>(x))); 977 movl(dst, Immediate(static_cast<uint32_t>(x)));
978 } else if (is_int32(x)) { 978 } else if (is_int32(x)) {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1070 case 5: 1070 case 5:
1071 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0)); 1071 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
1072 break; 1072 break;
1073 case 3: 1073 case 3:
1074 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0)); 1074 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
1075 break; 1075 break;
1076 case 2: 1076 case 2:
1077 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0)); 1077 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
1078 break; 1078 break;
1079 case 1: 1079 case 1:
1080 movq(dst, kSmiConstantRegister); 1080 movp(dst, kSmiConstantRegister);
1081 break; 1081 break;
1082 case 0: 1082 case 0:
1083 UNREACHABLE(); 1083 UNREACHABLE();
1084 return; 1084 return;
1085 default: 1085 default:
1086 Move(dst, source, RelocInfo::NONE64); 1086 Move(dst, source, RelocInfo::NONE64);
1087 return; 1087 return;
1088 } 1088 }
1089 if (negative) { 1089 if (negative) {
1090 neg(dst); 1090 neg(dst);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1122 } else { 1122 } else {
1123 leal(dst, Operand(src, constant)); 1123 leal(dst, Operand(src, constant));
1124 } 1124 }
1125 shl(dst, Immediate(kSmiShift)); 1125 shl(dst, Immediate(kSmiShift));
1126 } 1126 }
1127 1127
1128 1128
1129 void MacroAssembler::SmiToInteger32(Register dst, Register src) { 1129 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1130 STATIC_ASSERT(kSmiTag == 0); 1130 STATIC_ASSERT(kSmiTag == 0);
1131 if (!dst.is(src)) { 1131 if (!dst.is(src)) {
1132 movq(dst, src); 1132 movp(dst, src);
1133 } 1133 }
1134 shr(dst, Immediate(kSmiShift)); 1134 shr(dst, Immediate(kSmiShift));
1135 } 1135 }
1136 1136
1137 1137
1138 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { 1138 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1139 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); 1139 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1140 } 1140 }
1141 1141
1142 1142
1143 void MacroAssembler::SmiToInteger64(Register dst, Register src) { 1143 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1144 STATIC_ASSERT(kSmiTag == 0); 1144 STATIC_ASSERT(kSmiTag == 0);
1145 if (!dst.is(src)) { 1145 if (!dst.is(src)) {
1146 movq(dst, src); 1146 movp(dst, src);
1147 } 1147 }
1148 sar(dst, Immediate(kSmiShift)); 1148 sar(dst, Immediate(kSmiShift));
1149 } 1149 }
1150 1150
1151 1151
1152 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { 1152 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1153 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); 1153 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1154 } 1154 }
1155 1155
1156 1156
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1220 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, 1220 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1221 Register src, 1221 Register src,
1222 int power) { 1222 int power) {
1223 ASSERT(power >= 0); 1223 ASSERT(power >= 0);
1224 ASSERT(power < 64); 1224 ASSERT(power < 64);
1225 if (power == 0) { 1225 if (power == 0) {
1226 SmiToInteger64(dst, src); 1226 SmiToInteger64(dst, src);
1227 return; 1227 return;
1228 } 1228 }
1229 if (!dst.is(src)) { 1229 if (!dst.is(src)) {
1230 movq(dst, src); 1230 movp(dst, src);
1231 } 1231 }
1232 if (power < kSmiShift) { 1232 if (power < kSmiShift) {
1233 sar(dst, Immediate(kSmiShift - power)); 1233 sar(dst, Immediate(kSmiShift - power));
1234 } else if (power > kSmiShift) { 1234 } else if (power > kSmiShift) {
1235 shl(dst, Immediate(power - kSmiShift)); 1235 shl(dst, Immediate(power - kSmiShift));
1236 } 1236 }
1237 } 1237 }
1238 1238
1239 1239
1240 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, 1240 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1241 Register src, 1241 Register src,
1242 int power) { 1242 int power) {
1243 ASSERT((0 <= power) && (power < 32)); 1243 ASSERT((0 <= power) && (power < 32));
1244 if (dst.is(src)) { 1244 if (dst.is(src)) {
1245 shr(dst, Immediate(power + kSmiShift)); 1245 shr(dst, Immediate(power + kSmiShift));
1246 } else { 1246 } else {
1247 UNIMPLEMENTED(); // Not used. 1247 UNIMPLEMENTED(); // Not used.
1248 } 1248 }
1249 } 1249 }
1250 1250
1251 1251
1252 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, 1252 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1253 Label* on_not_smis, 1253 Label* on_not_smis,
1254 Label::Distance near_jump) { 1254 Label::Distance near_jump) {
1255 if (dst.is(src1) || dst.is(src2)) { 1255 if (dst.is(src1) || dst.is(src2)) {
1256 ASSERT(!src1.is(kScratchRegister)); 1256 ASSERT(!src1.is(kScratchRegister));
1257 ASSERT(!src2.is(kScratchRegister)); 1257 ASSERT(!src2.is(kScratchRegister));
1258 movq(kScratchRegister, src1); 1258 movp(kScratchRegister, src1);
1259 or_(kScratchRegister, src2); 1259 or_(kScratchRegister, src2);
1260 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump); 1260 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
1261 movq(dst, kScratchRegister); 1261 movp(dst, kScratchRegister);
1262 } else { 1262 } else {
1263 movq(dst, src1); 1263 movp(dst, src1);
1264 or_(dst, src2); 1264 or_(dst, src2);
1265 JumpIfNotSmi(dst, on_not_smis, near_jump); 1265 JumpIfNotSmi(dst, on_not_smis, near_jump);
1266 } 1266 }
1267 } 1267 }
1268 1268
1269 1269
1270 Condition MacroAssembler::CheckSmi(Register src) { 1270 Condition MacroAssembler::CheckSmi(Register src) {
1271 STATIC_ASSERT(kSmiTag == 0); 1271 STATIC_ASSERT(kSmiTag == 0);
1272 testb(src, Immediate(kSmiTagMask)); 1272 testb(src, Immediate(kSmiTagMask));
1273 return zero; 1273 return zero;
1274 } 1274 }
1275 1275
1276 1276
1277 Condition MacroAssembler::CheckSmi(const Operand& src) { 1277 Condition MacroAssembler::CheckSmi(const Operand& src) {
1278 STATIC_ASSERT(kSmiTag == 0); 1278 STATIC_ASSERT(kSmiTag == 0);
1279 testb(src, Immediate(kSmiTagMask)); 1279 testb(src, Immediate(kSmiTagMask));
1280 return zero; 1280 return zero;
1281 } 1281 }
1282 1282
1283 1283
1284 Condition MacroAssembler::CheckNonNegativeSmi(Register src) { 1284 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1285 STATIC_ASSERT(kSmiTag == 0); 1285 STATIC_ASSERT(kSmiTag == 0);
1286 // Test that both bits of the mask 0x8000000000000001 are zero. 1286 // Test that both bits of the mask 0x8000000000000001 are zero.
1287 movq(kScratchRegister, src); 1287 movp(kScratchRegister, src);
1288 rol(kScratchRegister, Immediate(1)); 1288 rol(kScratchRegister, Immediate(1));
1289 testb(kScratchRegister, Immediate(3)); 1289 testb(kScratchRegister, Immediate(3));
1290 return zero; 1290 return zero;
1291 } 1291 }
1292 1292
1293 1293
1294 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { 1294 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1295 if (first.is(second)) { 1295 if (first.is(second)) {
1296 return CheckSmi(first); 1296 return CheckSmi(first);
1297 } 1297 }
1298 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); 1298 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1299 leal(kScratchRegister, Operand(first, second, times_1, 0)); 1299 leal(kScratchRegister, Operand(first, second, times_1, 0));
1300 testb(kScratchRegister, Immediate(0x03)); 1300 testb(kScratchRegister, Immediate(0x03));
1301 return zero; 1301 return zero;
1302 } 1302 }
1303 1303
1304 1304
1305 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, 1305 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1306 Register second) { 1306 Register second) {
1307 if (first.is(second)) { 1307 if (first.is(second)) {
1308 return CheckNonNegativeSmi(first); 1308 return CheckNonNegativeSmi(first);
1309 } 1309 }
1310 movq(kScratchRegister, first); 1310 movp(kScratchRegister, first);
1311 or_(kScratchRegister, second); 1311 or_(kScratchRegister, second);
1312 rol(kScratchRegister, Immediate(1)); 1312 rol(kScratchRegister, Immediate(1));
1313 testl(kScratchRegister, Immediate(3)); 1313 testl(kScratchRegister, Immediate(3));
1314 return zero; 1314 return zero;
1315 } 1315 }
1316 1316
1317 1317
1318 Condition MacroAssembler::CheckEitherSmi(Register first, 1318 Condition MacroAssembler::CheckEitherSmi(Register first,
1319 Register second, 1319 Register second,
1320 Register scratch) { 1320 Register scratch) {
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
1440 Label* on_not_both_smi, 1440 Label* on_not_both_smi,
1441 Label::Distance near_jump) { 1441 Label::Distance near_jump) {
1442 Condition both_smi = CheckBothNonNegativeSmi(src1, src2); 1442 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1443 j(NegateCondition(both_smi), on_not_both_smi, near_jump); 1443 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1444 } 1444 }
1445 1445
1446 1446
1447 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { 1447 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1448 if (constant->value() == 0) { 1448 if (constant->value() == 0) {
1449 if (!dst.is(src)) { 1449 if (!dst.is(src)) {
1450 movq(dst, src); 1450 movp(dst, src);
1451 } 1451 }
1452 return; 1452 return;
1453 } else if (dst.is(src)) { 1453 } else if (dst.is(src)) {
1454 ASSERT(!dst.is(kScratchRegister)); 1454 ASSERT(!dst.is(kScratchRegister));
1455 switch (constant->value()) { 1455 switch (constant->value()) {
1456 case 1: 1456 case 1:
1457 addq(dst, kSmiConstantRegister); 1457 addq(dst, kSmiConstantRegister);
1458 return; 1458 return;
1459 case 2: 1459 case 2:
1460 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0)); 1460 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
1501 1501
1502 1502
1503 void MacroAssembler::SmiAddConstant(Register dst, 1503 void MacroAssembler::SmiAddConstant(Register dst,
1504 Register src, 1504 Register src,
1505 Smi* constant, 1505 Smi* constant,
1506 SmiOperationExecutionMode mode, 1506 SmiOperationExecutionMode mode,
1507 Label* bailout_label, 1507 Label* bailout_label,
1508 Label::Distance near_jump) { 1508 Label::Distance near_jump) {
1509 if (constant->value() == 0) { 1509 if (constant->value() == 0) {
1510 if (!dst.is(src)) { 1510 if (!dst.is(src)) {
1511 movq(dst, src); 1511 movp(dst, src);
1512 } 1512 }
1513 } else if (dst.is(src)) { 1513 } else if (dst.is(src)) {
1514 ASSERT(!dst.is(kScratchRegister)); 1514 ASSERT(!dst.is(kScratchRegister));
1515 LoadSmiConstant(kScratchRegister, constant); 1515 LoadSmiConstant(kScratchRegister, constant);
1516 addq(dst, kScratchRegister); 1516 addq(dst, kScratchRegister);
1517 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) { 1517 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1518 j(no_overflow, bailout_label, near_jump); 1518 j(no_overflow, bailout_label, near_jump);
1519 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); 1519 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1520 subq(dst, kScratchRegister); 1520 subq(dst, kScratchRegister);
1521 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) { 1521 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
(...skipping 16 matching lines...) Expand all
1538 LoadSmiConstant(dst, constant); 1538 LoadSmiConstant(dst, constant);
1539 addq(dst, src); 1539 addq(dst, src);
1540 j(overflow, bailout_label, near_jump); 1540 j(overflow, bailout_label, near_jump);
1541 } 1541 }
1542 } 1542 }
1543 1543
1544 1544
1545 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { 1545 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1546 if (constant->value() == 0) { 1546 if (constant->value() == 0) {
1547 if (!dst.is(src)) { 1547 if (!dst.is(src)) {
1548 movq(dst, src); 1548 movp(dst, src);
1549 } 1549 }
1550 } else if (dst.is(src)) { 1550 } else if (dst.is(src)) {
1551 ASSERT(!dst.is(kScratchRegister)); 1551 ASSERT(!dst.is(kScratchRegister));
1552 Register constant_reg = GetSmiConstant(constant); 1552 Register constant_reg = GetSmiConstant(constant);
1553 subq(dst, constant_reg); 1553 subq(dst, constant_reg);
1554 } else { 1554 } else {
1555 if (constant->value() == Smi::kMinValue) { 1555 if (constant->value() == Smi::kMinValue) {
1556 LoadSmiConstant(dst, constant); 1556 LoadSmiConstant(dst, constant);
1557 // Adding and subtracting the min-value gives the same result, it only 1557 // Adding and subtracting the min-value gives the same result, it only
1558 // differs on the overflow bit, which we don't check here. 1558 // differs on the overflow bit, which we don't check here.
1559 addq(dst, src); 1559 addq(dst, src);
1560 } else { 1560 } else {
1561 // Subtract by adding the negation. 1561 // Subtract by adding the negation.
1562 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); 1562 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1563 addq(dst, src); 1563 addq(dst, src);
1564 } 1564 }
1565 } 1565 }
1566 } 1566 }
1567 1567
1568 1568
1569 void MacroAssembler::SmiSubConstant(Register dst, 1569 void MacroAssembler::SmiSubConstant(Register dst,
1570 Register src, 1570 Register src,
1571 Smi* constant, 1571 Smi* constant,
1572 SmiOperationExecutionMode mode, 1572 SmiOperationExecutionMode mode,
1573 Label* bailout_label, 1573 Label* bailout_label,
1574 Label::Distance near_jump) { 1574 Label::Distance near_jump) {
1575 if (constant->value() == 0) { 1575 if (constant->value() == 0) {
1576 if (!dst.is(src)) { 1576 if (!dst.is(src)) {
1577 movq(dst, src); 1577 movp(dst, src);
1578 } 1578 }
1579 } else if (dst.is(src)) { 1579 } else if (dst.is(src)) {
1580 ASSERT(!dst.is(kScratchRegister)); 1580 ASSERT(!dst.is(kScratchRegister));
1581 LoadSmiConstant(kScratchRegister, constant); 1581 LoadSmiConstant(kScratchRegister, constant);
1582 subq(dst, kScratchRegister); 1582 subq(dst, kScratchRegister);
1583 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) { 1583 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1584 j(no_overflow, bailout_label, near_jump); 1584 j(no_overflow, bailout_label, near_jump);
1585 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); 1585 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1586 addq(dst, kScratchRegister); 1586 addq(dst, kScratchRegister);
1587 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) { 1587 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1588 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) { 1588 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1589 Label done; 1589 Label done;
1590 j(no_overflow, &done, Label::kNear); 1590 j(no_overflow, &done, Label::kNear);
1591 addq(dst, kScratchRegister); 1591 addq(dst, kScratchRegister);
1592 jmp(bailout_label, near_jump); 1592 jmp(bailout_label, near_jump);
1593 bind(&done); 1593 bind(&done);
1594 } else { 1594 } else {
1595 // Bailout if overflow without reserving src. 1595 // Bailout if overflow without reserving src.
1596 j(overflow, bailout_label, near_jump); 1596 j(overflow, bailout_label, near_jump);
1597 } 1597 }
1598 } else { 1598 } else {
1599 CHECK(mode.IsEmpty()); 1599 CHECK(mode.IsEmpty());
1600 } 1600 }
1601 } else { 1601 } else {
1602 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); 1602 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER));
1603 ASSERT(mode.Contains(BAILOUT_ON_OVERFLOW)); 1603 ASSERT(mode.Contains(BAILOUT_ON_OVERFLOW));
1604 if (constant->value() == Smi::kMinValue) { 1604 if (constant->value() == Smi::kMinValue) {
1605 ASSERT(!dst.is(kScratchRegister)); 1605 ASSERT(!dst.is(kScratchRegister));
1606 movq(dst, src); 1606 movp(dst, src);
1607 LoadSmiConstant(kScratchRegister, constant); 1607 LoadSmiConstant(kScratchRegister, constant);
1608 subq(dst, kScratchRegister); 1608 subq(dst, kScratchRegister);
1609 j(overflow, bailout_label, near_jump); 1609 j(overflow, bailout_label, near_jump);
1610 } else { 1610 } else {
1611 // Subtract by adding the negation. 1611 // Subtract by adding the negation.
1612 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); 1612 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1613 addq(dst, src); 1613 addq(dst, src);
1614 j(overflow, bailout_label, near_jump); 1614 j(overflow, bailout_label, near_jump);
1615 } 1615 }
1616 } 1616 }
1617 } 1617 }
1618 1618
1619 1619
1620 void MacroAssembler::SmiNeg(Register dst, 1620 void MacroAssembler::SmiNeg(Register dst,
1621 Register src, 1621 Register src,
1622 Label* on_smi_result, 1622 Label* on_smi_result,
1623 Label::Distance near_jump) { 1623 Label::Distance near_jump) {
1624 if (dst.is(src)) { 1624 if (dst.is(src)) {
1625 ASSERT(!dst.is(kScratchRegister)); 1625 ASSERT(!dst.is(kScratchRegister));
1626 movq(kScratchRegister, src); 1626 movp(kScratchRegister, src);
1627 neg(dst); // Low 32 bits are retained as zero by negation. 1627 neg(dst); // Low 32 bits are retained as zero by negation.
1628 // Test if result is zero or Smi::kMinValue. 1628 // Test if result is zero or Smi::kMinValue.
1629 cmpq(dst, kScratchRegister); 1629 cmpq(dst, kScratchRegister);
1630 j(not_equal, on_smi_result, near_jump); 1630 j(not_equal, on_smi_result, near_jump);
1631 movq(src, kScratchRegister); 1631 movp(src, kScratchRegister);
1632 } else { 1632 } else {
1633 movq(dst, src); 1633 movp(dst, src);
1634 neg(dst); 1634 neg(dst);
1635 cmpq(dst, src); 1635 cmpq(dst, src);
1636 // If the result is zero or Smi::kMinValue, negation failed to create a smi. 1636 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1637 j(not_equal, on_smi_result, near_jump); 1637 j(not_equal, on_smi_result, near_jump);
1638 } 1638 }
1639 } 1639 }
1640 1640
1641 1641
1642 template<class T> 1642 template<class T>
1643 static void SmiAddHelper(MacroAssembler* masm, 1643 static void SmiAddHelper(MacroAssembler* masm,
1644 Register dst, 1644 Register dst,
1645 Register src1, 1645 Register src1,
1646 T src2, 1646 T src2,
1647 Label* on_not_smi_result, 1647 Label* on_not_smi_result,
1648 Label::Distance near_jump) { 1648 Label::Distance near_jump) {
1649 if (dst.is(src1)) { 1649 if (dst.is(src1)) {
1650 Label done; 1650 Label done;
1651 masm->addq(dst, src2); 1651 masm->addq(dst, src2);
1652 masm->j(no_overflow, &done, Label::kNear); 1652 masm->j(no_overflow, &done, Label::kNear);
1653 // Restore src1. 1653 // Restore src1.
1654 masm->subq(dst, src2); 1654 masm->subq(dst, src2);
1655 masm->jmp(on_not_smi_result, near_jump); 1655 masm->jmp(on_not_smi_result, near_jump);
1656 masm->bind(&done); 1656 masm->bind(&done);
1657 } else { 1657 } else {
1658 masm->movq(dst, src1); 1658 masm->movp(dst, src1);
1659 masm->addq(dst, src2); 1659 masm->addq(dst, src2);
1660 masm->j(overflow, on_not_smi_result, near_jump); 1660 masm->j(overflow, on_not_smi_result, near_jump);
1661 } 1661 }
1662 } 1662 }
1663 1663
1664 1664
1665 void MacroAssembler::SmiAdd(Register dst, 1665 void MacroAssembler::SmiAdd(Register dst,
1666 Register src1, 1666 Register src1,
1667 Register src2, 1667 Register src2,
1668 Label* on_not_smi_result, 1668 Label* on_not_smi_result,
(...skipping 15 matching lines...) Expand all
1684 } 1684 }
1685 1685
1686 1686
1687 void MacroAssembler::SmiAdd(Register dst, 1687 void MacroAssembler::SmiAdd(Register dst,
1688 Register src1, 1688 Register src1,
1689 Register src2) { 1689 Register src2) {
1690 // No overflow checking. Use only when it's known that 1690 // No overflow checking. Use only when it's known that
1691 // overflowing is impossible. 1691 // overflowing is impossible.
1692 if (!dst.is(src1)) { 1692 if (!dst.is(src1)) {
1693 if (emit_debug_code()) { 1693 if (emit_debug_code()) {
1694 movq(kScratchRegister, src1); 1694 movp(kScratchRegister, src1);
1695 addq(kScratchRegister, src2); 1695 addq(kScratchRegister, src2);
1696 Check(no_overflow, kSmiAdditionOverflow); 1696 Check(no_overflow, kSmiAdditionOverflow);
1697 } 1697 }
1698 lea(dst, Operand(src1, src2, times_1, 0)); 1698 lea(dst, Operand(src1, src2, times_1, 0));
1699 } else { 1699 } else {
1700 addq(dst, src2); 1700 addq(dst, src2);
1701 Assert(no_overflow, kSmiAdditionOverflow); 1701 Assert(no_overflow, kSmiAdditionOverflow);
1702 } 1702 }
1703 } 1703 }
1704 1704
1705 1705
1706 template<class T> 1706 template<class T>
1707 static void SmiSubHelper(MacroAssembler* masm, 1707 static void SmiSubHelper(MacroAssembler* masm,
1708 Register dst, 1708 Register dst,
1709 Register src1, 1709 Register src1,
1710 T src2, 1710 T src2,
1711 Label* on_not_smi_result, 1711 Label* on_not_smi_result,
1712 Label::Distance near_jump) { 1712 Label::Distance near_jump) {
1713 if (dst.is(src1)) { 1713 if (dst.is(src1)) {
1714 Label done; 1714 Label done;
1715 masm->subq(dst, src2); 1715 masm->subq(dst, src2);
1716 masm->j(no_overflow, &done, Label::kNear); 1716 masm->j(no_overflow, &done, Label::kNear);
1717 // Restore src1. 1717 // Restore src1.
1718 masm->addq(dst, src2); 1718 masm->addq(dst, src2);
1719 masm->jmp(on_not_smi_result, near_jump); 1719 masm->jmp(on_not_smi_result, near_jump);
1720 masm->bind(&done); 1720 masm->bind(&done);
1721 } else { 1721 } else {
1722 masm->movq(dst, src1); 1722 masm->movp(dst, src1);
1723 masm->subq(dst, src2); 1723 masm->subq(dst, src2);
1724 masm->j(overflow, on_not_smi_result, near_jump); 1724 masm->j(overflow, on_not_smi_result, near_jump);
1725 } 1725 }
1726 } 1726 }
1727 1727
1728 1728
1729 void MacroAssembler::SmiSub(Register dst, 1729 void MacroAssembler::SmiSub(Register dst,
1730 Register src1, 1730 Register src1,
1731 Register src2, 1731 Register src2,
1732 Label* on_not_smi_result, 1732 Label* on_not_smi_result,
(...skipping 16 matching lines...) Expand all
1749 1749
1750 1750
1751 template<class T> 1751 template<class T>
1752 static void SmiSubNoOverflowHelper(MacroAssembler* masm, 1752 static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1753 Register dst, 1753 Register dst,
1754 Register src1, 1754 Register src1,
1755 T src2) { 1755 T src2) {
1756 // No overflow checking. Use only when it's known that 1756 // No overflow checking. Use only when it's known that
1757 // overflowing is impossible (e.g., subtracting two positive smis). 1757 // overflowing is impossible (e.g., subtracting two positive smis).
1758 if (!dst.is(src1)) { 1758 if (!dst.is(src1)) {
1759 masm->movq(dst, src1); 1759 masm->movp(dst, src1);
1760 } 1760 }
1761 masm->subq(dst, src2); 1761 masm->subq(dst, src2);
1762 masm->Assert(no_overflow, kSmiSubtractionOverflow); 1762 masm->Assert(no_overflow, kSmiSubtractionOverflow);
1763 } 1763 }
1764 1764
1765 1765
1766 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { 1766 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1767 ASSERT(!dst.is(src2)); 1767 ASSERT(!dst.is(src2));
1768 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2); 1768 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
1769 } 1769 }
(...skipping 11 matching lines...) Expand all
1781 Register src2, 1781 Register src2,
1782 Label* on_not_smi_result, 1782 Label* on_not_smi_result,
1783 Label::Distance near_jump) { 1783 Label::Distance near_jump) {
1784 ASSERT(!dst.is(src2)); 1784 ASSERT(!dst.is(src2));
1785 ASSERT(!dst.is(kScratchRegister)); 1785 ASSERT(!dst.is(kScratchRegister));
1786 ASSERT(!src1.is(kScratchRegister)); 1786 ASSERT(!src1.is(kScratchRegister));
1787 ASSERT(!src2.is(kScratchRegister)); 1787 ASSERT(!src2.is(kScratchRegister));
1788 1788
1789 if (dst.is(src1)) { 1789 if (dst.is(src1)) {
1790 Label failure, zero_correct_result; 1790 Label failure, zero_correct_result;
1791 movq(kScratchRegister, src1); // Create backup for later testing. 1791 movp(kScratchRegister, src1); // Create backup for later testing.
1792 SmiToInteger64(dst, src1); 1792 SmiToInteger64(dst, src1);
1793 imul(dst, src2); 1793 imul(dst, src2);
1794 j(overflow, &failure, Label::kNear); 1794 j(overflow, &failure, Label::kNear);
1795 1795
1796 // Check for negative zero result. If product is zero, and one 1796 // Check for negative zero result. If product is zero, and one
1797 // argument is negative, go to slow case. 1797 // argument is negative, go to slow case.
1798 Label correct_result; 1798 Label correct_result;
1799 testq(dst, dst); 1799 testq(dst, dst);
1800 j(not_zero, &correct_result, Label::kNear); 1800 j(not_zero, &correct_result, Label::kNear);
1801 1801
1802 movq(dst, kScratchRegister); 1802 movp(dst, kScratchRegister);
1803 xor_(dst, src2); 1803 xor_(dst, src2);
1804 // Result was positive zero. 1804 // Result was positive zero.
1805 j(positive, &zero_correct_result, Label::kNear); 1805 j(positive, &zero_correct_result, Label::kNear);
1806 1806
1807 bind(&failure); // Reused failure exit, restores src1. 1807 bind(&failure); // Reused failure exit, restores src1.
1808 movq(src1, kScratchRegister); 1808 movp(src1, kScratchRegister);
1809 jmp(on_not_smi_result, near_jump); 1809 jmp(on_not_smi_result, near_jump);
1810 1810
1811 bind(&zero_correct_result); 1811 bind(&zero_correct_result);
1812 Set(dst, 0); 1812 Set(dst, 0);
1813 1813
1814 bind(&correct_result); 1814 bind(&correct_result);
1815 } else { 1815 } else {
1816 SmiToInteger64(dst, src1); 1816 SmiToInteger64(dst, src1);
1817 imul(dst, src2); 1817 imul(dst, src2);
1818 j(overflow, on_not_smi_result, near_jump); 1818 j(overflow, on_not_smi_result, near_jump);
1819 // Check for negative zero result. If product is zero, and one 1819 // Check for negative zero result. If product is zero, and one
1820 // argument is negative, go to slow case. 1820 // argument is negative, go to slow case.
1821 Label correct_result; 1821 Label correct_result;
1822 testq(dst, dst); 1822 testq(dst, dst);
1823 j(not_zero, &correct_result, Label::kNear); 1823 j(not_zero, &correct_result, Label::kNear);
1824 // One of src1 and src2 is zero, the check whether the other is 1824 // One of src1 and src2 is zero, the check whether the other is
1825 // negative. 1825 // negative.
1826 movq(kScratchRegister, src1); 1826 movp(kScratchRegister, src1);
1827 xor_(kScratchRegister, src2); 1827 xor_(kScratchRegister, src2);
1828 j(negative, on_not_smi_result, near_jump); 1828 j(negative, on_not_smi_result, near_jump);
1829 bind(&correct_result); 1829 bind(&correct_result);
1830 } 1830 }
1831 } 1831 }
1832 1832
1833 1833
1834 void MacroAssembler::SmiDiv(Register dst, 1834 void MacroAssembler::SmiDiv(Register dst,
1835 Register src1, 1835 Register src1,
1836 Register src2, 1836 Register src2,
1837 Label* on_not_smi_result, 1837 Label* on_not_smi_result,
1838 Label::Distance near_jump) { 1838 Label::Distance near_jump) {
1839 ASSERT(!src1.is(kScratchRegister)); 1839 ASSERT(!src1.is(kScratchRegister));
1840 ASSERT(!src2.is(kScratchRegister)); 1840 ASSERT(!src2.is(kScratchRegister));
1841 ASSERT(!dst.is(kScratchRegister)); 1841 ASSERT(!dst.is(kScratchRegister));
1842 ASSERT(!src2.is(rax)); 1842 ASSERT(!src2.is(rax));
1843 ASSERT(!src2.is(rdx)); 1843 ASSERT(!src2.is(rdx));
1844 ASSERT(!src1.is(rdx)); 1844 ASSERT(!src1.is(rdx));
1845 1845
1846 // Check for 0 divisor (result is +/-Infinity). 1846 // Check for 0 divisor (result is +/-Infinity).
1847 testq(src2, src2); 1847 testq(src2, src2);
1848 j(zero, on_not_smi_result, near_jump); 1848 j(zero, on_not_smi_result, near_jump);
1849 1849
1850 if (src1.is(rax)) { 1850 if (src1.is(rax)) {
1851 movq(kScratchRegister, src1); 1851 movp(kScratchRegister, src1);
1852 } 1852 }
1853 SmiToInteger32(rax, src1); 1853 SmiToInteger32(rax, src1);
1854 // We need to rule out dividing Smi::kMinValue by -1, since that would 1854 // We need to rule out dividing Smi::kMinValue by -1, since that would
1855 // overflow in idiv and raise an exception. 1855 // overflow in idiv and raise an exception.
1856 // We combine this with negative zero test (negative zero only happens 1856 // We combine this with negative zero test (negative zero only happens
1857 // when dividing zero by a negative number). 1857 // when dividing zero by a negative number).
1858 1858
1859 // We overshoot a little and go to slow case if we divide min-value 1859 // We overshoot a little and go to slow case if we divide min-value
1860 // by any negative value, not just -1. 1860 // by any negative value, not just -1.
1861 Label safe_div; 1861 Label safe_div;
1862 testl(rax, Immediate(0x7fffffff)); 1862 testl(rax, Immediate(0x7fffffff));
1863 j(not_zero, &safe_div, Label::kNear); 1863 j(not_zero, &safe_div, Label::kNear);
1864 testq(src2, src2); 1864 testq(src2, src2);
1865 if (src1.is(rax)) { 1865 if (src1.is(rax)) {
1866 j(positive, &safe_div, Label::kNear); 1866 j(positive, &safe_div, Label::kNear);
1867 movq(src1, kScratchRegister); 1867 movp(src1, kScratchRegister);
1868 jmp(on_not_smi_result, near_jump); 1868 jmp(on_not_smi_result, near_jump);
1869 } else { 1869 } else {
1870 j(negative, on_not_smi_result, near_jump); 1870 j(negative, on_not_smi_result, near_jump);
1871 } 1871 }
1872 bind(&safe_div); 1872 bind(&safe_div);
1873 1873
1874 SmiToInteger32(src2, src2); 1874 SmiToInteger32(src2, src2);
1875 // Sign extend src1 into edx:eax. 1875 // Sign extend src1 into edx:eax.
1876 cdq(); 1876 cdq();
1877 idivl(src2); 1877 idivl(src2);
1878 Integer32ToSmi(src2, src2); 1878 Integer32ToSmi(src2, src2);
1879 // Check that the remainder is zero. 1879 // Check that the remainder is zero.
1880 testl(rdx, rdx); 1880 testl(rdx, rdx);
1881 if (src1.is(rax)) { 1881 if (src1.is(rax)) {
1882 Label smi_result; 1882 Label smi_result;
1883 j(zero, &smi_result, Label::kNear); 1883 j(zero, &smi_result, Label::kNear);
1884 movq(src1, kScratchRegister); 1884 movp(src1, kScratchRegister);
1885 jmp(on_not_smi_result, near_jump); 1885 jmp(on_not_smi_result, near_jump);
1886 bind(&smi_result); 1886 bind(&smi_result);
1887 } else { 1887 } else {
1888 j(not_zero, on_not_smi_result, near_jump); 1888 j(not_zero, on_not_smi_result, near_jump);
1889 } 1889 }
1890 if (!dst.is(src1) && src1.is(rax)) { 1890 if (!dst.is(src1) && src1.is(rax)) {
1891 movq(src1, kScratchRegister); 1891 movp(src1, kScratchRegister);
1892 } 1892 }
1893 Integer32ToSmi(dst, rax); 1893 Integer32ToSmi(dst, rax);
1894 } 1894 }
1895 1895
1896 1896
1897 void MacroAssembler::SmiMod(Register dst, 1897 void MacroAssembler::SmiMod(Register dst,
1898 Register src1, 1898 Register src1,
1899 Register src2, 1899 Register src2,
1900 Label* on_not_smi_result, 1900 Label* on_not_smi_result,
1901 Label::Distance near_jump) { 1901 Label::Distance near_jump) {
1902 ASSERT(!dst.is(kScratchRegister)); 1902 ASSERT(!dst.is(kScratchRegister));
1903 ASSERT(!src1.is(kScratchRegister)); 1903 ASSERT(!src1.is(kScratchRegister));
1904 ASSERT(!src2.is(kScratchRegister)); 1904 ASSERT(!src2.is(kScratchRegister));
1905 ASSERT(!src2.is(rax)); 1905 ASSERT(!src2.is(rax));
1906 ASSERT(!src2.is(rdx)); 1906 ASSERT(!src2.is(rdx));
1907 ASSERT(!src1.is(rdx)); 1907 ASSERT(!src1.is(rdx));
1908 ASSERT(!src1.is(src2)); 1908 ASSERT(!src1.is(src2));
1909 1909
1910 testq(src2, src2); 1910 testq(src2, src2);
1911 j(zero, on_not_smi_result, near_jump); 1911 j(zero, on_not_smi_result, near_jump);
1912 1912
1913 if (src1.is(rax)) { 1913 if (src1.is(rax)) {
1914 movq(kScratchRegister, src1); 1914 movp(kScratchRegister, src1);
1915 } 1915 }
1916 SmiToInteger32(rax, src1); 1916 SmiToInteger32(rax, src1);
1917 SmiToInteger32(src2, src2); 1917 SmiToInteger32(src2, src2);
1918 1918
1919 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow). 1919 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1920 Label safe_div; 1920 Label safe_div;
1921 cmpl(rax, Immediate(Smi::kMinValue)); 1921 cmpl(rax, Immediate(Smi::kMinValue));
1922 j(not_equal, &safe_div, Label::kNear); 1922 j(not_equal, &safe_div, Label::kNear);
1923 cmpl(src2, Immediate(-1)); 1923 cmpl(src2, Immediate(-1));
1924 j(not_equal, &safe_div, Label::kNear); 1924 j(not_equal, &safe_div, Label::kNear);
1925 // Retag inputs and go slow case. 1925 // Retag inputs and go slow case.
1926 Integer32ToSmi(src2, src2); 1926 Integer32ToSmi(src2, src2);
1927 if (src1.is(rax)) { 1927 if (src1.is(rax)) {
1928 movq(src1, kScratchRegister); 1928 movp(src1, kScratchRegister);
1929 } 1929 }
1930 jmp(on_not_smi_result, near_jump); 1930 jmp(on_not_smi_result, near_jump);
1931 bind(&safe_div); 1931 bind(&safe_div);
1932 1932
1933 // Sign extend eax into edx:eax. 1933 // Sign extend eax into edx:eax.
1934 cdq(); 1934 cdq();
1935 idivl(src2); 1935 idivl(src2);
1936 // Restore smi tags on inputs. 1936 // Restore smi tags on inputs.
1937 Integer32ToSmi(src2, src2); 1937 Integer32ToSmi(src2, src2);
1938 if (src1.is(rax)) { 1938 if (src1.is(rax)) {
1939 movq(src1, kScratchRegister); 1939 movp(src1, kScratchRegister);
1940 } 1940 }
1941 // Check for a negative zero result. If the result is zero, and the 1941 // Check for a negative zero result. If the result is zero, and the
1942 // dividend is negative, go slow to return a floating point negative zero. 1942 // dividend is negative, go slow to return a floating point negative zero.
1943 Label smi_result; 1943 Label smi_result;
1944 testl(rdx, rdx); 1944 testl(rdx, rdx);
1945 j(not_zero, &smi_result, Label::kNear); 1945 j(not_zero, &smi_result, Label::kNear);
1946 testq(src1, src1); 1946 testq(src1, src1);
1947 j(negative, on_not_smi_result, near_jump); 1947 j(negative, on_not_smi_result, near_jump);
1948 bind(&smi_result); 1948 bind(&smi_result);
1949 Integer32ToSmi(dst, rdx); 1949 Integer32ToSmi(dst, rdx);
(...skipping 10 matching lines...) Expand all
1960 } else { 1960 } else {
1961 lea(dst, Operand(src, kScratchRegister, times_1, 0)); 1961 lea(dst, Operand(src, kScratchRegister, times_1, 0));
1962 } 1962 }
1963 not_(dst); 1963 not_(dst);
1964 } 1964 }
1965 1965
1966 1966
1967 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { 1967 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1968 ASSERT(!dst.is(src2)); 1968 ASSERT(!dst.is(src2));
1969 if (!dst.is(src1)) { 1969 if (!dst.is(src1)) {
1970 movq(dst, src1); 1970 movp(dst, src1);
1971 } 1971 }
1972 and_(dst, src2); 1972 and_(dst, src2);
1973 } 1973 }
1974 1974
1975 1975
1976 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) { 1976 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1977 if (constant->value() == 0) { 1977 if (constant->value() == 0) {
1978 Set(dst, 0); 1978 Set(dst, 0);
1979 } else if (dst.is(src)) { 1979 } else if (dst.is(src)) {
1980 ASSERT(!dst.is(kScratchRegister)); 1980 ASSERT(!dst.is(kScratchRegister));
1981 Register constant_reg = GetSmiConstant(constant); 1981 Register constant_reg = GetSmiConstant(constant);
1982 and_(dst, constant_reg); 1982 and_(dst, constant_reg);
1983 } else { 1983 } else {
1984 LoadSmiConstant(dst, constant); 1984 LoadSmiConstant(dst, constant);
1985 and_(dst, src); 1985 and_(dst, src);
1986 } 1986 }
1987 } 1987 }
1988 1988
1989 1989
1990 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) { 1990 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1991 if (!dst.is(src1)) { 1991 if (!dst.is(src1)) {
1992 ASSERT(!src1.is(src2)); 1992 ASSERT(!src1.is(src2));
1993 movq(dst, src1); 1993 movp(dst, src1);
1994 } 1994 }
1995 or_(dst, src2); 1995 or_(dst, src2);
1996 } 1996 }
1997 1997
1998 1998
1999 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) { 1999 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2000 if (dst.is(src)) { 2000 if (dst.is(src)) {
2001 ASSERT(!dst.is(kScratchRegister)); 2001 ASSERT(!dst.is(kScratchRegister));
2002 Register constant_reg = GetSmiConstant(constant); 2002 Register constant_reg = GetSmiConstant(constant);
2003 or_(dst, constant_reg); 2003 or_(dst, constant_reg);
2004 } else { 2004 } else {
2005 LoadSmiConstant(dst, constant); 2005 LoadSmiConstant(dst, constant);
2006 or_(dst, src); 2006 or_(dst, src);
2007 } 2007 }
2008 } 2008 }
2009 2009
2010 2010
2011 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) { 2011 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2012 if (!dst.is(src1)) { 2012 if (!dst.is(src1)) {
2013 ASSERT(!src1.is(src2)); 2013 ASSERT(!src1.is(src2));
2014 movq(dst, src1); 2014 movp(dst, src1);
2015 } 2015 }
2016 xor_(dst, src2); 2016 xor_(dst, src2);
2017 } 2017 }
2018 2018
2019 2019
2020 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) { 2020 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2021 if (dst.is(src)) { 2021 if (dst.is(src)) {
2022 ASSERT(!dst.is(kScratchRegister)); 2022 ASSERT(!dst.is(kScratchRegister));
2023 Register constant_reg = GetSmiConstant(constant); 2023 Register constant_reg = GetSmiConstant(constant);
2024 xor_(dst, constant_reg); 2024 xor_(dst, constant_reg);
(...skipping 16 matching lines...) Expand all
2041 UNIMPLEMENTED(); // Not used. 2041 UNIMPLEMENTED(); // Not used.
2042 } 2042 }
2043 } 2043 }
2044 } 2044 }
2045 2045
2046 2046
2047 void MacroAssembler::SmiShiftLeftConstant(Register dst, 2047 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2048 Register src, 2048 Register src,
2049 int shift_value) { 2049 int shift_value) {
2050 if (!dst.is(src)) { 2050 if (!dst.is(src)) {
2051 movq(dst, src); 2051 movp(dst, src);
2052 } 2052 }
2053 if (shift_value > 0) { 2053 if (shift_value > 0) {
2054 shl(dst, Immediate(shift_value)); 2054 shl(dst, Immediate(shift_value));
2055 } 2055 }
2056 } 2056 }
2057 2057
2058 2058
2059 void MacroAssembler::SmiShiftLogicalRightConstant( 2059 void MacroAssembler::SmiShiftLogicalRightConstant(
2060 Register dst, Register src, int shift_value, 2060 Register dst, Register src, int shift_value,
2061 Label* on_not_smi_result, Label::Distance near_jump) { 2061 Label* on_not_smi_result, Label::Distance near_jump) {
2062 // Logic right shift interprets its result as an *unsigned* number. 2062 // Logic right shift interprets its result as an *unsigned* number.
2063 if (dst.is(src)) { 2063 if (dst.is(src)) {
2064 UNIMPLEMENTED(); // Not used. 2064 UNIMPLEMENTED(); // Not used.
2065 } else { 2065 } else {
2066 movq(dst, src); 2066 movp(dst, src);
2067 if (shift_value == 0) { 2067 if (shift_value == 0) {
2068 testq(dst, dst); 2068 testq(dst, dst);
2069 j(negative, on_not_smi_result, near_jump); 2069 j(negative, on_not_smi_result, near_jump);
2070 } 2070 }
2071 shr(dst, Immediate(shift_value + kSmiShift)); 2071 shr(dst, Immediate(shift_value + kSmiShift));
2072 shl(dst, Immediate(kSmiShift)); 2072 shl(dst, Immediate(kSmiShift));
2073 } 2073 }
2074 } 2074 }
2075 2075
2076 2076
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2129 2129
2130 2130
2131 void MacroAssembler::SmiShiftArithmeticRight(Register dst, 2131 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2132 Register src1, 2132 Register src1,
2133 Register src2) { 2133 Register src2) {
2134 ASSERT(!dst.is(kScratchRegister)); 2134 ASSERT(!dst.is(kScratchRegister));
2135 ASSERT(!src1.is(kScratchRegister)); 2135 ASSERT(!src1.is(kScratchRegister));
2136 ASSERT(!src2.is(kScratchRegister)); 2136 ASSERT(!src2.is(kScratchRegister));
2137 ASSERT(!dst.is(rcx)); 2137 ASSERT(!dst.is(rcx));
2138 if (src1.is(rcx)) { 2138 if (src1.is(rcx)) {
2139 movq(kScratchRegister, src1); 2139 movp(kScratchRegister, src1);
2140 } else if (src2.is(rcx)) { 2140 } else if (src2.is(rcx)) {
2141 movq(kScratchRegister, src2); 2141 movp(kScratchRegister, src2);
2142 } 2142 }
2143 if (!dst.is(src1)) { 2143 if (!dst.is(src1)) {
2144 movq(dst, src1); 2144 movp(dst, src1);
2145 } 2145 }
2146 SmiToInteger32(rcx, src2); 2146 SmiToInteger32(rcx, src2);
2147 orl(rcx, Immediate(kSmiShift)); 2147 orl(rcx, Immediate(kSmiShift));
2148 sar_cl(dst); // Shift 32 + original rcx & 0x1f. 2148 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
2149 shl(dst, Immediate(kSmiShift)); 2149 shl(dst, Immediate(kSmiShift));
2150 if (src1.is(rcx)) { 2150 if (src1.is(rcx)) {
2151 movq(src1, kScratchRegister); 2151 movp(src1, kScratchRegister);
2152 } else if (src2.is(rcx)) { 2152 } else if (src2.is(rcx)) {
2153 movq(src2, kScratchRegister); 2153 movp(src2, kScratchRegister);
2154 } 2154 }
2155 } 2155 }
2156 2156
2157 2157
2158 void MacroAssembler::SelectNonSmi(Register dst, 2158 void MacroAssembler::SelectNonSmi(Register dst,
2159 Register src1, 2159 Register src1,
2160 Register src2, 2160 Register src2,
2161 Label* on_not_smis, 2161 Label* on_not_smis,
2162 Label::Distance near_jump) { 2162 Label::Distance near_jump) {
2163 ASSERT(!dst.is(kScratchRegister)); 2163 ASSERT(!dst.is(kScratchRegister));
(...skipping 12 matching lines...) Expand all
2176 and_(kScratchRegister, src1); 2176 and_(kScratchRegister, src1);
2177 testl(kScratchRegister, src2); 2177 testl(kScratchRegister, src2);
2178 // If non-zero then both are smis. 2178 // If non-zero then both are smis.
2179 j(not_zero, on_not_smis, near_jump); 2179 j(not_zero, on_not_smis, near_jump);
2180 2180
2181 // Exactly one operand is a smi. 2181 // Exactly one operand is a smi.
2182 ASSERT_EQ(1, static_cast<int>(kSmiTagMask)); 2182 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2183 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one. 2183 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
2184 subq(kScratchRegister, Immediate(1)); 2184 subq(kScratchRegister, Immediate(1));
2185 // If src1 is a smi, then scratch register all 1s, else it is all 0s. 2185 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
2186 movq(dst, src1); 2186 movp(dst, src1);
2187 xor_(dst, src2); 2187 xor_(dst, src2);
2188 and_(dst, kScratchRegister); 2188 and_(dst, kScratchRegister);
2189 // If src1 is a smi, dst holds src1 ^ src2, else it is zero. 2189 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2190 xor_(dst, src1); 2190 xor_(dst, src1);
2191 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi. 2191 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2192 } 2192 }
2193 2193
2194 2194
2195 SmiIndex MacroAssembler::SmiToIndex(Register dst, 2195 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2196 Register src, 2196 Register src,
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2238 if (is_int32(smi)) { 2238 if (is_int32(smi)) {
2239 push(Immediate(static_cast<int32_t>(smi))); 2239 push(Immediate(static_cast<int32_t>(smi)));
2240 } else { 2240 } else {
2241 Register constant = GetSmiConstant(source); 2241 Register constant = GetSmiConstant(source);
2242 push(constant); 2242 push(constant);
2243 } 2243 }
2244 } 2244 }
2245 2245
2246 2246
2247 void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) { 2247 void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) {
2248 movq(scratch, src); 2248 movp(scratch, src);
2249 // High bits. 2249 // High bits.
2250 shr(src, Immediate(64 - kSmiShift)); 2250 shr(src, Immediate(64 - kSmiShift));
2251 shl(src, Immediate(kSmiShift)); 2251 shl(src, Immediate(kSmiShift));
2252 push(src); 2252 push(src);
2253 // Low bits. 2253 // Low bits.
2254 shl(scratch, Immediate(kSmiShift)); 2254 shl(scratch, Immediate(kSmiShift));
2255 push(scratch); 2255 push(scratch);
2256 } 2256 }
2257 2257
2258 2258
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
2313 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); 2313 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2314 and_(scratch, mask); 2314 and_(scratch, mask);
2315 // Each entry in string cache consists of two pointer sized fields, 2315 // Each entry in string cache consists of two pointer sized fields,
2316 // but times_twice_pointer_size (multiplication by 16) scale factor 2316 // but times_twice_pointer_size (multiplication by 16) scale factor
2317 // is not supported by addrmode on x64 platform. 2317 // is not supported by addrmode on x64 platform.
2318 // So we have to premultiply entry index before lookup. 2318 // So we have to premultiply entry index before lookup.
2319 shl(scratch, Immediate(kPointerSizeLog2 + 1)); 2319 shl(scratch, Immediate(kPointerSizeLog2 + 1));
2320 2320
2321 Register index = scratch; 2321 Register index = scratch;
2322 Register probe = mask; 2322 Register probe = mask;
2323 movq(probe, 2323 movp(probe,
2324 FieldOperand(number_string_cache, 2324 FieldOperand(number_string_cache,
2325 index, 2325 index,
2326 times_1, 2326 times_1,
2327 FixedArray::kHeaderSize)); 2327 FixedArray::kHeaderSize));
2328 JumpIfSmi(probe, not_found); 2328 JumpIfSmi(probe, not_found);
2329 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); 2329 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2330 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset)); 2330 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2331 j(parity_even, not_found); // Bail out if NaN is involved. 2331 j(parity_even, not_found); // Bail out if NaN is involved.
2332 j(not_equal, not_found); // The cache did not contain this value. 2332 j(not_equal, not_found); // The cache did not contain this value.
2333 jmp(&load_result_from_cache); 2333 jmp(&load_result_from_cache);
(...skipping 10 matching lines...) Expand all
2344 // Check if the entry is the smi we are looking for. 2344 // Check if the entry is the smi we are looking for.
2345 cmpq(object, 2345 cmpq(object,
2346 FieldOperand(number_string_cache, 2346 FieldOperand(number_string_cache,
2347 index, 2347 index,
2348 times_1, 2348 times_1,
2349 FixedArray::kHeaderSize)); 2349 FixedArray::kHeaderSize));
2350 j(not_equal, not_found); 2350 j(not_equal, not_found);
2351 2351
2352 // Get the result from the cache. 2352 // Get the result from the cache.
2353 bind(&load_result_from_cache); 2353 bind(&load_result_from_cache);
2354 movq(result, 2354 movp(result,
2355 FieldOperand(number_string_cache, 2355 FieldOperand(number_string_cache,
2356 index, 2356 index,
2357 times_1, 2357 times_1,
2358 FixedArray::kHeaderSize + kPointerSize)); 2358 FixedArray::kHeaderSize + kPointerSize));
2359 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); 2359 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2360 } 2360 }
2361 2361
2362 2362
2363 void MacroAssembler::JumpIfNotString(Register object, 2363 void MacroAssembler::JumpIfNotString(Register object,
2364 Register object_map, 2364 Register object_map,
(...skipping 11 matching lines...) Expand all
2376 Register second_object, 2376 Register second_object,
2377 Register scratch1, 2377 Register scratch1,
2378 Register scratch2, 2378 Register scratch2,
2379 Label* on_fail, 2379 Label* on_fail,
2380 Label::Distance near_jump) { 2380 Label::Distance near_jump) {
2381 // Check that both objects are not smis. 2381 // Check that both objects are not smis.
2382 Condition either_smi = CheckEitherSmi(first_object, second_object); 2382 Condition either_smi = CheckEitherSmi(first_object, second_object);
2383 j(either_smi, on_fail, near_jump); 2383 j(either_smi, on_fail, near_jump);
2384 2384
2385 // Load instance type for both strings. 2385 // Load instance type for both strings.
2386 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset)); 2386 movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2387 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset)); 2387 movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
2388 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); 2388 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2389 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); 2389 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2390 2390
2391 // Check that both are flat ASCII strings. 2391 // Check that both are flat ASCII strings.
2392 ASSERT(kNotStringTag != 0); 2392 ASSERT(kNotStringTag != 0);
2393 const int kFlatAsciiStringMask = 2393 const int kFlatAsciiStringMask =
2394 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 2394 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2395 const int kFlatAsciiStringTag = 2395 const int kFlatAsciiStringTag =
2396 kStringTag | kOneByteStringTag | kSeqStringTag; 2396 kStringTag | kOneByteStringTag | kSeqStringTag;
2397 2397
(...skipping 27 matching lines...) Expand all
2425 2425
2426 2426
2427 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( 2427 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2428 Register first_object_instance_type, 2428 Register first_object_instance_type,
2429 Register second_object_instance_type, 2429 Register second_object_instance_type,
2430 Register scratch1, 2430 Register scratch1,
2431 Register scratch2, 2431 Register scratch2,
2432 Label* on_fail, 2432 Label* on_fail,
2433 Label::Distance near_jump) { 2433 Label::Distance near_jump) {
2434 // Load instance type for both strings. 2434 // Load instance type for both strings.
2435 movq(scratch1, first_object_instance_type); 2435 movp(scratch1, first_object_instance_type);
2436 movq(scratch2, second_object_instance_type); 2436 movp(scratch2, second_object_instance_type);
2437 2437
2438 // Check that both are flat ASCII strings. 2438 // Check that both are flat ASCII strings.
2439 ASSERT(kNotStringTag != 0); 2439 ASSERT(kNotStringTag != 0);
2440 const int kFlatAsciiStringMask = 2440 const int kFlatAsciiStringMask =
2441 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 2441 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2442 const int kFlatAsciiStringTag = 2442 const int kFlatAsciiStringTag =
2443 kStringTag | kOneByteStringTag | kSeqStringTag; 2443 kStringTag | kOneByteStringTag | kSeqStringTag;
2444 2444
2445 andl(scratch1, Immediate(kFlatAsciiStringMask)); 2445 andl(scratch1, Immediate(kFlatAsciiStringMask));
2446 andl(scratch2, Immediate(kFlatAsciiStringMask)); 2446 andl(scratch2, Immediate(kFlatAsciiStringMask));
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
2479 2479
2480 void MacroAssembler::JumpIfNotUniqueName(Register reg, 2480 void MacroAssembler::JumpIfNotUniqueName(Register reg,
2481 Label* not_unique_name, 2481 Label* not_unique_name,
2482 Label::Distance distance) { 2482 Label::Distance distance) {
2483 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance); 2483 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
2484 } 2484 }
2485 2485
2486 2486
2487 void MacroAssembler::Move(Register dst, Register src) { 2487 void MacroAssembler::Move(Register dst, Register src) {
2488 if (!dst.is(src)) { 2488 if (!dst.is(src)) {
2489 movq(dst, src); 2489 movp(dst, src);
2490 } 2490 }
2491 } 2491 }
2492 2492
2493 2493
2494 void MacroAssembler::Move(Register dst, Handle<Object> source) { 2494 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2495 AllowDeferredHandleDereference smi_check; 2495 AllowDeferredHandleDereference smi_check;
2496 if (source->IsSmi()) { 2496 if (source->IsSmi()) {
2497 Move(dst, Smi::cast(*source)); 2497 Move(dst, Smi::cast(*source));
2498 } else { 2498 } else {
2499 MoveHeapObject(dst, source); 2499 MoveHeapObject(dst, source);
2500 } 2500 }
2501 } 2501 }
2502 2502
2503 2503
2504 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) { 2504 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
2505 AllowDeferredHandleDereference smi_check; 2505 AllowDeferredHandleDereference smi_check;
2506 if (source->IsSmi()) { 2506 if (source->IsSmi()) {
2507 Move(dst, Smi::cast(*source)); 2507 Move(dst, Smi::cast(*source));
2508 } else { 2508 } else {
2509 MoveHeapObject(kScratchRegister, source); 2509 MoveHeapObject(kScratchRegister, source);
2510 movq(dst, kScratchRegister); 2510 movp(dst, kScratchRegister);
2511 } 2511 }
2512 } 2512 }
2513 2513
2514 2514
2515 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { 2515 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2516 AllowDeferredHandleDereference smi_check; 2516 AllowDeferredHandleDereference smi_check;
2517 if (source->IsSmi()) { 2517 if (source->IsSmi()) {
2518 Cmp(dst, Smi::cast(*source)); 2518 Cmp(dst, Smi::cast(*source));
2519 } else { 2519 } else {
2520 MoveHeapObject(kScratchRegister, source); 2520 MoveHeapObject(kScratchRegister, source);
(...skipping 24 matching lines...) Expand all
2545 } 2545 }
2546 2546
2547 2547
2548 void MacroAssembler::MoveHeapObject(Register result, 2548 void MacroAssembler::MoveHeapObject(Register result,
2549 Handle<Object> object) { 2549 Handle<Object> object) {
2550 AllowDeferredHandleDereference using_raw_address; 2550 AllowDeferredHandleDereference using_raw_address;
2551 ASSERT(object->IsHeapObject()); 2551 ASSERT(object->IsHeapObject());
2552 if (isolate()->heap()->InNewSpace(*object)) { 2552 if (isolate()->heap()->InNewSpace(*object)) {
2553 Handle<Cell> cell = isolate()->factory()->NewCell(object); 2553 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2554 Move(result, cell, RelocInfo::CELL); 2554 Move(result, cell, RelocInfo::CELL);
2555 movq(result, Operand(result, 0)); 2555 movp(result, Operand(result, 0));
2556 } else { 2556 } else {
2557 Move(result, object, RelocInfo::EMBEDDED_OBJECT); 2557 Move(result, object, RelocInfo::EMBEDDED_OBJECT);
2558 } 2558 }
2559 } 2559 }
2560 2560
2561 2561
2562 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) { 2562 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
2563 if (dst.is(rax)) { 2563 if (dst.is(rax)) {
2564 AllowDeferredHandleDereference embedding_raw_address; 2564 AllowDeferredHandleDereference embedding_raw_address;
2565 load_rax(cell.location(), RelocInfo::CELL); 2565 load_rax(cell.location(), RelocInfo::CELL);
2566 } else { 2566 } else {
2567 Move(dst, cell, RelocInfo::CELL); 2567 Move(dst, cell, RelocInfo::CELL);
2568 movq(dst, Operand(dst, 0)); 2568 movp(dst, Operand(dst, 0));
2569 } 2569 }
2570 } 2570 }
2571 2571
2572 2572
2573 void MacroAssembler::Drop(int stack_elements) { 2573 void MacroAssembler::Drop(int stack_elements) {
2574 if (stack_elements > 0) { 2574 if (stack_elements > 0) {
2575 addq(rsp, Immediate(stack_elements * kPointerSize)); 2575 addq(rsp, Immediate(stack_elements * kPointerSize));
2576 } 2576 }
2577 } 2577 }
2578 2578
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
2714 8, 2714 8,
2715 -1, 2715 -1,
2716 -1, 2716 -1,
2717 9, 2717 9,
2718 10 2718 10
2719 }; 2719 };
2720 2720
2721 2721
2722 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, 2722 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
2723 const Immediate& imm) { 2723 const Immediate& imm) {
2724 movq(SafepointRegisterSlot(dst), imm); 2724 movp(SafepointRegisterSlot(dst), imm);
2725 } 2725 }
2726 2726
2727 2727
2728 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) { 2728 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2729 movq(SafepointRegisterSlot(dst), src); 2729 movp(SafepointRegisterSlot(dst), src);
2730 } 2730 }
2731 2731
2732 2732
2733 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) { 2733 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2734 movq(dst, SafepointRegisterSlot(src)); 2734 movp(dst, SafepointRegisterSlot(src));
2735 } 2735 }
2736 2736
2737 2737
2738 Operand MacroAssembler::SafepointRegisterSlot(Register reg) { 2738 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2739 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); 2739 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2740 } 2740 }
2741 2741
2742 2742
2743 void MacroAssembler::PushTryHandler(StackHandler::Kind kind, 2743 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2744 int handler_index) { 2744 int handler_index) {
(...skipping 23 matching lines...) Expand all
2768 unsigned state = 2768 unsigned state =
2769 StackHandler::IndexField::encode(handler_index) | 2769 StackHandler::IndexField::encode(handler_index) |
2770 StackHandler::KindField::encode(kind); 2770 StackHandler::KindField::encode(kind);
2771 push(Immediate(state)); 2771 push(Immediate(state));
2772 Push(CodeObject()); 2772 Push(CodeObject());
2773 2773
2774 // Link the current handler as the next handler. 2774 // Link the current handler as the next handler.
2775 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2775 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2776 push(ExternalOperand(handler_address)); 2776 push(ExternalOperand(handler_address));
2777 // Set this new handler as the current one. 2777 // Set this new handler as the current one.
2778 movq(ExternalOperand(handler_address), rsp); 2778 movp(ExternalOperand(handler_address), rsp);
2779 } 2779 }
2780 2780
2781 2781
2782 void MacroAssembler::PopTryHandler() { 2782 void MacroAssembler::PopTryHandler() {
2783 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2783 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2784 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2784 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2785 pop(ExternalOperand(handler_address)); 2785 pop(ExternalOperand(handler_address));
2786 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 2786 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2787 } 2787 }
2788 2788
2789 2789
2790 void MacroAssembler::JumpToHandlerEntry() { 2790 void MacroAssembler::JumpToHandlerEntry() {
2791 // Compute the handler entry address and jump to it. The handler table is 2791 // Compute the handler entry address and jump to it. The handler table is
2792 // a fixed array of (smi-tagged) code offsets. 2792 // a fixed array of (smi-tagged) code offsets.
2793 // rax = exception, rdi = code object, rdx = state. 2793 // rax = exception, rdi = code object, rdx = state.
2794 movq(rbx, FieldOperand(rdi, Code::kHandlerTableOffset)); 2794 movp(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
2795 shr(rdx, Immediate(StackHandler::kKindWidth)); 2795 shr(rdx, Immediate(StackHandler::kKindWidth));
2796 movq(rdx, 2796 movp(rdx,
2797 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); 2797 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
2798 SmiToInteger64(rdx, rdx); 2798 SmiToInteger64(rdx, rdx);
2799 lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize)); 2799 lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
2800 jmp(rdi); 2800 jmp(rdi);
2801 } 2801 }
2802 2802
2803 2803
2804 void MacroAssembler::Throw(Register value) { 2804 void MacroAssembler::Throw(Register value) {
2805 // Adjust this code if not the case. 2805 // Adjust this code if not the case.
2806 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize + 2806 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
2807 kFPOnStackSize); 2807 kFPOnStackSize);
2808 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2808 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2809 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); 2809 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2810 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); 2810 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2811 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); 2811 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2812 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); 2812 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2813 2813
2814 // The exception is expected in rax. 2814 // The exception is expected in rax.
2815 if (!value.is(rax)) { 2815 if (!value.is(rax)) {
2816 movq(rax, value); 2816 movp(rax, value);
2817 } 2817 }
2818 // Drop the stack pointer to the top of the top handler. 2818 // Drop the stack pointer to the top of the top handler.
2819 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2819 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2820 movq(rsp, ExternalOperand(handler_address)); 2820 movp(rsp, ExternalOperand(handler_address));
2821 // Restore the next handler. 2821 // Restore the next handler.
2822 pop(ExternalOperand(handler_address)); 2822 pop(ExternalOperand(handler_address));
2823 2823
2824 // Remove the code object and state, compute the handler address in rdi. 2824 // Remove the code object and state, compute the handler address in rdi.
2825 pop(rdi); // Code object. 2825 pop(rdi); // Code object.
2826 pop(rdx); // Offset and state. 2826 pop(rdx); // Offset and state.
2827 2827
2828 // Restore the context and frame pointer. 2828 // Restore the context and frame pointer.
2829 pop(rsi); // Context. 2829 pop(rsi); // Context.
2830 pop(rbp); // Frame pointer. 2830 pop(rbp); // Frame pointer.
2831 2831
2832 // If the handler is a JS frame, restore the context to the frame. 2832 // If the handler is a JS frame, restore the context to the frame.
2833 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either 2833 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
2834 // rbp or rsi. 2834 // rbp or rsi.
2835 Label skip; 2835 Label skip;
2836 testq(rsi, rsi); 2836 testq(rsi, rsi);
2837 j(zero, &skip, Label::kNear); 2837 j(zero, &skip, Label::kNear);
2838 movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 2838 movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2839 bind(&skip); 2839 bind(&skip);
2840 2840
2841 JumpToHandlerEntry(); 2841 JumpToHandlerEntry();
2842 } 2842 }
2843 2843
2844 2844
2845 void MacroAssembler::ThrowUncatchable(Register value) { 2845 void MacroAssembler::ThrowUncatchable(Register value) {
2846 // Adjust this code if not the case. 2846 // Adjust this code if not the case.
2847 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize + 2847 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
2848 kFPOnStackSize); 2848 kFPOnStackSize);
2849 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2849 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2850 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); 2850 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2851 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); 2851 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2852 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); 2852 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2853 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); 2853 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2854 2854
2855 // The exception is expected in rax. 2855 // The exception is expected in rax.
2856 if (!value.is(rax)) { 2856 if (!value.is(rax)) {
2857 movq(rax, value); 2857 movp(rax, value);
2858 } 2858 }
2859 // Drop the stack pointer to the top of the top stack handler. 2859 // Drop the stack pointer to the top of the top stack handler.
2860 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2860 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2861 Load(rsp, handler_address); 2861 Load(rsp, handler_address);
2862 2862
2863 // Unwind the handlers until the top ENTRY handler is found. 2863 // Unwind the handlers until the top ENTRY handler is found.
2864 Label fetch_next, check_kind; 2864 Label fetch_next, check_kind;
2865 jmp(&check_kind, Label::kNear); 2865 jmp(&check_kind, Label::kNear);
2866 bind(&fetch_next); 2866 bind(&fetch_next);
2867 movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset)); 2867 movp(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
2868 2868
2869 bind(&check_kind); 2869 bind(&check_kind);
2870 STATIC_ASSERT(StackHandler::JS_ENTRY == 0); 2870 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
2871 testl(Operand(rsp, StackHandlerConstants::kStateOffset), 2871 testl(Operand(rsp, StackHandlerConstants::kStateOffset),
2872 Immediate(StackHandler::KindField::kMask)); 2872 Immediate(StackHandler::KindField::kMask));
2873 j(not_zero, &fetch_next); 2873 j(not_zero, &fetch_next);
2874 2874
2875 // Set the top handler address to next handler past the top ENTRY handler. 2875 // Set the top handler address to next handler past the top ENTRY handler.
2876 pop(ExternalOperand(handler_address)); 2876 pop(ExternalOperand(handler_address));
2877 2877
(...skipping 28 matching lines...) Expand all
2906 2906
2907 void MacroAssembler::FCmp() { 2907 void MacroAssembler::FCmp() {
2908 fucomip(); 2908 fucomip();
2909 fstp(0); 2909 fstp(0);
2910 } 2910 }
2911 2911
2912 2912
2913 void MacroAssembler::CmpObjectType(Register heap_object, 2913 void MacroAssembler::CmpObjectType(Register heap_object,
2914 InstanceType type, 2914 InstanceType type,
2915 Register map) { 2915 Register map) {
2916 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 2916 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2917 CmpInstanceType(map, type); 2917 CmpInstanceType(map, type);
2918 } 2918 }
2919 2919
2920 2920
2921 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { 2921 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2922 cmpb(FieldOperand(map, Map::kInstanceTypeOffset), 2922 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
2923 Immediate(static_cast<int8_t>(type))); 2923 Immediate(static_cast<int8_t>(type)));
2924 } 2924 }
2925 2925
2926 2926
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after
3221 Label L; 3221 Label L;
3222 j(NegateCondition(cc), &L); 3222 j(NegateCondition(cc), &L);
3223 Throw(reason); 3223 Throw(reason);
3224 // will not return here 3224 // will not return here
3225 bind(&L); 3225 bind(&L);
3226 } 3226 }
3227 3227
3228 3228
3229 void MacroAssembler::LoadInstanceDescriptors(Register map, 3229 void MacroAssembler::LoadInstanceDescriptors(Register map,
3230 Register descriptors) { 3230 Register descriptors) {
3231 movq(descriptors, FieldOperand(map, Map::kDescriptorsOffset)); 3231 movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3232 } 3232 }
3233 3233
3234 3234
3235 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) { 3235 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3236 movq(dst, FieldOperand(map, Map::kBitField3Offset)); 3236 movp(dst, FieldOperand(map, Map::kBitField3Offset));
3237 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst); 3237 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3238 } 3238 }
3239 3239
3240 3240
3241 void MacroAssembler::EnumLength(Register dst, Register map) { 3241 void MacroAssembler::EnumLength(Register dst, Register map) {
3242 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); 3242 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3243 movq(dst, FieldOperand(map, Map::kBitField3Offset)); 3243 movp(dst, FieldOperand(map, Map::kBitField3Offset));
3244 Move(kScratchRegister, Smi::FromInt(Map::EnumLengthBits::kMask)); 3244 Move(kScratchRegister, Smi::FromInt(Map::EnumLengthBits::kMask));
3245 and_(dst, kScratchRegister); 3245 and_(dst, kScratchRegister);
3246 } 3246 }
3247 3247
3248 3248
3249 void MacroAssembler::DispatchMap(Register obj, 3249 void MacroAssembler::DispatchMap(Register obj,
3250 Register unused, 3250 Register unused,
3251 Handle<Map> map, 3251 Handle<Map> map,
3252 Handle<Code> success, 3252 Handle<Code> success,
3253 SmiCheckType smi_check_type) { 3253 SmiCheckType smi_check_type) {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3307 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); 3307 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3308 } 3308 }
3309 } 3309 }
3310 3310
3311 3311
3312 void MacroAssembler::AssertString(Register object) { 3312 void MacroAssembler::AssertString(Register object) {
3313 if (emit_debug_code()) { 3313 if (emit_debug_code()) {
3314 testb(object, Immediate(kSmiTagMask)); 3314 testb(object, Immediate(kSmiTagMask));
3315 Check(not_equal, kOperandIsASmiAndNotAString); 3315 Check(not_equal, kOperandIsASmiAndNotAString);
3316 push(object); 3316 push(object);
3317 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3317 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3318 CmpInstanceType(object, FIRST_NONSTRING_TYPE); 3318 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3319 pop(object); 3319 pop(object);
3320 Check(below, kOperandIsNotAString); 3320 Check(below, kOperandIsNotAString);
3321 } 3321 }
3322 } 3322 }
3323 3323
3324 3324
3325 void MacroAssembler::AssertName(Register object) { 3325 void MacroAssembler::AssertName(Register object) {
3326 if (emit_debug_code()) { 3326 if (emit_debug_code()) {
3327 testb(object, Immediate(kSmiTagMask)); 3327 testb(object, Immediate(kSmiTagMask));
3328 Check(not_equal, kOperandIsASmiAndNotAName); 3328 Check(not_equal, kOperandIsASmiAndNotAName);
3329 push(object); 3329 push(object);
3330 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3330 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3331 CmpInstanceType(object, LAST_NAME_TYPE); 3331 CmpInstanceType(object, LAST_NAME_TYPE);
3332 pop(object); 3332 pop(object);
3333 Check(below_equal, kOperandIsNotAName); 3333 Check(below_equal, kOperandIsNotAName);
3334 } 3334 }
3335 } 3335 }
3336 3336
3337 3337
3338 void MacroAssembler::AssertRootValue(Register src, 3338 void MacroAssembler::AssertRootValue(Register src,
3339 Heap::RootListIndex root_value_index, 3339 Heap::RootListIndex root_value_index,
3340 BailoutReason reason) { 3340 BailoutReason reason) {
3341 if (emit_debug_code()) { 3341 if (emit_debug_code()) {
3342 ASSERT(!src.is(kScratchRegister)); 3342 ASSERT(!src.is(kScratchRegister));
3343 LoadRoot(kScratchRegister, root_value_index); 3343 LoadRoot(kScratchRegister, root_value_index);
3344 cmpq(src, kScratchRegister); 3344 cmpq(src, kScratchRegister);
3345 Check(equal, reason); 3345 Check(equal, reason);
3346 } 3346 }
3347 } 3347 }
3348 3348
3349 3349
3350 3350
3351 Condition MacroAssembler::IsObjectStringType(Register heap_object, 3351 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3352 Register map, 3352 Register map,
3353 Register instance_type) { 3353 Register instance_type) {
3354 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 3354 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3355 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 3355 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3356 STATIC_ASSERT(kNotStringTag != 0); 3356 STATIC_ASSERT(kNotStringTag != 0);
3357 testb(instance_type, Immediate(kIsNotStringMask)); 3357 testb(instance_type, Immediate(kIsNotStringMask));
3358 return zero; 3358 return zero;
3359 } 3359 }
3360 3360
3361 3361
3362 Condition MacroAssembler::IsObjectNameType(Register heap_object, 3362 Condition MacroAssembler::IsObjectNameType(Register heap_object,
3363 Register map, 3363 Register map,
3364 Register instance_type) { 3364 Register instance_type) {
3365 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 3365 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3366 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 3366 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3367 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE))); 3367 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE)));
3368 return below_equal; 3368 return below_equal;
3369 } 3369 }
3370 3370
3371 3371
3372 void MacroAssembler::TryGetFunctionPrototype(Register function, 3372 void MacroAssembler::TryGetFunctionPrototype(Register function,
3373 Register result, 3373 Register result,
3374 Label* miss, 3374 Label* miss,
3375 bool miss_on_bound_function) { 3375 bool miss_on_bound_function) {
3376 // Check that the receiver isn't a smi. 3376 // Check that the receiver isn't a smi.
3377 testl(function, Immediate(kSmiTagMask)); 3377 testl(function, Immediate(kSmiTagMask));
3378 j(zero, miss); 3378 j(zero, miss);
3379 3379
3380 // Check that the function really is a function. 3380 // Check that the function really is a function.
3381 CmpObjectType(function, JS_FUNCTION_TYPE, result); 3381 CmpObjectType(function, JS_FUNCTION_TYPE, result);
3382 j(not_equal, miss); 3382 j(not_equal, miss);
3383 3383
3384 if (miss_on_bound_function) { 3384 if (miss_on_bound_function) {
3385 movq(kScratchRegister, 3385 movp(kScratchRegister,
3386 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3386 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3387 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte 3387 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
3388 // field). 3388 // field).
3389 TestBit(FieldOperand(kScratchRegister, 3389 TestBit(FieldOperand(kScratchRegister,
3390 SharedFunctionInfo::kCompilerHintsOffset), 3390 SharedFunctionInfo::kCompilerHintsOffset),
3391 SharedFunctionInfo::kBoundFunction); 3391 SharedFunctionInfo::kBoundFunction);
3392 j(not_zero, miss); 3392 j(not_zero, miss);
3393 } 3393 }
3394 3394
3395 // Make sure that the function has an instance prototype. 3395 // Make sure that the function has an instance prototype.
3396 Label non_instance; 3396 Label non_instance;
3397 testb(FieldOperand(result, Map::kBitFieldOffset), 3397 testb(FieldOperand(result, Map::kBitFieldOffset),
3398 Immediate(1 << Map::kHasNonInstancePrototype)); 3398 Immediate(1 << Map::kHasNonInstancePrototype));
3399 j(not_zero, &non_instance, Label::kNear); 3399 j(not_zero, &non_instance, Label::kNear);
3400 3400
3401 // Get the prototype or initial map from the function. 3401 // Get the prototype or initial map from the function.
3402 movq(result, 3402 movp(result,
3403 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 3403 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3404 3404
3405 // If the prototype or initial map is the hole, don't return it and 3405 // If the prototype or initial map is the hole, don't return it and
3406 // simply miss the cache instead. This will allow us to allocate a 3406 // simply miss the cache instead. This will allow us to allocate a
3407 // prototype object on-demand in the runtime system. 3407 // prototype object on-demand in the runtime system.
3408 CompareRoot(result, Heap::kTheHoleValueRootIndex); 3408 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3409 j(equal, miss); 3409 j(equal, miss);
3410 3410
3411 // If the function does not have an initial map, we're done. 3411 // If the function does not have an initial map, we're done.
3412 Label done; 3412 Label done;
3413 CmpObjectType(result, MAP_TYPE, kScratchRegister); 3413 CmpObjectType(result, MAP_TYPE, kScratchRegister);
3414 j(not_equal, &done, Label::kNear); 3414 j(not_equal, &done, Label::kNear);
3415 3415
3416 // Get the prototype from the initial map. 3416 // Get the prototype from the initial map.
3417 movq(result, FieldOperand(result, Map::kPrototypeOffset)); 3417 movp(result, FieldOperand(result, Map::kPrototypeOffset));
3418 jmp(&done, Label::kNear); 3418 jmp(&done, Label::kNear);
3419 3419
3420 // Non-instance prototype: Fetch prototype from constructor field 3420 // Non-instance prototype: Fetch prototype from constructor field
3421 // in initial map. 3421 // in initial map.
3422 bind(&non_instance); 3422 bind(&non_instance);
3423 movq(result, FieldOperand(result, Map::kConstructorOffset)); 3423 movp(result, FieldOperand(result, Map::kConstructorOffset));
3424 3424
3425 // All done. 3425 // All done.
3426 bind(&done); 3426 bind(&done);
3427 } 3427 }
3428 3428
3429 3429
3430 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { 3430 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
3431 if (FLAG_native_code_counters && counter->Enabled()) { 3431 if (FLAG_native_code_counters && counter->Enabled()) {
3432 Operand counter_operand = ExternalOperand(ExternalReference(counter)); 3432 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3433 movl(counter_operand, Immediate(value)); 3433 movl(counter_operand, Immediate(value));
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
3506 3506
3507 3507
3508 void MacroAssembler::InvokeFunction(Register function, 3508 void MacroAssembler::InvokeFunction(Register function,
3509 const ParameterCount& actual, 3509 const ParameterCount& actual,
3510 InvokeFlag flag, 3510 InvokeFlag flag,
3511 const CallWrapper& call_wrapper) { 3511 const CallWrapper& call_wrapper) {
3512 // You can't call a function without a valid frame. 3512 // You can't call a function without a valid frame.
3513 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3513 ASSERT(flag == JUMP_FUNCTION || has_frame());
3514 3514
3515 ASSERT(function.is(rdi)); 3515 ASSERT(function.is(rdi));
3516 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3516 movp(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3517 movq(rsi, FieldOperand(function, JSFunction::kContextOffset)); 3517 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
3518 movsxlq(rbx, 3518 movsxlq(rbx,
3519 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); 3519 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
3520 // Advances rdx to the end of the Code object header, to the start of 3520 // Advances rdx to the end of the Code object header, to the start of
3521 // the executable code. 3521 // the executable code.
3522 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 3522 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3523 3523
3524 ParameterCount expected(rbx); 3524 ParameterCount expected(rbx);
3525 InvokeCode(rdx, expected, actual, flag, call_wrapper); 3525 InvokeCode(rdx, expected, actual, flag, call_wrapper);
3526 } 3526 }
3527 3527
3528 3528
3529 void MacroAssembler::InvokeFunction(Register function, 3529 void MacroAssembler::InvokeFunction(Register function,
3530 const ParameterCount& expected, 3530 const ParameterCount& expected,
3531 const ParameterCount& actual, 3531 const ParameterCount& actual,
3532 InvokeFlag flag, 3532 InvokeFlag flag,
3533 const CallWrapper& call_wrapper) { 3533 const CallWrapper& call_wrapper) {
3534 // You can't call a function without a valid frame. 3534 // You can't call a function without a valid frame.
3535 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3535 ASSERT(flag == JUMP_FUNCTION || has_frame());
3536 3536
3537 ASSERT(function.is(rdi)); 3537 ASSERT(function.is(rdi));
3538 movq(rsi, FieldOperand(function, JSFunction::kContextOffset)); 3538 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
3539 // Advances rdx to the end of the Code object header, to the start of 3539 // Advances rdx to the end of the Code object header, to the start of
3540 // the executable code. 3540 // the executable code.
3541 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 3541 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3542 3542
3543 InvokeCode(rdx, expected, actual, flag, call_wrapper); 3543 InvokeCode(rdx, expected, actual, flag, call_wrapper);
3544 } 3544 }
3545 3545
3546 3546
3547 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, 3547 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
3548 const ParameterCount& expected, 3548 const ParameterCount& expected,
3549 const ParameterCount& actual, 3549 const ParameterCount& actual,
3550 InvokeFlag flag, 3550 InvokeFlag flag,
3551 const CallWrapper& call_wrapper) { 3551 const CallWrapper& call_wrapper) {
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3602 ASSERT(expected.reg().is(rbx)); 3602 ASSERT(expected.reg().is(rbx));
3603 } 3603 }
3604 } 3604 }
3605 3605
3606 if (!definitely_matches) { 3606 if (!definitely_matches) {
3607 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3607 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3608 if (!code_constant.is_null()) { 3608 if (!code_constant.is_null()) {
3609 Move(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT); 3609 Move(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3610 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 3610 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3611 } else if (!code_register.is(rdx)) { 3611 } else if (!code_register.is(rdx)) {
3612 movq(rdx, code_register); 3612 movp(rdx, code_register);
3613 } 3613 }
3614 3614
3615 if (flag == CALL_FUNCTION) { 3615 if (flag == CALL_FUNCTION) {
3616 call_wrapper.BeforeCall(CallSize(adaptor)); 3616 call_wrapper.BeforeCall(CallSize(adaptor));
3617 Call(adaptor, RelocInfo::CODE_TARGET); 3617 Call(adaptor, RelocInfo::CODE_TARGET);
3618 call_wrapper.AfterCall(); 3618 call_wrapper.AfterCall();
3619 if (!*definitely_mismatches) { 3619 if (!*definitely_mismatches) {
3620 jmp(done, near_jump); 3620 jmp(done, near_jump);
3621 } 3621 }
3622 } else { 3622 } else {
3623 Jump(adaptor, RelocInfo::CODE_TARGET); 3623 Jump(adaptor, RelocInfo::CODE_TARGET);
3624 } 3624 }
3625 bind(&invoke); 3625 bind(&invoke);
3626 } 3626 }
3627 } 3627 }
3628 3628
3629 3629
3630 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { 3630 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
3631 if (frame_mode == BUILD_STUB_FRAME) { 3631 if (frame_mode == BUILD_STUB_FRAME) {
3632 push(rbp); // Caller's frame pointer. 3632 push(rbp); // Caller's frame pointer.
3633 movq(rbp, rsp); 3633 movp(rbp, rsp);
3634 push(rsi); // Callee's context. 3634 push(rsi); // Callee's context.
3635 Push(Smi::FromInt(StackFrame::STUB)); 3635 Push(Smi::FromInt(StackFrame::STUB));
3636 } else { 3636 } else {
3637 PredictableCodeSizeScope predictible_code_size_scope(this, 3637 PredictableCodeSizeScope predictible_code_size_scope(this,
3638 kNoCodeAgeSequenceLength); 3638 kNoCodeAgeSequenceLength);
3639 if (isolate()->IsCodePreAgingActive()) { 3639 if (isolate()->IsCodePreAgingActive()) {
3640 // Pre-age the code. 3640 // Pre-age the code.
3641 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(), 3641 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
3642 RelocInfo::CODE_AGE_SEQUENCE); 3642 RelocInfo::CODE_AGE_SEQUENCE);
3643 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength); 3643 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
3644 } else { 3644 } else {
3645 push(rbp); // Caller's frame pointer. 3645 push(rbp); // Caller's frame pointer.
3646 movq(rbp, rsp); 3646 movp(rbp, rsp);
3647 push(rsi); // Callee's context. 3647 push(rsi); // Callee's context.
3648 push(rdi); // Callee's JS function. 3648 push(rdi); // Callee's JS function.
3649 } 3649 }
3650 } 3650 }
3651 } 3651 }
3652 3652
3653 3653
3654 void MacroAssembler::EnterFrame(StackFrame::Type type) { 3654 void MacroAssembler::EnterFrame(StackFrame::Type type) {
3655 push(rbp); 3655 push(rbp);
3656 movq(rbp, rsp); 3656 movp(rbp, rsp);
3657 push(rsi); // Context. 3657 push(rsi); // Context.
3658 Push(Smi::FromInt(type)); 3658 Push(Smi::FromInt(type));
3659 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 3659 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3660 push(kScratchRegister); 3660 push(kScratchRegister);
3661 if (emit_debug_code()) { 3661 if (emit_debug_code()) {
3662 Move(kScratchRegister, 3662 Move(kScratchRegister,
3663 isolate()->factory()->undefined_value(), 3663 isolate()->factory()->undefined_value(),
3664 RelocInfo::EMBEDDED_OBJECT); 3664 RelocInfo::EMBEDDED_OBJECT);
3665 cmpq(Operand(rsp, 0), kScratchRegister); 3665 cmpq(Operand(rsp, 0), kScratchRegister);
3666 Check(not_equal, kCodeObjectNotProperlyPatched); 3666 Check(not_equal, kCodeObjectNotProperlyPatched);
3667 } 3667 }
3668 } 3668 }
3669 3669
3670 3670
3671 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 3671 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3672 if (emit_debug_code()) { 3672 if (emit_debug_code()) {
3673 Move(kScratchRegister, Smi::FromInt(type)); 3673 Move(kScratchRegister, Smi::FromInt(type));
3674 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); 3674 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3675 Check(equal, kStackFrameTypesMustMatch); 3675 Check(equal, kStackFrameTypesMustMatch);
3676 } 3676 }
3677 movq(rsp, rbp); 3677 movp(rsp, rbp);
3678 pop(rbp); 3678 pop(rbp);
3679 } 3679 }
3680 3680
3681 3681
3682 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { 3682 void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
3683 // Set up the frame structure on the stack. 3683 // Set up the frame structure on the stack.
3684 // All constants are relative to the frame pointer of the exit frame. 3684 // All constants are relative to the frame pointer of the exit frame.
3685 ASSERT(ExitFrameConstants::kCallerSPDisplacement == 3685 ASSERT(ExitFrameConstants::kCallerSPDisplacement ==
3686 kFPOnStackSize + kPCOnStackSize); 3686 kFPOnStackSize + kPCOnStackSize);
3687 ASSERT(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize); 3687 ASSERT(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize);
3688 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); 3688 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3689 push(rbp); 3689 push(rbp);
3690 movq(rbp, rsp); 3690 movp(rbp, rsp);
3691 3691
3692 // Reserve room for entry stack pointer and push the code object. 3692 // Reserve room for entry stack pointer and push the code object.
3693 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); 3693 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
3694 push(Immediate(0)); // Saved entry sp, patched before call. 3694 push(Immediate(0)); // Saved entry sp, patched before call.
3695 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 3695 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3696 push(kScratchRegister); // Accessed from EditFrame::code_slot. 3696 push(kScratchRegister); // Accessed from EditFrame::code_slot.
3697 3697
3698 // Save the frame pointer and the context in top. 3698 // Save the frame pointer and the context in top.
3699 if (save_rax) { 3699 if (save_rax) {
3700 movq(r14, rax); // Backup rax in callee-save register. 3700 movp(r14, rax); // Backup rax in callee-save register.
3701 } 3701 }
3702 3702
3703 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp); 3703 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
3704 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi); 3704 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
3705 } 3705 }
3706 3706
3707 3707
3708 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, 3708 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
3709 bool save_doubles) { 3709 bool save_doubles) {
3710 #ifdef _WIN64 3710 #ifdef _WIN64
(...skipping 16 matching lines...) Expand all
3727 3727
3728 // Get the required frame alignment for the OS. 3728 // Get the required frame alignment for the OS.
3729 const int kFrameAlignment = OS::ActivationFrameAlignment(); 3729 const int kFrameAlignment = OS::ActivationFrameAlignment();
3730 if (kFrameAlignment > 0) { 3730 if (kFrameAlignment > 0) {
3731 ASSERT(IsPowerOf2(kFrameAlignment)); 3731 ASSERT(IsPowerOf2(kFrameAlignment));
3732 ASSERT(is_int8(kFrameAlignment)); 3732 ASSERT(is_int8(kFrameAlignment));
3733 and_(rsp, Immediate(-kFrameAlignment)); 3733 and_(rsp, Immediate(-kFrameAlignment));
3734 } 3734 }
3735 3735
3736 // Patch the saved entry sp. 3736 // Patch the saved entry sp.
3737 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); 3737 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
3738 } 3738 }
3739 3739
3740 3740
3741 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) { 3741 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
3742 EnterExitFramePrologue(true); 3742 EnterExitFramePrologue(true);
3743 3743
3744 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame, 3744 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
3745 // so it must be retained across the C-call. 3745 // so it must be retained across the C-call.
3746 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; 3746 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
3747 lea(r15, Operand(rbp, r14, times_pointer_size, offset)); 3747 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
(...skipping 12 matching lines...) Expand all
3760 // Registers: 3760 // Registers:
3761 // r15 : argv 3761 // r15 : argv
3762 if (save_doubles) { 3762 if (save_doubles) {
3763 int offset = -2 * kPointerSize; 3763 int offset = -2 * kPointerSize;
3764 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) { 3764 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
3765 XMMRegister reg = XMMRegister::FromAllocationIndex(i); 3765 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3766 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize))); 3766 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
3767 } 3767 }
3768 } 3768 }
3769 // Get the return address from the stack and restore the frame pointer. 3769 // Get the return address from the stack and restore the frame pointer.
3770 movq(rcx, Operand(rbp, kFPOnStackSize)); 3770 movp(rcx, Operand(rbp, kFPOnStackSize));
3771 movq(rbp, Operand(rbp, 0 * kPointerSize)); 3771 movp(rbp, Operand(rbp, 0 * kPointerSize));
3772 3772
3773 // Drop everything up to and including the arguments and the receiver 3773 // Drop everything up to and including the arguments and the receiver
3774 // from the caller stack. 3774 // from the caller stack.
3775 lea(rsp, Operand(r15, 1 * kPointerSize)); 3775 lea(rsp, Operand(r15, 1 * kPointerSize));
3776 3776
3777 PushReturnAddressFrom(rcx); 3777 PushReturnAddressFrom(rcx);
3778 3778
3779 LeaveExitFrameEpilogue(true); 3779 LeaveExitFrameEpilogue(true);
3780 } 3780 }
3781 3781
3782 3782
3783 void MacroAssembler::LeaveApiExitFrame(bool restore_context) { 3783 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
3784 movq(rsp, rbp); 3784 movp(rsp, rbp);
3785 pop(rbp); 3785 pop(rbp);
3786 3786
3787 LeaveExitFrameEpilogue(restore_context); 3787 LeaveExitFrameEpilogue(restore_context);
3788 } 3788 }
3789 3789
3790 3790
3791 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) { 3791 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
3792 // Restore current context from top and clear it in debug mode. 3792 // Restore current context from top and clear it in debug mode.
3793 ExternalReference context_address(Isolate::kContextAddress, isolate()); 3793 ExternalReference context_address(Isolate::kContextAddress, isolate());
3794 Operand context_operand = ExternalOperand(context_address); 3794 Operand context_operand = ExternalOperand(context_address);
3795 if (restore_context) { 3795 if (restore_context) {
3796 movq(rsi, context_operand); 3796 movp(rsi, context_operand);
3797 } 3797 }
3798 #ifdef DEBUG 3798 #ifdef DEBUG
3799 movq(context_operand, Immediate(0)); 3799 movp(context_operand, Immediate(0));
3800 #endif 3800 #endif
3801 3801
3802 // Clear the top frame. 3802 // Clear the top frame.
3803 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, 3803 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
3804 isolate()); 3804 isolate());
3805 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address); 3805 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3806 movq(c_entry_fp_operand, Immediate(0)); 3806 movp(c_entry_fp_operand, Immediate(0));
3807 } 3807 }
3808 3808
3809 3809
3810 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, 3810 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3811 Register scratch, 3811 Register scratch,
3812 Label* miss) { 3812 Label* miss) {
3813 Label same_contexts; 3813 Label same_contexts;
3814 3814
3815 ASSERT(!holder_reg.is(scratch)); 3815 ASSERT(!holder_reg.is(scratch));
3816 ASSERT(!scratch.is(kScratchRegister)); 3816 ASSERT(!scratch.is(kScratchRegister));
3817 // Load current lexical context from the stack frame. 3817 // Load current lexical context from the stack frame.
3818 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); 3818 movp(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3819 3819
3820 // When generating debug code, make sure the lexical context is set. 3820 // When generating debug code, make sure the lexical context is set.
3821 if (emit_debug_code()) { 3821 if (emit_debug_code()) {
3822 cmpq(scratch, Immediate(0)); 3822 cmpq(scratch, Immediate(0));
3823 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext); 3823 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
3824 } 3824 }
3825 // Load the native context of the current context. 3825 // Load the native context of the current context.
3826 int offset = 3826 int offset =
3827 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 3827 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
3828 movq(scratch, FieldOperand(scratch, offset)); 3828 movp(scratch, FieldOperand(scratch, offset));
3829 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); 3829 movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
3830 3830
3831 // Check the context is a native context. 3831 // Check the context is a native context.
3832 if (emit_debug_code()) { 3832 if (emit_debug_code()) {
3833 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), 3833 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
3834 isolate()->factory()->native_context_map()); 3834 isolate()->factory()->native_context_map());
3835 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); 3835 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
3836 } 3836 }
3837 3837
3838 // Check if both contexts are the same. 3838 // Check if both contexts are the same.
3839 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3839 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3840 j(equal, &same_contexts); 3840 j(equal, &same_contexts);
3841 3841
3842 // Compare security tokens. 3842 // Compare security tokens.
3843 // Check that the security token in the calling global object is 3843 // Check that the security token in the calling global object is
3844 // compatible with the security token in the receiving global 3844 // compatible with the security token in the receiving global
3845 // object. 3845 // object.
3846 3846
3847 // Check the context is a native context. 3847 // Check the context is a native context.
3848 if (emit_debug_code()) { 3848 if (emit_debug_code()) {
3849 // Preserve original value of holder_reg. 3849 // Preserve original value of holder_reg.
3850 push(holder_reg); 3850 push(holder_reg);
3851 movq(holder_reg, 3851 movp(holder_reg,
3852 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3852 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3853 CompareRoot(holder_reg, Heap::kNullValueRootIndex); 3853 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3854 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); 3854 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
3855 3855
3856 // Read the first word and compare to native_context_map(), 3856 // Read the first word and compare to native_context_map(),
3857 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); 3857 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3858 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); 3858 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
3859 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); 3859 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
3860 pop(holder_reg); 3860 pop(holder_reg);
3861 } 3861 }
3862 3862
3863 movq(kScratchRegister, 3863 movp(kScratchRegister,
3864 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3864 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3865 int token_offset = 3865 int token_offset =
3866 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize; 3866 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
3867 movq(scratch, FieldOperand(scratch, token_offset)); 3867 movp(scratch, FieldOperand(scratch, token_offset));
3868 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); 3868 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3869 j(not_equal, miss); 3869 j(not_equal, miss);
3870 3870
3871 bind(&same_contexts); 3871 bind(&same_contexts);
3872 } 3872 }
3873 3873
3874 3874
3875 // Compute the hash code from the untagged key. This must be kept in sync with 3875 // Compute the hash code from the untagged key. This must be kept in sync with
3876 // ComputeIntegerHash in utils.h and KeyedLoadGenericElementStub in 3876 // ComputeIntegerHash in utils.h and KeyedLoadGenericElementStub in
3877 // code-stub-hydrogen.cc 3877 // code-stub-hydrogen.cc
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
3944 GetNumberHash(r0, r1); 3944 GetNumberHash(r0, r1);
3945 3945
3946 // Compute capacity mask. 3946 // Compute capacity mask.
3947 SmiToInteger32(r1, FieldOperand(elements, 3947 SmiToInteger32(r1, FieldOperand(elements,
3948 SeededNumberDictionary::kCapacityOffset)); 3948 SeededNumberDictionary::kCapacityOffset));
3949 decl(r1); 3949 decl(r1);
3950 3950
3951 // Generate an unrolled loop that performs a few probes before giving up. 3951 // Generate an unrolled loop that performs a few probes before giving up.
3952 for (int i = 0; i < kNumberDictionaryProbes; i++) { 3952 for (int i = 0; i < kNumberDictionaryProbes; i++) {
3953 // Use r2 for index calculations and keep the hash intact in r0. 3953 // Use r2 for index calculations and keep the hash intact in r0.
3954 movq(r2, r0); 3954 movp(r2, r0);
3955 // Compute the masked index: (hash + i + i * i) & mask. 3955 // Compute the masked index: (hash + i + i * i) & mask.
3956 if (i > 0) { 3956 if (i > 0) {
3957 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i))); 3957 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
3958 } 3958 }
3959 and_(r2, r1); 3959 and_(r2, r1);
3960 3960
3961 // Scale the index by multiplying by the entry size. 3961 // Scale the index by multiplying by the entry size.
3962 ASSERT(SeededNumberDictionary::kEntrySize == 3); 3962 ASSERT(SeededNumberDictionary::kEntrySize == 3);
3963 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3 3963 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
3964 3964
(...skipping 14 matching lines...) Expand all
3979 const int kDetailsOffset = 3979 const int kDetailsOffset =
3980 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize; 3980 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
3981 ASSERT_EQ(NORMAL, 0); 3981 ASSERT_EQ(NORMAL, 0);
3982 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset), 3982 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
3983 Smi::FromInt(PropertyDetails::TypeField::kMask)); 3983 Smi::FromInt(PropertyDetails::TypeField::kMask));
3984 j(not_zero, miss); 3984 j(not_zero, miss);
3985 3985
3986 // Get the value at the masked, scaled index. 3986 // Get the value at the masked, scaled index.
3987 const int kValueOffset = 3987 const int kValueOffset =
3988 SeededNumberDictionary::kElementsStartOffset + kPointerSize; 3988 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
3989 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); 3989 movp(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
3990 } 3990 }
3991 3991
3992 3992
3993 void MacroAssembler::LoadAllocationTopHelper(Register result, 3993 void MacroAssembler::LoadAllocationTopHelper(Register result,
3994 Register scratch, 3994 Register scratch,
3995 AllocationFlags flags) { 3995 AllocationFlags flags) {
3996 ExternalReference allocation_top = 3996 ExternalReference allocation_top =
3997 AllocationUtils::GetAllocationTopReference(isolate(), flags); 3997 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3998 3998
3999 // Just return if allocation top is already known. 3999 // Just return if allocation top is already known.
4000 if ((flags & RESULT_CONTAINS_TOP) != 0) { 4000 if ((flags & RESULT_CONTAINS_TOP) != 0) {
4001 // No use of scratch if allocation top is provided. 4001 // No use of scratch if allocation top is provided.
4002 ASSERT(!scratch.is_valid()); 4002 ASSERT(!scratch.is_valid());
4003 #ifdef DEBUG 4003 #ifdef DEBUG
4004 // Assert that result actually contains top on entry. 4004 // Assert that result actually contains top on entry.
4005 Operand top_operand = ExternalOperand(allocation_top); 4005 Operand top_operand = ExternalOperand(allocation_top);
4006 cmpq(result, top_operand); 4006 cmpq(result, top_operand);
4007 Check(equal, kUnexpectedAllocationTop); 4007 Check(equal, kUnexpectedAllocationTop);
4008 #endif 4008 #endif
4009 return; 4009 return;
4010 } 4010 }
4011 4011
4012 // Move address of new object to result. Use scratch register if available, 4012 // Move address of new object to result. Use scratch register if available,
4013 // and keep address in scratch until call to UpdateAllocationTopHelper. 4013 // and keep address in scratch until call to UpdateAllocationTopHelper.
4014 if (scratch.is_valid()) { 4014 if (scratch.is_valid()) {
4015 LoadAddress(scratch, allocation_top); 4015 LoadAddress(scratch, allocation_top);
4016 movq(result, Operand(scratch, 0)); 4016 movp(result, Operand(scratch, 0));
4017 } else { 4017 } else {
4018 Load(result, allocation_top); 4018 Load(result, allocation_top);
4019 } 4019 }
4020 } 4020 }
4021 4021
4022 4022
4023 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 4023 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
4024 Register scratch, 4024 Register scratch,
4025 AllocationFlags flags) { 4025 AllocationFlags flags) {
4026 if (emit_debug_code()) { 4026 if (emit_debug_code()) {
4027 testq(result_end, Immediate(kObjectAlignmentMask)); 4027 testq(result_end, Immediate(kObjectAlignmentMask));
4028 Check(zero, kUnalignedAllocationInNewSpace); 4028 Check(zero, kUnalignedAllocationInNewSpace);
4029 } 4029 }
4030 4030
4031 ExternalReference allocation_top = 4031 ExternalReference allocation_top =
4032 AllocationUtils::GetAllocationTopReference(isolate(), flags); 4032 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4033 4033
4034 // Update new top. 4034 // Update new top.
4035 if (scratch.is_valid()) { 4035 if (scratch.is_valid()) {
4036 // Scratch already contains address of allocation top. 4036 // Scratch already contains address of allocation top.
4037 movq(Operand(scratch, 0), result_end); 4037 movp(Operand(scratch, 0), result_end);
4038 } else { 4038 } else {
4039 Store(allocation_top, result_end); 4039 Store(allocation_top, result_end);
4040 } 4040 }
4041 } 4041 }
4042 4042
4043 4043
4044 void MacroAssembler::Allocate(int object_size, 4044 void MacroAssembler::Allocate(int object_size,
4045 Register result, 4045 Register result,
4046 Register result_end, 4046 Register result_end,
4047 Register scratch, 4047 Register scratch,
(...skipping 27 matching lines...) Expand all
4075 Check(zero, kAllocationIsNotDoubleAligned); 4075 Check(zero, kAllocationIsNotDoubleAligned);
4076 } 4076 }
4077 4077
4078 // Calculate new top and bail out if new space is exhausted. 4078 // Calculate new top and bail out if new space is exhausted.
4079 ExternalReference allocation_limit = 4079 ExternalReference allocation_limit =
4080 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 4080 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4081 4081
4082 Register top_reg = result_end.is_valid() ? result_end : result; 4082 Register top_reg = result_end.is_valid() ? result_end : result;
4083 4083
4084 if (!top_reg.is(result)) { 4084 if (!top_reg.is(result)) {
4085 movq(top_reg, result); 4085 movp(top_reg, result);
4086 } 4086 }
4087 addq(top_reg, Immediate(object_size)); 4087 addq(top_reg, Immediate(object_size));
4088 j(carry, gc_required); 4088 j(carry, gc_required);
4089 Operand limit_operand = ExternalOperand(allocation_limit); 4089 Operand limit_operand = ExternalOperand(allocation_limit);
4090 cmpq(top_reg, limit_operand); 4090 cmpq(top_reg, limit_operand);
4091 j(above, gc_required); 4091 j(above, gc_required);
4092 4092
4093 // Update allocation top. 4093 // Update allocation top.
4094 UpdateAllocationTopHelper(top_reg, scratch, flags); 4094 UpdateAllocationTopHelper(top_reg, scratch, flags);
4095 4095
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
4151 // safe in new-space because the limit of the heap is aligned there. 4151 // safe in new-space because the limit of the heap is aligned there.
4152 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 4152 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
4153 testq(result, Immediate(kDoubleAlignmentMask)); 4153 testq(result, Immediate(kDoubleAlignmentMask));
4154 Check(zero, kAllocationIsNotDoubleAligned); 4154 Check(zero, kAllocationIsNotDoubleAligned);
4155 } 4155 }
4156 4156
4157 // Calculate new top and bail out if new space is exhausted. 4157 // Calculate new top and bail out if new space is exhausted.
4158 ExternalReference allocation_limit = 4158 ExternalReference allocation_limit =
4159 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 4159 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4160 if (!object_size.is(result_end)) { 4160 if (!object_size.is(result_end)) {
4161 movq(result_end, object_size); 4161 movp(result_end, object_size);
4162 } 4162 }
4163 addq(result_end, result); 4163 addq(result_end, result);
4164 j(carry, gc_required); 4164 j(carry, gc_required);
4165 Operand limit_operand = ExternalOperand(allocation_limit); 4165 Operand limit_operand = ExternalOperand(allocation_limit);
4166 cmpq(result_end, limit_operand); 4166 cmpq(result_end, limit_operand);
4167 j(above, gc_required); 4167 j(above, gc_required);
4168 4168
4169 // Update allocation top. 4169 // Update allocation top.
4170 UpdateAllocationTopHelper(result_end, scratch, flags); 4170 UpdateAllocationTopHelper(result_end, scratch, flags);
4171 4171
4172 // Tag the result if requested. 4172 // Tag the result if requested.
4173 if ((flags & TAG_OBJECT) != 0) { 4173 if ((flags & TAG_OBJECT) != 0) {
4174 addq(result, Immediate(kHeapObjectTag)); 4174 addq(result, Immediate(kHeapObjectTag));
4175 } 4175 }
4176 } 4176 }
4177 4177
4178 4178
4179 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 4179 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
4180 ExternalReference new_space_allocation_top = 4180 ExternalReference new_space_allocation_top =
4181 ExternalReference::new_space_allocation_top_address(isolate()); 4181 ExternalReference::new_space_allocation_top_address(isolate());
4182 4182
4183 // Make sure the object has no tag before resetting top. 4183 // Make sure the object has no tag before resetting top.
4184 and_(object, Immediate(~kHeapObjectTagMask)); 4184 and_(object, Immediate(~kHeapObjectTagMask));
4185 Operand top_operand = ExternalOperand(new_space_allocation_top); 4185 Operand top_operand = ExternalOperand(new_space_allocation_top);
4186 #ifdef DEBUG 4186 #ifdef DEBUG
4187 cmpq(object, top_operand); 4187 cmpq(object, top_operand);
4188 Check(below, kUndoAllocationOfNonAllocatedMemory); 4188 Check(below, kUndoAllocationOfNonAllocatedMemory);
4189 #endif 4189 #endif
4190 movq(top_operand, object); 4190 movp(top_operand, object);
4191 } 4191 }
4192 4192
4193 4193
4194 void MacroAssembler::AllocateHeapNumber(Register result, 4194 void MacroAssembler::AllocateHeapNumber(Register result,
4195 Register scratch, 4195 Register scratch,
4196 Label* gc_required) { 4196 Label* gc_required) {
4197 // Allocate heap number in new space. 4197 // Allocate heap number in new space.
4198 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT); 4198 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
4199 4199
4200 // Set the map. 4200 // Set the map.
4201 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); 4201 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
4202 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4202 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4203 } 4203 }
4204 4204
4205 4205
4206 void MacroAssembler::AllocateTwoByteString(Register result, 4206 void MacroAssembler::AllocateTwoByteString(Register result,
4207 Register length, 4207 Register length,
4208 Register scratch1, 4208 Register scratch1,
4209 Register scratch2, 4209 Register scratch2,
4210 Register scratch3, 4210 Register scratch3,
4211 Label* gc_required) { 4211 Label* gc_required) {
4212 // Calculate the number of bytes needed for the characters in the string while 4212 // Calculate the number of bytes needed for the characters in the string while
(...skipping 14 matching lines...) Expand all
4227 times_1, 4227 times_1,
4228 scratch1, 4228 scratch1,
4229 result, 4229 result,
4230 scratch2, 4230 scratch2,
4231 scratch3, 4231 scratch3,
4232 gc_required, 4232 gc_required,
4233 TAG_OBJECT); 4233 TAG_OBJECT);
4234 4234
4235 // Set the map, length and hash field. 4235 // Set the map, length and hash field.
4236 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex); 4236 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
4237 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4237 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4238 Integer32ToSmi(scratch1, length); 4238 Integer32ToSmi(scratch1, length);
4239 movq(FieldOperand(result, String::kLengthOffset), scratch1); 4239 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4240 movq(FieldOperand(result, String::kHashFieldOffset), 4240 movp(FieldOperand(result, String::kHashFieldOffset),
4241 Immediate(String::kEmptyHashField)); 4241 Immediate(String::kEmptyHashField));
4242 } 4242 }
4243 4243
4244 4244
4245 void MacroAssembler::AllocateAsciiString(Register result, 4245 void MacroAssembler::AllocateAsciiString(Register result,
4246 Register length, 4246 Register length,
4247 Register scratch1, 4247 Register scratch1,
4248 Register scratch2, 4248 Register scratch2,
4249 Register scratch3, 4249 Register scratch3,
4250 Label* gc_required) { 4250 Label* gc_required) {
(...skipping 14 matching lines...) Expand all
4265 times_1, 4265 times_1,
4266 scratch1, 4266 scratch1,
4267 result, 4267 result,
4268 scratch2, 4268 scratch2,
4269 scratch3, 4269 scratch3,
4270 gc_required, 4270 gc_required,
4271 TAG_OBJECT); 4271 TAG_OBJECT);
4272 4272
4273 // Set the map, length and hash field. 4273 // Set the map, length and hash field.
4274 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex); 4274 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
4275 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4275 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4276 Integer32ToSmi(scratch1, length); 4276 Integer32ToSmi(scratch1, length);
4277 movq(FieldOperand(result, String::kLengthOffset), scratch1); 4277 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4278 movq(FieldOperand(result, String::kHashFieldOffset), 4278 movp(FieldOperand(result, String::kHashFieldOffset),
4279 Immediate(String::kEmptyHashField)); 4279 Immediate(String::kEmptyHashField));
4280 } 4280 }
4281 4281
4282 4282
4283 void MacroAssembler::AllocateTwoByteConsString(Register result, 4283 void MacroAssembler::AllocateTwoByteConsString(Register result,
4284 Register scratch1, 4284 Register scratch1,
4285 Register scratch2, 4285 Register scratch2,
4286 Label* gc_required) { 4286 Label* gc_required) {
4287 // Allocate heap number in new space. 4287 // Allocate heap number in new space.
4288 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, 4288 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4289 TAG_OBJECT); 4289 TAG_OBJECT);
4290 4290
4291 // Set the map. The other fields are left uninitialized. 4291 // Set the map. The other fields are left uninitialized.
4292 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex); 4292 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
4293 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4293 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4294 } 4294 }
4295 4295
4296 4296
4297 void MacroAssembler::AllocateAsciiConsString(Register result, 4297 void MacroAssembler::AllocateAsciiConsString(Register result,
4298 Register scratch1, 4298 Register scratch1,
4299 Register scratch2, 4299 Register scratch2,
4300 Label* gc_required) { 4300 Label* gc_required) {
4301 Label allocate_new_space, install_map; 4301 Label allocate_new_space, install_map;
4302 AllocationFlags flags = TAG_OBJECT; 4302 AllocationFlags flags = TAG_OBJECT;
4303 4303
(...skipping 17 matching lines...) Expand all
4321 result, 4321 result,
4322 scratch1, 4322 scratch1,
4323 scratch2, 4323 scratch2,
4324 gc_required, 4324 gc_required,
4325 flags); 4325 flags);
4326 4326
4327 bind(&install_map); 4327 bind(&install_map);
4328 4328
4329 // Set the map. The other fields are left uninitialized. 4329 // Set the map. The other fields are left uninitialized.
4330 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); 4330 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
4331 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4331 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4332 } 4332 }
4333 4333
4334 4334
4335 void MacroAssembler::AllocateTwoByteSlicedString(Register result, 4335 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4336 Register scratch1, 4336 Register scratch1,
4337 Register scratch2, 4337 Register scratch2,
4338 Label* gc_required) { 4338 Label* gc_required) {
4339 // Allocate heap number in new space. 4339 // Allocate heap number in new space.
4340 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, 4340 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4341 TAG_OBJECT); 4341 TAG_OBJECT);
4342 4342
4343 // Set the map. The other fields are left uninitialized. 4343 // Set the map. The other fields are left uninitialized.
4344 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex); 4344 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
4345 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4345 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4346 } 4346 }
4347 4347
4348 4348
4349 void MacroAssembler::AllocateAsciiSlicedString(Register result, 4349 void MacroAssembler::AllocateAsciiSlicedString(Register result,
4350 Register scratch1, 4350 Register scratch1,
4351 Register scratch2, 4351 Register scratch2,
4352 Label* gc_required) { 4352 Label* gc_required) {
4353 // Allocate heap number in new space. 4353 // Allocate heap number in new space.
4354 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, 4354 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4355 TAG_OBJECT); 4355 TAG_OBJECT);
4356 4356
4357 // Set the map. The other fields are left uninitialized. 4357 // Set the map. The other fields are left uninitialized.
4358 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex); 4358 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex);
4359 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4359 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4360 } 4360 }
4361 4361
4362 4362
4363 // Copy memory, byte-by-byte, from source to destination. Not optimized for 4363 // Copy memory, byte-by-byte, from source to destination. Not optimized for
4364 // long or aligned copies. The contents of scratch and length are destroyed. 4364 // long or aligned copies. The contents of scratch and length are destroyed.
4365 // Destination is incremented by length, source, length and scratch are 4365 // Destination is incremented by length, source, length and scratch are
4366 // clobbered. 4366 // clobbered.
4367 // A simpler loop is faster on small copies, but slower on large ones. 4367 // A simpler loop is faster on small copies, but slower on large ones.
4368 // The cld() instruction must have been emitted, to set the direction flag(), 4368 // The cld() instruction must have been emitted, to set the direction flag(),
4369 // before calling this function. 4369 // before calling this function.
(...skipping 24 matching lines...) Expand all
4394 j(below_equal, &len8, Label::kNear); 4394 j(below_equal, &len8, Label::kNear);
4395 cmpl(length, Immediate(3 * kPointerSize)); 4395 cmpl(length, Immediate(3 * kPointerSize));
4396 j(below_equal, &len16, Label::kNear); 4396 j(below_equal, &len16, Label::kNear);
4397 cmpl(length, Immediate(4 * kPointerSize)); 4397 cmpl(length, Immediate(4 * kPointerSize));
4398 j(below_equal, &len24, Label::kNear); 4398 j(below_equal, &len24, Label::kNear);
4399 } 4399 }
4400 4400
4401 // Because source is 8-byte aligned in our uses of this function, 4401 // Because source is 8-byte aligned in our uses of this function,
4402 // we keep source aligned for the rep movs operation by copying the odd bytes 4402 // we keep source aligned for the rep movs operation by copying the odd bytes
4403 // at the end of the ranges. 4403 // at the end of the ranges.
4404 movq(scratch, length); 4404 movp(scratch, length);
4405 shrl(length, Immediate(kPointerSizeLog2)); 4405 shrl(length, Immediate(kPointerSizeLog2));
4406 repmovsq(); 4406 repmovsq();
4407 // Move remaining bytes of length. 4407 // Move remaining bytes of length.
4408 andl(scratch, Immediate(kPointerSize - 1)); 4408 andl(scratch, Immediate(kPointerSize - 1));
4409 movq(length, Operand(source, scratch, times_1, -kPointerSize)); 4409 movp(length, Operand(source, scratch, times_1, -kPointerSize));
4410 movq(Operand(destination, scratch, times_1, -kPointerSize), length); 4410 movp(Operand(destination, scratch, times_1, -kPointerSize), length);
4411 addq(destination, scratch); 4411 addq(destination, scratch);
4412 4412
4413 if (min_length <= kLongStringLimit) { 4413 if (min_length <= kLongStringLimit) {
4414 jmp(&done, Label::kNear); 4414 jmp(&done, Label::kNear);
4415 bind(&len24); 4415 bind(&len24);
4416 movq(scratch, Operand(source, 2 * kPointerSize)); 4416 movp(scratch, Operand(source, 2 * kPointerSize));
4417 movq(Operand(destination, 2 * kPointerSize), scratch); 4417 movp(Operand(destination, 2 * kPointerSize), scratch);
4418 bind(&len16); 4418 bind(&len16);
4419 movq(scratch, Operand(source, kPointerSize)); 4419 movp(scratch, Operand(source, kPointerSize));
4420 movq(Operand(destination, kPointerSize), scratch); 4420 movp(Operand(destination, kPointerSize), scratch);
4421 bind(&len8); 4421 bind(&len8);
4422 movq(scratch, Operand(source, 0)); 4422 movp(scratch, Operand(source, 0));
4423 movq(Operand(destination, 0), scratch); 4423 movp(Operand(destination, 0), scratch);
4424 // Move remaining bytes of length. 4424 // Move remaining bytes of length.
4425 movq(scratch, Operand(source, length, times_1, -kPointerSize)); 4425 movp(scratch, Operand(source, length, times_1, -kPointerSize));
4426 movq(Operand(destination, length, times_1, -kPointerSize), scratch); 4426 movp(Operand(destination, length, times_1, -kPointerSize), scratch);
4427 addq(destination, length); 4427 addq(destination, length);
4428 jmp(&done, Label::kNear); 4428 jmp(&done, Label::kNear);
4429 4429
4430 bind(&short_string); 4430 bind(&short_string);
4431 if (min_length == 0) { 4431 if (min_length == 0) {
4432 testl(length, length); 4432 testl(length, length);
4433 j(zero, &done, Label::kNear); 4433 j(zero, &done, Label::kNear);
4434 } 4434 }
4435 4435
4436 bind(&short_loop); 4436 bind(&short_loop);
4437 movb(scratch, Operand(source, 0)); 4437 movb(scratch, Operand(source, 0));
4438 movb(Operand(destination, 0), scratch); 4438 movb(Operand(destination, 0), scratch);
4439 incq(source); 4439 incq(source);
4440 incq(destination); 4440 incq(destination);
4441 decl(length); 4441 decl(length);
4442 j(not_zero, &short_loop); 4442 j(not_zero, &short_loop);
4443 } 4443 }
4444 4444
4445 bind(&done); 4445 bind(&done);
4446 } 4446 }
4447 4447
4448 4448
4449 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, 4449 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4450 Register end_offset, 4450 Register end_offset,
4451 Register filler) { 4451 Register filler) {
4452 Label loop, entry; 4452 Label loop, entry;
4453 jmp(&entry); 4453 jmp(&entry);
4454 bind(&loop); 4454 bind(&loop);
4455 movq(Operand(start_offset, 0), filler); 4455 movp(Operand(start_offset, 0), filler);
4456 addq(start_offset, Immediate(kPointerSize)); 4456 addq(start_offset, Immediate(kPointerSize));
4457 bind(&entry); 4457 bind(&entry);
4458 cmpq(start_offset, end_offset); 4458 cmpq(start_offset, end_offset);
4459 j(less, &loop); 4459 j(less, &loop);
4460 } 4460 }
4461 4461
4462 4462
4463 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { 4463 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4464 if (context_chain_length > 0) { 4464 if (context_chain_length > 0) {
4465 // Move up the chain of contexts to the context containing the slot. 4465 // Move up the chain of contexts to the context containing the slot.
4466 movq(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX))); 4466 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4467 for (int i = 1; i < context_chain_length; i++) { 4467 for (int i = 1; i < context_chain_length; i++) {
4468 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); 4468 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4469 } 4469 }
4470 } else { 4470 } else {
4471 // Slot is in the current function context. Move it into the 4471 // Slot is in the current function context. Move it into the
4472 // destination register in case we store into it (the write barrier 4472 // destination register in case we store into it (the write barrier
4473 // cannot be allowed to destroy the context in rsi). 4473 // cannot be allowed to destroy the context in rsi).
4474 movq(dst, rsi); 4474 movp(dst, rsi);
4475 } 4475 }
4476 4476
4477 // We should not have found a with context by walking the context 4477 // We should not have found a with context by walking the context
4478 // chain (i.e., the static scope chain and runtime context chain do 4478 // chain (i.e., the static scope chain and runtime context chain do
4479 // not agree). A variable occurring in such a scope should have 4479 // not agree). A variable occurring in such a scope should have
4480 // slot type LOOKUP and not CONTEXT. 4480 // slot type LOOKUP and not CONTEXT.
4481 if (emit_debug_code()) { 4481 if (emit_debug_code()) {
4482 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset), 4482 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4483 Heap::kWithContextMapRootIndex); 4483 Heap::kWithContextMapRootIndex);
4484 Check(not_equal, kVariableResolvedToWithContext); 4484 Check(not_equal, kVariableResolvedToWithContext);
4485 } 4485 }
4486 } 4486 }
4487 4487
4488 4488
4489 void MacroAssembler::LoadTransitionedArrayMapConditional( 4489 void MacroAssembler::LoadTransitionedArrayMapConditional(
4490 ElementsKind expected_kind, 4490 ElementsKind expected_kind,
4491 ElementsKind transitioned_kind, 4491 ElementsKind transitioned_kind,
4492 Register map_in_out, 4492 Register map_in_out,
4493 Register scratch, 4493 Register scratch,
4494 Label* no_map_match) { 4494 Label* no_map_match) {
4495 // Load the global or builtins object from the current context. 4495 // Load the global or builtins object from the current context.
4496 movq(scratch, 4496 movp(scratch,
4497 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 4497 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4498 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); 4498 movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
4499 4499
4500 // Check that the function's map is the same as the expected cached map. 4500 // Check that the function's map is the same as the expected cached map.
4501 movq(scratch, Operand(scratch, 4501 movp(scratch, Operand(scratch,
4502 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX))); 4502 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4503 4503
4504 int offset = expected_kind * kPointerSize + 4504 int offset = expected_kind * kPointerSize +
4505 FixedArrayBase::kHeaderSize; 4505 FixedArrayBase::kHeaderSize;
4506 cmpq(map_in_out, FieldOperand(scratch, offset)); 4506 cmpq(map_in_out, FieldOperand(scratch, offset));
4507 j(not_equal, no_map_match); 4507 j(not_equal, no_map_match);
4508 4508
4509 // Use the transitioned cached map. 4509 // Use the transitioned cached map.
4510 offset = transitioned_kind * kPointerSize + 4510 offset = transitioned_kind * kPointerSize +
4511 FixedArrayBase::kHeaderSize; 4511 FixedArrayBase::kHeaderSize;
4512 movq(map_in_out, FieldOperand(scratch, offset)); 4512 movp(map_in_out, FieldOperand(scratch, offset));
4513 } 4513 }
4514 4514
4515 4515
4516 void MacroAssembler::LoadInitialArrayMap( 4516 void MacroAssembler::LoadInitialArrayMap(
4517 Register function_in, Register scratch, 4517 Register function_in, Register scratch,
4518 Register map_out, bool can_have_holes) { 4518 Register map_out, bool can_have_holes) {
4519 ASSERT(!function_in.is(map_out)); 4519 ASSERT(!function_in.is(map_out));
4520 Label done; 4520 Label done;
4521 movq(map_out, FieldOperand(function_in, 4521 movp(map_out, FieldOperand(function_in,
4522 JSFunction::kPrototypeOrInitialMapOffset)); 4522 JSFunction::kPrototypeOrInitialMapOffset));
4523 if (!FLAG_smi_only_arrays) { 4523 if (!FLAG_smi_only_arrays) {
4524 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; 4524 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
4525 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 4525 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
4526 kind, 4526 kind,
4527 map_out, 4527 map_out,
4528 scratch, 4528 scratch,
4529 &done); 4529 &done);
4530 } else if (can_have_holes) { 4530 } else if (can_have_holes) {
4531 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 4531 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
4532 FAST_HOLEY_SMI_ELEMENTS, 4532 FAST_HOLEY_SMI_ELEMENTS,
4533 map_out, 4533 map_out,
4534 scratch, 4534 scratch,
4535 &done); 4535 &done);
4536 } 4536 }
4537 bind(&done); 4537 bind(&done);
4538 } 4538 }
4539 4539
4540 #ifdef _WIN64 4540 #ifdef _WIN64
4541 static const int kRegisterPassedArguments = 4; 4541 static const int kRegisterPassedArguments = 4;
4542 #else 4542 #else
4543 static const int kRegisterPassedArguments = 6; 4543 static const int kRegisterPassedArguments = 6;
4544 #endif 4544 #endif
4545 4545
4546 void MacroAssembler::LoadGlobalFunction(int index, Register function) { 4546 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4547 // Load the global or builtins object from the current context. 4547 // Load the global or builtins object from the current context.
4548 movq(function, 4548 movp(function,
4549 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 4549 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4550 // Load the native context from the global or builtins object. 4550 // Load the native context from the global or builtins object.
4551 movq(function, FieldOperand(function, GlobalObject::kNativeContextOffset)); 4551 movp(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
4552 // Load the function from the native context. 4552 // Load the function from the native context.
4553 movq(function, Operand(function, Context::SlotOffset(index))); 4553 movp(function, Operand(function, Context::SlotOffset(index)));
4554 } 4554 }
4555 4555
4556 4556
4557 void MacroAssembler::LoadArrayFunction(Register function) { 4557 void MacroAssembler::LoadArrayFunction(Register function) {
4558 movq(function, 4558 movp(function,
4559 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 4559 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4560 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); 4560 movp(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
4561 movq(function, 4561 movp(function,
4562 Operand(function, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 4562 Operand(function, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
4563 } 4563 }
4564 4564
4565 4565
4566 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 4566 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4567 Register map) { 4567 Register map) {
4568 // Load the initial map. The global functions all have initial maps. 4568 // Load the initial map. The global functions all have initial maps.
4569 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 4569 movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4570 if (emit_debug_code()) { 4570 if (emit_debug_code()) {
4571 Label ok, fail; 4571 Label ok, fail;
4572 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); 4572 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
4573 jmp(&ok); 4573 jmp(&ok);
4574 bind(&fail); 4574 bind(&fail);
4575 Abort(kGlobalFunctionsMustHaveInitialMap); 4575 Abort(kGlobalFunctionsMustHaveInitialMap);
4576 bind(&ok); 4576 bind(&ok);
4577 } 4577 }
4578 } 4578 }
4579 4579
(...skipping 20 matching lines...) Expand all
4600 void MacroAssembler::EmitSeqStringSetCharCheck(Register string, 4600 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
4601 Register index, 4601 Register index,
4602 Register value, 4602 Register value,
4603 uint32_t encoding_mask) { 4603 uint32_t encoding_mask) {
4604 Label is_object; 4604 Label is_object;
4605 JumpIfNotSmi(string, &is_object); 4605 JumpIfNotSmi(string, &is_object);
4606 Throw(kNonObject); 4606 Throw(kNonObject);
4607 bind(&is_object); 4607 bind(&is_object);
4608 4608
4609 push(value); 4609 push(value);
4610 movq(value, FieldOperand(string, HeapObject::kMapOffset)); 4610 movp(value, FieldOperand(string, HeapObject::kMapOffset));
4611 movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset)); 4611 movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset));
4612 4612
4613 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask)); 4613 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
4614 cmpq(value, Immediate(encoding_mask)); 4614 cmpq(value, Immediate(encoding_mask));
4615 pop(value); 4615 pop(value);
4616 ThrowIf(not_equal, kUnexpectedStringType); 4616 ThrowIf(not_equal, kUnexpectedStringType);
4617 4617
4618 // The index is assumed to be untagged coming in, tag it to compare with the 4618 // The index is assumed to be untagged coming in, tag it to compare with the
4619 // string length without using a temp register, it is restored at the end of 4619 // string length without using a temp register, it is restored at the end of
4620 // this function. 4620 // this function.
4621 Integer32ToSmi(index, index); 4621 Integer32ToSmi(index, index);
4622 SmiCompare(index, FieldOperand(string, String::kLengthOffset)); 4622 SmiCompare(index, FieldOperand(string, String::kLengthOffset));
4623 ThrowIf(greater_equal, kIndexIsTooLarge); 4623 ThrowIf(greater_equal, kIndexIsTooLarge);
4624 4624
4625 SmiCompare(index, Smi::FromInt(0)); 4625 SmiCompare(index, Smi::FromInt(0));
4626 ThrowIf(less, kIndexIsNegative); 4626 ThrowIf(less, kIndexIsNegative);
4627 4627
4628 // Restore the index 4628 // Restore the index
4629 SmiToInteger32(index, index); 4629 SmiToInteger32(index, index);
4630 } 4630 }
4631 4631
4632 4632
4633 void MacroAssembler::PrepareCallCFunction(int num_arguments) { 4633 void MacroAssembler::PrepareCallCFunction(int num_arguments) {
4634 int frame_alignment = OS::ActivationFrameAlignment(); 4634 int frame_alignment = OS::ActivationFrameAlignment();
4635 ASSERT(frame_alignment != 0); 4635 ASSERT(frame_alignment != 0);
4636 ASSERT(num_arguments >= 0); 4636 ASSERT(num_arguments >= 0);
4637 4637
4638 // Make stack end at alignment and allocate space for arguments and old rsp. 4638 // Make stack end at alignment and allocate space for arguments and old rsp.
4639 movq(kScratchRegister, rsp); 4639 movp(kScratchRegister, rsp);
4640 ASSERT(IsPowerOf2(frame_alignment)); 4640 ASSERT(IsPowerOf2(frame_alignment));
4641 int argument_slots_on_stack = 4641 int argument_slots_on_stack =
4642 ArgumentStackSlotsForCFunctionCall(num_arguments); 4642 ArgumentStackSlotsForCFunctionCall(num_arguments);
4643 subq(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize)); 4643 subq(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
4644 and_(rsp, Immediate(-frame_alignment)); 4644 and_(rsp, Immediate(-frame_alignment));
4645 movq(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister); 4645 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
4646 } 4646 }
4647 4647
4648 4648
4649 void MacroAssembler::CallCFunction(ExternalReference function, 4649 void MacroAssembler::CallCFunction(ExternalReference function,
4650 int num_arguments) { 4650 int num_arguments) {
4651 LoadAddress(rax, function); 4651 LoadAddress(rax, function);
4652 CallCFunction(rax, num_arguments); 4652 CallCFunction(rax, num_arguments);
4653 } 4653 }
4654 4654
4655 4655
4656 void MacroAssembler::CallCFunction(Register function, int num_arguments) { 4656 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
4657 ASSERT(has_frame()); 4657 ASSERT(has_frame());
4658 // Check stack alignment. 4658 // Check stack alignment.
4659 if (emit_debug_code()) { 4659 if (emit_debug_code()) {
4660 CheckStackAlignment(); 4660 CheckStackAlignment();
4661 } 4661 }
4662 4662
4663 call(function); 4663 call(function);
4664 ASSERT(OS::ActivationFrameAlignment() != 0); 4664 ASSERT(OS::ActivationFrameAlignment() != 0);
4665 ASSERT(num_arguments >= 0); 4665 ASSERT(num_arguments >= 0);
4666 int argument_slots_on_stack = 4666 int argument_slots_on_stack =
4667 ArgumentStackSlotsForCFunctionCall(num_arguments); 4667 ArgumentStackSlotsForCFunctionCall(num_arguments);
4668 movq(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize)); 4668 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
4669 } 4669 }
4670 4670
4671 4671
4672 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { 4672 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
4673 if (r1.is(r2)) return true; 4673 if (r1.is(r2)) return true;
4674 if (r1.is(r3)) return true; 4674 if (r1.is(r3)) return true;
4675 if (r1.is(r4)) return true; 4675 if (r1.is(r4)) return true;
4676 if (r2.is(r3)) return true; 4676 if (r2.is(r3)) return true;
4677 if (r2.is(r4)) return true; 4677 if (r2.is(r4)) return true;
4678 if (r3.is(r4)) return true; 4678 if (r3.is(r4)) return true;
(...skipping 26 matching lines...) Expand all
4705 Register object, 4705 Register object,
4706 Register scratch, 4706 Register scratch,
4707 int mask, 4707 int mask,
4708 Condition cc, 4708 Condition cc,
4709 Label* condition_met, 4709 Label* condition_met,
4710 Label::Distance condition_met_distance) { 4710 Label::Distance condition_met_distance) {
4711 ASSERT(cc == zero || cc == not_zero); 4711 ASSERT(cc == zero || cc == not_zero);
4712 if (scratch.is(object)) { 4712 if (scratch.is(object)) {
4713 and_(scratch, Immediate(~Page::kPageAlignmentMask)); 4713 and_(scratch, Immediate(~Page::kPageAlignmentMask));
4714 } else { 4714 } else {
4715 movq(scratch, Immediate(~Page::kPageAlignmentMask)); 4715 movp(scratch, Immediate(~Page::kPageAlignmentMask));
4716 and_(scratch, object); 4716 and_(scratch, object);
4717 } 4717 }
4718 if (mask < (1 << kBitsPerByte)) { 4718 if (mask < (1 << kBitsPerByte)) {
4719 testb(Operand(scratch, MemoryChunk::kFlagsOffset), 4719 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4720 Immediate(static_cast<uint8_t>(mask))); 4720 Immediate(static_cast<uint8_t>(mask)));
4721 } else { 4721 } else {
4722 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask)); 4722 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4723 } 4723 }
4724 j(cc, condition_met, condition_met_distance); 4724 j(cc, condition_met, condition_met_distance);
4725 } 4725 }
4726 4726
4727 4727
4728 void MacroAssembler::CheckMapDeprecated(Handle<Map> map, 4728 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
4729 Register scratch, 4729 Register scratch,
4730 Label* if_deprecated) { 4730 Label* if_deprecated) {
4731 if (map->CanBeDeprecated()) { 4731 if (map->CanBeDeprecated()) {
4732 Move(scratch, map); 4732 Move(scratch, map);
4733 movq(scratch, FieldOperand(scratch, Map::kBitField3Offset)); 4733 movp(scratch, FieldOperand(scratch, Map::kBitField3Offset));
4734 SmiToInteger32(scratch, scratch); 4734 SmiToInteger32(scratch, scratch);
4735 and_(scratch, Immediate(Map::Deprecated::kMask)); 4735 and_(scratch, Immediate(Map::Deprecated::kMask));
4736 j(not_zero, if_deprecated); 4736 j(not_zero, if_deprecated);
4737 } 4737 }
4738 } 4738 }
4739 4739
4740 4740
4741 void MacroAssembler::JumpIfBlack(Register object, 4741 void MacroAssembler::JumpIfBlack(Register object,
4742 Register bitmap_scratch, 4742 Register bitmap_scratch,
4743 Register mask_scratch, 4743 Register mask_scratch,
4744 Label* on_black, 4744 Label* on_black,
4745 Label::Distance on_black_distance) { 4745 Label::Distance on_black_distance) {
4746 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, rcx)); 4746 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
4747 GetMarkBits(object, bitmap_scratch, mask_scratch); 4747 GetMarkBits(object, bitmap_scratch, mask_scratch);
4748 4748
4749 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); 4749 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4750 // The mask_scratch register contains a 1 at the position of the first bit 4750 // The mask_scratch register contains a 1 at the position of the first bit
4751 // and a 0 at all other positions, including the position of the second bit. 4751 // and a 0 at all other positions, including the position of the second bit.
4752 movq(rcx, mask_scratch); 4752 movp(rcx, mask_scratch);
4753 // Make rcx into a mask that covers both marking bits using the operation 4753 // Make rcx into a mask that covers both marking bits using the operation
4754 // rcx = mask | (mask << 1). 4754 // rcx = mask | (mask << 1).
4755 lea(rcx, Operand(mask_scratch, mask_scratch, times_2, 0)); 4755 lea(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
4756 // Note that we are using a 4-byte aligned 8-byte load. 4756 // Note that we are using a 4-byte aligned 8-byte load.
4757 and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize)); 4757 and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4758 cmpq(mask_scratch, rcx); 4758 cmpq(mask_scratch, rcx);
4759 j(equal, on_black, on_black_distance); 4759 j(equal, on_black, on_black_distance);
4760 } 4760 }
4761 4761
4762 4762
4763 // Detect some, but not all, common pointer-free objects. This is used by the 4763 // Detect some, but not all, common pointer-free objects. This is used by the
4764 // incremental write barrier which doesn't care about oddballs (they are always 4764 // incremental write barrier which doesn't care about oddballs (they are always
4765 // marked black immediately so this code is not hit). 4765 // marked black immediately so this code is not hit).
4766 void MacroAssembler::JumpIfDataObject( 4766 void MacroAssembler::JumpIfDataObject(
4767 Register value, 4767 Register value,
4768 Register scratch, 4768 Register scratch,
4769 Label* not_data_object, 4769 Label* not_data_object,
4770 Label::Distance not_data_object_distance) { 4770 Label::Distance not_data_object_distance) {
4771 Label is_data_object; 4771 Label is_data_object;
4772 movq(scratch, FieldOperand(value, HeapObject::kMapOffset)); 4772 movp(scratch, FieldOperand(value, HeapObject::kMapOffset));
4773 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex); 4773 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4774 j(equal, &is_data_object, Label::kNear); 4774 j(equal, &is_data_object, Label::kNear);
4775 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); 4775 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4776 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); 4776 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4777 // If it's a string and it's not a cons string then it's an object containing 4777 // If it's a string and it's not a cons string then it's an object containing
4778 // no GC pointers. 4778 // no GC pointers.
4779 testb(FieldOperand(scratch, Map::kInstanceTypeOffset), 4779 testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
4780 Immediate(kIsIndirectStringMask | kIsNotStringMask)); 4780 Immediate(kIsIndirectStringMask | kIsNotStringMask));
4781 j(not_zero, not_data_object, not_data_object_distance); 4781 j(not_zero, not_data_object, not_data_object_distance);
4782 bind(&is_data_object); 4782 bind(&is_data_object);
4783 } 4783 }
4784 4784
4785 4785
4786 void MacroAssembler::GetMarkBits(Register addr_reg, 4786 void MacroAssembler::GetMarkBits(Register addr_reg,
4787 Register bitmap_reg, 4787 Register bitmap_reg,
4788 Register mask_reg) { 4788 Register mask_reg) {
4789 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx)); 4789 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
4790 movq(bitmap_reg, addr_reg); 4790 movp(bitmap_reg, addr_reg);
4791 // Sign extended 32 bit immediate. 4791 // Sign extended 32 bit immediate.
4792 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask)); 4792 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4793 movq(rcx, addr_reg); 4793 movp(rcx, addr_reg);
4794 int shift = 4794 int shift =
4795 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2; 4795 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
4796 shrl(rcx, Immediate(shift)); 4796 shrl(rcx, Immediate(shift));
4797 and_(rcx, 4797 and_(rcx,
4798 Immediate((Page::kPageAlignmentMask >> shift) & 4798 Immediate((Page::kPageAlignmentMask >> shift) &
4799 ~(Bitmap::kBytesPerCell - 1))); 4799 ~(Bitmap::kBytesPerCell - 1)));
4800 4800
4801 addq(bitmap_reg, rcx); 4801 addq(bitmap_reg, rcx);
4802 movq(rcx, addr_reg); 4802 movp(rcx, addr_reg);
4803 shrl(rcx, Immediate(kPointerSizeLog2)); 4803 shrl(rcx, Immediate(kPointerSizeLog2));
4804 and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1)); 4804 and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4805 movl(mask_reg, Immediate(1)); 4805 movl(mask_reg, Immediate(1));
4806 shl_cl(mask_reg); 4806 shl_cl(mask_reg);
4807 } 4807 }
4808 4808
4809 4809
4810 void MacroAssembler::EnsureNotWhite( 4810 void MacroAssembler::EnsureNotWhite(
4811 Register value, 4811 Register value,
4812 Register bitmap_scratch, 4812 Register bitmap_scratch,
(...skipping 30 matching lines...) Expand all
4843 } 4843 }
4844 4844
4845 // Value is white. We check whether it is data that doesn't need scanning. 4845 // Value is white. We check whether it is data that doesn't need scanning.
4846 // Currently only checks for HeapNumber and non-cons strings. 4846 // Currently only checks for HeapNumber and non-cons strings.
4847 Register map = rcx; // Holds map while checking type. 4847 Register map = rcx; // Holds map while checking type.
4848 Register length = rcx; // Holds length of object after checking type. 4848 Register length = rcx; // Holds length of object after checking type.
4849 Label not_heap_number; 4849 Label not_heap_number;
4850 Label is_data_object; 4850 Label is_data_object;
4851 4851
4852 // Check for heap-number 4852 // Check for heap-number
4853 movq(map, FieldOperand(value, HeapObject::kMapOffset)); 4853 movp(map, FieldOperand(value, HeapObject::kMapOffset));
4854 CompareRoot(map, Heap::kHeapNumberMapRootIndex); 4854 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4855 j(not_equal, &not_heap_number, Label::kNear); 4855 j(not_equal, &not_heap_number, Label::kNear);
4856 movq(length, Immediate(HeapNumber::kSize)); 4856 movp(length, Immediate(HeapNumber::kSize));
4857 jmp(&is_data_object, Label::kNear); 4857 jmp(&is_data_object, Label::kNear);
4858 4858
4859 bind(&not_heap_number); 4859 bind(&not_heap_number);
4860 // Check for strings. 4860 // Check for strings.
4861 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); 4861 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4862 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); 4862 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4863 // If it's a string and it's not a cons string then it's an object containing 4863 // If it's a string and it's not a cons string then it's an object containing
4864 // no GC pointers. 4864 // no GC pointers.
4865 Register instance_type = rcx; 4865 Register instance_type = rcx;
4866 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 4866 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4867 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask)); 4867 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask));
4868 j(not_zero, value_is_white_and_not_data); 4868 j(not_zero, value_is_white_and_not_data);
4869 // It's a non-indirect (non-cons and non-slice) string. 4869 // It's a non-indirect (non-cons and non-slice) string.
4870 // If it's external, the length is just ExternalString::kSize. 4870 // If it's external, the length is just ExternalString::kSize.
4871 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2). 4871 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
4872 Label not_external; 4872 Label not_external;
4873 // External strings are the only ones with the kExternalStringTag bit 4873 // External strings are the only ones with the kExternalStringTag bit
4874 // set. 4874 // set.
4875 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag); 4875 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
4876 ASSERT_EQ(0, kConsStringTag & kExternalStringTag); 4876 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
4877 testb(instance_type, Immediate(kExternalStringTag)); 4877 testb(instance_type, Immediate(kExternalStringTag));
4878 j(zero, &not_external, Label::kNear); 4878 j(zero, &not_external, Label::kNear);
4879 movq(length, Immediate(ExternalString::kSize)); 4879 movp(length, Immediate(ExternalString::kSize));
4880 jmp(&is_data_object, Label::kNear); 4880 jmp(&is_data_object, Label::kNear);
4881 4881
4882 bind(&not_external); 4882 bind(&not_external);
4883 // Sequential string, either ASCII or UC16. 4883 // Sequential string, either ASCII or UC16.
4884 ASSERT(kOneByteStringTag == 0x04); 4884 ASSERT(kOneByteStringTag == 0x04);
4885 and_(length, Immediate(kStringEncodingMask)); 4885 and_(length, Immediate(kStringEncodingMask));
4886 xor_(length, Immediate(kStringEncodingMask)); 4886 xor_(length, Immediate(kStringEncodingMask));
4887 addq(length, Immediate(0x04)); 4887 addq(length, Immediate(0x04));
4888 // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2. 4888 // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2.
4889 imul(length, FieldOperand(value, String::kLengthOffset)); 4889 imul(length, FieldOperand(value, String::kLengthOffset));
(...skipping 10 matching lines...) Expand all
4900 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length); 4900 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4901 4901
4902 bind(&done); 4902 bind(&done);
4903 } 4903 }
4904 4904
4905 4905
4906 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) { 4906 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
4907 Label next, start; 4907 Label next, start;
4908 Register empty_fixed_array_value = r8; 4908 Register empty_fixed_array_value = r8;
4909 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); 4909 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
4910 movq(rcx, rax); 4910 movp(rcx, rax);
4911 4911
4912 // Check if the enum length field is properly initialized, indicating that 4912 // Check if the enum length field is properly initialized, indicating that
4913 // there is an enum cache. 4913 // there is an enum cache.
4914 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset)); 4914 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
4915 4915
4916 EnumLength(rdx, rbx); 4916 EnumLength(rdx, rbx);
4917 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel)); 4917 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
4918 j(equal, call_runtime); 4918 j(equal, call_runtime);
4919 4919
4920 jmp(&start); 4920 jmp(&start);
4921 4921
4922 bind(&next); 4922 bind(&next);
4923 4923
4924 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset)); 4924 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
4925 4925
4926 // For all objects but the receiver, check that the cache is empty. 4926 // For all objects but the receiver, check that the cache is empty.
4927 EnumLength(rdx, rbx); 4927 EnumLength(rdx, rbx);
4928 Cmp(rdx, Smi::FromInt(0)); 4928 Cmp(rdx, Smi::FromInt(0));
4929 j(not_equal, call_runtime); 4929 j(not_equal, call_runtime);
4930 4930
4931 bind(&start); 4931 bind(&start);
4932 4932
4933 // Check that there are no elements. Register rcx contains the current JS 4933 // Check that there are no elements. Register rcx contains the current JS
4934 // object we've reached through the prototype chain. 4934 // object we've reached through the prototype chain.
4935 cmpq(empty_fixed_array_value, 4935 cmpq(empty_fixed_array_value,
4936 FieldOperand(rcx, JSObject::kElementsOffset)); 4936 FieldOperand(rcx, JSObject::kElementsOffset));
4937 j(not_equal, call_runtime); 4937 j(not_equal, call_runtime);
4938 4938
4939 movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); 4939 movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
4940 cmpq(rcx, null_value); 4940 cmpq(rcx, null_value);
4941 j(not_equal, &next); 4941 j(not_equal, &next);
4942 } 4942 }
4943 4943
4944 void MacroAssembler::TestJSArrayForAllocationMemento( 4944 void MacroAssembler::TestJSArrayForAllocationMemento(
4945 Register receiver_reg, 4945 Register receiver_reg,
4946 Register scratch_reg, 4946 Register scratch_reg,
4947 Label* no_memento_found) { 4947 Label* no_memento_found) {
4948 ExternalReference new_space_start = 4948 ExternalReference new_space_start =
4949 ExternalReference::new_space_start(isolate()); 4949 ExternalReference::new_space_start(isolate());
(...skipping 15 matching lines...) Expand all
4965 void MacroAssembler::JumpIfDictionaryInPrototypeChain( 4965 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
4966 Register object, 4966 Register object,
4967 Register scratch0, 4967 Register scratch0,
4968 Register scratch1, 4968 Register scratch1,
4969 Label* found) { 4969 Label* found) {
4970 ASSERT(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister))); 4970 ASSERT(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
4971 ASSERT(!scratch1.is(scratch0)); 4971 ASSERT(!scratch1.is(scratch0));
4972 Register current = scratch0; 4972 Register current = scratch0;
4973 Label loop_again; 4973 Label loop_again;
4974 4974
4975 movq(current, object); 4975 movp(current, object);
4976 4976
4977 // Loop based on the map going up the prototype chain. 4977 // Loop based on the map going up the prototype chain.
4978 bind(&loop_again); 4978 bind(&loop_again);
4979 movq(current, FieldOperand(current, HeapObject::kMapOffset)); 4979 movp(current, FieldOperand(current, HeapObject::kMapOffset));
4980 movq(scratch1, FieldOperand(current, Map::kBitField2Offset)); 4980 movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
4981 and_(scratch1, Immediate(Map::kElementsKindMask)); 4981 and_(scratch1, Immediate(Map::kElementsKindMask));
4982 shr(scratch1, Immediate(Map::kElementsKindShift)); 4982 shr(scratch1, Immediate(Map::kElementsKindShift));
4983 cmpq(scratch1, Immediate(DICTIONARY_ELEMENTS)); 4983 cmpq(scratch1, Immediate(DICTIONARY_ELEMENTS));
4984 j(equal, found); 4984 j(equal, found);
4985 movq(current, FieldOperand(current, Map::kPrototypeOffset)); 4985 movp(current, FieldOperand(current, Map::kPrototypeOffset));
4986 CompareRoot(current, Heap::kNullValueRootIndex); 4986 CompareRoot(current, Heap::kNullValueRootIndex);
4987 j(not_equal, &loop_again); 4987 j(not_equal, &loop_again);
4988 } 4988 }
4989 4989
4990 4990
4991 } } // namespace v8::internal 4991 } } // namespace v8::internal
4992 4992
4993 #endif // V8_TARGET_ARCH_X64 4993 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698