| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 684 ASSERT(static_cast<int>(offset) == offset); | 684 ASSERT(static_cast<int>(offset) == offset); |
| 685 return static_cast<int>(offset); | 685 return static_cast<int>(offset); |
| 686 } | 686 } |
| 687 | 687 |
| 688 | 688 |
| 689 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) { | 689 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) { |
| 690 EnterApiExitFrame(arg_stack_space); | 690 EnterApiExitFrame(arg_stack_space); |
| 691 } | 691 } |
| 692 | 692 |
| 693 | 693 |
| 694 void MacroAssembler::CallApiFunctionAndReturn(Address function_address, | 694 void MacroAssembler::CallApiFunctionAndReturn( |
| 695 Address thunk_address, | 695 Address function_address, |
| 696 Register thunk_last_arg, | 696 Address thunk_address, |
| 697 int stack_space, | 697 Register thunk_last_arg, |
| 698 int return_value_offset) { | 698 int stack_space, |
| 699 Operand return_value_operand, |
| 700 Operand* context_restore_operand) { |
| 699 Label prologue; | 701 Label prologue; |
| 700 Label promote_scheduled_exception; | 702 Label promote_scheduled_exception; |
| 703 Label exception_handled; |
| 701 Label delete_allocated_handles; | 704 Label delete_allocated_handles; |
| 702 Label leave_exit_frame; | 705 Label leave_exit_frame; |
| 703 Label write_back; | 706 Label write_back; |
| 704 | 707 |
| 705 Factory* factory = isolate()->factory(); | 708 Factory* factory = isolate()->factory(); |
| 706 ExternalReference next_address = | 709 ExternalReference next_address = |
| 707 ExternalReference::handle_scope_next_address(isolate()); | 710 ExternalReference::handle_scope_next_address(isolate()); |
| 708 const int kNextOffset = 0; | 711 const int kNextOffset = 0; |
| 709 const int kLimitOffset = Offset( | 712 const int kLimitOffset = Offset( |
| 710 ExternalReference::handle_scope_limit_address(isolate()), | 713 ExternalReference::handle_scope_limit_address(isolate()), |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 761 if (FLAG_log_timer_events) { | 764 if (FLAG_log_timer_events) { |
| 762 FrameScope frame(this, StackFrame::MANUAL); | 765 FrameScope frame(this, StackFrame::MANUAL); |
| 763 PushSafepointRegisters(); | 766 PushSafepointRegisters(); |
| 764 PrepareCallCFunction(1); | 767 PrepareCallCFunction(1); |
| 765 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); | 768 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); |
| 766 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); | 769 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); |
| 767 PopSafepointRegisters(); | 770 PopSafepointRegisters(); |
| 768 } | 771 } |
| 769 | 772 |
| 770 // Load the value from ReturnValue | 773 // Load the value from ReturnValue |
| 771 movq(rax, Operand(rbp, return_value_offset * kPointerSize)); | 774 movq(rax, return_value_operand); |
| 772 bind(&prologue); | 775 bind(&prologue); |
| 773 | 776 |
| 774 // No more valid handles (the result handle was the last one). Restore | 777 // No more valid handles (the result handle was the last one). Restore |
| 775 // previous handle scope. | 778 // previous handle scope. |
| 776 subl(Operand(base_reg, kLevelOffset), Immediate(1)); | 779 subl(Operand(base_reg, kLevelOffset), Immediate(1)); |
| 777 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); | 780 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); |
| 778 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 781 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
| 779 j(not_equal, &delete_allocated_handles); | 782 j(not_equal, &delete_allocated_handles); |
| 780 bind(&leave_exit_frame); | 783 bind(&leave_exit_frame); |
| 781 | 784 |
| 782 // Check if the function scheduled an exception. | 785 // Check if the function scheduled an exception. |
| 783 movq(rsi, scheduled_exception_address); | 786 movq(rsi, scheduled_exception_address); |
| 784 Cmp(Operand(rsi, 0), factory->the_hole_value()); | 787 Cmp(Operand(rsi, 0), factory->the_hole_value()); |
| 785 j(not_equal, &promote_scheduled_exception); | 788 j(not_equal, &promote_scheduled_exception); |
| 789 bind(&exception_handled); |
| 786 | 790 |
| 787 #if ENABLE_EXTRA_CHECKS | 791 #if ENABLE_EXTRA_CHECKS |
| 788 // Check if the function returned a valid JavaScript value. | 792 // Check if the function returned a valid JavaScript value. |
| 789 Label ok; | 793 Label ok; |
| 790 Register return_value = rax; | 794 Register return_value = rax; |
| 791 Register map = rcx; | 795 Register map = rcx; |
| 792 | 796 |
| 793 JumpIfSmi(return_value, &ok, Label::kNear); | 797 JumpIfSmi(return_value, &ok, Label::kNear); |
| 794 movq(map, FieldOperand(return_value, HeapObject::kMapOffset)); | 798 movq(map, FieldOperand(return_value, HeapObject::kMapOffset)); |
| 795 | 799 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 812 j(equal, &ok, Label::kNear); | 816 j(equal, &ok, Label::kNear); |
| 813 | 817 |
| 814 CompareRoot(return_value, Heap::kNullValueRootIndex); | 818 CompareRoot(return_value, Heap::kNullValueRootIndex); |
| 815 j(equal, &ok, Label::kNear); | 819 j(equal, &ok, Label::kNear); |
| 816 | 820 |
| 817 Abort(kAPICallReturnedInvalidObject); | 821 Abort(kAPICallReturnedInvalidObject); |
| 818 | 822 |
| 819 bind(&ok); | 823 bind(&ok); |
| 820 #endif | 824 #endif |
| 821 | 825 |
| 822 LeaveApiExitFrame(); | 826 bool restore_context = context_restore_operand != NULL; |
| 827 if (restore_context) { |
| 828 movq(rsi, *context_restore_operand); |
| 829 } |
| 830 LeaveApiExitFrame(!restore_context); |
| 823 ret(stack_space * kPointerSize); | 831 ret(stack_space * kPointerSize); |
| 824 | 832 |
| 825 bind(&promote_scheduled_exception); | 833 bind(&promote_scheduled_exception); |
| 826 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 834 { |
| 835 FrameScope frame(this, StackFrame::INTERNAL); |
| 836 CallRuntime(Runtime::kPromoteScheduledException, 0); |
| 837 } |
| 838 jmp(&exception_handled); |
| 827 | 839 |
| 828 // HandleScope limit has changed. Delete allocated extensions. | 840 // HandleScope limit has changed. Delete allocated extensions. |
| 829 bind(&delete_allocated_handles); | 841 bind(&delete_allocated_handles); |
| 830 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); | 842 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); |
| 831 movq(prev_limit_reg, rax); | 843 movq(prev_limit_reg, rax); |
| 832 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); | 844 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); |
| 833 LoadAddress(rax, | 845 LoadAddress(rax, |
| 834 ExternalReference::delete_handle_scope_extensions(isolate())); | 846 ExternalReference::delete_handle_scope_extensions(isolate())); |
| 835 call(rax); | 847 call(rax); |
| 836 movq(rax, prev_limit_reg); | 848 movq(rax, prev_limit_reg); |
| (...skipping 1408 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2245 | 2257 |
| 2246 | 2258 |
| 2247 void MacroAssembler::Test(const Operand& src, Smi* source) { | 2259 void MacroAssembler::Test(const Operand& src, Smi* source) { |
| 2248 testl(Operand(src, kIntSize), Immediate(source->value())); | 2260 testl(Operand(src, kIntSize), Immediate(source->value())); |
| 2249 } | 2261 } |
| 2250 | 2262 |
| 2251 | 2263 |
| 2252 // ---------------------------------------------------------------------------- | 2264 // ---------------------------------------------------------------------------- |
| 2253 | 2265 |
| 2254 | 2266 |
| 2267 void MacroAssembler::LookupNumberStringCache(Register object, |
| 2268 Register result, |
| 2269 Register scratch1, |
| 2270 Register scratch2, |
| 2271 Label* not_found) { |
| 2272 // Use of registers. Register result is used as a temporary. |
| 2273 Register number_string_cache = result; |
| 2274 Register mask = scratch1; |
| 2275 Register scratch = scratch2; |
| 2276 |
| 2277 // Load the number string cache. |
| 2278 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); |
| 2279 |
| 2280 // Make the hash mask from the length of the number string cache. It |
| 2281 // contains two elements (number and string) for each cache entry. |
| 2282 SmiToInteger32( |
| 2283 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
| 2284 shrl(mask, Immediate(1)); |
| 2285 subq(mask, Immediate(1)); // Make mask. |
| 2286 |
| 2287 // Calculate the entry in the number string cache. The hash value in the |
| 2288 // number string cache for smis is just the smi value, and the hash for |
| 2289 // doubles is the xor of the upper and lower words. See |
| 2290 // Heap::GetNumberStringCache. |
| 2291 Label is_smi; |
| 2292 Label load_result_from_cache; |
| 2293 JumpIfSmi(object, &is_smi); |
| 2294 CheckMap(object, |
| 2295 isolate()->factory()->heap_number_map(), |
| 2296 not_found, |
| 2297 DONT_DO_SMI_CHECK); |
| 2298 |
| 2299 STATIC_ASSERT(8 == kDoubleSize); |
| 2300 movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); |
| 2301 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); |
| 2302 and_(scratch, mask); |
| 2303 // Each entry in string cache consists of two pointer sized fields, |
| 2304 // but times_twice_pointer_size (multiplication by 16) scale factor |
| 2305 // is not supported by addrmode on x64 platform. |
| 2306 // So we have to premultiply entry index before lookup. |
| 2307 shl(scratch, Immediate(kPointerSizeLog2 + 1)); |
| 2308 |
| 2309 Register index = scratch; |
| 2310 Register probe = mask; |
| 2311 movq(probe, |
| 2312 FieldOperand(number_string_cache, |
| 2313 index, |
| 2314 times_1, |
| 2315 FixedArray::kHeaderSize)); |
| 2316 JumpIfSmi(probe, not_found); |
| 2317 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); |
| 2318 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset)); |
| 2319 j(parity_even, not_found); // Bail out if NaN is involved. |
| 2320 j(not_equal, not_found); // The cache did not contain this value. |
| 2321 jmp(&load_result_from_cache); |
| 2322 |
| 2323 bind(&is_smi); |
| 2324 SmiToInteger32(scratch, object); |
| 2325 and_(scratch, mask); |
| 2326 // Each entry in string cache consists of two pointer sized fields, |
| 2327 // but times_twice_pointer_size (multiplication by 16) scale factor |
| 2328 // is not supported by addrmode on x64 platform. |
| 2329 // So we have to premultiply entry index before lookup. |
| 2330 shl(scratch, Immediate(kPointerSizeLog2 + 1)); |
| 2331 |
| 2332 // Check if the entry is the smi we are looking for. |
| 2333 cmpq(object, |
| 2334 FieldOperand(number_string_cache, |
| 2335 index, |
| 2336 times_1, |
| 2337 FixedArray::kHeaderSize)); |
| 2338 j(not_equal, not_found); |
| 2339 |
| 2340 // Get the result from the cache. |
| 2341 bind(&load_result_from_cache); |
| 2342 movq(result, |
| 2343 FieldOperand(number_string_cache, |
| 2344 index, |
| 2345 times_1, |
| 2346 FixedArray::kHeaderSize + kPointerSize)); |
| 2347 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); |
| 2348 } |
| 2349 |
| 2350 |
| 2255 void MacroAssembler::JumpIfNotString(Register object, | 2351 void MacroAssembler::JumpIfNotString(Register object, |
| 2256 Register object_map, | 2352 Register object_map, |
| 2257 Label* not_string, | 2353 Label* not_string, |
| 2258 Label::Distance near_jump) { | 2354 Label::Distance near_jump) { |
| 2259 Condition is_smi = CheckSmi(object); | 2355 Condition is_smi = CheckSmi(object); |
| 2260 j(is_smi, not_string, near_jump); | 2356 j(is_smi, not_string, near_jump); |
| 2261 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); | 2357 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); |
| 2262 j(above_equal, not_string, near_jump); | 2358 j(above_equal, not_string, near_jump); |
| 2263 } | 2359 } |
| 2264 | 2360 |
| (...skipping 1423 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3688 // Get the return address from the stack and restore the frame pointer. | 3784 // Get the return address from the stack and restore the frame pointer. |
| 3689 movq(rcx, Operand(rbp, 1 * kPointerSize)); | 3785 movq(rcx, Operand(rbp, 1 * kPointerSize)); |
| 3690 movq(rbp, Operand(rbp, 0 * kPointerSize)); | 3786 movq(rbp, Operand(rbp, 0 * kPointerSize)); |
| 3691 | 3787 |
| 3692 // Drop everything up to and including the arguments and the receiver | 3788 // Drop everything up to and including the arguments and the receiver |
| 3693 // from the caller stack. | 3789 // from the caller stack. |
| 3694 lea(rsp, Operand(r15, 1 * kPointerSize)); | 3790 lea(rsp, Operand(r15, 1 * kPointerSize)); |
| 3695 | 3791 |
| 3696 PushReturnAddressFrom(rcx); | 3792 PushReturnAddressFrom(rcx); |
| 3697 | 3793 |
| 3698 LeaveExitFrameEpilogue(); | 3794 LeaveExitFrameEpilogue(true); |
| 3699 } | 3795 } |
| 3700 | 3796 |
| 3701 | 3797 |
| 3702 void MacroAssembler::LeaveApiExitFrame() { | 3798 void MacroAssembler::LeaveApiExitFrame(bool restore_context) { |
| 3703 movq(rsp, rbp); | 3799 movq(rsp, rbp); |
| 3704 pop(rbp); | 3800 pop(rbp); |
| 3705 | 3801 |
| 3706 LeaveExitFrameEpilogue(); | 3802 LeaveExitFrameEpilogue(restore_context); |
| 3707 } | 3803 } |
| 3708 | 3804 |
| 3709 | 3805 |
| 3710 void MacroAssembler::LeaveExitFrameEpilogue() { | 3806 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) { |
| 3711 // Restore current context from top and clear it in debug mode. | 3807 // Restore current context from top and clear it in debug mode. |
| 3712 ExternalReference context_address(Isolate::kContextAddress, isolate()); | 3808 ExternalReference context_address(Isolate::kContextAddress, isolate()); |
| 3713 Operand context_operand = ExternalOperand(context_address); | 3809 Operand context_operand = ExternalOperand(context_address); |
| 3714 movq(rsi, context_operand); | 3810 if (restore_context) { |
| 3811 movq(rsi, context_operand); |
| 3812 } |
| 3715 #ifdef DEBUG | 3813 #ifdef DEBUG |
| 3716 movq(context_operand, Immediate(0)); | 3814 movq(context_operand, Immediate(0)); |
| 3717 #endif | 3815 #endif |
| 3718 | 3816 |
| 3719 // Clear the top frame. | 3817 // Clear the top frame. |
| 3720 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, | 3818 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, |
| 3721 isolate()); | 3819 isolate()); |
| 3722 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address); | 3820 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address); |
| 3723 movq(c_entry_fp_operand, Immediate(0)); | 3821 movq(c_entry_fp_operand, Immediate(0)); |
| 3724 } | 3822 } |
| (...skipping 1094 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4819 j(greater, &no_memento_available); | 4917 j(greater, &no_memento_available); |
| 4820 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4918 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
| 4821 Heap::kAllocationMementoMapRootIndex); | 4919 Heap::kAllocationMementoMapRootIndex); |
| 4822 bind(&no_memento_available); | 4920 bind(&no_memento_available); |
| 4823 } | 4921 } |
| 4824 | 4922 |
| 4825 | 4923 |
| 4826 } } // namespace v8::internal | 4924 } } // namespace v8::internal |
| 4827 | 4925 |
| 4828 #endif // V8_TARGET_ARCH_X64 | 4926 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |