OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 632 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
643 | 643 |
644 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( | 644 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( |
645 ApiFunction* function, int stack_space) { | 645 ApiFunction* function, int stack_space) { |
646 Label empty_result; | 646 Label empty_result; |
647 Label prologue; | 647 Label prologue; |
648 Label promote_scheduled_exception; | 648 Label promote_scheduled_exception; |
649 Label delete_allocated_handles; | 649 Label delete_allocated_handles; |
650 Label leave_exit_frame; | 650 Label leave_exit_frame; |
651 Label write_back; | 651 Label write_back; |
652 | 652 |
| 653 Factory* factory = isolate()->factory(); |
653 ExternalReference next_address = | 654 ExternalReference next_address = |
654 ExternalReference::handle_scope_next_address(); | 655 ExternalReference::handle_scope_next_address(); |
655 const int kNextOffset = 0; | 656 const int kNextOffset = 0; |
656 const int kLimitOffset = Offset( | 657 const int kLimitOffset = Offset( |
657 ExternalReference::handle_scope_limit_address(), | 658 ExternalReference::handle_scope_limit_address(), |
658 next_address); | 659 next_address); |
659 const int kLevelOffset = Offset( | 660 const int kLevelOffset = Offset( |
660 ExternalReference::handle_scope_level_address(), | 661 ExternalReference::handle_scope_level_address(), |
661 next_address); | 662 next_address); |
662 ExternalReference scheduled_exception_address = | 663 ExternalReference scheduled_exception_address = |
(...skipping 27 matching lines...) Expand all Loading... |
690 // No more valid handles (the result handle was the last one). Restore | 691 // No more valid handles (the result handle was the last one). Restore |
691 // previous handle scope. | 692 // previous handle scope. |
692 subl(Operand(base_reg, kLevelOffset), Immediate(1)); | 693 subl(Operand(base_reg, kLevelOffset), Immediate(1)); |
693 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); | 694 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); |
694 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 695 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
695 j(not_equal, &delete_allocated_handles); | 696 j(not_equal, &delete_allocated_handles); |
696 bind(&leave_exit_frame); | 697 bind(&leave_exit_frame); |
697 | 698 |
698 // Check if the function scheduled an exception. | 699 // Check if the function scheduled an exception. |
699 movq(rsi, scheduled_exception_address); | 700 movq(rsi, scheduled_exception_address); |
700 Cmp(Operand(rsi, 0), FACTORY->the_hole_value()); | 701 Cmp(Operand(rsi, 0), factory->the_hole_value()); |
701 j(not_equal, &promote_scheduled_exception); | 702 j(not_equal, &promote_scheduled_exception); |
702 | 703 |
703 LeaveApiExitFrame(); | 704 LeaveApiExitFrame(); |
704 ret(stack_space * kPointerSize); | 705 ret(stack_space * kPointerSize); |
705 | 706 |
706 bind(&promote_scheduled_exception); | 707 bind(&promote_scheduled_exception); |
707 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException, | 708 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException, |
708 0, 1); | 709 0, 1); |
709 if (result->IsFailure()) { | 710 if (result->IsFailure()) { |
710 return result; | 711 return result; |
711 } | 712 } |
712 | 713 |
713 bind(&empty_result); | 714 bind(&empty_result); |
714 // It was zero; the result is undefined. | 715 // It was zero; the result is undefined. |
715 Move(rax, FACTORY->undefined_value()); | 716 Move(rax, factory->undefined_value()); |
716 jmp(&prologue); | 717 jmp(&prologue); |
717 | 718 |
718 // HandleScope limit has changed. Delete allocated extensions. | 719 // HandleScope limit has changed. Delete allocated extensions. |
719 bind(&delete_allocated_handles); | 720 bind(&delete_allocated_handles); |
720 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); | 721 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); |
721 movq(prev_limit_reg, rax); | 722 movq(prev_limit_reg, rax); |
722 #ifdef _WIN64 | 723 #ifdef _WIN64 |
723 LoadAddress(rcx, ExternalReference::isolate_address()); | 724 LoadAddress(rcx, ExternalReference::isolate_address()); |
724 #else | 725 #else |
725 LoadAddress(rdi, ExternalReference::isolate_address()); | 726 LoadAddress(rdi, ExternalReference::isolate_address()); |
(...skipping 1160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1886 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); | 1887 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); |
1887 j(not_equal, fail); | 1888 j(not_equal, fail); |
1888 } | 1889 } |
1889 | 1890 |
1890 | 1891 |
1891 void MacroAssembler::AbortIfNotNumber(Register object) { | 1892 void MacroAssembler::AbortIfNotNumber(Register object) { |
1892 NearLabel ok; | 1893 NearLabel ok; |
1893 Condition is_smi = CheckSmi(object); | 1894 Condition is_smi = CheckSmi(object); |
1894 j(is_smi, &ok); | 1895 j(is_smi, &ok); |
1895 Cmp(FieldOperand(object, HeapObject::kMapOffset), | 1896 Cmp(FieldOperand(object, HeapObject::kMapOffset), |
1896 FACTORY->heap_number_map()); | 1897 isolate()->factory()->heap_number_map()); |
1897 Assert(equal, "Operand not a number"); | 1898 Assert(equal, "Operand not a number"); |
1898 bind(&ok); | 1899 bind(&ok); |
1899 } | 1900 } |
1900 | 1901 |
1901 | 1902 |
1902 void MacroAssembler::AbortIfSmi(Register object) { | 1903 void MacroAssembler::AbortIfSmi(Register object) { |
1903 NearLabel ok; | 1904 NearLabel ok; |
1904 Condition is_smi = CheckSmi(object); | 1905 Condition is_smi = CheckSmi(object); |
1905 Assert(NegateCondition(is_smi), "Operand is a smi"); | 1906 Assert(NegateCondition(is_smi), "Operand is a smi"); |
1906 } | 1907 } |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2143 | 2144 |
2144 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 2145 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
2145 push(rbp); | 2146 push(rbp); |
2146 movq(rbp, rsp); | 2147 movq(rbp, rsp); |
2147 push(rsi); // Context. | 2148 push(rsi); // Context. |
2148 Push(Smi::FromInt(type)); | 2149 Push(Smi::FromInt(type)); |
2149 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 2150 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
2150 push(kScratchRegister); | 2151 push(kScratchRegister); |
2151 if (emit_debug_code()) { | 2152 if (emit_debug_code()) { |
2152 movq(kScratchRegister, | 2153 movq(kScratchRegister, |
2153 FACTORY->undefined_value(), | 2154 isolate()->factory()->undefined_value(), |
2154 RelocInfo::EMBEDDED_OBJECT); | 2155 RelocInfo::EMBEDDED_OBJECT); |
2155 cmpq(Operand(rsp, 0), kScratchRegister); | 2156 cmpq(Operand(rsp, 0), kScratchRegister); |
2156 Check(not_equal, "code object not properly patched"); | 2157 Check(not_equal, "code object not properly patched"); |
2157 } | 2158 } |
2158 } | 2159 } |
2159 | 2160 |
2160 | 2161 |
2161 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 2162 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
2162 if (emit_debug_code()) { | 2163 if (emit_debug_code()) { |
2163 Move(kScratchRegister, Smi::FromInt(type)); | 2164 Move(kScratchRegister, Smi::FromInt(type)); |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2312 Check(not_equal, "we should not have an empty lexical context"); | 2313 Check(not_equal, "we should not have an empty lexical context"); |
2313 } | 2314 } |
2314 // Load the global context of the current context. | 2315 // Load the global context of the current context. |
2315 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 2316 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
2316 movq(scratch, FieldOperand(scratch, offset)); | 2317 movq(scratch, FieldOperand(scratch, offset)); |
2317 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); | 2318 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); |
2318 | 2319 |
2319 // Check the context is a global context. | 2320 // Check the context is a global context. |
2320 if (emit_debug_code()) { | 2321 if (emit_debug_code()) { |
2321 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), | 2322 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), |
2322 FACTORY->global_context_map()); | 2323 isolate()->factory()->global_context_map()); |
2323 Check(equal, "JSGlobalObject::global_context should be a global context."); | 2324 Check(equal, "JSGlobalObject::global_context should be a global context."); |
2324 } | 2325 } |
2325 | 2326 |
2326 // Check if both contexts are the same. | 2327 // Check if both contexts are the same. |
2327 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 2328 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
2328 j(equal, &same_contexts); | 2329 j(equal, &same_contexts); |
2329 | 2330 |
2330 // Compare security tokens. | 2331 // Compare security tokens. |
2331 // Check that the security token in the calling global object is | 2332 // Check that the security token in the calling global object is |
2332 // compatible with the security token in the receiving global | 2333 // compatible with the security token in the receiving global |
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2814 movq(function, Operand(function, Context::SlotOffset(index))); | 2815 movq(function, Operand(function, Context::SlotOffset(index))); |
2815 } | 2816 } |
2816 | 2817 |
2817 | 2818 |
2818 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2819 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
2819 Register map) { | 2820 Register map) { |
2820 // Load the initial map. The global functions all have initial maps. | 2821 // Load the initial map. The global functions all have initial maps. |
2821 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2822 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
2822 if (emit_debug_code()) { | 2823 if (emit_debug_code()) { |
2823 Label ok, fail; | 2824 Label ok, fail; |
2824 CheckMap(map, FACTORY->meta_map(), &fail, false); | 2825 CheckMap(map, isolate()->factory()->meta_map(), &fail, false); |
2825 jmp(&ok); | 2826 jmp(&ok); |
2826 bind(&fail); | 2827 bind(&fail); |
2827 Abort("Global functions must have initial map"); | 2828 Abort("Global functions must have initial map"); |
2828 bind(&ok); | 2829 bind(&ok); |
2829 } | 2830 } |
2830 } | 2831 } |
2831 | 2832 |
2832 | 2833 |
2833 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { | 2834 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { |
2834 // On Windows 64 stack slots are reserved by the caller for all arguments | 2835 // On Windows 64 stack slots are reserved by the caller for all arguments |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2903 CPU::FlushICache(address_, size_); | 2904 CPU::FlushICache(address_, size_); |
2904 | 2905 |
2905 // Check that the code was patched as expected. | 2906 // Check that the code was patched as expected. |
2906 ASSERT(masm_.pc_ == address_ + size_); | 2907 ASSERT(masm_.pc_ == address_ + size_); |
2907 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2908 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2908 } | 2909 } |
2909 | 2910 |
2910 } } // namespace v8::internal | 2911 } } // namespace v8::internal |
2911 | 2912 |
2912 #endif // V8_TARGET_ARCH_X64 | 2913 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |