OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
72 | 72 |
73 Operand MacroAssembler::ExternalOperand(ExternalReference target, | 73 Operand MacroAssembler::ExternalOperand(ExternalReference target, |
74 Register scratch) { | 74 Register scratch) { |
75 if (root_array_available_ && !Serializer::enabled()) { | 75 if (root_array_available_ && !Serializer::enabled()) { |
76 intptr_t delta = RootRegisterDelta(target); | 76 intptr_t delta = RootRegisterDelta(target); |
77 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { | 77 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { |
78 Serializer::TooLateToEnableNow(); | 78 Serializer::TooLateToEnableNow(); |
79 return Operand(kRootRegister, static_cast<int32_t>(delta)); | 79 return Operand(kRootRegister, static_cast<int32_t>(delta)); |
80 } | 80 } |
81 } | 81 } |
82 movq(scratch, target); | 82 Move(scratch, target); |
83 return Operand(scratch, 0); | 83 return Operand(scratch, 0); |
84 } | 84 } |
85 | 85 |
86 | 86 |
87 void MacroAssembler::Load(Register destination, ExternalReference source) { | 87 void MacroAssembler::Load(Register destination, ExternalReference source) { |
88 if (root_array_available_ && !Serializer::enabled()) { | 88 if (root_array_available_ && !Serializer::enabled()) { |
89 intptr_t delta = RootRegisterDelta(source); | 89 intptr_t delta = RootRegisterDelta(source); |
90 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { | 90 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { |
91 Serializer::TooLateToEnableNow(); | 91 Serializer::TooLateToEnableNow(); |
92 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); | 92 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); |
93 return; | 93 return; |
94 } | 94 } |
95 } | 95 } |
96 // Safe code. | 96 // Safe code. |
97 if (destination.is(rax)) { | 97 if (destination.is(rax)) { |
98 load_rax(source); | 98 load_rax(source); |
99 } else { | 99 } else { |
100 movq(kScratchRegister, source); | 100 Move(kScratchRegister, source); |
101 movq(destination, Operand(kScratchRegister, 0)); | 101 movq(destination, Operand(kScratchRegister, 0)); |
102 } | 102 } |
103 } | 103 } |
104 | 104 |
105 | 105 |
106 void MacroAssembler::Store(ExternalReference destination, Register source) { | 106 void MacroAssembler::Store(ExternalReference destination, Register source) { |
107 if (root_array_available_ && !Serializer::enabled()) { | 107 if (root_array_available_ && !Serializer::enabled()) { |
108 intptr_t delta = RootRegisterDelta(destination); | 108 intptr_t delta = RootRegisterDelta(destination); |
109 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { | 109 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { |
110 Serializer::TooLateToEnableNow(); | 110 Serializer::TooLateToEnableNow(); |
111 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source); | 111 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source); |
112 return; | 112 return; |
113 } | 113 } |
114 } | 114 } |
115 // Safe code. | 115 // Safe code. |
116 if (source.is(rax)) { | 116 if (source.is(rax)) { |
117 store_rax(destination); | 117 store_rax(destination); |
118 } else { | 118 } else { |
119 movq(kScratchRegister, destination); | 119 Move(kScratchRegister, destination); |
120 movq(Operand(kScratchRegister, 0), source); | 120 movq(Operand(kScratchRegister, 0), source); |
121 } | 121 } |
122 } | 122 } |
123 | 123 |
124 | 124 |
125 void MacroAssembler::LoadAddress(Register destination, | 125 void MacroAssembler::LoadAddress(Register destination, |
126 ExternalReference source) { | 126 ExternalReference source) { |
127 if (root_array_available_ && !Serializer::enabled()) { | 127 if (root_array_available_ && !Serializer::enabled()) { |
128 intptr_t delta = RootRegisterDelta(source); | 128 intptr_t delta = RootRegisterDelta(source); |
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { | 129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { |
130 Serializer::TooLateToEnableNow(); | 130 Serializer::TooLateToEnableNow(); |
131 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); | 131 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); |
132 return; | 132 return; |
133 } | 133 } |
134 } | 134 } |
135 // Safe code. | 135 // Safe code. |
136 movq(destination, source); | 136 Move(destination, source); |
137 } | 137 } |
138 | 138 |
139 | 139 |
140 int MacroAssembler::LoadAddressSize(ExternalReference source) { | 140 int MacroAssembler::LoadAddressSize(ExternalReference source) { |
141 if (root_array_available_ && !Serializer::enabled()) { | 141 if (root_array_available_ && !Serializer::enabled()) { |
142 // This calculation depends on the internals of LoadAddress. | 142 // This calculation depends on the internals of LoadAddress. |
143 // It's correctness is ensured by the asserts in the Call | 143 // It's correctness is ensured by the asserts in the Call |
144 // instruction below. | 144 // instruction below. |
145 intptr_t delta = RootRegisterDelta(source); | 145 intptr_t delta = RootRegisterDelta(source); |
146 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { | 146 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
268 Register scratch, | 268 Register scratch, |
269 Condition cc, | 269 Condition cc, |
270 Label* branch, | 270 Label* branch, |
271 Label::Distance distance) { | 271 Label::Distance distance) { |
272 if (Serializer::enabled()) { | 272 if (Serializer::enabled()) { |
273 // Can't do arithmetic on external references if it might get serialized. | 273 // Can't do arithmetic on external references if it might get serialized. |
274 // The mask isn't really an address. We load it as an external reference in | 274 // The mask isn't really an address. We load it as an external reference in |
275 // case the size of the new space is different between the snapshot maker | 275 // case the size of the new space is different between the snapshot maker |
276 // and the running system. | 276 // and the running system. |
277 if (scratch.is(object)) { | 277 if (scratch.is(object)) { |
278 movq(kScratchRegister, ExternalReference::new_space_mask(isolate())); | 278 Move(kScratchRegister, ExternalReference::new_space_mask(isolate())); |
279 and_(scratch, kScratchRegister); | 279 and_(scratch, kScratchRegister); |
280 } else { | 280 } else { |
281 movq(scratch, ExternalReference::new_space_mask(isolate())); | 281 Move(scratch, ExternalReference::new_space_mask(isolate())); |
282 and_(scratch, object); | 282 and_(scratch, object); |
283 } | 283 } |
284 movq(kScratchRegister, ExternalReference::new_space_start(isolate())); | 284 Move(kScratchRegister, ExternalReference::new_space_start(isolate())); |
285 cmpq(scratch, kScratchRegister); | 285 cmpq(scratch, kScratchRegister); |
286 j(cc, branch, distance); | 286 j(cc, branch, distance); |
287 } else { | 287 } else { |
288 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))); | 288 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))); |
289 intptr_t new_space_start = | 289 intptr_t new_space_start = |
290 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart()); | 290 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart()); |
291 movq(kScratchRegister, -new_space_start, RelocInfo::NONE64); | 291 movq(kScratchRegister, -new_space_start, RelocInfo::NONE64); |
292 if (scratch.is(object)) { | 292 if (scratch.is(object)) { |
293 addq(scratch, kScratchRegister); | 293 addq(scratch, kScratchRegister); |
294 } else { | 294 } else { |
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
702 const int kLevelOffset = Offset( | 702 const int kLevelOffset = Offset( |
703 ExternalReference::handle_scope_level_address(isolate()), | 703 ExternalReference::handle_scope_level_address(isolate()), |
704 next_address); | 704 next_address); |
705 ExternalReference scheduled_exception_address = | 705 ExternalReference scheduled_exception_address = |
706 ExternalReference::scheduled_exception_address(isolate()); | 706 ExternalReference::scheduled_exception_address(isolate()); |
707 | 707 |
708 // Allocate HandleScope in callee-save registers. | 708 // Allocate HandleScope in callee-save registers. |
709 Register prev_next_address_reg = r14; | 709 Register prev_next_address_reg = r14; |
710 Register prev_limit_reg = rbx; | 710 Register prev_limit_reg = rbx; |
711 Register base_reg = r15; | 711 Register base_reg = r15; |
712 movq(base_reg, next_address); | 712 Move(base_reg, next_address); |
713 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); | 713 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); |
714 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 714 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
715 addl(Operand(base_reg, kLevelOffset), Immediate(1)); | 715 addl(Operand(base_reg, kLevelOffset), Immediate(1)); |
716 | 716 |
717 if (FLAG_log_timer_events) { | 717 if (FLAG_log_timer_events) { |
718 FrameScope frame(this, StackFrame::MANUAL); | 718 FrameScope frame(this, StackFrame::MANUAL); |
719 PushSafepointRegisters(); | 719 PushSafepointRegisters(); |
720 PrepareCallCFunction(1); | 720 PrepareCallCFunction(1); |
721 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); | 721 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate())); |
722 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); | 722 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
763 | 763 |
764 // No more valid handles (the result handle was the last one). Restore | 764 // No more valid handles (the result handle was the last one). Restore |
765 // previous handle scope. | 765 // previous handle scope. |
766 subl(Operand(base_reg, kLevelOffset), Immediate(1)); | 766 subl(Operand(base_reg, kLevelOffset), Immediate(1)); |
767 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); | 767 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); |
768 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); | 768 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); |
769 j(not_equal, &delete_allocated_handles); | 769 j(not_equal, &delete_allocated_handles); |
770 bind(&leave_exit_frame); | 770 bind(&leave_exit_frame); |
771 | 771 |
772 // Check if the function scheduled an exception. | 772 // Check if the function scheduled an exception. |
773 movq(rsi, scheduled_exception_address); | 773 Move(rsi, scheduled_exception_address); |
774 Cmp(Operand(rsi, 0), factory->the_hole_value()); | 774 Cmp(Operand(rsi, 0), factory->the_hole_value()); |
775 j(not_equal, &promote_scheduled_exception); | 775 j(not_equal, &promote_scheduled_exception); |
776 bind(&exception_handled); | 776 bind(&exception_handled); |
777 | 777 |
778 #if ENABLE_EXTRA_CHECKS | 778 #if ENABLE_EXTRA_CHECKS |
779 // Check if the function returned a valid JavaScript value. | 779 // Check if the function returned a valid JavaScript value. |
780 Label ok; | 780 Label ok; |
781 Register return_value = rax; | 781 Register return_value = rax; |
782 Register map = rcx; | 782 Register map = rcx; |
783 | 783 |
(...skipping 4135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4919 Register receiver_reg, | 4919 Register receiver_reg, |
4920 Register scratch_reg, | 4920 Register scratch_reg, |
4921 Label* no_memento_found) { | 4921 Label* no_memento_found) { |
4922 ExternalReference new_space_start = | 4922 ExternalReference new_space_start = |
4923 ExternalReference::new_space_start(isolate()); | 4923 ExternalReference::new_space_start(isolate()); |
4924 ExternalReference new_space_allocation_top = | 4924 ExternalReference new_space_allocation_top = |
4925 ExternalReference::new_space_allocation_top_address(isolate()); | 4925 ExternalReference::new_space_allocation_top_address(isolate()); |
4926 | 4926 |
4927 lea(scratch_reg, Operand(receiver_reg, | 4927 lea(scratch_reg, Operand(receiver_reg, |
4928 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); | 4928 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); |
4929 movq(kScratchRegister, new_space_start); | 4929 Move(kScratchRegister, new_space_start); |
4930 cmpq(scratch_reg, kScratchRegister); | 4930 cmpq(scratch_reg, kScratchRegister); |
4931 j(less, no_memento_found); | 4931 j(less, no_memento_found); |
4932 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); | 4932 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); |
4933 j(greater, no_memento_found); | 4933 j(greater, no_memento_found); |
4934 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4934 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4935 Heap::kAllocationMementoMapRootIndex); | 4935 Heap::kAllocationMementoMapRootIndex); |
4936 } | 4936 } |
4937 | 4937 |
4938 | 4938 |
4939 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, | 4939 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, |
(...skipping 24 matching lines...) Expand all Loading... |
4964 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | 4964 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); |
4965 CallCFunction( | 4965 CallCFunction( |
4966 ExternalReference::record_object_allocation_function(isolate), 3); | 4966 ExternalReference::record_object_allocation_function(isolate), 3); |
4967 PopSafepointRegisters(); | 4967 PopSafepointRegisters(); |
4968 } | 4968 } |
4969 | 4969 |
4970 | 4970 |
4971 } } // namespace v8::internal | 4971 } } // namespace v8::internal |
4972 | 4972 |
4973 #endif // V8_TARGET_ARCH_X64 | 4973 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |