OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 25 matching lines...) Expand all Loading... |
36 #include "full-codegen.h" | 36 #include "full-codegen.h" |
37 #include "isolate-inl.h" | 37 #include "isolate-inl.h" |
38 #include "parser.h" | 38 #include "parser.h" |
39 #include "scopes.h" | 39 #include "scopes.h" |
40 #include "stub-cache.h" | 40 #include "stub-cache.h" |
41 | 41 |
42 namespace v8 { | 42 namespace v8 { |
43 namespace internal { | 43 namespace internal { |
44 | 44 |
45 #define __ ACCESS_MASM(masm_) | 45 #define __ ACCESS_MASM(masm_) |
| 46 #define __k __ |
| 47 #define __n __ |
46 | 48 |
47 | 49 |
48 class JumpPatchSite BASE_EMBEDDED { | 50 class JumpPatchSite BASE_EMBEDDED { |
49 public: | 51 public: |
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | 52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
51 #ifdef DEBUG | 53 #ifdef DEBUG |
52 info_emitted_ = false; | 54 info_emitted_ = false; |
53 #endif | 55 #endif |
54 } | 56 } |
55 | 57 |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
133 #endif | 135 #endif |
134 | 136 |
135 // Strict mode functions and builtins need to replace the receiver | 137 // Strict mode functions and builtins need to replace the receiver |
136 // with undefined when called as functions (without an explicit | 138 // with undefined when called as functions (without an explicit |
137 // receiver object). rcx is zero for method calls and non-zero for | 139 // receiver object). rcx is zero for method calls and non-zero for |
138 // function calls. | 140 // function calls. |
139 if (!info->is_classic_mode() || info->is_native()) { | 141 if (!info->is_classic_mode() || info->is_native()) { |
140 Label ok; | 142 Label ok; |
141 __ testq(rcx, rcx); | 143 __ testq(rcx, rcx); |
142 __ j(zero, &ok, Label::kNear); | 144 __ j(zero, &ok, Label::kNear); |
| 145 #ifndef V8_TARGET_ARCH_X32 |
143 // +1 for return address. | 146 // +1 for return address. |
144 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; | 147 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; |
| 148 #else |
| 149 int receiver_offset = 1 * kHWRegSize + |
| 150 info->scope()->num_parameters() * kPointerSize; |
| 151 #endif |
145 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); | 152 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); |
146 __ movq(Operand(rsp, receiver_offset), kScratchRegister); | 153 __ movq(Operand(rsp, receiver_offset), kScratchRegister); |
147 __ bind(&ok); | 154 __ bind(&ok); |
148 } | 155 } |
149 | 156 |
150 // Open a frame scope to indicate that there is a frame on the stack. The | 157 // Open a frame scope to indicate that there is a frame on the stack. The |
151 // MANUAL indicates that the scope shouldn't actually generate code to set up | 158 // MANUAL indicates that the scope shouldn't actually generate code to set up |
152 // the frame (that is done below). | 159 // the frame (that is done below). |
153 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 160 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
154 | 161 |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
313 } | 320 } |
314 | 321 |
315 | 322 |
316 void FullCodeGenerator::EmitProfilingCounterReset() { | 323 void FullCodeGenerator::EmitProfilingCounterReset() { |
317 int reset_value = FLAG_interrupt_budget; | 324 int reset_value = FLAG_interrupt_budget; |
318 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | 325 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { |
319 // Self-optimization is a one-off thing; if it fails, don't try again. | 326 // Self-optimization is a one-off thing; if it fails, don't try again. |
320 reset_value = Smi::kMaxValue; | 327 reset_value = Smi::kMaxValue; |
321 } | 328 } |
322 __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); | 329 __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); |
| 330 #ifndef V8_TARGET_ARCH_X32 |
323 __ movq(kScratchRegister, | 331 __ movq(kScratchRegister, |
324 reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)), | 332 reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)), |
325 RelocInfo::NONE64); | 333 RelocInfo::NONE64); |
| 334 #else |
| 335 __ movl(kScratchRegister, |
| 336 reinterpret_cast<uint32_t>(Smi::FromInt(reset_value)), |
| 337 RelocInfo::NONE32); |
| 338 #endif |
326 __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister); | 339 __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister); |
327 } | 340 } |
328 | 341 |
329 | 342 |
330 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 343 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
331 Label* back_edge_target) { | 344 Label* back_edge_target) { |
332 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 345 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
333 Label ok; | 346 Label ok; |
334 | 347 |
335 int weight = 1; | 348 int weight = 1; |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
408 __ pop(rbp); | 421 __ pop(rbp); |
409 int no_frame_start = masm_->pc_offset(); | 422 int no_frame_start = masm_->pc_offset(); |
410 | 423 |
411 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; | 424 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; |
412 __ Ret(arguments_bytes, rcx); | 425 __ Ret(arguments_bytes, rcx); |
413 | 426 |
414 #ifdef ENABLE_DEBUGGER_SUPPORT | 427 #ifdef ENABLE_DEBUGGER_SUPPORT |
415 // Add padding that will be overwritten by a debugger breakpoint. We | 428 // Add padding that will be overwritten by a debugger breakpoint. We |
416 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" | 429 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" |
417 // (3 + 1 + 3). | 430 // (3 + 1 + 3). |
| 431 #ifndef V8_TARGET_ARCH_X32 |
418 const int kPadding = Assembler::kJSReturnSequenceLength - 7; | 432 const int kPadding = Assembler::kJSReturnSequenceLength - 7; |
| 433 #else |
| 434 const int kPadding = Assembler::kJSReturnSequenceLength - 6; |
| 435 #endif |
419 for (int i = 0; i < kPadding; ++i) { | 436 for (int i = 0; i < kPadding; ++i) { |
420 masm_->int3(); | 437 masm_->int3(); |
421 } | 438 } |
422 // Check that the size of the code used for returning is large enough | 439 // Check that the size of the code used for returning is large enough |
423 // for the debugger's requirements. | 440 // for the debugger's requirements. |
424 ASSERT(Assembler::kJSReturnSequenceLength <= | 441 ASSERT(Assembler::kJSReturnSequenceLength <= |
425 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); | 442 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); |
426 #endif | 443 #endif |
427 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | 444 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); |
428 } | 445 } |
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
673 } | 690 } |
674 } | 691 } |
675 | 692 |
676 | 693 |
677 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | 694 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
678 ASSERT(var->IsStackAllocated()); | 695 ASSERT(var->IsStackAllocated()); |
679 // Offset is negative because higher indexes are at lower addresses. | 696 // Offset is negative because higher indexes are at lower addresses. |
680 int offset = -var->index() * kPointerSize; | 697 int offset = -var->index() * kPointerSize; |
681 // Adjust by a (parameter or local) base offset. | 698 // Adjust by a (parameter or local) base offset. |
682 if (var->IsParameter()) { | 699 if (var->IsParameter()) { |
| 700 #ifndef V8_TARGET_ARCH_X32 |
683 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | 701 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
| 702 #else |
| 703 offset += 2 * kHWRegSize + |
| 704 (info_->scope()->num_parameters() - 1) * kPointerSize; |
| 705 #endif |
684 } else { | 706 } else { |
685 offset += JavaScriptFrameConstants::kLocal0Offset; | 707 offset += JavaScriptFrameConstants::kLocal0Offset; |
686 } | 708 } |
687 return Operand(rbp, offset); | 709 return Operand(rbp, offset); |
688 } | 710 } |
689 | 711 |
690 | 712 |
691 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | 713 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
692 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | 714 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
693 if (var->IsContextSlot()) { | 715 if (var->IsContextSlot()) { |
(...skipping 1433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2127 | 2149 |
2128 // Load suspended function and context. | 2150 // Load suspended function and context. |
2129 __ movq(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); | 2151 __ movq(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); |
2130 __ movq(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); | 2152 __ movq(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); |
2131 | 2153 |
2132 // Push receiver. | 2154 // Push receiver. |
2133 __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); | 2155 __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); |
2134 | 2156 |
2135 // Push holes for arguments to generator function. | 2157 // Push holes for arguments to generator function. |
2136 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 2158 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 2159 #ifndef V8_TARGET_ARCH_X32 |
2137 __ movsxlq(rdx, | 2160 __ movsxlq(rdx, |
2138 FieldOperand(rdx, | 2161 FieldOperand(rdx, |
2139 SharedFunctionInfo::kFormalParameterCountOffset)); | 2162 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 2163 #else |
| 2164 __ movl(rdx, |
| 2165 FieldOperand(rdx, |
| 2166 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 2167 __ SmiToInteger32(rdx, rdx); |
| 2168 #endif |
2140 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); | 2169 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); |
2141 Label push_argument_holes, push_frame; | 2170 Label push_argument_holes, push_frame; |
2142 __ bind(&push_argument_holes); | 2171 __ bind(&push_argument_holes); |
2143 __ subq(rdx, Immediate(1)); | 2172 __ subq(rdx, Immediate(1)); |
2144 __ j(carry, &push_frame); | 2173 __ j(carry, &push_frame); |
2145 __ push(rcx); | 2174 __ push(rcx); |
2146 __ jmp(&push_argument_holes); | 2175 __ jmp(&push_argument_holes); |
2147 | 2176 |
2148 // Enter a new JavaScript frame, and initialize its slots as they were when | 2177 // Enter a new JavaScript frame, and initialize its slots as they were when |
2149 // the generator was suspended. | 2178 // the generator was suspended. |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2279 expr->BinaryOperationFeedbackId()); | 2308 expr->BinaryOperationFeedbackId()); |
2280 patch_site.EmitPatchInfo(); | 2309 patch_site.EmitPatchInfo(); |
2281 __ jmp(&done, Label::kNear); | 2310 __ jmp(&done, Label::kNear); |
2282 | 2311 |
2283 __ bind(&smi_case); | 2312 __ bind(&smi_case); |
2284 switch (op) { | 2313 switch (op) { |
2285 case Token::SAR: | 2314 case Token::SAR: |
2286 __ SmiShiftArithmeticRight(rax, rdx, rcx); | 2315 __ SmiShiftArithmeticRight(rax, rdx, rcx); |
2287 break; | 2316 break; |
2288 case Token::SHL: | 2317 case Token::SHL: |
| 2318 #ifndef V8_TARGET_ARCH_X32 |
2289 __ SmiShiftLeft(rax, rdx, rcx); | 2319 __ SmiShiftLeft(rax, rdx, rcx); |
| 2320 #else |
| 2321 __ SmiShiftLeft(rax, rdx, rcx, &stub_call); |
| 2322 #endif |
2290 break; | 2323 break; |
2291 case Token::SHR: | 2324 case Token::SHR: |
2292 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call); | 2325 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call); |
2293 break; | 2326 break; |
2294 case Token::ADD: | 2327 case Token::ADD: |
2295 __ SmiAdd(rax, rdx, rcx, &stub_call); | 2328 __ SmiAdd(rax, rdx, rcx, &stub_call); |
2296 break; | 2329 break; |
2297 case Token::SUB: | 2330 case Token::SUB: |
2298 __ SmiSub(rax, rdx, rcx, &stub_call); | 2331 __ SmiSub(rax, rdx, rcx, &stub_call); |
2299 break; | 2332 break; |
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2633 | 2666 |
2634 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2667 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
2635 // Push copy of the first argument or undefined if it doesn't exist. | 2668 // Push copy of the first argument or undefined if it doesn't exist. |
2636 if (arg_count > 0) { | 2669 if (arg_count > 0) { |
2637 __ push(Operand(rsp, arg_count * kPointerSize)); | 2670 __ push(Operand(rsp, arg_count * kPointerSize)); |
2638 } else { | 2671 } else { |
2639 __ PushRoot(Heap::kUndefinedValueRootIndex); | 2672 __ PushRoot(Heap::kUndefinedValueRootIndex); |
2640 } | 2673 } |
2641 | 2674 |
2642 // Push the receiver of the enclosing function and do runtime call. | 2675 // Push the receiver of the enclosing function and do runtime call. |
| 2676 #ifndef V8_TARGET_ARCH_X32 |
2643 __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize)); | 2677 __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize)); |
| 2678 #else |
| 2679 __ Push(Operand(rbp, 2 * kHWRegSize + |
| 2680 info_->scope()->num_parameters() * kPointerSize)); |
| 2681 #endif |
2644 | 2682 |
2645 // Push the language mode. | 2683 // Push the language mode. |
2646 __ Push(Smi::FromInt(language_mode())); | 2684 __ Push(Smi::FromInt(language_mode())); |
2647 | 2685 |
2648 // Push the start position of the scope the calls resides in. | 2686 // Push the start position of the scope the calls resides in. |
2649 __ Push(Smi::FromInt(scope()->start_position())); | 2687 __ Push(Smi::FromInt(scope()->start_position())); |
2650 | 2688 |
2651 // Do the runtime call. | 2689 // Do the runtime call. |
2652 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | 2690 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); |
2653 } | 2691 } |
(...skipping 719 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3373 Operand stamp_operand = __ ExternalOperand(stamp); | 3411 Operand stamp_operand = __ ExternalOperand(stamp); |
3374 __ movq(scratch, stamp_operand); | 3412 __ movq(scratch, stamp_operand); |
3375 __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); | 3413 __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); |
3376 __ j(not_equal, &runtime, Label::kNear); | 3414 __ j(not_equal, &runtime, Label::kNear); |
3377 __ movq(result, FieldOperand(object, JSDate::kValueOffset + | 3415 __ movq(result, FieldOperand(object, JSDate::kValueOffset + |
3378 kPointerSize * index->value())); | 3416 kPointerSize * index->value())); |
3379 __ jmp(&done); | 3417 __ jmp(&done); |
3380 } | 3418 } |
3381 __ bind(&runtime); | 3419 __ bind(&runtime); |
3382 __ PrepareCallCFunction(2); | 3420 __ PrepareCallCFunction(2); |
3383 __ movq(arg_reg_1, object); | 3421 __ movq(arg_reg_1, object); |
3384 __ movq(arg_reg_2, index, RelocInfo::NONE64); | 3422 __n movq(arg_reg_2, index, RelocInfo::NONE64); |
3385 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 3423 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
3386 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 3424 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
3387 __ jmp(&done); | 3425 __ jmp(&done); |
3388 } | 3426 } |
3389 | 3427 |
3390 __ bind(¬_date_object); | 3428 __ bind(¬_date_object); |
3391 __ CallRuntime(Runtime::kThrowNotDateError, 0); | 3429 __ CallRuntime(Runtime::kThrowNotDateError, 0); |
3392 __ bind(&done); | 3430 __ bind(&done); |
3393 context()->Plug(rax); | 3431 context()->Plug(rax); |
3394 } | 3432 } |
(...skipping 734 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4129 | 4167 |
4130 // Long separator case (separator is more than one character). | 4168 // Long separator case (separator is more than one character). |
4131 __ bind(&long_separator); | 4169 __ bind(&long_separator); |
4132 | 4170 |
4133 // Make elements point to end of elements array, and index | 4171 // Make elements point to end of elements array, and index |
4134 // count from -array_length to zero, so we don't need to maintain | 4172 // count from -array_length to zero, so we don't need to maintain |
4135 // a loop limit. | 4173 // a loop limit. |
4136 __ movl(index, array_length_operand); | 4174 __ movl(index, array_length_operand); |
4137 __ lea(elements, FieldOperand(elements, index, times_pointer_size, | 4175 __ lea(elements, FieldOperand(elements, index, times_pointer_size, |
4138 FixedArray::kHeaderSize)); | 4176 FixedArray::kHeaderSize)); |
4139 __ neg(index); | 4177 __k neg(index); |
4140 | 4178 |
4141 // Replace separator string with pointer to its first character, and | 4179 // Replace separator string with pointer to its first character, and |
4142 // make scratch be its length. | 4180 // make scratch be its length. |
4143 __ movq(string, separator_operand); | 4181 __ movq(string, separator_operand); |
4144 __ SmiToInteger32(scratch, | 4182 __ SmiToInteger32(scratch, |
4145 FieldOperand(string, String::kLengthOffset)); | 4183 FieldOperand(string, String::kLengthOffset)); |
4146 __ lea(string, | 4184 __ lea(string, |
4147 FieldOperand(string, SeqOneByteString::kHeaderSize)); | 4185 FieldOperand(string, SeqOneByteString::kHeaderSize)); |
4148 __ movq(separator_operand, string); | 4186 __ movq(separator_operand, string); |
4149 | 4187 |
(...skipping 15 matching lines...) Expand all Loading... |
4165 __ CopyBytes(result_pos, string, string_length, 2); | 4203 __ CopyBytes(result_pos, string, string_length, 2); |
4166 | 4204 |
4167 __ bind(&loop_3_entry); | 4205 __ bind(&loop_3_entry); |
4168 // Get string = array[index]. | 4206 // Get string = array[index]. |
4169 __ movq(string, Operand(elements, index, times_pointer_size, 0)); | 4207 __ movq(string, Operand(elements, index, times_pointer_size, 0)); |
4170 __ SmiToInteger32(string_length, | 4208 __ SmiToInteger32(string_length, |
4171 FieldOperand(string, String::kLengthOffset)); | 4209 FieldOperand(string, String::kLengthOffset)); |
4172 __ lea(string, | 4210 __ lea(string, |
4173 FieldOperand(string, SeqOneByteString::kHeaderSize)); | 4211 FieldOperand(string, SeqOneByteString::kHeaderSize)); |
4174 __ CopyBytes(result_pos, string, string_length); | 4212 __ CopyBytes(result_pos, string, string_length); |
4175 __ incq(index); | 4213 __k incq(index); |
4176 __ j(not_equal, &loop_3); // Loop while (index < 0). | 4214 __ j(not_equal, &loop_3); // Loop while (index < 0). |
4177 | 4215 |
4178 __ bind(&done); | 4216 __ bind(&done); |
4179 __ movq(rax, result_operand); | 4217 __ movq(rax, result_operand); |
4180 | 4218 |
4181 __ bind(&return_result); | 4219 __ bind(&return_result); |
4182 // Drop temp values from the stack, and restore context register. | 4220 // Drop temp values from the stack, and restore context register. |
4183 __ addq(rsp, Immediate(3 * kPointerSize)); | 4221 __ addq(rsp, Immediate(3 * kPointerSize)); |
4184 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4222 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
4185 context()->Plug(rax); | 4223 context()->Plug(rax); |
(...skipping 627 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4813 | 4851 |
4814 | 4852 |
4815 // ---------------------------------------------------------------------------- | 4853 // ---------------------------------------------------------------------------- |
4816 // Non-local control flow support. | 4854 // Non-local control flow support. |
4817 | 4855 |
4818 | 4856 |
4819 void FullCodeGenerator::EnterFinallyBlock() { | 4857 void FullCodeGenerator::EnterFinallyBlock() { |
4820 ASSERT(!result_register().is(rdx)); | 4858 ASSERT(!result_register().is(rdx)); |
4821 ASSERT(!result_register().is(rcx)); | 4859 ASSERT(!result_register().is(rcx)); |
4822 // Cook return address on top of stack (smi encoded Code* delta) | 4860 // Cook return address on top of stack (smi encoded Code* delta) |
4823 __ pop(rdx); | 4861 __k pop(rdx); |
4824 __ Move(rcx, masm_->CodeObject()); | 4862 __ Move(rcx, masm_->CodeObject()); |
4825 __ subq(rdx, rcx); | 4863 __ subq(rdx, rcx); |
4826 __ Integer32ToSmi(rdx, rdx); | 4864 __ Integer32ToSmi(rdx, rdx); |
4827 __ push(rdx); | 4865 __ push(rdx); |
4828 | 4866 |
4829 // Store result register while executing finally block. | 4867 // Store result register while executing finally block. |
4830 __ push(result_register()); | 4868 __ push(result_register()); |
4831 | 4869 |
4832 // Store pending message while executing finally block. | 4870 // Store pending message while executing finally block. |
4833 ExternalReference pending_message_obj = | 4871 ExternalReference pending_message_obj = |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4901 } | 4939 } |
4902 __ PopTryHandler(); | 4940 __ PopTryHandler(); |
4903 __ call(finally_entry_); | 4941 __ call(finally_entry_); |
4904 | 4942 |
4905 *stack_depth = 0; | 4943 *stack_depth = 0; |
4906 *context_length = 0; | 4944 *context_length = 0; |
4907 return previous_; | 4945 return previous_; |
4908 } | 4946 } |
4909 | 4947 |
4910 | 4948 |
| 4949 #undef __n |
| 4950 #undef __k |
4911 #undef __ | 4951 #undef __ |
4912 | 4952 |
4913 } } // namespace v8::internal | 4953 } } // namespace v8::internal |
4914 | 4954 |
4915 #endif // V8_TARGET_ARCH_X64 | 4955 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |