Index: src/x64/code-stubs-x64.cc |
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc |
index 41507d158d19a3011036da42fdeeb94fa1b0b771..ed4548de815c0993e6b3bb4082512a040435dd97 100644 |
--- a/src/x64/code-stubs-x64.cc |
+++ b/src/x64/code-stubs-x64.cc |
@@ -39,6 +39,17 @@ namespace v8 { |
namespace internal { |
+void FastNewClosureStub::InitializeInterfaceDescriptor( |
+ Isolate* isolate, |
+ CodeStubInterfaceDescriptor* descriptor) { |
+ static Register registers[] = { rbx }; |
+ descriptor->register_param_count_ = 1; |
+ descriptor->register_params_ = registers; |
+ descriptor->deoptimization_handler_ = |
+ Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; |
+} |
+ |
+ |
void ToNumberStub::InitializeInterfaceDescriptor( |
Isolate* isolate, |
CodeStubInterfaceDescriptor* descriptor) { |
@@ -295,140 +306,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { |
} |
-void FastNewClosureStub::Generate(MacroAssembler* masm) { |
- // Create a new closure from the given function info in new |
- // space. Set the context to the current context in rsi. |
- Counters* counters = masm->isolate()->counters(); |
- |
- Label gc; |
- __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); |
- |
- __ IncrementCounter(counters->fast_new_closure_total(), 1); |
- |
- // Get the function info from the stack. |
- __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
- |
- int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); |
- |
- // Compute the function map in the current native context and set that |
- // as the map of the allocated object. |
- __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); |
- __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); |
- __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); |
- |
- // Initialize the rest of the function. We don't have to update the |
- // write barrier because the allocated object is in new space. |
- __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); |
- __ LoadRoot(r8, Heap::kTheHoleValueRootIndex); |
- __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); |
- __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); |
- __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); |
- __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), r8); |
- __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx); |
- __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi); |
- __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx); |
- |
- // Initialize the code pointer in the function to be the one |
- // found in the shared function info object. |
- // But first check if there is an optimized version for our context. |
- Label check_optimized; |
- Label install_unoptimized; |
- if (FLAG_cache_optimized_code) { |
- __ movq(rbx, |
- FieldOperand(rdx, SharedFunctionInfo::kOptimizedCodeMapOffset)); |
- __ testq(rbx, rbx); |
- __ j(not_zero, &check_optimized, Label::kNear); |
- } |
- __ bind(&install_unoptimized); |
- __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), |
- rdi); // Initialize with undefined. |
- __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); |
- __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
- __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); |
- |
- // Return and remove the on-stack parameter. |
- __ ret(1 * kPointerSize); |
- |
- __ bind(&check_optimized); |
- |
- __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); |
- |
- // rcx holds native context, rbx points to fixed array of 3-element entries |
- // (native context, optimized code, literals). |
- // The optimized code map must never be empty, so check the first elements. |
- Label install_optimized; |
- // Speculatively move code object into edx. |
- __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); |
- __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); |
- __ j(equal, &install_optimized); |
- |
- // Iterate through the rest of map backwards. rdx holds an index. |
- Label loop; |
- Label restore; |
- __ movq(rdx, FieldOperand(rbx, FixedArray::kLengthOffset)); |
- __ SmiToInteger32(rdx, rdx); |
- __ bind(&loop); |
- // Do not double check first entry. |
- __ cmpq(rdx, Immediate(SharedFunctionInfo::kSecondEntryIndex)); |
- __ j(equal, &restore); |
- __ subq(rdx, Immediate(SharedFunctionInfo::kEntryLength)); |
- __ cmpq(rcx, FieldOperand(rbx, |
- rdx, |
- times_pointer_size, |
- FixedArray::kHeaderSize)); |
- __ j(not_equal, &loop, Label::kNear); |
- // Hit: fetch the optimized code. |
- __ movq(rdx, FieldOperand(rbx, |
- rdx, |
- times_pointer_size, |
- FixedArray::kHeaderSize + 1 * kPointerSize)); |
- |
- __ bind(&install_optimized); |
- __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1); |
- |
- // TODO(fschneider): Idea: store proper code pointers in the map and either |
- // unmangle them on marking or do nothing as the whole map is discarded on |
- // major GC anyway. |
- __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
- __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); |
- |
- // Now link a function into a list of optimized functions. |
- __ movq(rdx, ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST)); |
- |
- __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdx); |
- // No need for write barrier as JSFunction (rax) is in the new space. |
- |
- __ movq(ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST), rax); |
- // Store JSFunction (rax) into rdx before issuing write barrier as |
- // it clobbers all the registers passed. |
- __ movq(rdx, rax); |
- __ RecordWriteContextSlot( |
- rcx, |
- Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), |
- rdx, |
- rbx, |
- kDontSaveFPRegs); |
- |
- // Return and remove the on-stack parameter. |
- __ ret(1 * kPointerSize); |
- |
- __ bind(&restore); |
- __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
- __ jmp(&install_unoptimized); |
- |
- // Create a new closure through the slower runtime call. |
- __ bind(&gc); |
- __ PopReturnAddressTo(rcx); |
- __ pop(rdx); |
- __ push(rsi); |
- __ push(rdx); |
- __ PushRoot(Heap::kFalseValueRootIndex); |
- __ PushReturnAddressFrom(rcx); |
- __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
-} |
- |
- |
void FastNewContextStub::Generate(MacroAssembler* masm) { |
// Try to allocate the context in new space. |
Label gc; |