| Index: src/ia32/code-stubs-ia32.cc
|
| diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
|
| index 78daf7cf420c69afe33b54e8d231798802d1bc10..ece5a9b95cad4f71ee634f8217c31e63221eb5c6 100644
|
| --- a/src/ia32/code-stubs-ia32.cc
|
| +++ b/src/ia32/code-stubs-ia32.cc
|
| @@ -65,9 +65,13 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
|
| void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| // Create a new closure from the given function info in new
|
| // space. Set the context to the current context in esi.
|
| + Counters* counters = masm->isolate()->counters();
|
| +
|
| Label gc;
|
| __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
|
|
|
| + __ IncrementCounter(counters->fast_new_closure_total(), 1);
|
| +
|
| // Get the function info from the stack.
|
| __ mov(edx, Operand(esp, 1 * kPointerSize));
|
|
|
| @@ -79,8 +83,8 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| // as the map of the allocated object.
|
| __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
|
| __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
|
| - __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
|
| - __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
|
| + __ mov(ebx, Operand(ecx, Context::SlotOffset(map_index)));
|
| + __ mov(FieldOperand(eax, JSObject::kMapOffset), ebx);
|
|
|
| // Initialize the rest of the function. We don't have to update the
|
| // write barrier because the allocated object is in new space.
|
| @@ -93,11 +97,20 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
|
| __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
|
| __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
|
| - __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
|
| - Immediate(factory->undefined_value()));
|
|
|
| // Initialize the code pointer in the function to be the one
|
| // found in the shared function info object.
|
| + // But first check if there is optimized version for our context.
|
| + NearLabel check_optimized;
|
| + Label install_unoptimized;
|
| + if (FLAG_cache_optimized_code) {
|
| + __ mov(ebx, FieldOperand(edx, SharedFunctionInfo::kOptimizedCodeMapOffset));
|
| + __ test(ebx, Operand(ebx));
|
| + __ j(not_zero, &check_optimized);
|
| + }
|
| + __ bind(&install_unoptimized);
|
| + __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
|
| + Immediate(factory->undefined_value()));
|
| __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
|
| __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
|
| __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
|
| @@ -105,6 +118,68 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| // Return and remove the on-stack parameter.
|
| __ ret(1 * kPointerSize);
|
|
|
| + __ bind(&check_optimized);
|
| +
|
| + __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
|
| +
|
| + // ecx holds global context, ebx points to fixed array of pairs
|
| + // (global context, optimized code).
|
| + // Map must never be empty, so check the first elements.
|
| + Label install_optimized;
|
| + // Speculatively move code object into edx.
|
| + __ mov(edx, FieldOperand(ebx, FixedArray::kHeaderSize + kPointerSize));
|
| + __ cmp(ecx, FieldOperand(ebx, FixedArray::kHeaderSize));
|
| + __ j(equal, &install_optimized);
|
| +
|
| + // Iterate through the rest of map backwards. edx holds an index as a Smi.
|
| + NearLabel loop;
|
| + Label restore;
|
| + __ mov(edx, FieldOperand(ebx, FixedArray::kLengthOffset));
|
| + __ bind(&loop);
|
| + __ cmp(edx, 4); // Do not double check first entry.
|
| + __ j(equal, &restore);
|
| + __ sub(Operand(edx), Immediate(4)); // Skip a pair.
|
| + __ cmp(ecx, FixedArrayElementOperand(ebx, edx, 0));
|
| + __ j(not_equal, &loop);
|
| + // Hit: fetch the optimized code.
|
| + __ mov(edx, FixedArrayElementOperand(ebx, edx, 1));
|
| +
|
| + __ bind(&install_optimized);
|
| +
|
| + __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1);
|
| +
|
| + // Idea: store proper code pointers in the map and either unmangle them
|
| + // on marking or do nothing as the whole map is discarded on major GC anyway.
|
| + __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
|
| + __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
|
| +
|
| + // Now link a function into a list of optimized functions.
|
| + __ mov(edx, ContextOperand(ecx, Context::OPTIMIZED_FUNCTIONS_LIST));
|
| +
|
| + __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset), edx);
|
| + // No need in write barrier as JSFunction (eax) is in the new space.
|
| + if (FLAG_debug_code) {
|
| + NearLabel ok;
|
| + __ InNewSpace(eax, ebx, equal, &ok);
|
| + __ Abort("New closure has been allocated in old space!");
|
| + __ bind(&ok);
|
| + }
|
| +
|
| + __ mov(ContextOperand(ecx, Context::OPTIMIZED_FUNCTIONS_LIST), eax);
|
| + // Store JSFunction (eax) into edx before issuing write barrier as
|
| + // it clobbers all the registers passed.
|
| + __ mov(edx, eax);
|
| + __ RecordWrite(ecx, Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
|
| + edx, ebx);
|
| +
|
| + // Return and remove the on-stack parameter.
|
| + __ ret(1 * kPointerSize);
|
| +
|
| + __ bind(&restore);
|
| + // Restore SharedFunctionInfo into edx.
|
| + __ mov(edx, Operand(esp, 1 * kPointerSize));
|
| + __ jmp(&install_unoptimized);
|
| +
|
| // Create a new closure through the slower runtime call.
|
| __ bind(&gc);
|
| __ pop(ecx); // Temporarily remove return address.
|
|
|