| Index: src/mips/builtins-mips.cc
 | 
| diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
 | 
| index b26616a78dd3af0bde01ca733e6d60765ad631bb..081c1af6722225a134982fcd2cc34a93ec2facbc 100644
 | 
| --- a/src/mips/builtins-mips.cc
 | 
| +++ b/src/mips/builtins-mips.cc
 | 
| @@ -1226,154 +1226,6 @@
 | 
|  
 | 
|  
 | 
|  void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
 | 
| -  // ----------- S t a t e -------------
 | 
| -  //  -- a0 : argument count (preserved for callee)
 | 
| -  //  -- a3 : new target (preserved for callee)
 | 
| -  //  -- a1 : target function (preserved for callee)
 | 
| -  // -----------------------------------
 | 
| -  // First lookup code, maybe we don't need to compile!
 | 
| -  Label gotta_call_runtime, gotta_call_runtime_no_stack;
 | 
| -  Label maybe_call_runtime;
 | 
| -  Label try_shared;
 | 
| -  Label loop_top, loop_bottom;
 | 
| -
 | 
| -  Register argument_count = a0;
 | 
| -  Register closure = a1;
 | 
| -  Register new_target = a3;
 | 
| -  __ push(argument_count);
 | 
| -  __ push(new_target);
 | 
| -  __ push(closure);
 | 
| -
 | 
| -  Register map = a0;
 | 
| -  Register index = a2;
 | 
| -  __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
 | 
| -  __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
 | 
| -  __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
 | 
| -  __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
 | 
| -
 | 
| -  // Find literals.
 | 
| -  // a3  : native context
 | 
| -  // a2  : length / index
 | 
| -  // a0  : optimized code map
 | 
| -  // stack[0] : new target
 | 
| -  // stack[4] : closure
 | 
| -  Register native_context = a3;
 | 
| -  __ lw(native_context, NativeContextMemOperand());
 | 
| -
 | 
| -  __ bind(&loop_top);
 | 
| -  Register temp = a1;
 | 
| -  Register array_pointer = t1;
 | 
| -
 | 
| -  // Does the native context match?
 | 
| -  __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
 | 
| -  __ Addu(array_pointer, map, Operand(at));
 | 
| -  __ lw(temp, FieldMemOperand(array_pointer,
 | 
| -                              SharedFunctionInfo::kOffsetToPreviousContext));
 | 
| -  __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
 | 
| -  __ Branch(&loop_bottom, ne, temp, Operand(native_context));
 | 
| -  // OSR id set to none?
 | 
| -  __ lw(temp, FieldMemOperand(array_pointer,
 | 
| -                              SharedFunctionInfo::kOffsetToPreviousOsrAstId));
 | 
| -  const int bailout_id = BailoutId::None().ToInt();
 | 
| -  __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
 | 
| -  // Literals available?
 | 
| -  __ lw(temp, FieldMemOperand(array_pointer,
 | 
| -                              SharedFunctionInfo::kOffsetToPreviousLiterals));
 | 
| -  __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
 | 
| -  __ JumpIfSmi(temp, &gotta_call_runtime);
 | 
| -
 | 
| -  // Save the literals in the closure.
 | 
| -  __ lw(t0, MemOperand(sp, 0));
 | 
| -  __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
 | 
| -  __ push(index);
 | 
| -  __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
 | 
| -                      kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
 | 
| -                      OMIT_SMI_CHECK);
 | 
| -  __ pop(index);
 | 
| -
 | 
| -  // Code available?
 | 
| -  Register entry = t0;
 | 
| -  __ lw(entry,
 | 
| -        FieldMemOperand(array_pointer,
 | 
| -                        SharedFunctionInfo::kOffsetToPreviousCachedCode));
 | 
| -  __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
 | 
| -  __ JumpIfSmi(entry, &maybe_call_runtime);
 | 
| -
 | 
| -  // Found literals and code. Get them into the closure and return.
 | 
| -  __ pop(closure);
 | 
| -  // Store code entry in the closure.
 | 
| -  __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
 | 
| -
 | 
| -  Label install_optimized_code_and_tailcall;
 | 
| -  __ bind(&install_optimized_code_and_tailcall);
 | 
| -  __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
 | 
| -  __ RecordWriteCodeEntryField(closure, entry, t1);
 | 
| -
 | 
| -  // Link the closure into the optimized function list.
 | 
| -  // t0 : code entry
 | 
| -  // a3 : native context
 | 
| -  // a1 : closure
 | 
| -  __ lw(t1,
 | 
| -        ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
 | 
| -  __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
 | 
| -  __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
 | 
| -                      kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
 | 
| -                      OMIT_SMI_CHECK);
 | 
| -  const int function_list_offset =
 | 
| -      Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
 | 
| -  __ sw(closure,
 | 
| -        ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
 | 
| -  // Save closure before the write barrier.
 | 
| -  __ mov(t1, closure);
 | 
| -  __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
 | 
| -                            kRAHasNotBeenSaved, kDontSaveFPRegs);
 | 
| -  __ mov(closure, t1);
 | 
| -  __ pop(new_target);
 | 
| -  __ pop(argument_count);
 | 
| -  __ Jump(entry);
 | 
| -
 | 
| -  __ bind(&loop_bottom);
 | 
| -  __ Subu(index, index,
 | 
| -          Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
 | 
| -  __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
 | 
| -
 | 
| -  // We found neither literals nor code.
 | 
| -  __ jmp(&gotta_call_runtime);
 | 
| -
 | 
| -  __ bind(&maybe_call_runtime);
 | 
| -  __ pop(closure);
 | 
| -
 | 
| -  // Last possibility. Check the context free optimized code map entry.
 | 
| -  __ lw(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
 | 
| -                                        SharedFunctionInfo::kSharedCodeIndex));
 | 
| -  __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
 | 
| -  __ JumpIfSmi(entry, &try_shared);
 | 
| -
 | 
| -  // Store code entry in the closure.
 | 
| -  __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
 | 
| -  __ jmp(&install_optimized_code_and_tailcall);
 | 
| -
 | 
| -  __ bind(&try_shared);
 | 
| -  __ pop(new_target);
 | 
| -  __ pop(argument_count);
 | 
| -  // Is the full code valid?
 | 
| -  __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
 | 
| -  __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
 | 
| -  __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset));
 | 
| -  __ And(t1, t1, Operand(Code::KindField::kMask));
 | 
| -  __ srl(t1, t1, Code::KindField::kShift);
 | 
| -  __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN));
 | 
| -  // Yes, install the full code.
 | 
| -  __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
 | 
| -  __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
 | 
| -  __ RecordWriteCodeEntryField(closure, entry, t1);
 | 
| -  __ Jump(entry);
 | 
| -
 | 
| -  __ bind(&gotta_call_runtime);
 | 
| -  __ pop(closure);
 | 
| -  __ pop(new_target);
 | 
| -  __ pop(argument_count);
 | 
| -  __ bind(&gotta_call_runtime_no_stack);
 | 
|    GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
 | 
|  }
 | 
|  
 | 
| 
 |