| Index: src/ia32/code-stubs-ia32.cc
|
| diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
|
| index a935e12b6ca6f18d659ec99ef84a3edfd47efb5c..f81d237e030f5858ec2ab92524aaecc1ac9c6ac3 100644
|
| --- a/src/ia32/code-stubs-ia32.cc
|
| +++ b/src/ia32/code-stubs-ia32.cc
|
| @@ -465,75 +465,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
| - // Stack layout on entry:
|
| - //
|
| - // [esp + (1 * kPointerSize)]: function
|
| - // [esp + (2 * kPointerSize)]: serialized scope info
|
| -
|
| - // Try to allocate the context in new space.
|
| - Label gc;
|
| - int length = slots_ + Context::MIN_CONTEXT_SLOTS;
|
| - __ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
|
| -
|
| - // Get the function or sentinel from the stack.
|
| - __ mov(ecx, Operand(esp, 1 * kPointerSize));
|
| -
|
| - // Get the serialized scope info from the stack.
|
| - __ mov(ebx, Operand(esp, 2 * kPointerSize));
|
| -
|
| - // Set up the object header.
|
| - Factory* factory = masm->isolate()->factory();
|
| - __ mov(FieldOperand(eax, HeapObject::kMapOffset),
|
| - factory->block_context_map());
|
| - __ mov(FieldOperand(eax, Context::kLengthOffset),
|
| - Immediate(Smi::FromInt(length)));
|
| -
|
| - // If this block context is nested in the native context we get a smi
|
| - // sentinel instead of a function. The block context should get the
|
| - // canonical empty function of the native context as its closure which
|
| - // we still have to look up.
|
| - Label after_sentinel;
|
| - __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
|
| - if (FLAG_debug_code) {
|
| - __ cmp(ecx, 0);
|
| - __ Assert(equal, kExpected0AsASmiSentinel);
|
| - }
|
| - __ mov(ecx, GlobalObjectOperand());
|
| - __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
|
| - __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
|
| - __ bind(&after_sentinel);
|
| -
|
| - // Set up the fixed slots.
|
| - __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
|
| - __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
|
| - __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
|
| -
|
| - // Copy the global object from the previous context.
|
| - __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
|
| - __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
|
| -
|
| - // Initialize the rest of the slots to the hole value.
|
| - if (slots_ == 1) {
|
| - __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
|
| - factory->the_hole_value());
|
| - } else {
|
| - __ mov(ebx, factory->the_hole_value());
|
| - for (int i = 0; i < slots_; i++) {
|
| - __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
|
| - }
|
| - }
|
| -
|
| - // Return and remove the on-stack parameters.
|
| - __ mov(esi, eax);
|
| - __ ret(2 * kPointerSize);
|
| -
|
| - // Need to collect. Call into runtime system.
|
| - __ bind(&gc);
|
| - __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
|
| -}
|
| -
|
| -
|
| void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
| // We don't allow a GC during a store buffer overflow so there is no need to
|
| // store the registers in any particular way, but we do have to store and
|
|
|