| Index: src/full-codegen/mips/full-codegen-mips.cc
|
| diff --git a/src/full-codegen/mips/full-codegen-mips.cc b/src/full-codegen/mips/full-codegen-mips.cc
|
| index 0ac568ff0a5b7475c7cb690f335b121c08bcdce7..b57903a49f39459a57172ca7fba190b06f0aea60 100644
|
| --- a/src/full-codegen/mips/full-codegen-mips.cc
|
| +++ b/src/full-codegen/mips/full-codegen-mips.cc
|
| @@ -217,7 +217,7 @@
|
| // Load parameter from stack.
|
| __ lw(a0, MemOperand(fp, parameter_offset));
|
| // Store it in the context.
|
| - MemOperand target = ContextMemOperand(cp, var->index());
|
| + MemOperand target = ContextOperand(cp, var->index());
|
| __ sw(a0, target);
|
|
|
| // Update the write barrier.
|
| @@ -714,7 +714,7 @@
|
| if (var->IsContextSlot()) {
|
| int context_chain_length = scope()->ContextChainLength(var->scope());
|
| __ LoadContext(scratch, context_chain_length);
|
| - return ContextMemOperand(scratch, var->index());
|
| + return ContextOperand(scratch, var->index());
|
| } else {
|
| return StackOperand(var);
|
| }
|
| @@ -820,7 +820,7 @@
|
| Comment cmnt(masm_, "[ VariableDeclaration");
|
| EmitDebugCheckDeclarationContext(variable);
|
| __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
|
| - __ sw(at, ContextMemOperand(cp, variable->index()));
|
| + __ sw(at, ContextOperand(cp, variable->index()));
|
| // No write barrier since the_hole_value is in old space.
|
| PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
|
| }
|
| @@ -878,7 +878,7 @@
|
| Comment cmnt(masm_, "[ FunctionDeclaration");
|
| EmitDebugCheckDeclarationContext(variable);
|
| VisitForAccumulatorValue(declaration->fun());
|
| - __ sw(result_register(), ContextMemOperand(cp, variable->index()));
|
| + __ sw(result_register(), ContextOperand(cp, variable->index()));
|
| int offset = Context::SlotOffset(variable->index());
|
| // We know that we have written a function, which is not a smi.
|
| __ RecordWriteContextSlot(cp,
|
| @@ -1269,11 +1269,11 @@
|
| if (s->num_heap_slots() > 0) {
|
| if (s->calls_sloppy_eval()) {
|
| // Check that extension is NULL.
|
| - __ lw(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
|
| + __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
|
| __ Branch(slow, ne, temp, Operand(zero_reg));
|
| }
|
| // Load next context in chain.
|
| - __ lw(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
|
| + __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
|
| // Walk the rest of the chain without clobbering cp.
|
| current = next;
|
| }
|
| @@ -1294,10 +1294,10 @@
|
| __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
|
| __ Branch(&fast, eq, temp, Operand(t0));
|
| // Check that extension is NULL.
|
| - __ lw(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
|
| + __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
|
| __ Branch(slow, ne, temp, Operand(zero_reg));
|
| // Load next context in chain.
|
| - __ lw(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
|
| + __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
|
| __ Branch(&loop);
|
| __ bind(&fast);
|
| }
|
| @@ -1319,22 +1319,22 @@
|
| if (s->num_heap_slots() > 0) {
|
| if (s->calls_sloppy_eval()) {
|
| // Check that extension is NULL.
|
| - __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
|
| + __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
|
| __ Branch(slow, ne, temp, Operand(zero_reg));
|
| }
|
| - __ lw(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
|
| + __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
|
| // Walk the rest of the chain without clobbering cp.
|
| context = next;
|
| }
|
| }
|
| // Check that last extension is NULL.
|
| - __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
|
| + __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
|
| __ Branch(slow, ne, temp, Operand(zero_reg));
|
|
|
| // This function is used only for loads, not stores, so it's safe to
|
| // return an cp-based operand (the write barrier cannot be allowed to
|
| // destroy the cp register).
|
| - return ContextMemOperand(context, var->index());
|
| + return ContextOperand(context, var->index());
|
| }
|
|
|
|
|
| @@ -1377,7 +1377,7 @@
|
| Variable* var = proxy->var();
|
| DCHECK(var->IsUnallocatedOrGlobalSlot() ||
|
| (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
|
| - __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
|
| + __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
|
| __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
|
| __ li(LoadDescriptor::SlotRegister(),
|
| Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
|
| @@ -2184,7 +2184,9 @@
|
| __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
|
|
|
| __ bind(&done_allocate);
|
| - __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
|
| + __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
| + __ lw(a1, FieldMemOperand(a1, JSGlobalObject::kNativeContextOffset));
|
| + __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
|
| __ pop(a2);
|
| __ LoadRoot(a3,
|
| done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
|
| @@ -2515,7 +2517,7 @@
|
| // Global var, const, or let.
|
| __ mov(StoreDescriptor::ValueRegister(), result_register());
|
| __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
|
| - __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
|
| + __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
|
| EmitLoadStoreICSlot(slot);
|
| CallStoreIC();
|
|
|
| @@ -4136,7 +4138,9 @@
|
| Label runtime, done;
|
|
|
| __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
|
| - __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
|
| + __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
| + __ lw(a1, FieldMemOperand(a1, JSGlobalObject::kNativeContextOffset));
|
| + __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
|
| __ Pop(a2, a3);
|
| __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
|
| __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
|
| @@ -4160,7 +4164,9 @@
|
| __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
|
| __ push(v0);
|
|
|
| - __ LoadNativeContextSlot(expr->context_index(), v0);
|
| + __ lw(v0, GlobalObjectOperand());
|
| + __ lw(v0, FieldMemOperand(v0, JSGlobalObject::kNativeContextOffset));
|
| + __ lw(v0, ContextOperand(v0, expr->context_index()));
|
| }
|
|
|
|
|
| @@ -4251,7 +4257,7 @@
|
| bool is_this = var->HasThisName(isolate());
|
| DCHECK(is_sloppy(language_mode()) || is_this);
|
| if (var->IsUnallocatedOrGlobalSlot()) {
|
| - __ LoadGlobalObject(a2);
|
| + __ lw(a2, GlobalObjectOperand());
|
| __ li(a1, Operand(var->name()));
|
| __ Push(a2, a1);
|
| __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
|
| @@ -4802,7 +4808,7 @@
|
|
|
|
|
| void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
|
| - __ lw(dst, ContextMemOperand(cp, context_index));
|
| + __ lw(dst, ContextOperand(cp, context_index));
|
| }
|
|
|
|
|
| @@ -4813,12 +4819,14 @@
|
| // Contexts nested in the native context have a canonical empty function
|
| // as their closure, not the anonymous closure containing the global
|
| // code.
|
| - __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
|
| + __ lw(at, GlobalObjectOperand());
|
| + __ lw(at, FieldMemOperand(at, JSGlobalObject::kNativeContextOffset));
|
| + __ lw(at, ContextOperand(at, Context::CLOSURE_INDEX));
|
| } else if (closure_scope->is_eval_scope()) {
|
| // Contexts created by a call to eval have the same closure as the
|
| // context calling eval, not the anonymous closure containing the eval
|
| // code. Fetch it from the context.
|
| - __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
|
| + __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
|
| } else {
|
| DCHECK(closure_scope->is_function_scope());
|
| __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
|
|