Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(59)

Side by Side Diff: src/full-codegen/mips64/full-codegen-mips64.cc

Issue 1480003002: [runtime] Replace global object link with native context link in all contexts. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Add patch from Orion for interpreter cementation test. Disable obsolete/invalid tests. Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/full-codegen/mips/full-codegen-mips.cc ('k') | src/full-codegen/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 // Note on Mips implementation: 7 // Note on Mips implementation:
8 // 8 //
9 // The result_register() for mips is the 'v0' register, which is defined 9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first 10 // by the ABI to contain function return values. However, the first
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
209 int num_parameters = info->scope()->num_parameters(); 209 int num_parameters = info->scope()->num_parameters();
210 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; 210 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
211 for (int i = first_parameter; i < num_parameters; i++) { 211 for (int i = first_parameter; i < num_parameters; i++) {
212 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); 212 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
213 if (var->IsContextSlot()) { 213 if (var->IsContextSlot()) {
214 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 214 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
215 (num_parameters - 1 - i) * kPointerSize; 215 (num_parameters - 1 - i) * kPointerSize;
216 // Load parameter from stack. 216 // Load parameter from stack.
217 __ ld(a0, MemOperand(fp, parameter_offset)); 217 __ ld(a0, MemOperand(fp, parameter_offset));
218 // Store it in the context. 218 // Store it in the context.
219 MemOperand target = ContextOperand(cp, var->index()); 219 MemOperand target = ContextMemOperand(cp, var->index());
220 __ sd(a0, target); 220 __ sd(a0, target);
221 221
222 // Update the write barrier. 222 // Update the write barrier.
223 if (need_write_barrier) { 223 if (need_write_barrier) {
224 __ RecordWriteContextSlot( 224 __ RecordWriteContextSlot(
225 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs); 225 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
226 } else if (FLAG_debug_code) { 226 } else if (FLAG_debug_code) {
227 Label done; 227 Label done;
228 __ JumpIfInNewSpace(cp, a0, &done); 228 __ JumpIfInNewSpace(cp, a0, &done);
229 __ Abort(kExpectedNewSpaceObject); 229 __ Abort(kExpectedNewSpaceObject);
(...skipping 477 matching lines...) Expand 10 before | Expand all | Expand 10 after
707 } 707 }
708 return MemOperand(fp, offset); 708 return MemOperand(fp, offset);
709 } 709 }
710 710
711 711
712 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 712 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
713 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 713 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
714 if (var->IsContextSlot()) { 714 if (var->IsContextSlot()) {
715 int context_chain_length = scope()->ContextChainLength(var->scope()); 715 int context_chain_length = scope()->ContextChainLength(var->scope());
716 __ LoadContext(scratch, context_chain_length); 716 __ LoadContext(scratch, context_chain_length);
717 return ContextOperand(scratch, var->index()); 717 return ContextMemOperand(scratch, var->index());
718 } else { 718 } else {
719 return StackOperand(var); 719 return StackOperand(var);
720 } 720 }
721 } 721 }
722 722
723 723
724 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 724 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
725 // Use destination as scratch. 725 // Use destination as scratch.
726 MemOperand location = VarOperand(var, dest); 726 MemOperand location = VarOperand(var, dest);
727 __ ld(dest, location); 727 __ ld(dest, location);
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
813 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 813 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
814 __ sd(a4, StackOperand(variable)); 814 __ sd(a4, StackOperand(variable));
815 } 815 }
816 break; 816 break;
817 817
818 case VariableLocation::CONTEXT: 818 case VariableLocation::CONTEXT:
819 if (hole_init) { 819 if (hole_init) {
820 Comment cmnt(masm_, "[ VariableDeclaration"); 820 Comment cmnt(masm_, "[ VariableDeclaration");
821 EmitDebugCheckDeclarationContext(variable); 821 EmitDebugCheckDeclarationContext(variable);
822 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 822 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
823 __ sd(at, ContextOperand(cp, variable->index())); 823 __ sd(at, ContextMemOperand(cp, variable->index()));
824 // No write barrier since the_hole_value is in old space. 824 // No write barrier since the_hole_value is in old space.
825 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 825 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
826 } 826 }
827 break; 827 break;
828 828
829 case VariableLocation::LOOKUP: { 829 case VariableLocation::LOOKUP: {
830 Comment cmnt(masm_, "[ VariableDeclaration"); 830 Comment cmnt(masm_, "[ VariableDeclaration");
831 __ li(a2, Operand(variable->name())); 831 __ li(a2, Operand(variable->name()));
832 // Declaration nodes are always introduced in one of four modes. 832 // Declaration nodes are always introduced in one of four modes.
833 DCHECK(IsDeclaredVariableMode(mode)); 833 DCHECK(IsDeclaredVariableMode(mode));
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
871 Comment cmnt(masm_, "[ FunctionDeclaration"); 871 Comment cmnt(masm_, "[ FunctionDeclaration");
872 VisitForAccumulatorValue(declaration->fun()); 872 VisitForAccumulatorValue(declaration->fun());
873 __ sd(result_register(), StackOperand(variable)); 873 __ sd(result_register(), StackOperand(variable));
874 break; 874 break;
875 } 875 }
876 876
877 case VariableLocation::CONTEXT: { 877 case VariableLocation::CONTEXT: {
878 Comment cmnt(masm_, "[ FunctionDeclaration"); 878 Comment cmnt(masm_, "[ FunctionDeclaration");
879 EmitDebugCheckDeclarationContext(variable); 879 EmitDebugCheckDeclarationContext(variable);
880 VisitForAccumulatorValue(declaration->fun()); 880 VisitForAccumulatorValue(declaration->fun());
881 __ sd(result_register(), ContextOperand(cp, variable->index())); 881 __ sd(result_register(), ContextMemOperand(cp, variable->index()));
882 int offset = Context::SlotOffset(variable->index()); 882 int offset = Context::SlotOffset(variable->index());
883 // We know that we have written a function, which is not a smi. 883 // We know that we have written a function, which is not a smi.
884 __ RecordWriteContextSlot(cp, 884 __ RecordWriteContextSlot(cp,
885 offset, 885 offset,
886 result_register(), 886 result_register(),
887 a2, 887 a2,
888 kRAHasBeenSaved, 888 kRAHasBeenSaved,
889 kDontSaveFPRegs, 889 kDontSaveFPRegs,
890 EMIT_REMEMBERED_SET, 890 EMIT_REMEMBERED_SET,
891 OMIT_SMI_CHECK); 891 OMIT_SMI_CHECK);
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after
1262 Label* slow) { 1262 Label* slow) {
1263 Register current = cp; 1263 Register current = cp;
1264 Register next = a1; 1264 Register next = a1;
1265 Register temp = a2; 1265 Register temp = a2;
1266 1266
1267 Scope* s = scope(); 1267 Scope* s = scope();
1268 while (s != NULL) { 1268 while (s != NULL) {
1269 if (s->num_heap_slots() > 0) { 1269 if (s->num_heap_slots() > 0) {
1270 if (s->calls_sloppy_eval()) { 1270 if (s->calls_sloppy_eval()) {
1271 // Check that extension is NULL. 1271 // Check that extension is NULL.
1272 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1272 __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1273 __ Branch(slow, ne, temp, Operand(zero_reg)); 1273 __ Branch(slow, ne, temp, Operand(zero_reg));
1274 } 1274 }
1275 // Load next context in chain. 1275 // Load next context in chain.
1276 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1276 __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1277 // Walk the rest of the chain without clobbering cp. 1277 // Walk the rest of the chain without clobbering cp.
1278 current = next; 1278 current = next;
1279 } 1279 }
1280 // If no outer scope calls eval, we do not need to check more 1280 // If no outer scope calls eval, we do not need to check more
1281 // context extensions. 1281 // context extensions.
1282 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1282 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1283 s = s->outer_scope(); 1283 s = s->outer_scope();
1284 } 1284 }
1285 1285
1286 if (s->is_eval_scope()) { 1286 if (s->is_eval_scope()) {
1287 Label loop, fast; 1287 Label loop, fast;
1288 if (!current.is(next)) { 1288 if (!current.is(next)) {
1289 __ Move(next, current); 1289 __ Move(next, current);
1290 } 1290 }
1291 __ bind(&loop); 1291 __ bind(&loop);
1292 // Terminate at native context. 1292 // Terminate at native context.
1293 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1293 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1294 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex); 1294 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1295 __ Branch(&fast, eq, temp, Operand(a4)); 1295 __ Branch(&fast, eq, temp, Operand(a4));
1296 // Check that extension is NULL. 1296 // Check that extension is NULL.
1297 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1297 __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1298 __ Branch(slow, ne, temp, Operand(zero_reg)); 1298 __ Branch(slow, ne, temp, Operand(zero_reg));
1299 // Load next context in chain. 1299 // Load next context in chain.
1300 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1300 __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1301 __ Branch(&loop); 1301 __ Branch(&loop);
1302 __ bind(&fast); 1302 __ bind(&fast);
1303 } 1303 }
1304 1304
1305 // All extension objects were empty and it is safe to use a normal global 1305 // All extension objects were empty and it is safe to use a normal global
1306 // load machinery. 1306 // load machinery.
1307 EmitGlobalVariableLoad(proxy, typeof_mode); 1307 EmitGlobalVariableLoad(proxy, typeof_mode);
1308 } 1308 }
1309 1309
1310 1310
1311 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1311 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1312 Label* slow) { 1312 Label* slow) {
1313 DCHECK(var->IsContextSlot()); 1313 DCHECK(var->IsContextSlot());
1314 Register context = cp; 1314 Register context = cp;
1315 Register next = a3; 1315 Register next = a3;
1316 Register temp = a4; 1316 Register temp = a4;
1317 1317
1318 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1318 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1319 if (s->num_heap_slots() > 0) { 1319 if (s->num_heap_slots() > 0) {
1320 if (s->calls_sloppy_eval()) { 1320 if (s->calls_sloppy_eval()) {
1321 // Check that extension is NULL. 1321 // Check that extension is NULL.
1322 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1322 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1323 __ Branch(slow, ne, temp, Operand(zero_reg)); 1323 __ Branch(slow, ne, temp, Operand(zero_reg));
1324 } 1324 }
1325 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1325 __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1326 // Walk the rest of the chain without clobbering cp. 1326 // Walk the rest of the chain without clobbering cp.
1327 context = next; 1327 context = next;
1328 } 1328 }
1329 } 1329 }
1330 // Check that last extension is NULL. 1330 // Check that last extension is NULL.
1331 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1331 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1332 __ Branch(slow, ne, temp, Operand(zero_reg)); 1332 __ Branch(slow, ne, temp, Operand(zero_reg));
1333 1333
1334 // This function is used only for loads, not stores, so it's safe to 1334 // This function is used only for loads, not stores, so it's safe to
1335 // return an cp-based operand (the write barrier cannot be allowed to 1335 // return an cp-based operand (the write barrier cannot be allowed to
1336 // destroy the cp register). 1336 // destroy the cp register).
1337 return ContextOperand(context, var->index()); 1337 return ContextMemOperand(context, var->index());
1338 } 1338 }
1339 1339
1340 1340
1341 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1341 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1342 TypeofMode typeof_mode, 1342 TypeofMode typeof_mode,
1343 Label* slow, Label* done) { 1343 Label* slow, Label* done) {
1344 // Generate fast-case code for variables that might be shadowed by 1344 // Generate fast-case code for variables that might be shadowed by
1345 // eval-introduced variables. Eval is used a lot without 1345 // eval-introduced variables. Eval is used a lot without
1346 // introducing variables. In those cases, we do not want to 1346 // introducing variables. In those cases, we do not want to
1347 // perform a runtime call for all variables in the scope 1347 // perform a runtime call for all variables in the scope
(...skipping 22 matching lines...) Expand all
1370 __ Branch(done); 1370 __ Branch(done);
1371 } 1371 }
1372 } 1372 }
1373 1373
1374 1374
1375 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, 1375 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1376 TypeofMode typeof_mode) { 1376 TypeofMode typeof_mode) {
1377 Variable* var = proxy->var(); 1377 Variable* var = proxy->var();
1378 DCHECK(var->IsUnallocatedOrGlobalSlot() || 1378 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1379 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); 1379 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1380 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1380 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1381 __ li(LoadDescriptor::NameRegister(), Operand(var->name())); 1381 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1382 __ li(LoadDescriptor::SlotRegister(), 1382 __ li(LoadDescriptor::SlotRegister(),
1383 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); 1383 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1384 CallLoadIC(typeof_mode); 1384 CallLoadIC(typeof_mode);
1385 } 1385 }
1386 1386
1387 1387
1388 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1388 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1389 TypeofMode typeof_mode) { 1389 TypeofMode typeof_mode) {
1390 // Record position before possible IC call. 1390 // Record position before possible IC call.
(...skipping 786 matching lines...) Expand 10 before | Expand all | Expand 10 after
2177 Label allocate, done_allocate; 2177 Label allocate, done_allocate;
2178 2178
2179 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT); 2179 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2180 __ jmp(&done_allocate); 2180 __ jmp(&done_allocate);
2181 2181
2182 __ bind(&allocate); 2182 __ bind(&allocate);
2183 __ Push(Smi::FromInt(JSIteratorResult::kSize)); 2183 __ Push(Smi::FromInt(JSIteratorResult::kSize));
2184 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2184 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2185 2185
2186 __ bind(&done_allocate); 2186 __ bind(&done_allocate);
2187 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 2187 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2188 __ ld(a1, FieldMemOperand(a1, JSGlobalObject::kNativeContextOffset));
2189 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2190 __ pop(a2); 2188 __ pop(a2);
2191 __ LoadRoot(a3, 2189 __ LoadRoot(a3,
2192 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); 2190 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2193 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex); 2191 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2194 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2192 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2195 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 2193 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2196 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); 2194 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2197 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset)); 2195 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2198 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset)); 2196 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2199 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2197 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after
2511 } 2509 }
2512 } 2510 }
2513 2511
2514 2512
2515 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, 2513 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2516 FeedbackVectorSlot slot) { 2514 FeedbackVectorSlot slot) {
2517 if (var->IsUnallocated()) { 2515 if (var->IsUnallocated()) {
2518 // Global var, const, or let. 2516 // Global var, const, or let.
2519 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2517 __ mov(StoreDescriptor::ValueRegister(), result_register());
2520 __ li(StoreDescriptor::NameRegister(), Operand(var->name())); 2518 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2521 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2519 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2522 EmitLoadStoreICSlot(slot); 2520 EmitLoadStoreICSlot(slot);
2523 CallStoreIC(); 2521 CallStoreIC();
2524 2522
2525 } else if (var->mode() == LET && op != Token::INIT) { 2523 } else if (var->mode() == LET && op != Token::INIT) {
2526 // Non-initializing assignment to let variable needs a write barrier. 2524 // Non-initializing assignment to let variable needs a write barrier.
2527 DCHECK(!var->IsLookupSlot()); 2525 DCHECK(!var->IsLookupSlot());
2528 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2526 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2529 Label assign; 2527 Label assign;
2530 MemOperand location = VarOperand(var, a1); 2528 MemOperand location = VarOperand(var, a1);
2531 __ ld(a3, location); 2529 __ ld(a3, location);
(...skipping 1604 matching lines...) Expand 10 before | Expand all | Expand 10 after
4136 4134
4137 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { 4135 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
4138 ZoneList<Expression*>* args = expr->arguments(); 4136 ZoneList<Expression*>* args = expr->arguments();
4139 DCHECK_EQ(2, args->length()); 4137 DCHECK_EQ(2, args->length());
4140 VisitForStackValue(args->at(0)); 4138 VisitForStackValue(args->at(0));
4141 VisitForStackValue(args->at(1)); 4139 VisitForStackValue(args->at(1));
4142 4140
4143 Label runtime, done; 4141 Label runtime, done;
4144 4142
4145 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT); 4143 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
4146 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 4144 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
4147 __ ld(a1, FieldMemOperand(a1, JSGlobalObject::kNativeContextOffset));
4148 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
4149 __ Pop(a2, a3); 4145 __ Pop(a2, a3);
4150 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex); 4146 __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
4151 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 4147 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
4152 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 4148 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4153 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); 4149 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
4154 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset)); 4150 __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
4155 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset)); 4151 __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
4156 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 4152 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4157 __ jmp(&done); 4153 __ jmp(&done);
4158 4154
4159 __ bind(&runtime); 4155 __ bind(&runtime);
4160 __ CallRuntime(Runtime::kCreateIterResultObject, 2); 4156 __ CallRuntime(Runtime::kCreateIterResultObject, 2);
4161 4157
4162 __ bind(&done); 4158 __ bind(&done);
4163 context()->Plug(v0); 4159 context()->Plug(v0);
4164 } 4160 }
4165 4161
4166 4162
4167 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { 4163 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4168 // Push undefined as the receiver. 4164 // Push undefined as the receiver.
4169 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 4165 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4170 __ push(v0); 4166 __ push(v0);
4171 4167
4172 __ ld(v0, GlobalObjectOperand()); 4168 __ LoadNativeContextSlot(expr->context_index(), v0);
4173 __ ld(v0, FieldMemOperand(v0, JSGlobalObject::kNativeContextOffset));
4174 __ ld(v0, ContextOperand(v0, expr->context_index()));
4175 } 4169 }
4176 4170
4177 4171
4178 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { 4172 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4179 ZoneList<Expression*>* args = expr->arguments(); 4173 ZoneList<Expression*>* args = expr->arguments();
4180 int arg_count = args->length(); 4174 int arg_count = args->length();
4181 4175
4182 SetCallPosition(expr, arg_count); 4176 SetCallPosition(expr, arg_count);
4183 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 4177 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4184 __ li(a0, Operand(arg_count)); 4178 __ li(a0, Operand(arg_count));
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
4254 : Runtime::kDeleteProperty_Sloppy, 4248 : Runtime::kDeleteProperty_Sloppy,
4255 2); 4249 2);
4256 context()->Plug(v0); 4250 context()->Plug(v0);
4257 } else if (proxy != NULL) { 4251 } else if (proxy != NULL) {
4258 Variable* var = proxy->var(); 4252 Variable* var = proxy->var();
4259 // Delete of an unqualified identifier is disallowed in strict mode but 4253 // Delete of an unqualified identifier is disallowed in strict mode but
4260 // "delete this" is allowed. 4254 // "delete this" is allowed.
4261 bool is_this = var->HasThisName(isolate()); 4255 bool is_this = var->HasThisName(isolate());
4262 DCHECK(is_sloppy(language_mode()) || is_this); 4256 DCHECK(is_sloppy(language_mode()) || is_this);
4263 if (var->IsUnallocatedOrGlobalSlot()) { 4257 if (var->IsUnallocatedOrGlobalSlot()) {
4264 __ ld(a2, GlobalObjectOperand()); 4258 __ LoadGlobalObject(a2);
4265 __ li(a1, Operand(var->name())); 4259 __ li(a1, Operand(var->name()));
4266 __ Push(a2, a1); 4260 __ Push(a2, a1);
4267 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2); 4261 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2);
4268 context()->Plug(v0); 4262 context()->Plug(v0);
4269 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4263 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4270 // Result of deleting non-global, non-dynamic variables is false. 4264 // Result of deleting non-global, non-dynamic variables is false.
4271 // The subexpression does not have side effects. 4265 // The subexpression does not have side effects.
4272 context()->Plug(is_this); 4266 context()->Plug(is_this);
4273 } else { 4267 } else {
4274 // Non-global variable. Call the runtime to try to delete from the 4268 // Non-global variable. Call the runtime to try to delete from the
(...skipping 532 matching lines...) Expand 10 before | Expand all | Expand 10 after
4807 4801
4808 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4802 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4809 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4803 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4810 DCHECK(IsAligned(frame_offset, kPointerSize)); 4804 DCHECK(IsAligned(frame_offset, kPointerSize));
4811 // __ sw(value, MemOperand(fp, frame_offset)); 4805 // __ sw(value, MemOperand(fp, frame_offset));
4812 __ sd(value, MemOperand(fp, frame_offset)); 4806 __ sd(value, MemOperand(fp, frame_offset));
4813 } 4807 }
4814 4808
4815 4809
4816 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4810 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4817 __ ld(dst, ContextOperand(cp, context_index)); 4811 __ ld(dst, ContextMemOperand(cp, context_index));
4818 } 4812 }
4819 4813
4820 4814
4821 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4815 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4822 Scope* closure_scope = scope()->ClosureScope(); 4816 Scope* closure_scope = scope()->ClosureScope();
4823 if (closure_scope->is_script_scope() || 4817 if (closure_scope->is_script_scope() ||
4824 closure_scope->is_module_scope()) { 4818 closure_scope->is_module_scope()) {
4825 // Contexts nested in the native context have a canonical empty function 4819 // Contexts nested in the native context have a canonical empty function
4826 // as their closure, not the anonymous closure containing the global 4820 // as their closure, not the anonymous closure containing the global
4827 // code. 4821 // code.
4828 __ ld(at, GlobalObjectOperand()); 4822 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
4829 __ ld(at, FieldMemOperand(at, JSGlobalObject::kNativeContextOffset));
4830 __ ld(at, ContextOperand(at, Context::CLOSURE_INDEX));
4831 } else if (closure_scope->is_eval_scope()) { 4823 } else if (closure_scope->is_eval_scope()) {
4832 // Contexts created by a call to eval have the same closure as the 4824 // Contexts created by a call to eval have the same closure as the
4833 // context calling eval, not the anonymous closure containing the eval 4825 // context calling eval, not the anonymous closure containing the eval
4834 // code. Fetch it from the context. 4826 // code. Fetch it from the context.
4835 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX)); 4827 __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4836 } else { 4828 } else {
4837 DCHECK(closure_scope->is_function_scope()); 4829 DCHECK(closure_scope->is_function_scope());
4838 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4830 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4839 } 4831 }
4840 __ push(at); 4832 __ push(at);
4841 } 4833 }
4842 4834
4843 4835
4844 // ---------------------------------------------------------------------------- 4836 // ----------------------------------------------------------------------------
4845 // Non-local control flow support. 4837 // Non-local control flow support.
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
4985 reinterpret_cast<uint64_t>( 4977 reinterpret_cast<uint64_t>(
4986 isolate->builtins()->OsrAfterStackCheck()->entry())); 4978 isolate->builtins()->OsrAfterStackCheck()->entry()));
4987 return OSR_AFTER_STACK_CHECK; 4979 return OSR_AFTER_STACK_CHECK;
4988 } 4980 }
4989 4981
4990 4982
4991 } // namespace internal 4983 } // namespace internal
4992 } // namespace v8 4984 } // namespace v8
4993 4985
4994 #endif // V8_TARGET_ARCH_MIPS64 4986 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/full-codegen/mips/full-codegen-mips.cc ('k') | src/full-codegen/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698