OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 // Note on Mips implementation: | 7 // Note on Mips implementation: |
8 // | 8 // |
9 // The result_register() for mips is the 'v0' register, which is defined | 9 // The result_register() for mips is the 'v0' register, which is defined |
10 // by the ABI to contain function return values. However, the first | 10 // by the ABI to contain function return values. However, the first |
(...skipping 1250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1261 TypeofMode typeof_mode, | 1261 TypeofMode typeof_mode, |
1262 Label* slow) { | 1262 Label* slow) { |
1263 Register current = cp; | 1263 Register current = cp; |
1264 Register next = a1; | 1264 Register next = a1; |
1265 Register temp = a2; | 1265 Register temp = a2; |
1266 | 1266 |
1267 Scope* s = scope(); | 1267 Scope* s = scope(); |
1268 while (s != NULL) { | 1268 while (s != NULL) { |
1269 if (s->num_heap_slots() > 0) { | 1269 if (s->num_heap_slots() > 0) { |
1270 if (s->calls_sloppy_eval()) { | 1270 if (s->calls_sloppy_eval()) { |
1271 // Check that extension is NULL. | 1271 // Check that extension is "the hole". |
1272 __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); | 1272 __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); |
1273 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1273 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1274 } | 1274 } |
1275 // Load next context in chain. | 1275 // Load next context in chain. |
1276 __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); | 1276 __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); |
1277 // Walk the rest of the chain without clobbering cp. | 1277 // Walk the rest of the chain without clobbering cp. |
1278 current = next; | 1278 current = next; |
1279 } | 1279 } |
1280 // If no outer scope calls eval, we do not need to check more | 1280 // If no outer scope calls eval, we do not need to check more |
1281 // context extensions. | 1281 // context extensions. |
1282 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | 1282 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
1283 s = s->outer_scope(); | 1283 s = s->outer_scope(); |
1284 } | 1284 } |
1285 | 1285 |
1286 if (s->is_eval_scope()) { | 1286 if (s->is_eval_scope()) { |
1287 Label loop, fast; | 1287 Label loop, fast; |
1288 if (!current.is(next)) { | 1288 if (!current.is(next)) { |
1289 __ Move(next, current); | 1289 __ Move(next, current); |
1290 } | 1290 } |
1291 __ bind(&loop); | 1291 __ bind(&loop); |
1292 // Terminate at native context. | 1292 // Terminate at native context. |
1293 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | 1293 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
1294 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex); | 1294 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex); |
1295 __ Branch(&fast, eq, temp, Operand(a4)); | 1295 __ Branch(&fast, eq, temp, Operand(a4)); |
1296 // Check that extension is NULL. | 1296 // Check that extension is "the hole". |
1297 __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); | 1297 __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); |
1298 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1298 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1299 // Load next context in chain. | 1299 // Load next context in chain. |
1300 __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); | 1300 __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); |
1301 __ Branch(&loop); | 1301 __ Branch(&loop); |
1302 __ bind(&fast); | 1302 __ bind(&fast); |
1303 } | 1303 } |
1304 | 1304 |
1305 // All extension objects were empty and it is safe to use a normal global | 1305 // All extension objects were empty and it is safe to use a normal global |
1306 // load machinery. | 1306 // load machinery. |
1307 EmitGlobalVariableLoad(proxy, typeof_mode); | 1307 EmitGlobalVariableLoad(proxy, typeof_mode); |
1308 } | 1308 } |
1309 | 1309 |
1310 | 1310 |
1311 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1311 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
1312 Label* slow) { | 1312 Label* slow) { |
1313 DCHECK(var->IsContextSlot()); | 1313 DCHECK(var->IsContextSlot()); |
1314 Register context = cp; | 1314 Register context = cp; |
1315 Register next = a3; | 1315 Register next = a3; |
1316 Register temp = a4; | 1316 Register temp = a4; |
1317 | 1317 |
1318 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1318 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
1319 if (s->num_heap_slots() > 0) { | 1319 if (s->num_heap_slots() > 0) { |
1320 if (s->calls_sloppy_eval()) { | 1320 if (s->calls_sloppy_eval()) { |
1321 // Check that extension is NULL. | 1321 // Check that extension is "the hole". |
1322 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1322 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
1323 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1323 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1324 } | 1324 } |
1325 __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | 1325 __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
1326 // Walk the rest of the chain without clobbering cp. | 1326 // Walk the rest of the chain without clobbering cp. |
1327 context = next; | 1327 context = next; |
1328 } | 1328 } |
1329 } | 1329 } |
1330 // Check that last extension is NULL. | 1330 // Check that last extension is "the hole". |
1331 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1331 __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
1332 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1332 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1333 | 1333 |
1334 // This function is used only for loads, not stores, so it's safe to | 1334 // This function is used only for loads, not stores, so it's safe to |
1335 // return an cp-based operand (the write barrier cannot be allowed to | 1335 // return an cp-based operand (the write barrier cannot be allowed to |
1336 // destroy the cp register). | 1336 // destroy the cp register). |
1337 return ContextMemOperand(context, var->index()); | 1337 return ContextMemOperand(context, var->index()); |
1338 } | 1338 } |
1339 | 1339 |
1340 | 1340 |
1341 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, | 1341 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, |
1342 TypeofMode typeof_mode, | 1342 TypeofMode typeof_mode, |
(...skipping 3634 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4977 reinterpret_cast<uint64_t>( | 4977 reinterpret_cast<uint64_t>( |
4978 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4978 isolate->builtins()->OsrAfterStackCheck()->entry())); |
4979 return OSR_AFTER_STACK_CHECK; | 4979 return OSR_AFTER_STACK_CHECK; |
4980 } | 4980 } |
4981 | 4981 |
4982 | 4982 |
4983 } // namespace internal | 4983 } // namespace internal |
4984 } // namespace v8 | 4984 } // namespace v8 |
4985 | 4985 |
4986 #endif // V8_TARGET_ARCH_MIPS64 | 4986 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |