| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 426 void MacroAssembler::StoreRoot(Register source, | 426 void MacroAssembler::StoreRoot(Register source, |
| 427 Heap::RootListIndex index, | 427 Heap::RootListIndex index, |
| 428 Condition cond) { | 428 Condition cond) { |
| 429 str(source, MemOperand(roots, index << kPointerSizeLog2), cond); | 429 str(source, MemOperand(roots, index << kPointerSizeLog2), cond); |
| 430 } | 430 } |
| 431 | 431 |
| 432 | 432 |
| 433 void MacroAssembler::RecordWriteHelper(Register object, | 433 void MacroAssembler::RecordWriteHelper(Register object, |
| 434 Register address, | 434 Register address, |
| 435 Register scratch) { | 435 Register scratch) { |
| 436 if (FLAG_debug_code) { | 436 if (emit_debug_code()) { |
| 437 // Check that the object is not in new space. | 437 // Check that the object is not in new space. |
| 438 Label not_in_new_space; | 438 Label not_in_new_space; |
| 439 InNewSpace(object, scratch, ne, ¬_in_new_space); | 439 InNewSpace(object, scratch, ne, ¬_in_new_space); |
| 440 Abort("new-space object passed to RecordWriteHelper"); | 440 Abort("new-space object passed to RecordWriteHelper"); |
| 441 bind(¬_in_new_space); | 441 bind(¬_in_new_space); |
| 442 } | 442 } |
| 443 | 443 |
| 444 // Calculate page address. | 444 // Calculate page address. |
| 445 Bfc(object, 0, kPageSizeBits); | 445 Bfc(object, 0, kPageSizeBits); |
| 446 | 446 |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 488 // Add offset into the object. | 488 // Add offset into the object. |
| 489 add(scratch0, object, offset); | 489 add(scratch0, object, offset); |
| 490 | 490 |
| 491 // Record the actual write. | 491 // Record the actual write. |
| 492 RecordWriteHelper(object, scratch0, scratch1); | 492 RecordWriteHelper(object, scratch0, scratch1); |
| 493 | 493 |
| 494 bind(&done); | 494 bind(&done); |
| 495 | 495 |
| 496 // Clobber all input registers when running with the debug-code flag | 496 // Clobber all input registers when running with the debug-code flag |
| 497 // turned on to provoke errors. | 497 // turned on to provoke errors. |
| 498 if (FLAG_debug_code) { | 498 if (emit_debug_code()) { |
| 499 mov(object, Operand(BitCast<int32_t>(kZapValue))); | 499 mov(object, Operand(BitCast<int32_t>(kZapValue))); |
| 500 mov(scratch0, Operand(BitCast<int32_t>(kZapValue))); | 500 mov(scratch0, Operand(BitCast<int32_t>(kZapValue))); |
| 501 mov(scratch1, Operand(BitCast<int32_t>(kZapValue))); | 501 mov(scratch1, Operand(BitCast<int32_t>(kZapValue))); |
| 502 } | 502 } |
| 503 } | 503 } |
| 504 | 504 |
| 505 | 505 |
| 506 // Will clobber 4 registers: object, address, scratch, ip. The | 506 // Will clobber 4 registers: object, address, scratch, ip. The |
| 507 // register 'object' contains a heap object pointer. The heap object | 507 // register 'object' contains a heap object pointer. The heap object |
| 508 // tag is shifted away. | 508 // tag is shifted away. |
| (...skipping 11 matching lines...) Expand all Loading... |
| 520 // region marks for new space pages. | 520 // region marks for new space pages. |
| 521 InNewSpace(object, scratch, eq, &done); | 521 InNewSpace(object, scratch, eq, &done); |
| 522 | 522 |
| 523 // Record the actual write. | 523 // Record the actual write. |
| 524 RecordWriteHelper(object, address, scratch); | 524 RecordWriteHelper(object, address, scratch); |
| 525 | 525 |
| 526 bind(&done); | 526 bind(&done); |
| 527 | 527 |
| 528 // Clobber all input registers when running with the debug-code flag | 528 // Clobber all input registers when running with the debug-code flag |
| 529 // turned on to provoke errors. | 529 // turned on to provoke errors. |
| 530 if (FLAG_debug_code) { | 530 if (emit_debug_code()) { |
| 531 mov(object, Operand(BitCast<int32_t>(kZapValue))); | 531 mov(object, Operand(BitCast<int32_t>(kZapValue))); |
| 532 mov(address, Operand(BitCast<int32_t>(kZapValue))); | 532 mov(address, Operand(BitCast<int32_t>(kZapValue))); |
| 533 mov(scratch, Operand(BitCast<int32_t>(kZapValue))); | 533 mov(scratch, Operand(BitCast<int32_t>(kZapValue))); |
| 534 } | 534 } |
| 535 } | 535 } |
| 536 | 536 |
| 537 | 537 |
| 538 // Push and pop all registers that can hold pointers. | 538 // Push and pop all registers that can hold pointers. |
| 539 void MacroAssembler::PushSafepointRegisters() { | 539 void MacroAssembler::PushSafepointRegisters() { |
| 540 // Safepoints expect a block of contiguous register values starting with r0: | 540 // Safepoints expect a block of contiguous register values starting with r0: |
| (...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 724 | 724 |
| 725 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { | 725 void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { |
| 726 // Setup the frame structure on the stack. | 726 // Setup the frame structure on the stack. |
| 727 ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); | 727 ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); |
| 728 ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); | 728 ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); |
| 729 ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); | 729 ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); |
| 730 Push(lr, fp); | 730 Push(lr, fp); |
| 731 mov(fp, Operand(sp)); // Setup new frame pointer. | 731 mov(fp, Operand(sp)); // Setup new frame pointer. |
| 732 // Reserve room for saved entry sp and code object. | 732 // Reserve room for saved entry sp and code object. |
| 733 sub(sp, sp, Operand(2 * kPointerSize)); | 733 sub(sp, sp, Operand(2 * kPointerSize)); |
| 734 if (FLAG_debug_code) { | 734 if (emit_debug_code()) { |
| 735 mov(ip, Operand(0)); | 735 mov(ip, Operand(0)); |
| 736 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 736 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
| 737 } | 737 } |
| 738 mov(ip, Operand(CodeObject())); | 738 mov(ip, Operand(CodeObject())); |
| 739 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 739 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
| 740 | 740 |
| 741 // Save the frame pointer and the context in top. | 741 // Save the frame pointer and the context in top. |
| 742 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); | 742 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); |
| 743 str(fp, MemOperand(ip)); | 743 str(fp, MemOperand(ip)); |
| 744 mov(ip, Operand(ExternalReference(Top::k_context_address))); | 744 mov(ip, Operand(ExternalReference(Top::k_context_address))); |
| (...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1130 | 1130 |
| 1131 // Before returning we restore the context from the frame pointer if | 1131 // Before returning we restore the context from the frame pointer if |
| 1132 // not NULL. The frame pointer is NULL in the exception handler of a | 1132 // not NULL. The frame pointer is NULL in the exception handler of a |
| 1133 // JS entry frame. | 1133 // JS entry frame. |
| 1134 cmp(fp, Operand(0, RelocInfo::NONE)); | 1134 cmp(fp, Operand(0, RelocInfo::NONE)); |
| 1135 // Set cp to NULL if fp is NULL. | 1135 // Set cp to NULL if fp is NULL. |
| 1136 mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq); | 1136 mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq); |
| 1137 // Restore cp otherwise. | 1137 // Restore cp otherwise. |
| 1138 ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); | 1138 ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); |
| 1139 #ifdef DEBUG | 1139 #ifdef DEBUG |
| 1140 if (FLAG_debug_code) { | 1140 if (emit_debug_code()) { |
| 1141 mov(lr, Operand(pc)); | 1141 mov(lr, Operand(pc)); |
| 1142 } | 1142 } |
| 1143 #endif | 1143 #endif |
| 1144 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); | 1144 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); |
| 1145 pop(pc); | 1145 pop(pc); |
| 1146 } | 1146 } |
| 1147 | 1147 |
| 1148 | 1148 |
| 1149 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, | 1149 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, |
| 1150 Register value) { | 1150 Register value) { |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1203 ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state. | 1203 ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state. |
| 1204 // Before returning we restore the context from the frame pointer if | 1204 // Before returning we restore the context from the frame pointer if |
| 1205 // not NULL. The frame pointer is NULL in the exception handler of a | 1205 // not NULL. The frame pointer is NULL in the exception handler of a |
| 1206 // JS entry frame. | 1206 // JS entry frame. |
| 1207 cmp(fp, Operand(0, RelocInfo::NONE)); | 1207 cmp(fp, Operand(0, RelocInfo::NONE)); |
| 1208 // Set cp to NULL if fp is NULL. | 1208 // Set cp to NULL if fp is NULL. |
| 1209 mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq); | 1209 mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq); |
| 1210 // Restore cp otherwise. | 1210 // Restore cp otherwise. |
| 1211 ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); | 1211 ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); |
| 1212 #ifdef DEBUG | 1212 #ifdef DEBUG |
| 1213 if (FLAG_debug_code) { | 1213 if (emit_debug_code()) { |
| 1214 mov(lr, Operand(pc)); | 1214 mov(lr, Operand(pc)); |
| 1215 } | 1215 } |
| 1216 #endif | 1216 #endif |
| 1217 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); | 1217 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); |
| 1218 pop(pc); | 1218 pop(pc); |
| 1219 } | 1219 } |
| 1220 | 1220 |
| 1221 | 1221 |
| 1222 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | 1222 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, |
| 1223 Register scratch, | 1223 Register scratch, |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1235 cmp(scratch, Operand(0, RelocInfo::NONE)); | 1235 cmp(scratch, Operand(0, RelocInfo::NONE)); |
| 1236 Check(ne, "we should not have an empty lexical context"); | 1236 Check(ne, "we should not have an empty lexical context"); |
| 1237 #endif | 1237 #endif |
| 1238 | 1238 |
| 1239 // Load the global context of the current context. | 1239 // Load the global context of the current context. |
| 1240 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1240 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
| 1241 ldr(scratch, FieldMemOperand(scratch, offset)); | 1241 ldr(scratch, FieldMemOperand(scratch, offset)); |
| 1242 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); | 1242 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); |
| 1243 | 1243 |
| 1244 // Check the context is a global context. | 1244 // Check the context is a global context. |
| 1245 if (FLAG_debug_code) { | 1245 if (emit_debug_code()) { |
| 1246 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 1246 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
| 1247 // Cannot use ip as a temporary in this verification code. Due to the fact | 1247 // Cannot use ip as a temporary in this verification code. Due to the fact |
| 1248 // that ip is clobbered as part of cmp with an object Operand. | 1248 // that ip is clobbered as part of cmp with an object Operand. |
| 1249 push(holder_reg); // Temporarily save holder on the stack. | 1249 push(holder_reg); // Temporarily save holder on the stack. |
| 1250 // Read the first word and compare to the global_context_map. | 1250 // Read the first word and compare to the global_context_map. |
| 1251 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); | 1251 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 1252 LoadRoot(ip, Heap::kGlobalContextMapRootIndex); | 1252 LoadRoot(ip, Heap::kGlobalContextMapRootIndex); |
| 1253 cmp(holder_reg, ip); | 1253 cmp(holder_reg, ip); |
| 1254 Check(eq, "JSGlobalObject::global_context should be a global context."); | 1254 Check(eq, "JSGlobalObject::global_context should be a global context."); |
| 1255 pop(holder_reg); // Restore holder. | 1255 pop(holder_reg); // Restore holder. |
| 1256 } | 1256 } |
| 1257 | 1257 |
| 1258 // Check if both contexts are the same. | 1258 // Check if both contexts are the same. |
| 1259 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 1259 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
| 1260 cmp(scratch, Operand(ip)); | 1260 cmp(scratch, Operand(ip)); |
| 1261 b(eq, &same_contexts); | 1261 b(eq, &same_contexts); |
| 1262 | 1262 |
| 1263 // Check the context is a global context. | 1263 // Check the context is a global context. |
| 1264 if (FLAG_debug_code) { | 1264 if (emit_debug_code()) { |
| 1265 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 1265 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
| 1266 // Cannot use ip as a temporary in this verification code. Due to the fact | 1266 // Cannot use ip as a temporary in this verification code. Due to the fact |
| 1267 // that ip is clobbered as part of cmp with an object Operand. | 1267 // that ip is clobbered as part of cmp with an object Operand. |
| 1268 push(holder_reg); // Temporarily save holder on the stack. | 1268 push(holder_reg); // Temporarily save holder on the stack. |
| 1269 mov(holder_reg, ip); // Move ip to its holding place. | 1269 mov(holder_reg, ip); // Move ip to its holding place. |
| 1270 LoadRoot(ip, Heap::kNullValueRootIndex); | 1270 LoadRoot(ip, Heap::kNullValueRootIndex); |
| 1271 cmp(holder_reg, ip); | 1271 cmp(holder_reg, ip); |
| 1272 Check(ne, "JSGlobalProxy::context() should not be null."); | 1272 Check(ne, "JSGlobalProxy::context() should not be null."); |
| 1273 | 1273 |
| 1274 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); | 1274 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 1296 } | 1296 } |
| 1297 | 1297 |
| 1298 | 1298 |
| 1299 void MacroAssembler::AllocateInNewSpace(int object_size, | 1299 void MacroAssembler::AllocateInNewSpace(int object_size, |
| 1300 Register result, | 1300 Register result, |
| 1301 Register scratch1, | 1301 Register scratch1, |
| 1302 Register scratch2, | 1302 Register scratch2, |
| 1303 Label* gc_required, | 1303 Label* gc_required, |
| 1304 AllocationFlags flags) { | 1304 AllocationFlags flags) { |
| 1305 if (!FLAG_inline_new) { | 1305 if (!FLAG_inline_new) { |
| 1306 if (FLAG_debug_code) { | 1306 if (emit_debug_code()) { |
| 1307 // Trash the registers to simulate an allocation failure. | 1307 // Trash the registers to simulate an allocation failure. |
| 1308 mov(result, Operand(0x7091)); | 1308 mov(result, Operand(0x7091)); |
| 1309 mov(scratch1, Operand(0x7191)); | 1309 mov(scratch1, Operand(0x7191)); |
| 1310 mov(scratch2, Operand(0x7291)); | 1310 mov(scratch2, Operand(0x7291)); |
| 1311 } | 1311 } |
| 1312 jmp(gc_required); | 1312 jmp(gc_required); |
| 1313 return; | 1313 return; |
| 1314 } | 1314 } |
| 1315 | 1315 |
| 1316 ASSERT(!result.is(scratch1)); | 1316 ASSERT(!result.is(scratch1)); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 1345 Register obj_size_reg = scratch2; | 1345 Register obj_size_reg = scratch2; |
| 1346 mov(topaddr, Operand(new_space_allocation_top)); | 1346 mov(topaddr, Operand(new_space_allocation_top)); |
| 1347 mov(obj_size_reg, Operand(object_size)); | 1347 mov(obj_size_reg, Operand(object_size)); |
| 1348 | 1348 |
| 1349 // This code stores a temporary value in ip. This is OK, as the code below | 1349 // This code stores a temporary value in ip. This is OK, as the code below |
| 1350 // does not need ip for implicit literal generation. | 1350 // does not need ip for implicit literal generation. |
| 1351 if ((flags & RESULT_CONTAINS_TOP) == 0) { | 1351 if ((flags & RESULT_CONTAINS_TOP) == 0) { |
| 1352 // Load allocation top into result and allocation limit into ip. | 1352 // Load allocation top into result and allocation limit into ip. |
| 1353 ldm(ia, topaddr, result.bit() | ip.bit()); | 1353 ldm(ia, topaddr, result.bit() | ip.bit()); |
| 1354 } else { | 1354 } else { |
| 1355 if (FLAG_debug_code) { | 1355 if (emit_debug_code()) { |
| 1356 // Assert that result actually contains top on entry. ip is used | 1356 // Assert that result actually contains top on entry. ip is used |
| 1357 // immediately below so this use of ip does not cause difference with | 1357 // immediately below so this use of ip does not cause difference with |
| 1358 // respect to register content between debug and release mode. | 1358 // respect to register content between debug and release mode. |
| 1359 ldr(ip, MemOperand(topaddr)); | 1359 ldr(ip, MemOperand(topaddr)); |
| 1360 cmp(result, ip); | 1360 cmp(result, ip); |
| 1361 Check(eq, "Unexpected allocation top"); | 1361 Check(eq, "Unexpected allocation top"); |
| 1362 } | 1362 } |
| 1363 // Load allocation limit into ip. Result already contains allocation top. | 1363 // Load allocation limit into ip. Result already contains allocation top. |
| 1364 ldr(ip, MemOperand(topaddr, limit - top)); | 1364 ldr(ip, MemOperand(topaddr, limit - top)); |
| 1365 } | 1365 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1379 } | 1379 } |
| 1380 | 1380 |
| 1381 | 1381 |
| 1382 void MacroAssembler::AllocateInNewSpace(Register object_size, | 1382 void MacroAssembler::AllocateInNewSpace(Register object_size, |
| 1383 Register result, | 1383 Register result, |
| 1384 Register scratch1, | 1384 Register scratch1, |
| 1385 Register scratch2, | 1385 Register scratch2, |
| 1386 Label* gc_required, | 1386 Label* gc_required, |
| 1387 AllocationFlags flags) { | 1387 AllocationFlags flags) { |
| 1388 if (!FLAG_inline_new) { | 1388 if (!FLAG_inline_new) { |
| 1389 if (FLAG_debug_code) { | 1389 if (emit_debug_code()) { |
| 1390 // Trash the registers to simulate an allocation failure. | 1390 // Trash the registers to simulate an allocation failure. |
| 1391 mov(result, Operand(0x7091)); | 1391 mov(result, Operand(0x7091)); |
| 1392 mov(scratch1, Operand(0x7191)); | 1392 mov(scratch1, Operand(0x7191)); |
| 1393 mov(scratch2, Operand(0x7291)); | 1393 mov(scratch2, Operand(0x7291)); |
| 1394 } | 1394 } |
| 1395 jmp(gc_required); | 1395 jmp(gc_required); |
| 1396 return; | 1396 return; |
| 1397 } | 1397 } |
| 1398 | 1398 |
| 1399 // Assert that the register arguments are different and that none of | 1399 // Assert that the register arguments are different and that none of |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1423 // Set up allocation top address. | 1423 // Set up allocation top address. |
| 1424 Register topaddr = scratch1; | 1424 Register topaddr = scratch1; |
| 1425 mov(topaddr, Operand(new_space_allocation_top)); | 1425 mov(topaddr, Operand(new_space_allocation_top)); |
| 1426 | 1426 |
| 1427 // This code stores a temporary value in ip. This is OK, as the code below | 1427 // This code stores a temporary value in ip. This is OK, as the code below |
| 1428 // does not need ip for implicit literal generation. | 1428 // does not need ip for implicit literal generation. |
| 1429 if ((flags & RESULT_CONTAINS_TOP) == 0) { | 1429 if ((flags & RESULT_CONTAINS_TOP) == 0) { |
| 1430 // Load allocation top into result and allocation limit into ip. | 1430 // Load allocation top into result and allocation limit into ip. |
| 1431 ldm(ia, topaddr, result.bit() | ip.bit()); | 1431 ldm(ia, topaddr, result.bit() | ip.bit()); |
| 1432 } else { | 1432 } else { |
| 1433 if (FLAG_debug_code) { | 1433 if (emit_debug_code()) { |
| 1434 // Assert that result actually contains top on entry. ip is used | 1434 // Assert that result actually contains top on entry. ip is used |
| 1435 // immediately below so this use of ip does not cause difference with | 1435 // immediately below so this use of ip does not cause difference with |
| 1436 // respect to register content between debug and release mode. | 1436 // respect to register content between debug and release mode. |
| 1437 ldr(ip, MemOperand(topaddr)); | 1437 ldr(ip, MemOperand(topaddr)); |
| 1438 cmp(result, ip); | 1438 cmp(result, ip); |
| 1439 Check(eq, "Unexpected allocation top"); | 1439 Check(eq, "Unexpected allocation top"); |
| 1440 } | 1440 } |
| 1441 // Load allocation limit into ip. Result already contains allocation top. | 1441 // Load allocation limit into ip. Result already contains allocation top. |
| 1442 ldr(ip, MemOperand(topaddr, limit - top)); | 1442 ldr(ip, MemOperand(topaddr, limit - top)); |
| 1443 } | 1443 } |
| 1444 | 1444 |
| 1445 // Calculate new top and bail out if new space is exhausted. Use result | 1445 // Calculate new top and bail out if new space is exhausted. Use result |
| 1446 // to calculate the new top. Object size may be in words so a shift is | 1446 // to calculate the new top. Object size may be in words so a shift is |
| 1447 // required to get the number of bytes. | 1447 // required to get the number of bytes. |
| 1448 if ((flags & SIZE_IN_WORDS) != 0) { | 1448 if ((flags & SIZE_IN_WORDS) != 0) { |
| 1449 add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC); | 1449 add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC); |
| 1450 } else { | 1450 } else { |
| 1451 add(scratch2, result, Operand(object_size), SetCC); | 1451 add(scratch2, result, Operand(object_size), SetCC); |
| 1452 } | 1452 } |
| 1453 b(cs, gc_required); | 1453 b(cs, gc_required); |
| 1454 cmp(scratch2, Operand(ip)); | 1454 cmp(scratch2, Operand(ip)); |
| 1455 b(hi, gc_required); | 1455 b(hi, gc_required); |
| 1456 | 1456 |
| 1457 // Update allocation top. result temporarily holds the new top. | 1457 // Update allocation top. result temporarily holds the new top. |
| 1458 if (FLAG_debug_code) { | 1458 if (emit_debug_code()) { |
| 1459 tst(scratch2, Operand(kObjectAlignmentMask)); | 1459 tst(scratch2, Operand(kObjectAlignmentMask)); |
| 1460 Check(eq, "Unaligned allocation in new space"); | 1460 Check(eq, "Unaligned allocation in new space"); |
| 1461 } | 1461 } |
| 1462 str(scratch2, MemOperand(topaddr)); | 1462 str(scratch2, MemOperand(topaddr)); |
| 1463 | 1463 |
| 1464 // Tag object if requested. | 1464 // Tag object if requested. |
| 1465 if ((flags & TAG_OBJECT) != 0) { | 1465 if ((flags & TAG_OBJECT) != 0) { |
| 1466 add(result, result, Operand(kHeapObjectTag)); | 1466 add(result, result, Operand(kHeapObjectTag)); |
| 1467 } | 1467 } |
| 1468 } | 1468 } |
| (...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1752 | 1752 |
| 1753 // If result is non-zero, dereference to get the result value | 1753 // If result is non-zero, dereference to get the result value |
| 1754 // otherwise set it to undefined. | 1754 // otherwise set it to undefined. |
| 1755 cmp(r0, Operand(0)); | 1755 cmp(r0, Operand(0)); |
| 1756 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 1756 LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
| 1757 ldr(r0, MemOperand(r0), ne); | 1757 ldr(r0, MemOperand(r0), ne); |
| 1758 | 1758 |
| 1759 // No more valid handles (the result handle was the last one). Restore | 1759 // No more valid handles (the result handle was the last one). Restore |
| 1760 // previous handle scope. | 1760 // previous handle scope. |
| 1761 str(r4, MemOperand(r7, kNextOffset)); | 1761 str(r4, MemOperand(r7, kNextOffset)); |
| 1762 if (FLAG_debug_code) { | 1762 if (emit_debug_code()) { |
| 1763 ldr(r1, MemOperand(r7, kLevelOffset)); | 1763 ldr(r1, MemOperand(r7, kLevelOffset)); |
| 1764 cmp(r1, r6); | 1764 cmp(r1, r6); |
| 1765 Check(eq, "Unexpected level after return from api call"); | 1765 Check(eq, "Unexpected level after return from api call"); |
| 1766 } | 1766 } |
| 1767 sub(r6, r6, Operand(1)); | 1767 sub(r6, r6, Operand(1)); |
| 1768 str(r6, MemOperand(r7, kLevelOffset)); | 1768 str(r6, MemOperand(r7, kLevelOffset)); |
| 1769 ldr(ip, MemOperand(r7, kLimitOffset)); | 1769 ldr(ip, MemOperand(r7, kLimitOffset)); |
| 1770 cmp(r5, ip); | 1770 cmp(r5, ip); |
| 1771 b(ne, &delete_allocated_handles); | 1771 b(ne, &delete_allocated_handles); |
| 1772 | 1772 |
| (...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2328 if (FLAG_native_code_counters && counter->Enabled()) { | 2328 if (FLAG_native_code_counters && counter->Enabled()) { |
| 2329 mov(scratch2, Operand(ExternalReference(counter))); | 2329 mov(scratch2, Operand(ExternalReference(counter))); |
| 2330 ldr(scratch1, MemOperand(scratch2)); | 2330 ldr(scratch1, MemOperand(scratch2)); |
| 2331 sub(scratch1, scratch1, Operand(value)); | 2331 sub(scratch1, scratch1, Operand(value)); |
| 2332 str(scratch1, MemOperand(scratch2)); | 2332 str(scratch1, MemOperand(scratch2)); |
| 2333 } | 2333 } |
| 2334 } | 2334 } |
| 2335 | 2335 |
| 2336 | 2336 |
| 2337 void MacroAssembler::Assert(Condition cond, const char* msg) { | 2337 void MacroAssembler::Assert(Condition cond, const char* msg) { |
| 2338 if (FLAG_debug_code) | 2338 if (emit_debug_code()) |
| 2339 Check(cond, msg); | 2339 Check(cond, msg); |
| 2340 } | 2340 } |
| 2341 | 2341 |
| 2342 | 2342 |
| 2343 void MacroAssembler::AssertRegisterIsRoot(Register reg, | 2343 void MacroAssembler::AssertRegisterIsRoot(Register reg, |
| 2344 Heap::RootListIndex index) { | 2344 Heap::RootListIndex index) { |
| 2345 if (FLAG_debug_code) { | 2345 if (emit_debug_code()) { |
| 2346 LoadRoot(ip, index); | 2346 LoadRoot(ip, index); |
| 2347 cmp(reg, ip); | 2347 cmp(reg, ip); |
| 2348 Check(eq, "Register did not match expected root"); | 2348 Check(eq, "Register did not match expected root"); |
| 2349 } | 2349 } |
| 2350 } | 2350 } |
| 2351 | 2351 |
| 2352 | 2352 |
| 2353 void MacroAssembler::AssertFastElements(Register elements) { | 2353 void MacroAssembler::AssertFastElements(Register elements) { |
| 2354 if (FLAG_debug_code) { | 2354 if (emit_debug_code()) { |
| 2355 ASSERT(!elements.is(ip)); | 2355 ASSERT(!elements.is(ip)); |
| 2356 Label ok; | 2356 Label ok; |
| 2357 push(elements); | 2357 push(elements); |
| 2358 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset)); | 2358 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset)); |
| 2359 LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | 2359 LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 2360 cmp(elements, ip); | 2360 cmp(elements, ip); |
| 2361 b(eq, &ok); | 2361 b(eq, &ok); |
| 2362 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); | 2362 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); |
| 2363 cmp(elements, ip); | 2363 cmp(elements, ip); |
| 2364 b(eq, &ok); | 2364 b(eq, &ok); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2432 // Slot is in the current function context. Move it into the | 2432 // Slot is in the current function context. Move it into the |
| 2433 // destination register in case we store into it (the write barrier | 2433 // destination register in case we store into it (the write barrier |
| 2434 // cannot be allowed to destroy the context in esi). | 2434 // cannot be allowed to destroy the context in esi). |
| 2435 mov(dst, cp); | 2435 mov(dst, cp); |
| 2436 } | 2436 } |
| 2437 | 2437 |
| 2438 // We should not have found a 'with' context by walking the context chain | 2438 // We should not have found a 'with' context by walking the context chain |
| 2439 // (i.e., the static scope chain and runtime context chain do not agree). | 2439 // (i.e., the static scope chain and runtime context chain do not agree). |
| 2440 // A variable occurring in such a scope should have slot type LOOKUP and | 2440 // A variable occurring in such a scope should have slot type LOOKUP and |
| 2441 // not CONTEXT. | 2441 // not CONTEXT. |
| 2442 if (FLAG_debug_code) { | 2442 if (emit_debug_code()) { |
| 2443 ldr(ip, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 2443 ldr(ip, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
| 2444 cmp(dst, ip); | 2444 cmp(dst, ip); |
| 2445 Check(eq, "Yo dawg, I heard you liked function contexts " | 2445 Check(eq, "Yo dawg, I heard you liked function contexts " |
| 2446 "so I put function contexts in all your contexts"); | 2446 "so I put function contexts in all your contexts"); |
| 2447 } | 2447 } |
| 2448 } | 2448 } |
| 2449 | 2449 |
| 2450 | 2450 |
| 2451 void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 2451 void MacroAssembler::LoadGlobalFunction(int index, Register function) { |
| 2452 // Load the global or builtins object from the current context. | 2452 // Load the global or builtins object from the current context. |
| 2453 ldr(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2453 ldr(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 2454 // Load the global context from the global or builtins object. | 2454 // Load the global context from the global or builtins object. |
| 2455 ldr(function, FieldMemOperand(function, | 2455 ldr(function, FieldMemOperand(function, |
| 2456 GlobalObject::kGlobalContextOffset)); | 2456 GlobalObject::kGlobalContextOffset)); |
| 2457 // Load the function from the global context. | 2457 // Load the function from the global context. |
| 2458 ldr(function, MemOperand(function, Context::SlotOffset(index))); | 2458 ldr(function, MemOperand(function, Context::SlotOffset(index))); |
| 2459 } | 2459 } |
| 2460 | 2460 |
| 2461 | 2461 |
| 2462 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2462 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
| 2463 Register map, | 2463 Register map, |
| 2464 Register scratch) { | 2464 Register scratch) { |
| 2465 // Load the initial map. The global functions all have initial maps. | 2465 // Load the initial map. The global functions all have initial maps. |
| 2466 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2466 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 2467 if (FLAG_debug_code) { | 2467 if (emit_debug_code()) { |
| 2468 Label ok, fail; | 2468 Label ok, fail; |
| 2469 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, false); | 2469 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, false); |
| 2470 b(&ok); | 2470 b(&ok); |
| 2471 bind(&fail); | 2471 bind(&fail); |
| 2472 Abort("Global functions must have initial map"); | 2472 Abort("Global functions must have initial map"); |
| 2473 bind(&ok); | 2473 bind(&ok); |
| 2474 } | 2474 } |
| 2475 } | 2475 } |
| 2476 | 2476 |
| 2477 | 2477 |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2665 bind(&align_loop_1); | 2665 bind(&align_loop_1); |
| 2666 tst(src, Operand(kPointerSize - 1)); | 2666 tst(src, Operand(kPointerSize - 1)); |
| 2667 b(eq, &word_loop); | 2667 b(eq, &word_loop); |
| 2668 ldrb(scratch, MemOperand(src, 1, PostIndex)); | 2668 ldrb(scratch, MemOperand(src, 1, PostIndex)); |
| 2669 strb(scratch, MemOperand(dst, 1, PostIndex)); | 2669 strb(scratch, MemOperand(dst, 1, PostIndex)); |
| 2670 sub(length, length, Operand(1), SetCC); | 2670 sub(length, length, Operand(1), SetCC); |
| 2671 b(ne, &byte_loop_1); | 2671 b(ne, &byte_loop_1); |
| 2672 | 2672 |
| 2673 // Copy bytes in word size chunks. | 2673 // Copy bytes in word size chunks. |
| 2674 bind(&word_loop); | 2674 bind(&word_loop); |
| 2675 if (FLAG_debug_code) { | 2675 if (emit_debug_code()) { |
| 2676 tst(src, Operand(kPointerSize - 1)); | 2676 tst(src, Operand(kPointerSize - 1)); |
| 2677 Assert(eq, "Expecting alignment for CopyBytes"); | 2677 Assert(eq, "Expecting alignment for CopyBytes"); |
| 2678 } | 2678 } |
| 2679 cmp(length, Operand(kPointerSize)); | 2679 cmp(length, Operand(kPointerSize)); |
| 2680 b(lt, &byte_loop); | 2680 b(lt, &byte_loop); |
| 2681 ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); | 2681 ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); |
| 2682 #if CAN_USE_UNALIGNED_ACCESSES | 2682 #if CAN_USE_UNALIGNED_ACCESSES |
| 2683 str(scratch, MemOperand(dst, kPointerSize, PostIndex)); | 2683 str(scratch, MemOperand(dst, kPointerSize, PostIndex)); |
| 2684 #else | 2684 #else |
| 2685 strb(scratch, MemOperand(dst, 1, PostIndex)); | 2685 strb(scratch, MemOperand(dst, 1, PostIndex)); |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2795 mov(ip, Operand(function)); | 2795 mov(ip, Operand(function)); |
| 2796 CallCFunction(ip, num_arguments); | 2796 CallCFunction(ip, num_arguments); |
| 2797 } | 2797 } |
| 2798 | 2798 |
| 2799 | 2799 |
| 2800 void MacroAssembler::CallCFunction(Register function, int num_arguments) { | 2800 void MacroAssembler::CallCFunction(Register function, int num_arguments) { |
| 2801 // Make sure that the stack is aligned before calling a C function unless | 2801 // Make sure that the stack is aligned before calling a C function unless |
| 2802 // running in the simulator. The simulator has its own alignment check which | 2802 // running in the simulator. The simulator has its own alignment check which |
| 2803 // provides more information. | 2803 // provides more information. |
| 2804 #if defined(V8_HOST_ARCH_ARM) | 2804 #if defined(V8_HOST_ARCH_ARM) |
| 2805 if (FLAG_debug_code) { | 2805 if (emit_debug_code()) { |
| 2806 int frame_alignment = OS::ActivationFrameAlignment(); | 2806 int frame_alignment = OS::ActivationFrameAlignment(); |
| 2807 int frame_alignment_mask = frame_alignment - 1; | 2807 int frame_alignment_mask = frame_alignment - 1; |
| 2808 if (frame_alignment > kPointerSize) { | 2808 if (frame_alignment > kPointerSize) { |
| 2809 ASSERT(IsPowerOf2(frame_alignment)); | 2809 ASSERT(IsPowerOf2(frame_alignment)); |
| 2810 Label alignment_as_expected; | 2810 Label alignment_as_expected; |
| 2811 tst(sp, Operand(frame_alignment_mask)); | 2811 tst(sp, Operand(frame_alignment_mask)); |
| 2812 b(eq, &alignment_as_expected); | 2812 b(eq, &alignment_as_expected); |
| 2813 // Don't use Check here, as it will call Runtime_Abort possibly | 2813 // Don't use Check here, as it will call Runtime_Abort possibly |
| 2814 // re-entering here. | 2814 // re-entering here. |
| 2815 stop("Unexpected alignment"); | 2815 stop("Unexpected alignment"); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2829 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 2829 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
| 2830 } | 2830 } |
| 2831 } | 2831 } |
| 2832 | 2832 |
| 2833 | 2833 |
| 2834 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 2834 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
| 2835 Register result) { | 2835 Register result) { |
| 2836 const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 2836 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| 2837 const int32_t kPCRegOffset = 2 * kPointerSize; | 2837 const int32_t kPCRegOffset = 2 * kPointerSize; |
| 2838 ldr(result, MemOperand(ldr_location)); | 2838 ldr(result, MemOperand(ldr_location)); |
| 2839 if (FLAG_debug_code) { | 2839 if (emit_debug_code()) { |
| 2840 // Check that the instruction is a ldr reg, [pc + offset] . | 2840 // Check that the instruction is a ldr reg, [pc + offset] . |
| 2841 and_(result, result, Operand(kLdrPCPattern)); | 2841 and_(result, result, Operand(kLdrPCPattern)); |
| 2842 cmp(result, Operand(kLdrPCPattern)); | 2842 cmp(result, Operand(kLdrPCPattern)); |
| 2843 Check(eq, "The instruction to patch should be a load from pc."); | 2843 Check(eq, "The instruction to patch should be a load from pc."); |
| 2844 // Result was clobbered. Restore it. | 2844 // Result was clobbered. Restore it. |
| 2845 ldr(result, MemOperand(ldr_location)); | 2845 ldr(result, MemOperand(ldr_location)); |
| 2846 } | 2846 } |
| 2847 // Get the address of the constant. | 2847 // Get the address of the constant. |
| 2848 and_(result, result, Operand(kLdrOffsetMask)); | 2848 and_(result, result, Operand(kLdrOffsetMask)); |
| 2849 add(result, ldr_location, Operand(result)); | 2849 add(result, ldr_location, Operand(result)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2886 void CodePatcher::EmitCondition(Condition cond) { | 2886 void CodePatcher::EmitCondition(Condition cond) { |
| 2887 Instr instr = Assembler::instr_at(masm_.pc_); | 2887 Instr instr = Assembler::instr_at(masm_.pc_); |
| 2888 instr = (instr & ~kCondMask) | cond; | 2888 instr = (instr & ~kCondMask) | cond; |
| 2889 masm_.emit(instr); | 2889 masm_.emit(instr); |
| 2890 } | 2890 } |
| 2891 | 2891 |
| 2892 | 2892 |
| 2893 } } // namespace v8::internal | 2893 } } // namespace v8::internal |
| 2894 | 2894 |
| 2895 #endif // V8_TARGET_ARCH_ARM | 2895 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |