| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 364 Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize)); | 364 Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize)); |
| 365 __ j(not_zero, miss); | 365 __ j(not_zero, miss); |
| 366 | 366 |
| 367 // Get the value at the masked, scaled index. | 367 // Get the value at the masked, scaled index. |
| 368 const int kValueOffset = | 368 const int kValueOffset = |
| 369 NumberDictionary::kElementsStartOffset + kPointerSize; | 369 NumberDictionary::kElementsStartOffset + kPointerSize; |
| 370 __ mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); | 370 __ mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); |
| 371 } | 371 } |
| 372 | 372 |
| 373 | 373 |
| 374 // The offset from the inlined patch site to the start of the | |
| 375 // inlined load instruction. It is 7 bytes (test eax, imm) plus | |
| 376 // 6 bytes (jne slow_label). | |
| 377 const int LoadIC::kOffsetToLoadInstruction = 13; | |
| 378 | |
| 379 | |
| 380 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { | 374 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { |
| 381 // ----------- S t a t e ------------- | 375 // ----------- S t a t e ------------- |
| 382 // -- eax : receiver | 376 // -- eax : receiver |
| 383 // -- ecx : name | 377 // -- ecx : name |
| 384 // -- esp[0] : return address | 378 // -- esp[0] : return address |
| 385 // ----------------------------------- | 379 // ----------------------------------- |
| 386 Label miss; | 380 Label miss; |
| 387 | 381 |
| 388 StubCompiler::GenerateLoadArrayLength(masm, eax, edx, &miss); | 382 StubCompiler::GenerateLoadArrayLength(masm, eax, edx, &miss); |
| 389 __ bind(&miss); | 383 __ bind(&miss); |
| (...skipping 876 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1266 __ push(ecx); // name | 1260 __ push(ecx); // name |
| 1267 __ push(ebx); // return address | 1261 __ push(ebx); // return address |
| 1268 | 1262 |
| 1269 // Perform tail call to the entry. | 1263 // Perform tail call to the entry. |
| 1270 ExternalReference ref = | 1264 ExternalReference ref = |
| 1271 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); | 1265 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); |
| 1272 __ TailCallExternalReference(ref, 2, 1); | 1266 __ TailCallExternalReference(ref, 2, 1); |
| 1273 } | 1267 } |
| 1274 | 1268 |
| 1275 | 1269 |
| 1276 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { | |
| 1277 if (V8::UseCrankshaft()) return false; | |
| 1278 | |
| 1279 // The address of the instruction following the call. | |
| 1280 Address test_instruction_address = | |
| 1281 address + Assembler::kCallTargetAddressOffset; | |
| 1282 // If the instruction following the call is not a test eax, nothing | |
| 1283 // was inlined. | |
| 1284 if (*test_instruction_address != Assembler::kTestEaxByte) return false; | |
| 1285 | |
| 1286 Address delta_address = test_instruction_address + 1; | |
| 1287 // The delta to the start of the map check instruction. | |
| 1288 int delta = *reinterpret_cast<int*>(delta_address); | |
| 1289 | |
| 1290 // The map address is the last 4 bytes of the 7-byte | |
| 1291 // operand-immediate compare instruction, so we add 3 to get the | |
| 1292 // offset to the last 4 bytes. | |
| 1293 Address map_address = test_instruction_address + delta + 3; | |
| 1294 *(reinterpret_cast<Object**>(map_address)) = map; | |
| 1295 | |
| 1296 // The offset is in the last 4 bytes of a six byte | |
| 1297 // memory-to-register move instruction, so we add 2 to get the | |
| 1298 // offset to the last 4 bytes. | |
| 1299 Address offset_address = | |
| 1300 test_instruction_address + delta + kOffsetToLoadInstruction + 2; | |
| 1301 *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag; | |
| 1302 return true; | |
| 1303 } | |
| 1304 | |
| 1305 | |
| 1306 // One byte opcode for mov ecx,0xXXXXXXXX. | |
| 1307 // Marks inlined contextual loads using all kinds of cells. Generated | |
| 1308 // code has the hole check: | |
| 1309 // mov reg, <cell> | |
| 1310 // mov reg, (<cell>, value offset) | |
| 1311 // cmp reg, <the hole> | |
| 1312 // je slow | |
| 1313 // ;; use reg | |
| 1314 static const byte kMovEcxByte = 0xB9; | |
| 1315 | |
| 1316 // One byte opcode for mov edx,0xXXXXXXXX. | |
| 1317 // Marks inlined contextual loads using only "don't delete" | |
| 1318 // cells. Generated code doesn't have the hole check: | |
| 1319 // mov reg, <cell> | |
| 1320 // mov reg, (<cell>, value offset) | |
| 1321 // ;; use reg | |
| 1322 static const byte kMovEdxByte = 0xBA; | |
| 1323 | |
| 1324 bool LoadIC::PatchInlinedContextualLoad(Address address, | |
| 1325 Object* map, | |
| 1326 Object* cell, | |
| 1327 bool is_dont_delete) { | |
| 1328 if (V8::UseCrankshaft()) return false; | |
| 1329 | |
| 1330 // The address of the instruction following the call. | |
| 1331 Address mov_instruction_address = | |
| 1332 address + Assembler::kCallTargetAddressOffset; | |
| 1333 // If the instruction following the call is not a mov ecx/edx, | |
| 1334 // nothing was inlined. | |
| 1335 byte b = *mov_instruction_address; | |
| 1336 if (b != kMovEcxByte && b != kMovEdxByte) return false; | |
| 1337 // If we don't have the hole check generated, we can only support | |
| 1338 // "don't delete" cells. | |
| 1339 if (b == kMovEdxByte && !is_dont_delete) return false; | |
| 1340 | |
| 1341 Address delta_address = mov_instruction_address + 1; | |
| 1342 // The delta to the start of the map check instruction. | |
| 1343 int delta = *reinterpret_cast<int*>(delta_address); | |
| 1344 | |
| 1345 // The map address is the last 4 bytes of the 7-byte | |
| 1346 // operand-immediate compare instruction, so we add 3 to get the | |
| 1347 // offset to the last 4 bytes. | |
| 1348 Address map_address = mov_instruction_address + delta + 3; | |
| 1349 *(reinterpret_cast<Object**>(map_address)) = map; | |
| 1350 | |
| 1351 // The cell is in the last 4 bytes of a five byte mov reg, imm32 | |
| 1352 // instruction, so we add 1 to get the offset to the last 4 bytes. | |
| 1353 Address offset_address = | |
| 1354 mov_instruction_address + delta + kOffsetToLoadInstruction + 1; | |
| 1355 *reinterpret_cast<Object**>(offset_address) = cell; | |
| 1356 return true; | |
| 1357 } | |
| 1358 | |
| 1359 | |
| 1360 bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) { | |
| 1361 if (V8::UseCrankshaft()) return false; | |
| 1362 | |
| 1363 // The address of the instruction following the call. | |
| 1364 Address test_instruction_address = | |
| 1365 address + Assembler::kCallTargetAddressOffset; | |
| 1366 | |
| 1367 // If the instruction following the call is not a test eax, nothing | |
| 1368 // was inlined. | |
| 1369 if (*test_instruction_address != Assembler::kTestEaxByte) return false; | |
| 1370 | |
| 1371 // Extract the encoded deltas from the test eax instruction. | |
| 1372 Address encoded_offsets_address = test_instruction_address + 1; | |
| 1373 int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address); | |
| 1374 int delta_to_map_check = -(encoded_offsets & 0xFFFF); | |
| 1375 int delta_to_record_write = encoded_offsets >> 16; | |
| 1376 | |
| 1377 // Patch the map to check. The map address is the last 4 bytes of | |
| 1378 // the 7-byte operand-immediate compare instruction. | |
| 1379 Address map_check_address = test_instruction_address + delta_to_map_check; | |
| 1380 Address map_address = map_check_address + 3; | |
| 1381 *(reinterpret_cast<Object**>(map_address)) = map; | |
| 1382 | |
| 1383 // Patch the offset in the store instruction. The offset is in the | |
| 1384 // last 4 bytes of a six byte register-to-memory move instruction. | |
| 1385 Address offset_address = | |
| 1386 map_check_address + StoreIC::kOffsetToStoreInstruction + 2; | |
| 1387 // The offset should have initial value (kMaxInt - 1), cleared value | |
| 1388 // (-1) or we should be clearing the inlined version. | |
| 1389 ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 || | |
| 1390 *reinterpret_cast<int*>(offset_address) == -1 || | |
| 1391 (offset == 0 && map == HEAP->null_value())); | |
| 1392 *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag; | |
| 1393 | |
| 1394 // Patch the offset in the write-barrier code. The offset is the | |
| 1395 // last 4 bytes of a six byte lea instruction. | |
| 1396 offset_address = map_check_address + delta_to_record_write + 2; | |
| 1397 // The offset should have initial value (kMaxInt), cleared value | |
| 1398 // (-1) or we should be clearing the inlined version. | |
| 1399 ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt || | |
| 1400 *reinterpret_cast<int*>(offset_address) == -1 || | |
| 1401 (offset == 0 && map == HEAP->null_value())); | |
| 1402 *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag; | |
| 1403 | |
| 1404 return true; | |
| 1405 } | |
| 1406 | |
| 1407 | |
| 1408 static bool PatchInlinedMapCheck(Address address, Object* map) { | |
| 1409 if (V8::UseCrankshaft()) return false; | |
| 1410 | |
| 1411 Address test_instruction_address = | |
| 1412 address + Assembler::kCallTargetAddressOffset; | |
| 1413 // The keyed load has a fast inlined case if the IC call instruction | |
| 1414 // is immediately followed by a test instruction. | |
| 1415 if (*test_instruction_address != Assembler::kTestEaxByte) return false; | |
| 1416 | |
| 1417 // Fetch the offset from the test instruction to the map cmp | |
| 1418 // instruction. This offset is stored in the last 4 bytes of the 5 | |
| 1419 // byte test instruction. | |
| 1420 Address delta_address = test_instruction_address + 1; | |
| 1421 int delta = *reinterpret_cast<int*>(delta_address); | |
| 1422 // Compute the map address. The map address is in the last 4 bytes | |
| 1423 // of the 7-byte operand-immediate compare instruction, so we add 3 | |
| 1424 // to the offset to get the map address. | |
| 1425 Address map_address = test_instruction_address + delta + 3; | |
| 1426 // Patch the map check. | |
| 1427 *(reinterpret_cast<Object**>(map_address)) = map; | |
| 1428 return true; | |
| 1429 } | |
| 1430 | |
| 1431 | |
| 1432 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { | |
| 1433 return PatchInlinedMapCheck(address, map); | |
| 1434 } | |
| 1435 | |
| 1436 | |
| 1437 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { | |
| 1438 return PatchInlinedMapCheck(address, map); | |
| 1439 } | |
| 1440 | |
| 1441 | |
| 1442 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 1270 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 1443 // ----------- S t a t e ------------- | 1271 // ----------- S t a t e ------------- |
| 1444 // -- eax : key | 1272 // -- eax : key |
| 1445 // -- edx : receiver | 1273 // -- edx : receiver |
| 1446 // -- esp[0] : return address | 1274 // -- esp[0] : return address |
| 1447 // ----------------------------------- | 1275 // ----------------------------------- |
| 1448 | 1276 |
| 1449 __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1); | 1277 __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1); |
| 1450 | 1278 |
| 1451 __ pop(ebx); | 1279 __ pop(ebx); |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1512 __ push(eax); | 1340 __ push(eax); |
| 1513 __ push(ebx); | 1341 __ push(ebx); |
| 1514 | 1342 |
| 1515 // Perform tail call to the entry. | 1343 // Perform tail call to the entry. |
| 1516 ExternalReference ref = | 1344 ExternalReference ref = |
| 1517 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); | 1345 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); |
| 1518 __ TailCallExternalReference(ref, 3, 1); | 1346 __ TailCallExternalReference(ref, 3, 1); |
| 1519 } | 1347 } |
| 1520 | 1348 |
| 1521 | 1349 |
| 1522 // The offset from the inlined patch site to the start of the inlined | |
| 1523 // store instruction. It is 7 bytes (test reg, imm) plus 6 bytes (jne | |
| 1524 // slow_label). | |
| 1525 const int StoreIC::kOffsetToStoreInstruction = 13; | |
| 1526 | |
| 1527 | |
| 1528 void StoreIC::GenerateArrayLength(MacroAssembler* masm) { | 1350 void StoreIC::GenerateArrayLength(MacroAssembler* masm) { |
| 1529 // ----------- S t a t e ------------- | 1351 // ----------- S t a t e ------------- |
| 1530 // -- eax : value | 1352 // -- eax : value |
| 1531 // -- ecx : name | 1353 // -- ecx : name |
| 1532 // -- edx : receiver | 1354 // -- edx : receiver |
| 1533 // -- esp[0] : return address | 1355 // -- esp[0] : return address |
| 1534 // ----------------------------------- | 1356 // ----------------------------------- |
| 1535 // | 1357 // |
| 1536 // This accepts as a receiver anything JSObject::SetElementsLength accepts | 1358 // This accepts as a receiver anything JSObject::SetElementsLength accepts |
| 1537 // (currently anything except for external arrays which means anything with | 1359 // (currently anything except for external arrays which means anything with |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1770 Condition cc = *jmp_address == Assembler::kJncShortOpcode | 1592 Condition cc = *jmp_address == Assembler::kJncShortOpcode |
| 1771 ? not_zero | 1593 ? not_zero |
| 1772 : zero; | 1594 : zero; |
| 1773 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 1595 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
| 1774 } | 1596 } |
| 1775 | 1597 |
| 1776 | 1598 |
| 1777 } } // namespace v8::internal | 1599 } } // namespace v8::internal |
| 1778 | 1600 |
| 1779 #endif // V8_TARGET_ARCH_IA32 | 1601 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |