| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
| 6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
| 7 #include "src/frames-inl.h" |
| 8 #include "src/frames.h" |
| 9 #include "src/ic/stub-cache.h" |
| 7 | 10 |
| 8 namespace v8 { | 11 namespace v8 { |
| 9 namespace internal { | 12 namespace internal { |
| 10 | 13 |
| 11 using compiler::Node; | 14 using compiler::Node; |
| 12 | 15 |
| 13 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | 16 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, |
| 14 const CallInterfaceDescriptor& descriptor, | 17 const CallInterfaceDescriptor& descriptor, |
| 15 Code::Flags flags, const char* name, | 18 Code::Flags flags, const char* name, |
| 16 size_t result_size) | 19 size_t result_size) |
| (...skipping 446 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 463 } | 466 } |
| 464 | 467 |
| 465 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) { | 468 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) { |
| 466 return BitcastWordToTagged(IntPtrAdd(previous, offset)); | 469 return BitcastWordToTagged(IntPtrAdd(previous, offset)); |
| 467 } | 470 } |
| 468 | 471 |
| 469 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { | 472 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { |
| 470 return InnerAllocate(previous, IntPtrConstant(offset)); | 473 return InnerAllocate(previous, IntPtrConstant(offset)); |
| 471 } | 474 } |
| 472 | 475 |
| 476 compiler::Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) { |
| 477 Node* frame_pointer = LoadFramePointer(); |
| 478 return Load(rep, frame_pointer, IntPtrConstant(offset)); |
| 479 } |
| 480 |
| 481 compiler::Node* CodeStubAssembler::LoadFromParentFrame(int offset, |
| 482 MachineType rep) { |
| 483 Node* frame_pointer = LoadParentFramePointer(); |
| 484 return Load(rep, frame_pointer, IntPtrConstant(offset)); |
| 485 } |
| 486 |
| 473 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | 487 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, |
| 474 MachineType rep) { | 488 MachineType rep) { |
| 475 return Load(rep, buffer, IntPtrConstant(offset)); | 489 return Load(rep, buffer, IntPtrConstant(offset)); |
| 476 } | 490 } |
| 477 | 491 |
| 478 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | 492 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, |
| 479 MachineType rep) { | 493 MachineType rep) { |
| 480 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); | 494 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); |
| 481 } | 495 } |
| 482 | 496 |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 549 } | 563 } |
| 550 | 564 |
| 551 Node* CodeStubAssembler::LoadStringLength(Node* object) { | 565 Node* CodeStubAssembler::LoadStringLength(Node* object) { |
| 552 return LoadObjectField(object, String::kLengthOffset); | 566 return LoadObjectField(object, String::kLengthOffset); |
| 553 } | 567 } |
| 554 | 568 |
| 555 Node* CodeStubAssembler::LoadJSValueValue(Node* object) { | 569 Node* CodeStubAssembler::LoadJSValueValue(Node* object) { |
| 556 return LoadObjectField(object, JSValue::kValueOffset); | 570 return LoadObjectField(object, JSValue::kValueOffset); |
| 557 } | 571 } |
| 558 | 572 |
| 573 Node* CodeStubAssembler::LoadWeakCellValue(Node* map) { |
| 574 return LoadObjectField(map, WeakCell::kValueOffset); |
| 575 } |
| 576 |
| 559 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) { | 577 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) { |
| 560 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize); | 578 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize); |
| 561 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2)); | 579 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2)); |
| 562 Node* total_size = IntPtrAdd(data_size, header_size); | 580 Node* total_size = IntPtrAdd(data_size, header_size); |
| 563 | 581 |
| 564 Node* result = Allocate(total_size, kNone); | 582 Node* result = Allocate(total_size, kNone); |
| 565 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex)); | 583 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex)); |
| 566 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, | 584 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, |
| 567 SmiTag(length)); | 585 SmiTag(length)); |
| 568 | 586 |
| (...skipping 850 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1419 Bind(&if_done); | 1437 Bind(&if_done); |
| 1420 return var_result.value(); | 1438 return var_result.value(); |
| 1421 } | 1439 } |
| 1422 | 1440 |
| 1423 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, | 1441 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, |
| 1424 uint32_t mask) { | 1442 uint32_t mask) { |
| 1425 return Word32Shr(Word32And(word32, Int32Constant(mask)), | 1443 return Word32Shr(Word32And(word32, Int32Constant(mask)), |
| 1426 Int32Constant(shift)); | 1444 Int32Constant(shift)); |
| 1427 } | 1445 } |
| 1428 | 1446 |
| 1447 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) { |
| 1448 if (FLAG_native_code_counters && counter->Enabled()) { |
| 1449 Node* counter_address = ExternalConstant(ExternalReference(counter)); |
| 1450 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, |
| 1451 Int32Constant(value)); |
| 1452 } |
| 1453 } |
| 1454 |
| 1455 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) { |
| 1456 DCHECK(delta > 0); |
| 1457 if (FLAG_native_code_counters && counter->Enabled()) { |
| 1458 Node* counter_address = ExternalConstant(ExternalReference(counter)); |
| 1459 Node* value = Load(MachineType::Int32(), counter_address); |
| 1460 value = Int32Add(value, Int32Constant(delta)); |
| 1461 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value); |
| 1462 } |
| 1463 } |
| 1464 |
| 1465 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) { |
| 1466 DCHECK(delta > 0); |
| 1467 if (FLAG_native_code_counters && counter->Enabled()) { |
| 1468 Node* counter_address = ExternalConstant(ExternalReference(counter)); |
| 1469 Node* value = Load(MachineType::Int32(), counter_address); |
| 1470 value = Int32Sub(value, Int32Constant(delta)); |
| 1471 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value); |
| 1472 } |
| 1473 } |
| 1474 |
| 1429 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex, | 1475 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex, |
| 1430 Variable* var_index, Label* if_keyisunique, | 1476 Variable* var_index, Label* if_keyisunique, |
| 1431 Label* if_bailout) { | 1477 Label* if_bailout) { |
| 1432 DCHECK_EQ(MachineRepresentation::kWord32, var_index->rep()); | 1478 DCHECK_EQ(MachineRepresentation::kWord32, var_index->rep()); |
| 1433 | 1479 |
| 1434 Label if_keyissmi(this), if_keyisnotsmi(this); | 1480 Label if_keyissmi(this), if_keyisnotsmi(this); |
| 1435 Branch(WordIsSmi(key), &if_keyissmi, &if_keyisnotsmi); | 1481 Branch(WordIsSmi(key), &if_keyissmi, &if_keyisnotsmi); |
| 1436 Bind(&if_keyissmi); | 1482 Bind(&if_keyissmi); |
| 1437 { | 1483 { |
| 1438 // Negative smi keys are named properties. Handle in the runtime. | 1484 // Negative smi keys are named properties. Handle in the runtime. |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1465 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_keyisunique); | 1511 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_keyisunique); |
| 1466 // Key is an index. Check if it is small enough to be encoded in the | 1512 // Key is an index. Check if it is small enough to be encoded in the |
| 1467 // hash_field. Handle too big array index in runtime. | 1513 // hash_field. Handle too big array index in runtime. |
| 1468 bit = Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask)); | 1514 bit = Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask)); |
| 1469 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_bailout); | 1515 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_bailout); |
| 1470 var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash)); | 1516 var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash)); |
| 1471 Goto(if_keyisindex); | 1517 Goto(if_keyisindex); |
| 1472 } | 1518 } |
| 1473 | 1519 |
| 1474 template <typename Dictionary> | 1520 template <typename Dictionary> |
| 1475 void CodeStubAssembler::NameDictionaryLookup( | 1521 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary, |
| 1476 Node* dictionary, Node* unique_name, Label* if_found_, Variable* var_entry, | 1522 Node* unique_name, Label* if_found, |
| 1477 Label* if_not_found, int inlined_probes) { | 1523 Variable* var_entry, |
| 1524 Label* if_not_found, |
| 1525 int inlined_probes) { |
| 1478 DCHECK_EQ(MachineRepresentation::kWord32, var_entry->rep()); | 1526 DCHECK_EQ(MachineRepresentation::kWord32, var_entry->rep()); |
| 1479 | 1527 |
| 1480 // TODO(ishell): Remove this trampoline block once crbug/615621 is fixed. | |
| 1481 // This trampoline block is currently necessary here to generate a correct | |
| 1482 // phi for |var_entry|. | |
| 1483 Label if_found(this, var_entry); | |
| 1484 | |
| 1485 const int kElementsStartOffset = | 1528 const int kElementsStartOffset = |
| 1486 Dictionary::kElementsStartIndex * kPointerSize; | 1529 Dictionary::kElementsStartIndex * kPointerSize; |
| 1487 | 1530 |
| 1488 Node* capacity = SmiToWord32(LoadFixedArrayElement( | 1531 Node* capacity = SmiToWord32(LoadFixedArrayElement( |
| 1489 dictionary, Int32Constant(Dictionary::kCapacityIndex))); | 1532 dictionary, Int32Constant(Dictionary::kCapacityIndex))); |
| 1490 Node* mask = Int32Sub(capacity, Int32Constant(1)); | 1533 Node* mask = Int32Sub(capacity, Int32Constant(1)); |
| 1491 Node* hash = LoadNameHash(unique_name); | 1534 Node* hash = LoadNameHash(unique_name); |
| 1492 | 1535 |
| 1493 // See Dictionary::FirstProbe(). | 1536 // See Dictionary::FirstProbe(). |
| 1494 Node* count = Int32Constant(0); | 1537 Node* count = Int32Constant(0); |
| 1495 Node* entry = Word32And(hash, mask); | 1538 Node* entry = Word32And(hash, mask); |
| 1496 | 1539 |
| 1497 for (int i = 0; i < inlined_probes; i++) { | 1540 for (int i = 0; i < inlined_probes; i++) { |
| 1498 // See Dictionary::EntryToIndex() | 1541 // See Dictionary::EntryToIndex() |
| 1499 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); | 1542 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); |
| 1500 Node* current = | 1543 Node* current = |
| 1501 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); | 1544 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); |
| 1502 var_entry->Bind(entry); | 1545 var_entry->Bind(entry); |
| 1503 GotoIf(WordEqual(current, unique_name), &if_found); | 1546 GotoIf(WordEqual(current, unique_name), if_found); |
| 1504 | 1547 |
| 1505 // See Dictionary::NextProbe(). | 1548 // See Dictionary::NextProbe(). |
| 1506 count = Int32Constant(i + 1); | 1549 count = Int32Constant(i + 1); |
| 1507 entry = Word32And(Int32Add(entry, count), mask); | 1550 entry = Word32And(Int32Add(entry, count), mask); |
| 1508 } | 1551 } |
| 1509 | 1552 |
| 1510 Node* undefined = UndefinedConstant(); | 1553 Node* undefined = UndefinedConstant(); |
| 1511 | 1554 |
| 1512 Variable var_count(this, MachineRepresentation::kWord32); | 1555 Variable var_count(this, MachineRepresentation::kWord32); |
| 1513 Variable* loop_vars[] = {&var_count, var_entry}; | 1556 Variable* loop_vars[] = {&var_count, var_entry}; |
| 1514 Label loop(this, 2, loop_vars); | 1557 Label loop(this, 2, loop_vars); |
| 1515 var_count.Bind(count); | 1558 var_count.Bind(count); |
| 1516 var_entry->Bind(entry); | 1559 var_entry->Bind(entry); |
| 1517 Goto(&loop); | 1560 Goto(&loop); |
| 1518 Bind(&loop); | 1561 Bind(&loop); |
| 1519 { | 1562 { |
| 1520 Node* count = var_count.value(); | 1563 Node* count = var_count.value(); |
| 1521 Node* entry = var_entry->value(); | 1564 Node* entry = var_entry->value(); |
| 1522 | 1565 |
| 1523 // See Dictionary::EntryToIndex() | 1566 // See Dictionary::EntryToIndex() |
| 1524 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); | 1567 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); |
| 1525 Node* current = | 1568 Node* current = |
| 1526 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); | 1569 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); |
| 1527 GotoIf(WordEqual(current, undefined), if_not_found); | 1570 GotoIf(WordEqual(current, undefined), if_not_found); |
| 1528 GotoIf(WordEqual(current, unique_name), &if_found); | 1571 GotoIf(WordEqual(current, unique_name), if_found); |
| 1529 | 1572 |
| 1530 // See Dictionary::NextProbe(). | 1573 // See Dictionary::NextProbe(). |
| 1531 count = Int32Add(count, Int32Constant(1)); | 1574 count = Int32Add(count, Int32Constant(1)); |
| 1532 entry = Word32And(Int32Add(entry, count), mask); | 1575 entry = Word32And(Int32Add(entry, count), mask); |
| 1533 | 1576 |
| 1534 var_count.Bind(count); | 1577 var_count.Bind(count); |
| 1535 var_entry->Bind(entry); | 1578 var_entry->Bind(entry); |
| 1536 Goto(&loop); | 1579 Goto(&loop); |
| 1537 } | 1580 } |
| 1538 Bind(&if_found); | |
| 1539 Goto(if_found_); | |
| 1540 } | 1581 } |
| 1541 | 1582 |
| 1542 // Instantiate template methods to workaround GCC compilation issue. | 1583 // Instantiate template methods to workaround GCC compilation issue. |
| 1543 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( | 1584 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( |
| 1544 Node*, Node*, Label*, Variable*, Label*, int); | 1585 Node*, Node*, Label*, Variable*, Label*, int); |
| 1545 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( | 1586 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( |
| 1546 Node*, Node*, Label*, Variable*, Label*, int); | 1587 Node*, Node*, Label*, Variable*, Label*, int); |
| 1547 | 1588 |
| 1548 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) { | 1589 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) { |
| 1549 // See v8::internal::ComputeIntegerHash() | 1590 // See v8::internal::ComputeIntegerHash() |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1976 ? WordShl(index_node, IntPtrConstant(element_size_shift)) | 2017 ? WordShl(index_node, IntPtrConstant(element_size_shift)) |
| 1977 : WordShr(index_node, IntPtrConstant(-element_size_shift)); | 2018 : WordShr(index_node, IntPtrConstant(-element_size_shift)); |
| 1978 } | 2019 } |
| 1979 return IntPtrAdd( | 2020 return IntPtrAdd( |
| 1980 IntPtrConstant(base_size), | 2021 IntPtrConstant(base_size), |
| 1981 (element_size_shift >= 0) | 2022 (element_size_shift >= 0) |
| 1982 ? WordShl(index_node, IntPtrConstant(element_size_shift)) | 2023 ? WordShl(index_node, IntPtrConstant(element_size_shift)) |
| 1983 : WordShr(index_node, IntPtrConstant(-element_size_shift))); | 2024 : WordShr(index_node, IntPtrConstant(-element_size_shift))); |
| 1984 } | 2025 } |
| 1985 | 2026 |
| 2027 compiler::Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() { |
| 2028 Node* function = |
| 2029 LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset); |
| 2030 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); |
| 2031 return LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset); |
| 2032 } |
| 2033 |
| 2034 compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) { |
| 2035 Variable var_receiver_map(this, MachineRepresentation::kTagged); |
| 2036 // TODO(ishell): defer blocks when it works. |
| 2037 Label load_smi_map(this /*, Label::kDeferred*/), load_receiver_map(this), |
| 2038 if_result(this); |
| 2039 |
| 2040 Branch(WordIsSmi(receiver), &load_smi_map, &load_receiver_map); |
| 2041 Bind(&load_smi_map); |
| 2042 { |
| 2043 var_receiver_map.Bind(LoadRoot(Heap::kHeapNumberMapRootIndex)); |
| 2044 Goto(&if_result); |
| 2045 } |
| 2046 Bind(&load_receiver_map); |
| 2047 { |
| 2048 var_receiver_map.Bind(LoadMap(receiver)); |
| 2049 Goto(&if_result); |
| 2050 } |
| 2051 Bind(&if_result); |
| 2052 return var_receiver_map.value(); |
| 2053 } |
| 2054 |
| 2055 compiler::Node* CodeStubAssembler::TryMonomorphicCase( |
| 2056 const LoadICParameters* p, compiler::Node* receiver_map, Label* if_handler, |
| 2057 Variable* var_handler, Label* if_miss) { |
| 2058 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep()); |
| 2059 |
| 2060 // TODO(ishell): add helper class that hides offset computations for a series |
| 2061 // of loads. |
| 2062 int32_t header_size = FixedArray::kHeaderSize - kHeapObjectTag; |
| 2063 Node* offset = ElementOffsetFromIndex(p->slot, FAST_HOLEY_ELEMENTS, |
| 2064 SMI_PARAMETERS, header_size); |
| 2065 Node* feedback = Load(MachineType::AnyTagged(), p->vector, offset); |
| 2066 |
| 2067 // Try to quickly handle the monomorphic case without knowing for sure |
| 2068 // if we have a weak cell in feedback. We do know it's safe to look |
| 2069 // at WeakCell::kValueOffset. |
| 2070 GotoUnless(WordEqual(receiver_map, LoadWeakCellValue(feedback)), if_miss); |
| 2071 |
| 2072 Node* handler = Load(MachineType::AnyTagged(), p->vector, |
| 2073 IntPtrAdd(offset, IntPtrConstant(kPointerSize))); |
| 2074 |
| 2075 var_handler->Bind(handler); |
| 2076 Goto(if_handler); |
| 2077 return feedback; |
| 2078 } |
| 2079 |
| 2080 void CodeStubAssembler::HandlePolymorphicCase( |
| 2081 const LoadICParameters* p, compiler::Node* receiver_map, |
| 2082 compiler::Node* feedback, Label* if_handler, Variable* var_handler, |
| 2083 Label* if_miss, int unroll_count) { |
| 2084 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep()); |
| 2085 |
| 2086 // Iterate {feedback} array. |
| 2087 const int kEntrySize = 2; |
| 2088 |
| 2089 for (int i = 0; i < unroll_count; i++) { |
| 2090 Label next_entry(this); |
| 2091 Node* cached_map = LoadWeakCellValue( |
| 2092 LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize))); |
| 2093 GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry); |
| 2094 |
| 2095 // Found, now call handler. |
| 2096 Node* handler = |
| 2097 LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize + 1)); |
| 2098 var_handler->Bind(handler); |
| 2099 Goto(if_handler); |
| 2100 |
| 2101 Bind(&next_entry); |
| 2102 } |
| 2103 Node* length = SmiToWord32(LoadFixedArrayBaseLength(feedback)); |
| 2104 |
| 2105 // Loop from {unroll_count}*kEntrySize to {length}. |
| 2106 Variable var_index(this, MachineRepresentation::kWord32); |
| 2107 Label loop(this, &var_index); |
| 2108 var_index.Bind(Int32Constant(unroll_count * kEntrySize)); |
| 2109 Goto(&loop); |
| 2110 Bind(&loop); |
| 2111 { |
| 2112 Node* index = var_index.value(); |
| 2113 GotoIf(Int32GreaterThanOrEqual(index, length), if_miss); |
| 2114 |
| 2115 Node* cached_map = |
| 2116 LoadWeakCellValue(LoadFixedArrayElement(feedback, index)); |
| 2117 |
| 2118 Label next_entry(this); |
| 2119 GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry); |
| 2120 |
| 2121 // Found, now call handler. |
| 2122 Node* handler = LoadFixedArrayElement(feedback, index, kPointerSize); |
| 2123 var_handler->Bind(handler); |
| 2124 Goto(if_handler); |
| 2125 |
| 2126 Bind(&next_entry); |
| 2127 var_index.Bind(Int32Add(index, Int32Constant(kEntrySize))); |
| 2128 Goto(&loop); |
| 2129 } |
| 2130 } |
| 2131 |
| 2132 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name, |
| 2133 Code::Flags flags, |
| 2134 compiler::Node* map) { |
| 2135 // See v8::internal::StubCache::PrimaryOffset(). |
| 2136 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift); |
| 2137 // Compute the hash of the name (use entire hash field). |
| 2138 Node* hash_field = LoadNameHashField(name); |
| 2139 Assert(WordEqual( |
| 2140 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)), |
| 2141 Int32Constant(0))); |
| 2142 |
| 2143 // Using only the low bits in 64-bit mode is unlikely to increase the |
| 2144 // risk of collision even if the heap is spread over an area larger than |
| 2145 // 4Gb (and not at all if it isn't). |
| 2146 Node* hash = Int32Add(hash_field, map); |
| 2147 // We always set the in_loop bit to zero when generating the lookup code |
| 2148 // so do it here too so the hash codes match. |
| 2149 uint32_t iflags = |
| 2150 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); |
| 2151 // Base the offset on a simple combination of name, flags, and map. |
| 2152 hash = Word32Xor(hash, Int32Constant(iflags)); |
| 2153 uint32_t mask = (StubCache::kPrimaryTableSize - 1) |
| 2154 << StubCache::kCacheIndexShift; |
| 2155 return Word32And(hash, Int32Constant(mask)); |
| 2156 } |
| 2157 |
| 2158 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset( |
| 2159 compiler::Node* name, Code::Flags flags, compiler::Node* seed) { |
| 2160 // See v8::internal::StubCache::SecondaryOffset(). |
| 2161 |
| 2162 // Use the seed from the primary cache in the secondary cache. |
| 2163 Node* hash = Int32Sub(seed, name); |
| 2164 // We always set the in_loop bit to zero when generating the lookup code |
| 2165 // so do it here too so the hash codes match. |
| 2166 uint32_t iflags = |
| 2167 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); |
| 2168 hash = Int32Add(hash, Int32Constant(iflags)); |
| 2169 int32_t mask = (StubCache::kSecondaryTableSize - 1) |
| 2170 << StubCache::kCacheIndexShift; |
| 2171 return Word32And(hash, Int32Constant(mask)); |
| 2172 } |
| 2173 |
| 2174 enum CodeStubAssembler::StubCacheTable : int { |
| 2175 kPrimary = static_cast<int>(StubCache::kPrimary), |
| 2176 kSecondary = static_cast<int>(StubCache::kSecondary) |
| 2177 }; |
| 2178 |
| 2179 void CodeStubAssembler::TryProbeStubCacheTable( |
| 2180 StubCache* stub_cache, StubCacheTable table_id, |
| 2181 compiler::Node* entry_offset, compiler::Node* name, Code::Flags flags, |
| 2182 compiler::Node* map, Label* if_handler, Variable* var_handler, |
| 2183 Label* if_miss) { |
| 2184 StubCache::Table table = static_cast<StubCache::Table>(table_id); |
| 2185 #ifdef DEBUG |
| 2186 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
| 2187 Goto(if_miss); |
| 2188 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
| 2189 Goto(if_miss); |
| 2190 } |
| 2191 #endif |
| 2192 // The {table_offset} holds the entry offset times four (due to masking |
| 2193 // and shifting optimizations). |
| 2194 const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift; |
| 2195 entry_offset = Int32Mul(entry_offset, Int32Constant(kMultiplier)); |
| 2196 |
| 2197 // Check that the key in the entry matches the name. |
| 2198 Node* key_base = |
| 2199 ExternalConstant(ExternalReference(stub_cache->key_reference(table))); |
| 2200 Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset); |
| 2201 GotoIf(WordNotEqual(name, entry_key), if_miss); |
| 2202 |
| 2203 // Get the map entry from the cache. |
| 2204 DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() - |
| 2205 stub_cache->key_reference(table).address()); |
| 2206 Node* entry_map = |
| 2207 Load(MachineType::Pointer(), key_base, |
| 2208 Int32Add(entry_offset, Int32Constant(kPointerSize * 2))); |
| 2209 GotoIf(WordNotEqual(map, entry_map), if_miss); |
| 2210 |
| 2211 // Check that the flags match what we're looking for. |
| 2212 DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() - |
| 2213 stub_cache->key_reference(table).address()); |
| 2214 Node* code = Load(MachineType::Pointer(), key_base, |
| 2215 Int32Add(entry_offset, Int32Constant(kPointerSize))); |
| 2216 |
| 2217 Node* code_flags = |
| 2218 LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32()); |
| 2219 GotoIf(Word32NotEqual(Int32Constant(flags), |
| 2220 Word32And(code_flags, |
| 2221 Int32Constant(~Code::kFlagsNotUsedInLookup))), |
| 2222 if_miss); |
| 2223 |
| 2224 // We found the handler. |
| 2225 var_handler->Bind(code); |
| 2226 Goto(if_handler); |
| 2227 } |
| 2228 |
| 2229 void CodeStubAssembler::TryProbeStubCache( |
| 2230 StubCache* stub_cache, Code::Flags flags, compiler::Node* receiver, |
| 2231 compiler::Node* name, Label* if_handler, Variable* var_handler, |
| 2232 Label* if_miss) { |
| 2233 Label try_secondary(this), miss(this); |
| 2234 |
| 2235 Counters* counters = isolate()->counters(); |
| 2236 IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); |
| 2237 |
| 2238 // Check that the {receiver} isn't a smi. |
| 2239 GotoIf(WordIsSmi(receiver), if_miss); |
| 2240 |
| 2241 Node* receiver_map = LoadMap(receiver); |
| 2242 |
| 2243 // Probe the primary table. |
| 2244 Node* primary_offset = StubCachePrimaryOffset(name, flags, receiver_map); |
| 2245 TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, flags, |
| 2246 receiver_map, if_handler, var_handler, &try_secondary); |
| 2247 |
| 2248 Bind(&try_secondary); |
| 2249 { |
| 2250 // Probe the secondary table. |
| 2251 Node* secondary_offset = |
| 2252 StubCacheSecondaryOffset(name, flags, primary_offset); |
| 2253 TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name, |
| 2254 flags, receiver_map, if_handler, var_handler, &miss); |
| 2255 } |
| 2256 |
| 2257 Bind(&miss); |
| 2258 { |
| 2259 IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
| 2260 Goto(if_miss); |
| 2261 } |
| 2262 } |
| 2263 |
| 2264 void CodeStubAssembler::LoadIC(const LoadICParameters* p, Label* if_miss) { |
| 2265 Variable var_handler(this, MachineRepresentation::kTagged); |
| 2266 // TODO(ishell): defer blocks when it works. |
| 2267 Label if_handler(this, &var_handler), try_polymorphic(this), |
| 2268 try_megamorphic(this /*, Label::kDeferred*/); |
| 2269 |
| 2270 Node* receiver_map = LoadReceiverMap(p->receiver); |
| 2271 |
| 2272 // Check monomorphic case. |
| 2273 Node* feedback = TryMonomorphicCase(p, receiver_map, &if_handler, |
| 2274 &var_handler, &try_polymorphic); |
| 2275 Bind(&if_handler); |
| 2276 { |
| 2277 LoadWithVectorDescriptor descriptor(isolate()); |
| 2278 TailCallStub(descriptor, var_handler.value(), p->context, p->receiver, |
| 2279 p->name, p->slot, p->vector); |
| 2280 } |
| 2281 |
| 2282 Bind(&try_polymorphic); |
| 2283 { |
| 2284 // Check polymorphic case. |
| 2285 GotoUnless( |
| 2286 WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)), |
| 2287 &try_megamorphic); |
| 2288 HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler, |
| 2289 if_miss, 2); |
| 2290 } |
| 2291 |
| 2292 Bind(&try_megamorphic); |
| 2293 { |
| 2294 // Check megamorphic case. |
| 2295 GotoUnless( |
| 2296 WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)), |
| 2297 if_miss); |
| 2298 |
| 2299 Code::Flags code_flags = |
| 2300 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC)); |
| 2301 |
| 2302 TryProbeStubCache(isolate()->stub_cache(), code_flags, p->receiver, p->name, |
| 2303 &if_handler, &var_handler, if_miss); |
| 2304 } |
| 2305 } |
| 2306 |
| 1986 } // namespace internal | 2307 } // namespace internal |
| 1987 } // namespace v8 | 2308 } // namespace v8 |
| OLD | NEW |