OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
| 7 #include "src/frames-inl.h" |
| 8 #include "src/frames.h" |
| 9 #include "src/ic/stub-cache.h" |
7 | 10 |
8 namespace v8 { | 11 namespace v8 { |
9 namespace internal { | 12 namespace internal { |
10 | 13 |
11 using compiler::Node; | 14 using compiler::Node; |
12 | 15 |
13 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | 16 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, |
14 const CallInterfaceDescriptor& descriptor, | 17 const CallInterfaceDescriptor& descriptor, |
15 Code::Flags flags, const char* name, | 18 Code::Flags flags, const char* name, |
16 size_t result_size) | 19 size_t result_size) |
(...skipping 446 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
463 } | 466 } |
464 | 467 |
465 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) { | 468 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) { |
466 return BitcastWordToTagged(IntPtrAdd(previous, offset)); | 469 return BitcastWordToTagged(IntPtrAdd(previous, offset)); |
467 } | 470 } |
468 | 471 |
469 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { | 472 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { |
470 return InnerAllocate(previous, IntPtrConstant(offset)); | 473 return InnerAllocate(previous, IntPtrConstant(offset)); |
471 } | 474 } |
472 | 475 |
| 476 compiler::Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) { |
| 477 Node* frame_pointer = LoadFramePointer(); |
| 478 return Load(rep, frame_pointer, IntPtrConstant(offset)); |
| 479 } |
| 480 |
| 481 compiler::Node* CodeStubAssembler::LoadFromParentFrame(int offset, |
| 482 MachineType rep) { |
| 483 Node* frame_pointer = LoadParentFramePointer(); |
| 484 return Load(rep, frame_pointer, IntPtrConstant(offset)); |
| 485 } |
| 486 |
473 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | 487 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, |
474 MachineType rep) { | 488 MachineType rep) { |
475 return Load(rep, buffer, IntPtrConstant(offset)); | 489 return Load(rep, buffer, IntPtrConstant(offset)); |
476 } | 490 } |
477 | 491 |
478 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | 492 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, |
479 MachineType rep) { | 493 MachineType rep) { |
480 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); | 494 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); |
481 } | 495 } |
482 | 496 |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
549 } | 563 } |
550 | 564 |
551 Node* CodeStubAssembler::LoadStringLength(Node* object) { | 565 Node* CodeStubAssembler::LoadStringLength(Node* object) { |
552 return LoadObjectField(object, String::kLengthOffset); | 566 return LoadObjectField(object, String::kLengthOffset); |
553 } | 567 } |
554 | 568 |
555 Node* CodeStubAssembler::LoadJSValueValue(Node* object) { | 569 Node* CodeStubAssembler::LoadJSValueValue(Node* object) { |
556 return LoadObjectField(object, JSValue::kValueOffset); | 570 return LoadObjectField(object, JSValue::kValueOffset); |
557 } | 571 } |
558 | 572 |
| 573 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell) { |
| 574 return LoadObjectField(weak_cell, WeakCell::kValueOffset); |
| 575 } |
| 576 |
559 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) { | 577 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) { |
560 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize); | 578 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize); |
561 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2)); | 579 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2)); |
562 Node* total_size = IntPtrAdd(data_size, header_size); | 580 Node* total_size = IntPtrAdd(data_size, header_size); |
563 | 581 |
564 Node* result = Allocate(total_size, kNone); | 582 Node* result = Allocate(total_size, kNone); |
565 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex)); | 583 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex)); |
566 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, | 584 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, |
567 SmiTag(length)); | 585 SmiTag(length)); |
568 | 586 |
(...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1465 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_keyisunique); | 1483 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_keyisunique); |
1466 // Key is an index. Check if it is small enough to be encoded in the | 1484 // Key is an index. Check if it is small enough to be encoded in the |
1467 // hash_field. Handle too big array index in runtime. | 1485 // hash_field. Handle too big array index in runtime. |
1468 bit = Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask)); | 1486 bit = Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask)); |
1469 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_bailout); | 1487 GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_bailout); |
1470 var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash)); | 1488 var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash)); |
1471 Goto(if_keyisindex); | 1489 Goto(if_keyisindex); |
1472 } | 1490 } |
1473 | 1491 |
1474 template <typename Dictionary> | 1492 template <typename Dictionary> |
1475 void CodeStubAssembler::NameDictionaryLookup( | 1493 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary, |
1476 Node* dictionary, Node* unique_name, Label* if_found_, Variable* var_entry, | 1494 Node* unique_name, Label* if_found, |
1477 Label* if_not_found, int inlined_probes) { | 1495 Variable* var_entry, |
| 1496 Label* if_not_found, |
| 1497 int inlined_probes) { |
1478 DCHECK_EQ(MachineRepresentation::kWord32, var_entry->rep()); | 1498 DCHECK_EQ(MachineRepresentation::kWord32, var_entry->rep()); |
1479 | 1499 |
1480 // TODO(ishell): Remove this trampoline block once crbug/615621 is fixed. | |
1481 // This trampoline block is currently necessary here to generate a correct | |
1482 // phi for |var_entry|. | |
1483 Label if_found(this, var_entry); | |
1484 | |
1485 const int kElementsStartOffset = | 1500 const int kElementsStartOffset = |
1486 Dictionary::kElementsStartIndex * kPointerSize; | 1501 Dictionary::kElementsStartIndex * kPointerSize; |
1487 | 1502 |
1488 Node* capacity = SmiToWord32(LoadFixedArrayElement( | 1503 Node* capacity = SmiToWord32(LoadFixedArrayElement( |
1489 dictionary, Int32Constant(Dictionary::kCapacityIndex))); | 1504 dictionary, Int32Constant(Dictionary::kCapacityIndex))); |
1490 Node* mask = Int32Sub(capacity, Int32Constant(1)); | 1505 Node* mask = Int32Sub(capacity, Int32Constant(1)); |
1491 Node* hash = LoadNameHash(unique_name); | 1506 Node* hash = LoadNameHash(unique_name); |
1492 | 1507 |
1493 // See Dictionary::FirstProbe(). | 1508 // See Dictionary::FirstProbe(). |
1494 Node* count = Int32Constant(0); | 1509 Node* count = Int32Constant(0); |
1495 Node* entry = Word32And(hash, mask); | 1510 Node* entry = Word32And(hash, mask); |
1496 | 1511 |
1497 for (int i = 0; i < inlined_probes; i++) { | 1512 for (int i = 0; i < inlined_probes; i++) { |
1498 // See Dictionary::EntryToIndex() | 1513 // See Dictionary::EntryToIndex() |
1499 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); | 1514 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); |
1500 Node* current = | 1515 Node* current = |
1501 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); | 1516 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); |
1502 var_entry->Bind(entry); | 1517 var_entry->Bind(entry); |
1503 GotoIf(WordEqual(current, unique_name), &if_found); | 1518 GotoIf(WordEqual(current, unique_name), if_found); |
1504 | 1519 |
1505 // See Dictionary::NextProbe(). | 1520 // See Dictionary::NextProbe(). |
1506 count = Int32Constant(i + 1); | 1521 count = Int32Constant(i + 1); |
1507 entry = Word32And(Int32Add(entry, count), mask); | 1522 entry = Word32And(Int32Add(entry, count), mask); |
1508 } | 1523 } |
1509 | 1524 |
1510 Node* undefined = UndefinedConstant(); | 1525 Node* undefined = UndefinedConstant(); |
1511 | 1526 |
1512 Variable var_count(this, MachineRepresentation::kWord32); | 1527 Variable var_count(this, MachineRepresentation::kWord32); |
1513 Variable* loop_vars[] = {&var_count, var_entry}; | 1528 Variable* loop_vars[] = {&var_count, var_entry}; |
1514 Label loop(this, 2, loop_vars); | 1529 Label loop(this, 2, loop_vars); |
1515 var_count.Bind(count); | 1530 var_count.Bind(count); |
1516 var_entry->Bind(entry); | 1531 var_entry->Bind(entry); |
1517 Goto(&loop); | 1532 Goto(&loop); |
1518 Bind(&loop); | 1533 Bind(&loop); |
1519 { | 1534 { |
1520 Node* count = var_count.value(); | 1535 Node* count = var_count.value(); |
1521 Node* entry = var_entry->value(); | 1536 Node* entry = var_entry->value(); |
1522 | 1537 |
1523 // See Dictionary::EntryToIndex() | 1538 // See Dictionary::EntryToIndex() |
1524 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); | 1539 Node* index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize)); |
1525 Node* current = | 1540 Node* current = |
1526 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); | 1541 LoadFixedArrayElement(dictionary, index, kElementsStartOffset); |
1527 GotoIf(WordEqual(current, undefined), if_not_found); | 1542 GotoIf(WordEqual(current, undefined), if_not_found); |
1528 GotoIf(WordEqual(current, unique_name), &if_found); | 1543 GotoIf(WordEqual(current, unique_name), if_found); |
1529 | 1544 |
1530 // See Dictionary::NextProbe(). | 1545 // See Dictionary::NextProbe(). |
1531 count = Int32Add(count, Int32Constant(1)); | 1546 count = Int32Add(count, Int32Constant(1)); |
1532 entry = Word32And(Int32Add(entry, count), mask); | 1547 entry = Word32And(Int32Add(entry, count), mask); |
1533 | 1548 |
1534 var_count.Bind(count); | 1549 var_count.Bind(count); |
1535 var_entry->Bind(entry); | 1550 var_entry->Bind(entry); |
1536 Goto(&loop); | 1551 Goto(&loop); |
1537 } | 1552 } |
1538 Bind(&if_found); | |
1539 Goto(if_found_); | |
1540 } | 1553 } |
1541 | 1554 |
1542 // Instantiate template methods to workaround GCC compilation issue. | 1555 // Instantiate template methods to workaround GCC compilation issue. |
1543 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( | 1556 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( |
1544 Node*, Node*, Label*, Variable*, Label*, int); | 1557 Node*, Node*, Label*, Variable*, Label*, int); |
1545 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( | 1558 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( |
1546 Node*, Node*, Label*, Variable*, Label*, int); | 1559 Node*, Node*, Label*, Variable*, Label*, int); |
1547 | 1560 |
1548 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) { | 1561 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) { |
1549 // See v8::internal::ComputeIntegerHash() | 1562 // See v8::internal::ComputeIntegerHash() |
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1976 ? WordShl(index_node, IntPtrConstant(element_size_shift)) | 1989 ? WordShl(index_node, IntPtrConstant(element_size_shift)) |
1977 : WordShr(index_node, IntPtrConstant(-element_size_shift)); | 1990 : WordShr(index_node, IntPtrConstant(-element_size_shift)); |
1978 } | 1991 } |
1979 return IntPtrAdd( | 1992 return IntPtrAdd( |
1980 IntPtrConstant(base_size), | 1993 IntPtrConstant(base_size), |
1981 (element_size_shift >= 0) | 1994 (element_size_shift >= 0) |
1982 ? WordShl(index_node, IntPtrConstant(element_size_shift)) | 1995 ? WordShl(index_node, IntPtrConstant(element_size_shift)) |
1983 : WordShr(index_node, IntPtrConstant(-element_size_shift))); | 1996 : WordShr(index_node, IntPtrConstant(-element_size_shift))); |
1984 } | 1997 } |
1985 | 1998 |
| 1999 compiler::Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() { |
| 2000 Node* function = |
| 2001 LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset); |
| 2002 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); |
| 2003 return LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset); |
| 2004 } |
| 2005 |
| 2006 compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) { |
| 2007 Variable var_receiver_map(this, MachineRepresentation::kTagged); |
| 2008 // TODO(ishell): defer blocks when it works. |
| 2009 Label load_smi_map(this /*, Label::kDeferred*/), load_receiver_map(this), |
| 2010 if_result(this); |
| 2011 |
| 2012 Branch(WordIsSmi(receiver), &load_smi_map, &load_receiver_map); |
| 2013 Bind(&load_smi_map); |
| 2014 { |
| 2015 var_receiver_map.Bind(LoadRoot(Heap::kHeapNumberMapRootIndex)); |
| 2016 Goto(&if_result); |
| 2017 } |
| 2018 Bind(&load_receiver_map); |
| 2019 { |
| 2020 var_receiver_map.Bind(LoadMap(receiver)); |
| 2021 Goto(&if_result); |
| 2022 } |
| 2023 Bind(&if_result); |
| 2024 return var_receiver_map.value(); |
| 2025 } |
| 2026 |
| 2027 compiler::Node* CodeStubAssembler::TryMonomorphicCase( |
| 2028 const LoadICParameters* p, compiler::Node* receiver_map, Label* if_handler, |
| 2029 Variable* var_handler, Label* if_miss) { |
| 2030 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep()); |
| 2031 |
| 2032 // TODO(ishell): add helper class that hides offset computations for a series |
| 2033 // of loads. |
| 2034 int32_t header_size = FixedArray::kHeaderSize - kHeapObjectTag; |
| 2035 Node* offset = ElementOffsetFromIndex(p->slot, FAST_HOLEY_ELEMENTS, |
| 2036 SMI_PARAMETERS, header_size); |
| 2037 Node* feedback = Load(MachineType::AnyTagged(), p->vector, offset); |
| 2038 |
| 2039 // Try to quickly handle the monomorphic case without knowing for sure |
| 2040 // if we have a weak cell in feedback. We do know it's safe to look |
| 2041 // at WeakCell::kValueOffset. |
| 2042 GotoUnless(WordEqual(receiver_map, LoadWeakCellValue(feedback)), if_miss); |
| 2043 |
| 2044 Node* handler = Load(MachineType::AnyTagged(), p->vector, |
| 2045 IntPtrAdd(offset, IntPtrConstant(kPointerSize))); |
| 2046 |
| 2047 var_handler->Bind(handler); |
| 2048 Goto(if_handler); |
| 2049 return feedback; |
| 2050 } |
| 2051 |
| 2052 void CodeStubAssembler::HandlePolymorphicCase( |
| 2053 const LoadICParameters* p, compiler::Node* receiver_map, |
| 2054 compiler::Node* feedback, Label* if_handler, Variable* var_handler, |
| 2055 Label* if_miss, int unroll_count) { |
| 2056 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep()); |
| 2057 |
| 2058 // Iterate {feedback} array. |
| 2059 const int kEntrySize = 2; |
| 2060 |
| 2061 for (int i = 0; i < unroll_count; i++) { |
| 2062 Label next_entry(this); |
| 2063 Node* cached_map = LoadWeakCellValue( |
| 2064 LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize))); |
| 2065 GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry); |
| 2066 |
| 2067 // Found, now call handler. |
| 2068 Node* handler = |
| 2069 LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize + 1)); |
| 2070 var_handler->Bind(handler); |
| 2071 Goto(if_handler); |
| 2072 |
| 2073 Bind(&next_entry); |
| 2074 } |
| 2075 Node* length = SmiToWord32(LoadFixedArrayBaseLength(feedback)); |
| 2076 |
| 2077 // Loop from {unroll_count}*kEntrySize to {length}. |
| 2078 Variable var_index(this, MachineRepresentation::kWord32); |
| 2079 Label loop(this, &var_index); |
| 2080 var_index.Bind(Int32Constant(unroll_count * kEntrySize)); |
| 2081 Goto(&loop); |
| 2082 Bind(&loop); |
| 2083 { |
| 2084 Node* index = var_index.value(); |
| 2085 GotoIf(Int32GreaterThanOrEqual(index, length), if_miss); |
| 2086 |
| 2087 Node* cached_map = |
| 2088 LoadWeakCellValue(LoadFixedArrayElement(feedback, index)); |
| 2089 |
| 2090 Label next_entry(this); |
| 2091 GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry); |
| 2092 |
| 2093 // Found, now call handler. |
| 2094 Node* handler = LoadFixedArrayElement(feedback, index, kPointerSize); |
| 2095 var_handler->Bind(handler); |
| 2096 Goto(if_handler); |
| 2097 |
| 2098 Bind(&next_entry); |
| 2099 var_index.Bind(Int32Add(index, Int32Constant(kEntrySize))); |
| 2100 Goto(&loop); |
| 2101 } |
| 2102 } |
| 2103 |
| 2104 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name, |
| 2105 Code::Flags flags, |
| 2106 compiler::Node* map) { |
| 2107 // See v8::internal::StubCache::PrimaryOffset(). |
| 2108 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift); |
| 2109 // Compute the hash of the name (use entire hash field). |
| 2110 Node* hash_field = LoadNameHashField(name); |
| 2111 Assert(WordEqual( |
| 2112 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)), |
| 2113 Int32Constant(0))); |
| 2114 |
| 2115 // Using only the low bits in 64-bit mode is unlikely to increase the |
| 2116 // risk of collision even if the heap is spread over an area larger than |
| 2117 // 4Gb (and not at all if it isn't). |
| 2118 Node* hash = Int32Add(hash_field, map); |
| 2119 // We always set the in_loop bit to zero when generating the lookup code |
| 2120 // so do it here too so the hash codes match. |
| 2121 uint32_t iflags = |
| 2122 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); |
| 2123 // Base the offset on a simple combination of name, flags, and map. |
| 2124 hash = Word32Xor(hash, Int32Constant(iflags)); |
| 2125 uint32_t mask = (StubCache::kPrimaryTableSize - 1) |
| 2126 << StubCache::kCacheIndexShift; |
| 2127 return Word32And(hash, Int32Constant(mask)); |
| 2128 } |
| 2129 |
| 2130 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset( |
| 2131 compiler::Node* name, Code::Flags flags, compiler::Node* seed) { |
| 2132 // See v8::internal::StubCache::SecondaryOffset(). |
| 2133 |
| 2134 // Use the seed from the primary cache in the secondary cache. |
| 2135 Node* hash = Int32Sub(seed, name); |
| 2136 // We always set the in_loop bit to zero when generating the lookup code |
| 2137 // so do it here too so the hash codes match. |
| 2138 uint32_t iflags = |
| 2139 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); |
| 2140 hash = Int32Add(hash, Int32Constant(iflags)); |
| 2141 int32_t mask = (StubCache::kSecondaryTableSize - 1) |
| 2142 << StubCache::kCacheIndexShift; |
| 2143 return Word32And(hash, Int32Constant(mask)); |
| 2144 } |
| 2145 |
| 2146 enum CodeStubAssembler::StubCacheTable : int { |
| 2147 kPrimary = static_cast<int>(StubCache::kPrimary), |
| 2148 kSecondary = static_cast<int>(StubCache::kSecondary) |
| 2149 }; |
| 2150 |
| 2151 void CodeStubAssembler::TryProbeStubCacheTable( |
| 2152 StubCache* stub_cache, StubCacheTable table_id, |
| 2153 compiler::Node* entry_offset, compiler::Node* name, Code::Flags flags, |
| 2154 compiler::Node* map, Label* if_handler, Variable* var_handler, |
| 2155 Label* if_miss) { |
| 2156 StubCache::Table table = static_cast<StubCache::Table>(table_id); |
| 2157 #ifdef DEBUG |
| 2158 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
| 2159 Goto(if_miss); |
| 2160 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
| 2161 Goto(if_miss); |
| 2162 } |
| 2163 #endif |
| 2164 // The {table_offset} holds the entry offset times four (due to masking |
| 2165 // and shifting optimizations). |
| 2166 const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift; |
| 2167 entry_offset = Int32Mul(entry_offset, Int32Constant(kMultiplier)); |
| 2168 |
| 2169 // Check that the key in the entry matches the name. |
| 2170 Node* key_base = |
| 2171 ExternalConstant(ExternalReference(stub_cache->key_reference(table))); |
| 2172 Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset); |
| 2173 GotoIf(WordNotEqual(name, entry_key), if_miss); |
| 2174 |
| 2175 // Get the map entry from the cache. |
| 2176 DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() - |
| 2177 stub_cache->key_reference(table).address()); |
| 2178 Node* entry_map = |
| 2179 Load(MachineType::Pointer(), key_base, |
| 2180 Int32Add(entry_offset, Int32Constant(kPointerSize * 2))); |
| 2181 GotoIf(WordNotEqual(map, entry_map), if_miss); |
| 2182 |
| 2183 // Check that the flags match what we're looking for. |
| 2184 DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() - |
| 2185 stub_cache->key_reference(table).address()); |
| 2186 Node* code = Load(MachineType::Pointer(), key_base, |
| 2187 Int32Add(entry_offset, Int32Constant(kPointerSize))); |
| 2188 |
| 2189 Node* code_flags = |
| 2190 LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32()); |
| 2191 GotoIf(Word32NotEqual(Int32Constant(flags), |
| 2192 Word32And(code_flags, |
| 2193 Int32Constant(~Code::kFlagsNotUsedInLookup))), |
| 2194 if_miss); |
| 2195 |
| 2196 // We found the handler. |
| 2197 var_handler->Bind(code); |
| 2198 Goto(if_handler); |
| 2199 } |
| 2200 |
| 2201 void CodeStubAssembler::TryProbeStubCache( |
| 2202 StubCache* stub_cache, Code::Flags flags, compiler::Node* receiver, |
| 2203 compiler::Node* name, Label* if_handler, Variable* var_handler, |
| 2204 Label* if_miss) { |
| 2205 Label try_secondary(this); |
| 2206 |
| 2207 // Check that the {receiver} isn't a smi. |
| 2208 GotoIf(WordIsSmi(receiver), if_miss); |
| 2209 |
| 2210 Node* receiver_map = LoadMap(receiver); |
| 2211 |
| 2212 // Probe the primary table. |
| 2213 Node* primary_offset = StubCachePrimaryOffset(name, flags, receiver_map); |
| 2214 TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, flags, |
| 2215 receiver_map, if_handler, var_handler, &try_secondary); |
| 2216 |
| 2217 Bind(&try_secondary); |
| 2218 { |
| 2219 // Probe the secondary table. |
| 2220 Node* secondary_offset = |
| 2221 StubCacheSecondaryOffset(name, flags, primary_offset); |
| 2222 TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name, |
| 2223 flags, receiver_map, if_handler, var_handler, |
| 2224 if_miss); |
| 2225 } |
| 2226 } |
| 2227 |
| 2228 void CodeStubAssembler::LoadIC(const LoadICParameters* p, Label* if_miss) { |
| 2229 Variable var_handler(this, MachineRepresentation::kTagged); |
| 2230 // TODO(ishell): defer blocks when it works. |
| 2231 Label if_handler(this, &var_handler), try_polymorphic(this), |
| 2232 try_megamorphic(this /*, Label::kDeferred*/); |
| 2233 |
| 2234 Node* receiver_map = LoadReceiverMap(p->receiver); |
| 2235 |
| 2236 // Check monomorphic case. |
| 2237 Node* feedback = TryMonomorphicCase(p, receiver_map, &if_handler, |
| 2238 &var_handler, &try_polymorphic); |
| 2239 Bind(&if_handler); |
| 2240 { |
| 2241 LoadWithVectorDescriptor descriptor(isolate()); |
| 2242 TailCallStub(descriptor, var_handler.value(), p->context, p->receiver, |
| 2243 p->name, p->slot, p->vector); |
| 2244 } |
| 2245 |
| 2246 Bind(&try_polymorphic); |
| 2247 { |
| 2248 // Check polymorphic case. |
| 2249 GotoUnless( |
| 2250 WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)), |
| 2251 &try_megamorphic); |
| 2252 HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler, |
| 2253 if_miss, 2); |
| 2254 } |
| 2255 |
| 2256 Bind(&try_megamorphic); |
| 2257 { |
| 2258 // Check megamorphic case. |
| 2259 GotoUnless( |
| 2260 WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)), |
| 2261 if_miss); |
| 2262 |
| 2263 Code::Flags code_flags = |
| 2264 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC)); |
| 2265 |
| 2266 TryProbeStubCache(isolate()->stub_cache(), code_flags, p->receiver, p->name, |
| 2267 &if_handler, &var_handler, if_miss); |
| 2268 } |
| 2269 } |
| 2270 |
1986 } // namespace internal | 2271 } // namespace internal |
1987 } // namespace v8 | 2272 } // namespace v8 |
OLD | NEW |