| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 #include "src/code-stub-assembler.h" | 4 #include "src/code-stub-assembler.h" |
| 5 #include "src/code-factory.h" | 5 #include "src/code-factory.h" |
| 6 #include "src/frames-inl.h" | 6 #include "src/frames-inl.h" |
| 7 #include "src/frames.h" | 7 #include "src/frames.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 1318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1329 if (Heap::RootIsImmortalImmovable(root_index)) { | 1329 if (Heap::RootIsImmortalImmovable(root_index)) { |
| 1330 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); | 1330 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); |
| 1331 } else { | 1331 } else { |
| 1332 return StoreObjectField(object, offset, LoadRoot(root_index)); | 1332 return StoreObjectField(object, offset, LoadRoot(root_index)); |
| 1333 } | 1333 } |
| 1334 } | 1334 } |
| 1335 | 1335 |
| 1336 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, | 1336 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, |
| 1337 Node* value, | 1337 Node* value, |
| 1338 WriteBarrierMode barrier_mode, | 1338 WriteBarrierMode barrier_mode, |
| 1339 int additional_offset, | |
| 1340 ParameterMode parameter_mode) { | 1339 ParameterMode parameter_mode) { |
| 1341 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || | 1340 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || |
| 1342 barrier_mode == UPDATE_WRITE_BARRIER); | 1341 barrier_mode == UPDATE_WRITE_BARRIER); |
| 1343 int header_size = | 1342 Node* offset = |
| 1344 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1343 ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, parameter_mode, |
| 1345 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1344 FixedArray::kHeaderSize - kHeapObjectTag); |
| 1346 parameter_mode, header_size); | |
| 1347 MachineRepresentation rep = MachineRepresentation::kTagged; | 1345 MachineRepresentation rep = MachineRepresentation::kTagged; |
| 1348 if (barrier_mode == SKIP_WRITE_BARRIER) { | 1346 if (barrier_mode == SKIP_WRITE_BARRIER) { |
| 1349 return StoreNoWriteBarrier(rep, object, offset, value); | 1347 return StoreNoWriteBarrier(rep, object, offset, value); |
| 1350 } else { | 1348 } else { |
| 1351 return Store(rep, object, offset, value); | 1349 return Store(rep, object, offset, value); |
| 1352 } | 1350 } |
| 1353 } | 1351 } |
| 1354 | 1352 |
| 1355 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( | 1353 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( |
| 1356 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { | 1354 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { |
| (...skipping 2674 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4031 Node* capacity = IntPtrRoundUpToPowerOfTwo32( | 4029 Node* capacity = IntPtrRoundUpToPowerOfTwo32( |
| 4032 WordShl(at_least_space_for, IntPtrConstant(1))); | 4030 WordShl(at_least_space_for, IntPtrConstant(1))); |
| 4033 return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity)); | 4031 return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity)); |
| 4034 } | 4032 } |
| 4035 | 4033 |
| 4036 Node* CodeStubAssembler::IntPtrMax(Node* left, Node* right) { | 4034 Node* CodeStubAssembler::IntPtrMax(Node* left, Node* right) { |
| 4037 return Select(IntPtrGreaterThanOrEqual(left, right), left, right, | 4035 return Select(IntPtrGreaterThanOrEqual(left, right), left, right, |
| 4038 MachineType::PointerRepresentation()); | 4036 MachineType::PointerRepresentation()); |
| 4039 } | 4037 } |
| 4040 | 4038 |
| 4041 template <class Dictionary> | |
| 4042 Node* CodeStubAssembler::GetNumberOfElements(Node* dictionary) { | |
| 4043 return LoadFixedArrayElement( | |
| 4044 dictionary, IntPtrConstant(Dictionary::kNumberOfElementsIndex), 0, | |
| 4045 INTPTR_PARAMETERS); | |
| 4046 } | |
| 4047 | |
| 4048 template <class Dictionary> | |
| 4049 void CodeStubAssembler::SetNumberOfElements(Node* dictionary, | |
| 4050 Node* num_elements_smi) { | |
| 4051 StoreFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex, | |
| 4052 num_elements_smi, SKIP_WRITE_BARRIER); | |
| 4053 } | |
| 4054 | |
| 4055 template <class Dictionary> | |
| 4056 Node* CodeStubAssembler::GetCapacity(Node* dictionary) { | |
| 4057 return LoadFixedArrayElement(dictionary, | |
| 4058 IntPtrConstant(Dictionary::kCapacityIndex), 0, | |
| 4059 INTPTR_PARAMETERS); | |
| 4060 } | |
| 4061 | |
| 4062 template <class Dictionary> | |
| 4063 Node* CodeStubAssembler::GetNextEnumerationIndex(Node* dictionary) { | |
| 4064 return LoadFixedArrayElement( | |
| 4065 dictionary, IntPtrConstant(Dictionary::kNextEnumerationIndexIndex), 0, | |
| 4066 INTPTR_PARAMETERS); | |
| 4067 } | |
| 4068 | |
| 4069 template <class Dictionary> | |
| 4070 void CodeStubAssembler::SetNextEnumerationIndex(Node* dictionary, | |
| 4071 Node* next_enum_index_smi) { | |
| 4072 StoreFixedArrayElement(dictionary, Dictionary::kNextEnumerationIndexIndex, | |
| 4073 next_enum_index_smi, SKIP_WRITE_BARRIER); | |
| 4074 } | |
| 4075 | |
| 4076 template <typename Dictionary> | 4039 template <typename Dictionary> |
| 4077 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary, | 4040 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary, |
| 4078 Node* unique_name, Label* if_found, | 4041 Node* unique_name, Label* if_found, |
| 4079 Variable* var_name_index, | 4042 Variable* var_name_index, |
| 4080 Label* if_not_found, | 4043 Label* if_not_found, |
| 4081 int inlined_probes, | 4044 int inlined_probes) { |
| 4082 LookupMode mode) { | |
| 4083 CSA_ASSERT(this, IsDictionary(dictionary)); | 4045 CSA_ASSERT(this, IsDictionary(dictionary)); |
| 4084 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); | 4046 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); |
| 4085 DCHECK_IMPLIES(mode == kFindInsertionIndex, | |
| 4086 inlined_probes == 0 && if_found == nullptr); | |
| 4087 Comment("NameDictionaryLookup"); | 4047 Comment("NameDictionaryLookup"); |
| 4088 | 4048 |
| 4089 Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary)); | 4049 Node* capacity = SmiUntag(LoadFixedArrayElement( |
| 4050 dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0, |
| 4051 INTPTR_PARAMETERS)); |
| 4090 Node* mask = IntPtrSub(capacity, IntPtrConstant(1)); | 4052 Node* mask = IntPtrSub(capacity, IntPtrConstant(1)); |
| 4091 Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name)); | 4053 Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name)); |
| 4092 | 4054 |
| 4093 // See Dictionary::FirstProbe(). | 4055 // See Dictionary::FirstProbe(). |
| 4094 Node* count = IntPtrConstant(0); | 4056 Node* count = IntPtrConstant(0); |
| 4095 Node* entry = WordAnd(hash, mask); | 4057 Node* entry = WordAnd(hash, mask); |
| 4096 | 4058 |
| 4097 for (int i = 0; i < inlined_probes; i++) { | 4059 for (int i = 0; i < inlined_probes; i++) { |
| 4098 Node* index = EntryToIndex<Dictionary>(entry); | 4060 Node* index = EntryToIndex<Dictionary>(entry); |
| 4099 var_name_index->Bind(index); | 4061 var_name_index->Bind(index); |
| 4100 | 4062 |
| 4101 Node* current = | 4063 Node* current = |
| 4102 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS); | 4064 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS); |
| 4103 GotoIf(WordEqual(current, unique_name), if_found); | 4065 GotoIf(WordEqual(current, unique_name), if_found); |
| 4104 | 4066 |
| 4105 // See Dictionary::NextProbe(). | 4067 // See Dictionary::NextProbe(). |
| 4106 count = IntPtrConstant(i + 1); | 4068 count = IntPtrConstant(i + 1); |
| 4107 entry = WordAnd(IntPtrAdd(entry, count), mask); | 4069 entry = WordAnd(IntPtrAdd(entry, count), mask); |
| 4108 } | 4070 } |
| 4109 if (mode == kFindInsertionIndex) { | |
| 4110 // Appease the variable merging algorithm for "Goto(&loop)" below. | |
| 4111 var_name_index->Bind(IntPtrConstant(0)); | |
| 4112 } | |
| 4113 | 4071 |
| 4114 Node* undefined = UndefinedConstant(); | 4072 Node* undefined = UndefinedConstant(); |
| 4115 Node* the_hole = mode == kFindExisting ? nullptr : TheHoleConstant(); | |
| 4116 | 4073 |
| 4117 Variable var_count(this, MachineType::PointerRepresentation()); | 4074 Variable var_count(this, MachineType::PointerRepresentation()); |
| 4118 Variable var_entry(this, MachineType::PointerRepresentation()); | 4075 Variable var_entry(this, MachineType::PointerRepresentation()); |
| 4119 Variable* loop_vars[] = {&var_count, &var_entry, var_name_index}; | 4076 Variable* loop_vars[] = {&var_count, &var_entry, var_name_index}; |
| 4120 Label loop(this, 3, loop_vars); | 4077 Label loop(this, 3, loop_vars); |
| 4121 var_count.Bind(count); | 4078 var_count.Bind(count); |
| 4122 var_entry.Bind(entry); | 4079 var_entry.Bind(entry); |
| 4123 Goto(&loop); | 4080 Goto(&loop); |
| 4124 Bind(&loop); | 4081 Bind(&loop); |
| 4125 { | 4082 { |
| 4126 Node* count = var_count.value(); | 4083 Node* count = var_count.value(); |
| 4127 Node* entry = var_entry.value(); | 4084 Node* entry = var_entry.value(); |
| 4128 | 4085 |
| 4129 Node* index = EntryToIndex<Dictionary>(entry); | 4086 Node* index = EntryToIndex<Dictionary>(entry); |
| 4130 var_name_index->Bind(index); | 4087 var_name_index->Bind(index); |
| 4131 | 4088 |
| 4132 Node* current = | 4089 Node* current = |
| 4133 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS); | 4090 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS); |
| 4134 GotoIf(WordEqual(current, undefined), if_not_found); | 4091 GotoIf(WordEqual(current, undefined), if_not_found); |
| 4135 if (mode == kFindExisting) { | 4092 GotoIf(WordEqual(current, unique_name), if_found); |
| 4136 GotoIf(WordEqual(current, unique_name), if_found); | |
| 4137 } else { | |
| 4138 DCHECK_EQ(kFindInsertionIndex, mode); | |
| 4139 GotoIf(WordEqual(current, the_hole), if_not_found); | |
| 4140 } | |
| 4141 | 4093 |
| 4142 // See Dictionary::NextProbe(). | 4094 // See Dictionary::NextProbe(). |
| 4143 count = IntPtrAdd(count, IntPtrConstant(1)); | 4095 count = IntPtrAdd(count, IntPtrConstant(1)); |
| 4144 entry = WordAnd(IntPtrAdd(entry, count), mask); | 4096 entry = WordAnd(IntPtrAdd(entry, count), mask); |
| 4145 | 4097 |
| 4146 var_count.Bind(count); | 4098 var_count.Bind(count); |
| 4147 var_entry.Bind(entry); | 4099 var_entry.Bind(entry); |
| 4148 Goto(&loop); | 4100 Goto(&loop); |
| 4149 } | 4101 } |
| 4150 } | 4102 } |
| 4151 | 4103 |
| 4152 // Instantiate template methods to workaround GCC compilation issue. | 4104 // Instantiate template methods to workaround GCC compilation issue. |
| 4153 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( | 4105 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( |
| 4154 Node*, Node*, Label*, Variable*, Label*, int, LookupMode); | 4106 Node*, Node*, Label*, Variable*, Label*, int); |
| 4155 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( | 4107 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( |
| 4156 Node*, Node*, Label*, Variable*, Label*, int, LookupMode); | 4108 Node*, Node*, Label*, Variable*, Label*, int); |
| 4157 | 4109 |
| 4158 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) { | 4110 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) { |
| 4159 // See v8::internal::ComputeIntegerHash() | 4111 // See v8::internal::ComputeIntegerHash() |
| 4160 Node* hash = key; | 4112 Node* hash = key; |
| 4161 hash = Word32Xor(hash, seed); | 4113 hash = Word32Xor(hash, seed); |
| 4162 hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)), | 4114 hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)), |
| 4163 Word32Shl(hash, Int32Constant(15))); | 4115 Word32Shl(hash, Int32Constant(15))); |
| 4164 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12))); | 4116 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12))); |
| 4165 hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2))); | 4117 hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2))); |
| 4166 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4))); | 4118 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4))); |
| 4167 hash = Int32Mul(hash, Int32Constant(2057)); | 4119 hash = Int32Mul(hash, Int32Constant(2057)); |
| 4168 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16))); | 4120 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16))); |
| 4169 return Word32And(hash, Int32Constant(0x3fffffff)); | 4121 return Word32And(hash, Int32Constant(0x3fffffff)); |
| 4170 } | 4122 } |
| 4171 | 4123 |
| 4172 template <typename Dictionary> | 4124 template <typename Dictionary> |
| 4173 void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary, | 4125 void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary, |
| 4174 Node* intptr_index, | 4126 Node* intptr_index, |
| 4175 Label* if_found, | 4127 Label* if_found, |
| 4176 Variable* var_entry, | 4128 Variable* var_entry, |
| 4177 Label* if_not_found) { | 4129 Label* if_not_found) { |
| 4178 CSA_ASSERT(this, IsDictionary(dictionary)); | 4130 CSA_ASSERT(this, IsDictionary(dictionary)); |
| 4179 DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep()); | 4131 DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep()); |
| 4180 Comment("NumberDictionaryLookup"); | 4132 Comment("NumberDictionaryLookup"); |
| 4181 | 4133 |
| 4182 Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary)); | 4134 Node* capacity = SmiUntag(LoadFixedArrayElement( |
| 4135 dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0, |
| 4136 INTPTR_PARAMETERS)); |
| 4183 Node* mask = IntPtrSub(capacity, IntPtrConstant(1)); | 4137 Node* mask = IntPtrSub(capacity, IntPtrConstant(1)); |
| 4184 | 4138 |
| 4185 Node* int32_seed; | 4139 Node* int32_seed; |
| 4186 if (Dictionary::ShapeT::UsesSeed) { | 4140 if (Dictionary::ShapeT::UsesSeed) { |
| 4187 int32_seed = HashSeed(); | 4141 int32_seed = HashSeed(); |
| 4188 } else { | 4142 } else { |
| 4189 int32_seed = Int32Constant(kZeroHashSeed); | 4143 int32_seed = Int32Constant(kZeroHashSeed); |
| 4190 } | 4144 } |
| 4191 Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed)); | 4145 Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed)); |
| 4192 Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index); | 4146 Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4236 // See Dictionary::NextProbe(). | 4190 // See Dictionary::NextProbe(). |
| 4237 count = IntPtrAdd(count, IntPtrConstant(1)); | 4191 count = IntPtrAdd(count, IntPtrConstant(1)); |
| 4238 entry = WordAnd(IntPtrAdd(entry, count), mask); | 4192 entry = WordAnd(IntPtrAdd(entry, count), mask); |
| 4239 | 4193 |
| 4240 var_count.Bind(count); | 4194 var_count.Bind(count); |
| 4241 var_entry->Bind(entry); | 4195 var_entry->Bind(entry); |
| 4242 Goto(&loop); | 4196 Goto(&loop); |
| 4243 } | 4197 } |
| 4244 } | 4198 } |
| 4245 | 4199 |
| 4246 template <class Dictionary> | |
| 4247 void CodeStubAssembler::FindInsertionEntry(Node* dictionary, Node* key, | |
| 4248 Variable* var_key_index) { | |
| 4249 UNREACHABLE(); | |
| 4250 } | |
| 4251 | |
| 4252 template <> | |
| 4253 void CodeStubAssembler::FindInsertionEntry<NameDictionary>( | |
| 4254 Node* dictionary, Node* key, Variable* var_key_index) { | |
| 4255 Label done(this); | |
| 4256 NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index, | |
| 4257 &done, 0, kFindInsertionIndex); | |
| 4258 Bind(&done); | |
| 4259 } | |
| 4260 | |
| 4261 template <class Dictionary> | |
| 4262 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, | |
| 4263 Node* index, Node* enum_index) { | |
| 4264 // This implementation works for dictionaries with details. | |
| 4265 STATIC_ASSERT(Dictionary::kEntrySize == 3); | |
| 4266 | |
| 4267 StoreFixedArrayElement(dictionary, index, key, UPDATE_WRITE_BARRIER, 0, | |
| 4268 INTPTR_PARAMETERS); | |
| 4269 const int kNameToValueOffset = | |
| 4270 (Dictionary::kEntryValueIndex - Dictionary::kEntryKeyIndex) * | |
| 4271 kPointerSize; | |
| 4272 StoreFixedArrayElement(dictionary, index, value, UPDATE_WRITE_BARRIER, | |
| 4273 kNameToValueOffset, INTPTR_PARAMETERS); | |
| 4274 const int kInitialIndex = 0; | |
| 4275 PropertyDetails d(NONE, DATA, kInitialIndex, PropertyCellType::kNoCell); | |
| 4276 Node* details = SmiConstant(d.AsSmi()); | |
| 4277 if (Dictionary::kIsEnumerable) { | |
| 4278 enum_index = | |
| 4279 WordShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); | |
| 4280 STATIC_ASSERT(kInitialIndex == 0); | |
| 4281 details = WordOr(details, enum_index); | |
| 4282 } | |
| 4283 const int kNameToDetailsOffset = | |
| 4284 (Dictionary::kEntryDetailsIndex - Dictionary::kEntryKeyIndex) * | |
| 4285 kPointerSize; | |
| 4286 StoreFixedArrayElement(dictionary, index, details, SKIP_WRITE_BARRIER, | |
| 4287 kNameToDetailsOffset, INTPTR_PARAMETERS); | |
| 4288 } | |
| 4289 | |
| 4290 template <> | |
| 4291 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, | |
| 4292 Node* key, Node* value, | |
| 4293 Node* index, | |
| 4294 Node* enum_index) { | |
| 4295 UNIMPLEMENTED(); | |
| 4296 } | |
| 4297 | |
| 4298 template <class Dictionary> | |
| 4299 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, | |
| 4300 Label* bailout) { | |
| 4301 Node* capacity = GetCapacity<Dictionary>(dictionary); | |
| 4302 Node* nof = GetNumberOfElements<Dictionary>(dictionary); | |
| 4303 Node* new_nof = SmiAdd(nof, SmiConstant(1)); | |
| 4304 // Require 33% to still be free after adding additional_elements. | |
| 4305 // This is a simplification of the C++ implementation's behavior, which | |
| 4306 // also rehashes the dictionary when there are too many deleted elements. | |
| 4307 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! | |
| 4308 // But that's OK here because it's only used for a comparison. | |
| 4309 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, WordShr(new_nof, 1)); | |
| 4310 GotoIf(UintPtrLessThan(capacity, required_capacity_pseudo_smi), bailout); | |
| 4311 Node* enum_index = nullptr; | |
| 4312 if (Dictionary::kIsEnumerable) { | |
| 4313 enum_index = GetNextEnumerationIndex<Dictionary>(dictionary); | |
| 4314 Node* new_enum_index = SmiAdd(enum_index, SmiConstant(1)); | |
| 4315 Node* max_enum_index = | |
| 4316 SmiConstant(PropertyDetails::DictionaryStorageField::kMax); | |
| 4317 GotoIf(UintPtrGreaterThan(new_enum_index, max_enum_index), bailout); | |
| 4318 | |
| 4319 // No more bailouts after this point. | |
| 4320 // Operations from here on can have side effects. | |
| 4321 | |
| 4322 SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index); | |
| 4323 } else { | |
| 4324 USE(enum_index); | |
| 4325 } | |
| 4326 SetNumberOfElements<Dictionary>(dictionary, new_nof); | |
| 4327 | |
| 4328 Variable var_key_index(this, MachineType::PointerRepresentation()); | |
| 4329 FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index); | |
| 4330 InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(), | |
| 4331 enum_index); | |
| 4332 } | |
| 4333 | |
| 4334 template void CodeStubAssembler::Add<NameDictionary>(Node*, Node*, Node*, | |
| 4335 Label*); | |
| 4336 | |
| 4337 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, | 4200 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, |
| 4338 Node* descriptors, Node* nof, | 4201 Node* descriptors, Node* nof, |
| 4339 Label* if_found, | 4202 Label* if_found, |
| 4340 Variable* var_name_index, | 4203 Variable* var_name_index, |
| 4341 Label* if_not_found) { | 4204 Label* if_not_found) { |
| 4342 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0)); | 4205 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0)); |
| 4343 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize); | 4206 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize); |
| 4344 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor)); | 4207 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor)); |
| 4345 | 4208 |
| 4346 BuildFastLoop( | 4209 BuildFastLoop( |
| (...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5324 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); | 5187 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); |
| 5325 DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag, | 5188 DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag, |
| 5326 FixedArray::OffsetOfElementAt(0)); | 5189 FixedArray::OffsetOfElementAt(0)); |
| 5327 if (is_load) { | 5190 if (is_load) { |
| 5328 Node* result = LoadFixedArrayElement(the_context, mapped_index, 0, | 5191 Node* result = LoadFixedArrayElement(the_context, mapped_index, 0, |
| 5329 INTPTR_PARAMETERS); | 5192 INTPTR_PARAMETERS); |
| 5330 CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant())); | 5193 CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant())); |
| 5331 var_result.Bind(result); | 5194 var_result.Bind(result); |
| 5332 } else { | 5195 } else { |
| 5333 StoreFixedArrayElement(the_context, mapped_index, value, | 5196 StoreFixedArrayElement(the_context, mapped_index, value, |
| 5334 UPDATE_WRITE_BARRIER, 0, INTPTR_PARAMETERS); | 5197 UPDATE_WRITE_BARRIER, INTPTR_PARAMETERS); |
| 5335 } | 5198 } |
| 5336 Goto(&end); | 5199 Goto(&end); |
| 5337 } | 5200 } |
| 5338 | 5201 |
| 5339 Bind(&if_unmapped); | 5202 Bind(&if_unmapped); |
| 5340 { | 5203 { |
| 5341 Node* backing_store = LoadFixedArrayElement(elements, IntPtrConstant(1), 0, | 5204 Node* backing_store = LoadFixedArrayElement(elements, IntPtrConstant(1), 0, |
| 5342 INTPTR_PARAMETERS); | 5205 INTPTR_PARAMETERS); |
| 5343 GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()), | 5206 GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()), |
| 5344 bailout); | 5207 bailout); |
| 5345 | 5208 |
| 5346 Node* backing_store_length = | 5209 Node* backing_store_length = |
| 5347 LoadAndUntagFixedArrayBaseLength(backing_store); | 5210 LoadAndUntagFixedArrayBaseLength(backing_store); |
| 5348 GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout); | 5211 GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout); |
| 5349 | 5212 |
| 5350 // The key falls into unmapped range. | 5213 // The key falls into unmapped range. |
| 5351 if (is_load) { | 5214 if (is_load) { |
| 5352 Node* result = | 5215 Node* result = |
| 5353 LoadFixedArrayElement(backing_store, key, 0, INTPTR_PARAMETERS); | 5216 LoadFixedArrayElement(backing_store, key, 0, INTPTR_PARAMETERS); |
| 5354 GotoIf(WordEqual(result, TheHoleConstant()), bailout); | 5217 GotoIf(WordEqual(result, TheHoleConstant()), bailout); |
| 5355 var_result.Bind(result); | 5218 var_result.Bind(result); |
| 5356 } else { | 5219 } else { |
| 5357 StoreFixedArrayElement(backing_store, key, value, UPDATE_WRITE_BARRIER, 0, | 5220 StoreFixedArrayElement(backing_store, key, value, UPDATE_WRITE_BARRIER, |
| 5358 INTPTR_PARAMETERS); | 5221 INTPTR_PARAMETERS); |
| 5359 } | 5222 } |
| 5360 Goto(&end); | 5223 Goto(&end); |
| 5361 } | 5224 } |
| 5362 | 5225 |
| 5363 Bind(&end); | 5226 Bind(&end); |
| 5364 return var_result.value(); | 5227 return var_result.value(); |
| 5365 } | 5228 } |
| 5366 | 5229 |
| 5367 Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) { | 5230 Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) { |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5430 return; | 5293 return; |
| 5431 } | 5294 } |
| 5432 | 5295 |
| 5433 WriteBarrierMode barrier_mode = | 5296 WriteBarrierMode barrier_mode = |
| 5434 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; | 5297 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; |
| 5435 if (IsFastDoubleElementsKind(kind)) { | 5298 if (IsFastDoubleElementsKind(kind)) { |
| 5436 // Make sure we do not store signalling NaNs into double arrays. | 5299 // Make sure we do not store signalling NaNs into double arrays. |
| 5437 value = Float64SilenceNaN(value); | 5300 value = Float64SilenceNaN(value); |
| 5438 StoreFixedDoubleArrayElement(elements, index, value, mode); | 5301 StoreFixedDoubleArrayElement(elements, index, value, mode); |
| 5439 } else { | 5302 } else { |
| 5440 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); | 5303 StoreFixedArrayElement(elements, index, value, barrier_mode, mode); |
| 5441 } | 5304 } |
| 5442 } | 5305 } |
| 5443 | 5306 |
| 5444 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value, | 5307 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value, |
| 5445 bool is_jsarray, | 5308 bool is_jsarray, |
| 5446 ElementsKind elements_kind, | 5309 ElementsKind elements_kind, |
| 5447 KeyedAccessStoreMode store_mode, | 5310 KeyedAccessStoreMode store_mode, |
| 5448 Label* bailout) { | 5311 Label* bailout) { |
| 5449 Node* elements = LoadElements(object); | 5312 Node* elements = LoadElements(object); |
| 5450 if (IsFastSmiOrObjectElementsKind(elements_kind) && | 5313 if (IsFastSmiOrObjectElementsKind(elements_kind) && |
| (...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5822 Node* next_site = LoadBufferObject(site_list, 0); | 5685 Node* next_site = LoadBufferObject(site_list, 0); |
| 5823 | 5686 |
| 5824 // TODO(mvstanton): This is a store to a weak pointer, which we may want to | 5687 // TODO(mvstanton): This is a store to a weak pointer, which we may want to |
| 5825 // mark as such in order to skip the write barrier, once we have a unified | 5688 // mark as such in order to skip the write barrier, once we have a unified |
| 5826 // system for weakness. For now we decided to keep it like this because having | 5689 // system for weakness. For now we decided to keep it like this because having |
| 5827 // an initial write barrier backed store makes this pointer strong until the | 5690 // an initial write barrier backed store makes this pointer strong until the |
| 5828 // next GC, and allocation sites are designed to survive several GCs anyway. | 5691 // next GC, and allocation sites are designed to survive several GCs anyway. |
| 5829 StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site); | 5692 StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site); |
| 5830 StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site); | 5693 StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site); |
| 5831 | 5694 |
| 5832 StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0, | 5695 StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, |
| 5833 CodeStubAssembler::SMI_PARAMETERS); | 5696 CodeStubAssembler::SMI_PARAMETERS); |
| 5834 return site; | 5697 return site; |
| 5835 } | 5698 } |
| 5836 | 5699 |
| 5837 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector, | 5700 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector, |
| 5838 Node* slot, | 5701 Node* slot, |
| 5839 Node* value) { | 5702 Node* value) { |
| 5840 Node* size = IntPtrConstant(WeakCell::kSize); | 5703 Node* size = IntPtrConstant(WeakCell::kSize); |
| 5841 Node* cell = Allocate(size, CodeStubAssembler::kPretenured); | 5704 Node* cell = Allocate(size, CodeStubAssembler::kPretenured); |
| 5842 | 5705 |
| 5843 // Initialize the WeakCell. | 5706 // Initialize the WeakCell. |
| 5844 StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex); | 5707 StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex); |
| 5845 StoreObjectField(cell, WeakCell::kValueOffset, value); | 5708 StoreObjectField(cell, WeakCell::kValueOffset, value); |
| 5846 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, | 5709 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, |
| 5847 Heap::kTheHoleValueRootIndex); | 5710 Heap::kTheHoleValueRootIndex); |
| 5848 | 5711 |
| 5849 // Store the WeakCell in the feedback vector. | 5712 // Store the WeakCell in the feedback vector. |
| 5850 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0, | 5713 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, |
| 5851 CodeStubAssembler::SMI_PARAMETERS); | 5714 CodeStubAssembler::SMI_PARAMETERS); |
| 5852 return cell; | 5715 return cell; |
| 5853 } | 5716 } |
| 5854 | 5717 |
| 5855 void CodeStubAssembler::BuildFastLoop( | 5718 void CodeStubAssembler::BuildFastLoop( |
| 5856 const CodeStubAssembler::VariableList& vars, | 5719 const CodeStubAssembler::VariableList& vars, |
| 5857 MachineRepresentation index_rep, Node* start_index, Node* end_index, | 5720 MachineRepresentation index_rep, Node* start_index, Node* end_index, |
| 5858 std::function<void(CodeStubAssembler* assembler, Node* index)> body, | 5721 std::function<void(CodeStubAssembler* assembler, Node* index)> body, |
| 5859 int increment, IndexAdvanceMode mode) { | 5722 int increment, IndexAdvanceMode mode) { |
| 5860 Variable var(this, index_rep); | 5723 Variable var(this, index_rep); |
| (...skipping 1987 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7848 | 7711 |
| 7849 compiler::Node* CodeStubAssembler::IsDebugActive() { | 7712 compiler::Node* CodeStubAssembler::IsDebugActive() { |
| 7850 Node* is_debug_active = Load( | 7713 Node* is_debug_active = Load( |
| 7851 MachineType::Uint8(), | 7714 MachineType::Uint8(), |
| 7852 ExternalConstant(ExternalReference::debug_is_active_address(isolate()))); | 7715 ExternalConstant(ExternalReference::debug_is_active_address(isolate()))); |
| 7853 return WordNotEqual(is_debug_active, Int32Constant(0)); | 7716 return WordNotEqual(is_debug_active, Int32Constant(0)); |
| 7854 } | 7717 } |
| 7855 | 7718 |
| 7856 } // namespace internal | 7719 } // namespace internal |
| 7857 } // namespace v8 | 7720 } // namespace v8 |
| OLD | NEW |