OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2016 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/ic/keyed-store-generic.h" | |
6 | |
7 #include "src/compiler/code-assembler.h" | |
8 #include "src/contexts.h" | |
9 #include "src/isolate.h" | |
10 | |
11 namespace v8 { | |
12 namespace internal { | |
13 | |
14 using compiler::Node; | |
15 | |
16 class KeyedStoreGenericAssembler : public CodeStubAssembler { | |
17 public: | |
18 void KeyedStoreGeneric(const StoreICParameters* p, | |
19 LanguageMode language_mode); | |
20 | |
21 private: | |
22 enum UpdateLength { | |
23 kDontChangeLength, | |
24 kIncrementLengthByOne, | |
25 kBumpLengthWithGap | |
26 }; | |
27 | |
28 void EmitGenericElementStore(Node* receiver, Node* receiver_map, | |
29 Node* instance_type, Node* intptr_index, | |
30 Node* value, Node* context, Label* slow); | |
31 | |
32 void EmitGenericPropertyStore(Node* receiver, Node* receiver_map, | |
33 const StoreICParameters* p, Label* slow); | |
34 | |
35 void BranchIfPrototypesHaveNonFastElements(Node* receiver_map, | |
36 Label* non_fast_elements, | |
37 Label* only_fast_elements); | |
38 | |
39 void TryRewriteElements(Node* receiver, Node* receiver_map, Node* elements, | |
40 Node* native_context, ElementsKind from_kind, | |
41 ElementsKind to_kind, Label* bailout); | |
42 | |
43 void StoreElementWithCapacity(Node* receiver, Node* receiver_map, | |
44 Node* elements, Node* elements_kind, | |
45 Node* intptr_index, Node* value, Node* context, | |
46 Label* slow, UpdateLength update_length); | |
47 | |
48 void MaybeUpdateLengthAndReturn(Node* receiver, Node* index, Node* value, | |
49 UpdateLength update_length); | |
50 | |
51 void TryChangeToHoleyMapHelper(Node* receiver, Node* receiver_map, | |
52 Node* native_context, ElementsKind packed_kind, | |
53 ElementsKind holey_kind, Label* done, | |
54 Label* map_mismatch, Label* bailout); | |
55 void TryChangeToHoleyMap(Node* receiver, Node* receiver_map, | |
56 Node* current_elements_kind, Node* context, | |
57 ElementsKind packed_kind, Label* bailout); | |
58 void TryChangeToHoleyMapMulti(Node* receiver, Node* receiver_map, | |
59 Node* current_elements_kind, Node* context, | |
60 ElementsKind packed_kind, | |
61 ElementsKind packed_kind_2, Label* bailout); | |
62 | |
63 // Do not add fields, so that this is safe to reinterpret_cast to CSA. | |
64 }; | |
65 | |
66 void KeyedStoreGenericGenerator::Generate( | |
67 CodeStubAssembler* assembler, const CodeStubAssembler::StoreICParameters* p, | |
68 LanguageMode language_mode) { | |
69 STATIC_ASSERT(sizeof(CodeStubAssembler) == | |
70 sizeof(KeyedStoreGenericAssembler)); | |
71 auto assm = reinterpret_cast<KeyedStoreGenericAssembler*>(assembler); | |
hans
2016/11/14 22:16:01
This looks like a hack, and the reason for the UBS
| |
72 assm->KeyedStoreGeneric(p, language_mode); | |
73 } | |
74 | |
75 void KeyedStoreGenericAssembler::BranchIfPrototypesHaveNonFastElements( | |
76 Node* receiver_map, Label* non_fast_elements, Label* only_fast_elements) { | |
77 Variable var_map(this, MachineRepresentation::kTagged); | |
78 var_map.Bind(receiver_map); | |
79 Label loop_body(this, &var_map); | |
80 Goto(&loop_body); | |
81 | |
82 Bind(&loop_body); | |
83 { | |
84 Node* map = var_map.value(); | |
85 Node* prototype = LoadMapPrototype(map); | |
86 GotoIf(WordEqual(prototype, NullConstant()), only_fast_elements); | |
87 Node* prototype_map = LoadMap(prototype); | |
88 var_map.Bind(prototype_map); | |
89 Node* instance_type = LoadMapInstanceType(prototype_map); | |
90 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE); | |
91 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE); | |
92 GotoIf(Int32LessThanOrEqual(instance_type, | |
93 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)), | |
94 non_fast_elements); | |
95 Node* elements_kind = LoadMapElementsKind(prototype_map); | |
96 STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND); | |
97 GotoIf(Int32LessThanOrEqual(elements_kind, | |
98 Int32Constant(LAST_FAST_ELEMENTS_KIND)), | |
99 &loop_body); | |
100 GotoIf(Word32Equal(elements_kind, Int32Constant(NO_ELEMENTS)), &loop_body); | |
101 Goto(non_fast_elements); | |
102 } | |
103 } | |
104 | |
105 void KeyedStoreGenericAssembler::TryRewriteElements( | |
106 Node* receiver, Node* receiver_map, Node* elements, Node* native_context, | |
107 ElementsKind from_kind, ElementsKind to_kind, Label* bailout) { | |
108 DCHECK(IsFastPackedElementsKind(from_kind)); | |
109 ElementsKind holey_from_kind = GetHoleyElementsKind(from_kind); | |
110 ElementsKind holey_to_kind = GetHoleyElementsKind(to_kind); | |
111 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { | |
112 TrapAllocationMemento(receiver, bailout); | |
113 } | |
114 Label perform_transition(this), check_holey_map(this); | |
115 Variable var_target_map(this, MachineType::PointerRepresentation()); | |
116 // Check if the receiver has the default |from_kind| map. | |
117 { | |
118 Node* packed_map = | |
119 LoadContextElement(native_context, Context::ArrayMapIndex(from_kind)); | |
120 GotoIf(WordNotEqual(receiver_map, packed_map), &check_holey_map); | |
121 var_target_map.Bind( | |
122 LoadContextElement(native_context, Context::ArrayMapIndex(to_kind))); | |
123 Goto(&perform_transition); | |
124 } | |
125 | |
126 // Check if the receiver has the default |holey_from_kind| map. | |
127 Bind(&check_holey_map); | |
128 { | |
129 Node* holey_map = LoadContextElement( | |
130 native_context, Context::ArrayMapIndex(holey_from_kind)); | |
131 GotoIf(WordNotEqual(receiver_map, holey_map), bailout); | |
132 var_target_map.Bind(LoadContextElement( | |
133 native_context, Context::ArrayMapIndex(holey_to_kind))); | |
134 Goto(&perform_transition); | |
135 } | |
136 | |
137 // Found a supported transition target map, perform the transition! | |
138 Bind(&perform_transition); | |
139 { | |
140 if (IsFastDoubleElementsKind(from_kind) != | |
141 IsFastDoubleElementsKind(to_kind)) { | |
142 Node* capacity = LoadFixedArrayBaseLength(elements); | |
143 GrowElementsCapacity(receiver, elements, from_kind, to_kind, capacity, | |
144 capacity, INTPTR_PARAMETERS, bailout); | |
145 } | |
146 StoreObjectField(receiver, JSObject::kMapOffset, var_target_map.value()); | |
147 } | |
148 } | |
149 | |
150 void KeyedStoreGenericAssembler::TryChangeToHoleyMapHelper( | |
151 Node* receiver, Node* receiver_map, Node* native_context, | |
152 ElementsKind packed_kind, ElementsKind holey_kind, Label* done, | |
153 Label* map_mismatch, Label* bailout) { | |
154 Node* packed_map = | |
155 LoadContextElement(native_context, Context::ArrayMapIndex(packed_kind)); | |
156 GotoIf(WordNotEqual(receiver_map, packed_map), map_mismatch); | |
157 if (AllocationSite::GetMode(packed_kind, holey_kind) == | |
158 TRACK_ALLOCATION_SITE) { | |
159 TrapAllocationMemento(receiver, bailout); | |
160 } | |
161 Node* holey_map = | |
162 LoadContextElement(native_context, Context::ArrayMapIndex(holey_kind)); | |
163 StoreObjectField(receiver, JSObject::kMapOffset, holey_map); | |
164 Goto(done); | |
165 } | |
166 | |
167 void KeyedStoreGenericAssembler::TryChangeToHoleyMap( | |
168 Node* receiver, Node* receiver_map, Node* current_elements_kind, | |
169 Node* context, ElementsKind packed_kind, Label* bailout) { | |
170 ElementsKind holey_kind = GetHoleyElementsKind(packed_kind); | |
171 Label already_holey(this); | |
172 | |
173 GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)), | |
174 &already_holey); | |
175 Node* native_context = LoadNativeContext(context); | |
176 TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind, | |
177 holey_kind, &already_holey, bailout, bailout); | |
178 Bind(&already_holey); | |
179 } | |
180 | |
181 void KeyedStoreGenericAssembler::TryChangeToHoleyMapMulti( | |
182 Node* receiver, Node* receiver_map, Node* current_elements_kind, | |
183 Node* context, ElementsKind packed_kind, ElementsKind packed_kind_2, | |
184 Label* bailout) { | |
185 ElementsKind holey_kind = GetHoleyElementsKind(packed_kind); | |
186 ElementsKind holey_kind_2 = GetHoleyElementsKind(packed_kind_2); | |
187 Label already_holey(this), check_other_kind(this); | |
188 | |
189 GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)), | |
190 &already_holey); | |
191 GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind_2)), | |
192 &already_holey); | |
193 | |
194 Node* native_context = LoadNativeContext(context); | |
195 TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind, | |
196 holey_kind, &already_holey, &check_other_kind, | |
197 bailout); | |
198 Bind(&check_other_kind); | |
199 TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, | |
200 packed_kind_2, holey_kind_2, &already_holey, | |
201 bailout, bailout); | |
202 Bind(&already_holey); | |
203 } | |
204 | |
205 void KeyedStoreGenericAssembler::MaybeUpdateLengthAndReturn( | |
206 Node* receiver, Node* index, Node* value, UpdateLength update_length) { | |
207 if (update_length != kDontChangeLength) { | |
208 Node* new_length = SmiTag(IntPtrAdd(index, IntPtrConstant(1))); | |
209 StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset, new_length, | |
210 MachineRepresentation::kTagged); | |
211 } | |
212 Return(value); | |
213 } | |
214 | |
215 void KeyedStoreGenericAssembler::StoreElementWithCapacity( | |
216 Node* receiver, Node* receiver_map, Node* elements, Node* elements_kind, | |
217 Node* intptr_index, Node* value, Node* context, Label* slow, | |
218 UpdateLength update_length) { | |
219 if (update_length != kDontChangeLength) { | |
220 CSA_ASSERT(this, Word32Equal(LoadMapInstanceType(receiver_map), | |
221 Int32Constant(JS_ARRAY_TYPE))); | |
222 } | |
223 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | |
224 const int kHeaderSize = FixedArray::kHeaderSize - kHeapObjectTag; | |
225 | |
226 Label check_double_elements(this), check_cow_elements(this); | |
227 Node* elements_map = LoadMap(elements); | |
228 GotoIf(WordNotEqual(elements_map, LoadRoot(Heap::kFixedArrayMapRootIndex)), | |
229 &check_double_elements); | |
230 | |
231 // FixedArray backing store -> Smi or object elements. | |
232 { | |
233 Node* offset = ElementOffsetFromIndex(intptr_index, FAST_ELEMENTS, | |
234 INTPTR_PARAMETERS, kHeaderSize); | |
235 // Check if we're about to overwrite the hole. We can safely do that | |
236 // only if there can be no setters on the prototype chain. | |
237 // If we know that we're storing beyond the previous array length, we | |
238 // can skip the hole check (and always assume the hole). | |
239 { | |
240 Label hole_check_passed(this); | |
241 if (update_length == kDontChangeLength) { | |
242 Node* element = Load(MachineType::AnyTagged(), elements, offset); | |
243 GotoIf(WordNotEqual(element, TheHoleConstant()), &hole_check_passed); | |
244 } | |
245 BranchIfPrototypesHaveNonFastElements(receiver_map, slow, | |
246 &hole_check_passed); | |
247 Bind(&hole_check_passed); | |
248 } | |
249 | |
250 // Check if the value we're storing matches the elements_kind. Smis | |
251 // can always be stored. | |
252 { | |
253 Label non_smi_value(this); | |
254 GotoUnless(TaggedIsSmi(value), &non_smi_value); | |
255 // If we're about to introduce holes, ensure holey elements. | |
256 if (update_length == kBumpLengthWithGap) { | |
257 TryChangeToHoleyMapMulti(receiver, receiver_map, elements_kind, context, | |
258 FAST_SMI_ELEMENTS, FAST_ELEMENTS, slow); | |
259 } | |
260 StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset, | |
261 value); | |
262 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
263 | |
264 Bind(&non_smi_value); | |
265 } | |
266 | |
267 // Check if we already have object elements; just do the store if so. | |
268 { | |
269 Label must_transition(this); | |
270 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
271 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
272 GotoIf(Int32LessThanOrEqual(elements_kind, | |
273 Int32Constant(FAST_HOLEY_SMI_ELEMENTS)), | |
274 &must_transition); | |
275 if (update_length == kBumpLengthWithGap) { | |
276 TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context, | |
277 FAST_ELEMENTS, slow); | |
278 } | |
279 Store(MachineRepresentation::kTagged, elements, offset, value); | |
280 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
281 | |
282 Bind(&must_transition); | |
283 } | |
284 | |
285 // Transition to the required ElementsKind. | |
286 { | |
287 Label transition_to_double(this), transition_to_object(this); | |
288 Node* native_context = LoadNativeContext(context); | |
289 Branch(WordEqual(LoadMap(value), LoadRoot(Heap::kHeapNumberMapRootIndex)), | |
290 &transition_to_double, &transition_to_object); | |
291 Bind(&transition_to_double); | |
292 { | |
293 // If we're adding holes at the end, always transition to a holey | |
294 // elements kind, otherwise try to remain packed. | |
295 ElementsKind target_kind = update_length == kBumpLengthWithGap | |
296 ? FAST_HOLEY_DOUBLE_ELEMENTS | |
297 : FAST_DOUBLE_ELEMENTS; | |
298 TryRewriteElements(receiver, receiver_map, elements, native_context, | |
299 FAST_SMI_ELEMENTS, target_kind, slow); | |
300 // Reload migrated elements. | |
301 Node* double_elements = LoadElements(receiver); | |
302 Node* double_offset = ElementOffsetFromIndex( | |
303 intptr_index, FAST_DOUBLE_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize); | |
304 // Make sure we do not store signalling NaNs into double arrays. | |
305 Node* double_value = Float64SilenceNaN(LoadHeapNumberValue(value)); | |
306 StoreNoWriteBarrier(MachineRepresentation::kFloat64, double_elements, | |
307 double_offset, double_value); | |
308 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, | |
309 update_length); | |
310 } | |
311 | |
312 Bind(&transition_to_object); | |
313 { | |
314 // If we're adding holes at the end, always transition to a holey | |
315 // elements kind, otherwise try to remain packed. | |
316 ElementsKind target_kind = update_length == kBumpLengthWithGap | |
317 ? FAST_HOLEY_ELEMENTS | |
318 : FAST_ELEMENTS; | |
319 TryRewriteElements(receiver, receiver_map, elements, native_context, | |
320 FAST_SMI_ELEMENTS, target_kind, slow); | |
321 // The elements backing store didn't change, no reload necessary. | |
322 CSA_ASSERT(this, WordEqual(elements, LoadElements(receiver))); | |
323 Store(MachineRepresentation::kTagged, elements, offset, value); | |
324 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, | |
325 update_length); | |
326 } | |
327 } | |
328 } | |
329 | |
330 Bind(&check_double_elements); | |
331 Node* fixed_double_array_map = LoadRoot(Heap::kFixedDoubleArrayMapRootIndex); | |
332 GotoIf(WordNotEqual(elements_map, fixed_double_array_map), | |
333 &check_cow_elements); | |
334 // FixedDoubleArray backing store -> double elements. | |
335 { | |
336 Node* offset = ElementOffsetFromIndex(intptr_index, FAST_DOUBLE_ELEMENTS, | |
337 INTPTR_PARAMETERS, kHeaderSize); | |
338 // Check if we're about to overwrite the hole. We can safely do that | |
339 // only if there can be no setters on the prototype chain. | |
340 { | |
341 Label hole_check_passed(this); | |
342 // If we know that we're storing beyond the previous array length, we | |
343 // can skip the hole check (and always assume the hole). | |
344 if (update_length == kDontChangeLength) { | |
345 Label found_hole(this); | |
346 LoadDoubleWithHoleCheck(elements, offset, &found_hole, | |
347 MachineType::None()); | |
348 Goto(&hole_check_passed); | |
349 Bind(&found_hole); | |
350 } | |
351 BranchIfPrototypesHaveNonFastElements(receiver_map, slow, | |
352 &hole_check_passed); | |
353 Bind(&hole_check_passed); | |
354 } | |
355 | |
356 // Try to store the value as a double. | |
357 { | |
358 Label non_number_value(this); | |
359 Node* double_value = PrepareValueForWrite(value, Representation::Double(), | |
360 &non_number_value); | |
361 // Make sure we do not store signalling NaNs into double arrays. | |
362 double_value = Float64SilenceNaN(double_value); | |
363 // If we're about to introduce holes, ensure holey elements. | |
364 if (update_length == kBumpLengthWithGap) { | |
365 TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context, | |
366 FAST_DOUBLE_ELEMENTS, slow); | |
367 } | |
368 StoreNoWriteBarrier(MachineRepresentation::kFloat64, elements, offset, | |
369 double_value); | |
370 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
371 | |
372 Bind(&non_number_value); | |
373 } | |
374 | |
375 // Transition to object elements. | |
376 { | |
377 Node* native_context = LoadNativeContext(context); | |
378 ElementsKind target_kind = update_length == kBumpLengthWithGap | |
379 ? FAST_HOLEY_ELEMENTS | |
380 : FAST_ELEMENTS; | |
381 TryRewriteElements(receiver, receiver_map, elements, native_context, | |
382 FAST_DOUBLE_ELEMENTS, target_kind, slow); | |
383 // Reload migrated elements. | |
384 Node* fast_elements = LoadElements(receiver); | |
385 Node* fast_offset = ElementOffsetFromIndex( | |
386 intptr_index, FAST_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize); | |
387 Store(MachineRepresentation::kTagged, fast_elements, fast_offset, value); | |
388 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
389 } | |
390 } | |
391 | |
392 Bind(&check_cow_elements); | |
393 { | |
394 // TODO(jkummerow): Use GrowElementsCapacity instead of bailing out. | |
395 Goto(slow); | |
396 } | |
397 } | |
398 | |
399 void KeyedStoreGenericAssembler::EmitGenericElementStore( | |
400 Node* receiver, Node* receiver_map, Node* instance_type, Node* intptr_index, | |
401 Node* value, Node* context, Label* slow) { | |
402 Label if_in_bounds(this), if_increment_length_by_one(this), | |
403 if_bump_length_with_gap(this), if_grow(this), if_nonfast(this), | |
404 if_typed_array(this), if_dictionary(this); | |
405 Node* elements = LoadElements(receiver); | |
406 Node* elements_kind = LoadMapElementsKind(receiver_map); | |
407 GotoIf( | |
408 Int32GreaterThan(elements_kind, Int32Constant(LAST_FAST_ELEMENTS_KIND)), | |
409 &if_nonfast); | |
410 | |
411 Label if_array(this); | |
412 GotoIf(Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)), &if_array); | |
413 { | |
414 Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements)); | |
415 Branch(UintPtrLessThan(intptr_index, capacity), &if_in_bounds, &if_grow); | |
416 } | |
417 Bind(&if_array); | |
418 { | |
419 Node* length = SmiUntag(LoadJSArrayLength(receiver)); | |
420 GotoIf(UintPtrLessThan(intptr_index, length), &if_in_bounds); | |
421 Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements)); | |
422 GotoIf(UintPtrGreaterThanOrEqual(intptr_index, capacity), &if_grow); | |
423 Branch(WordEqual(intptr_index, length), &if_increment_length_by_one, | |
424 &if_bump_length_with_gap); | |
425 } | |
426 | |
427 Bind(&if_in_bounds); | |
428 { | |
429 StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind, | |
430 intptr_index, value, context, slow, | |
431 kDontChangeLength); | |
432 } | |
433 | |
434 Bind(&if_increment_length_by_one); | |
435 { | |
436 StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind, | |
437 intptr_index, value, context, slow, | |
438 kIncrementLengthByOne); | |
439 } | |
440 | |
441 Bind(&if_bump_length_with_gap); | |
442 { | |
443 StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind, | |
444 intptr_index, value, context, slow, | |
445 kBumpLengthWithGap); | |
446 } | |
447 | |
448 // Out-of-capacity accesses (index >= capacity) jump here. Additionally, | |
449 // an ElementsKind transition might be necessary. | |
450 Bind(&if_grow); | |
451 { | |
452 Comment("Grow backing store"); | |
453 // TODO(jkummerow): Support inline backing store growth. | |
454 Goto(slow); | |
455 } | |
456 | |
457 // Any ElementsKind > LAST_FAST_ELEMENTS_KIND jumps here for further dispatch. | |
458 Bind(&if_nonfast); | |
459 { | |
460 STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND); | |
461 GotoIf(Int32GreaterThanOrEqual( | |
462 elements_kind, | |
463 Int32Constant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)), | |
464 &if_typed_array); | |
465 GotoIf(Word32Equal(elements_kind, Int32Constant(DICTIONARY_ELEMENTS)), | |
466 &if_dictionary); | |
467 Goto(slow); | |
468 } | |
469 | |
470 Bind(&if_dictionary); | |
471 { | |
472 Comment("Dictionary"); | |
473 // TODO(jkummerow): Support storing to dictionary elements. | |
474 Goto(slow); | |
475 } | |
476 | |
477 Bind(&if_typed_array); | |
478 { | |
479 Comment("Typed array"); | |
480 // TODO(jkummerow): Support typed arrays. | |
481 Goto(slow); | |
482 } | |
483 } | |
484 | |
485 void KeyedStoreGenericAssembler::EmitGenericPropertyStore( | |
486 Node* receiver, Node* receiver_map, const StoreICParameters* p, | |
487 Label* slow) { | |
488 Comment("stub cache probe"); | |
489 // TODO(jkummerow): Don't rely on the stub cache as much. | |
490 // - existing properties can be overwritten inline (unless readonly). | |
491 // - for dictionary mode receivers, we can even add properties inline | |
492 // (unless the prototype chain prevents it). | |
493 Variable var_handler(this, MachineRepresentation::kTagged); | |
494 Label found_handler(this, &var_handler), stub_cache_miss(this); | |
495 TryProbeStubCache(isolate()->store_stub_cache(), receiver, p->name, | |
496 &found_handler, &var_handler, &stub_cache_miss); | |
497 Bind(&found_handler); | |
498 { | |
499 Comment("KeyedStoreGeneric found handler"); | |
500 HandleStoreICHandlerCase(p, var_handler.value(), slow); | |
501 } | |
502 Bind(&stub_cache_miss); | |
503 { | |
504 Comment("KeyedStoreGeneric_miss"); | |
505 TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value, p->slot, | |
506 p->vector, p->receiver, p->name); | |
507 } | |
508 } | |
509 | |
510 void KeyedStoreGenericAssembler::KeyedStoreGeneric(const StoreICParameters* p, | |
511 LanguageMode language_mode) { | |
512 Variable var_index(this, MachineType::PointerRepresentation()); | |
513 Label if_index(this), if_unique_name(this), slow(this); | |
514 | |
515 Node* receiver = p->receiver; | |
516 GotoIf(TaggedIsSmi(receiver), &slow); | |
517 Node* receiver_map = LoadMap(receiver); | |
518 Node* instance_type = LoadMapInstanceType(receiver_map); | |
519 // Receivers requiring non-standard element accesses (interceptors, access | |
520 // checks, strings and string wrappers, proxies) are handled in the runtime. | |
521 GotoIf(Int32LessThanOrEqual(instance_type, | |
522 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)), | |
523 &slow); | |
524 | |
525 TryToName(p->name, &if_index, &var_index, &if_unique_name, &slow); | |
526 | |
527 Bind(&if_index); | |
528 { | |
529 Comment("integer index"); | |
530 EmitGenericElementStore(receiver, receiver_map, instance_type, | |
531 var_index.value(), p->value, p->context, &slow); | |
532 } | |
533 | |
534 Bind(&if_unique_name); | |
535 { | |
536 Comment("key is unique name"); | |
537 EmitGenericPropertyStore(receiver, receiver_map, p, &slow); | |
538 } | |
539 | |
540 Bind(&slow); | |
541 { | |
542 Comment("KeyedStoreGeneric_slow"); | |
543 TailCallRuntime(Runtime::kSetProperty, p->context, p->receiver, p->name, | |
544 p->value, SmiConstant(language_mode)); | |
545 } | |
546 } | |
547 | |
548 } // namespace internal | |
549 } // namespace v8 | |
OLD | NEW |