OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2016 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/ic/keyed-store-generic.h" | |
6 | |
7 #include "src/compiler/code-assembler.h" | |
8 #include "src/contexts.h" | |
9 #include "src/isolate.h" | |
10 | |
11 namespace v8 { | |
12 namespace internal { | |
13 | |
14 using compiler::Node; | |
15 | |
16 class KeyedStoreGenericAssembler : public CodeStubAssembler { | |
17 public: | |
18 void KeyedStoreGeneric(const StoreICParameters* p, | |
19 LanguageMode language_mode); | |
20 | |
21 private: | |
22 enum UpdateLength { | |
23 kDontChangeLength, | |
24 kIncrementLengthByOne, | |
25 kBumpLengthWithGap | |
26 }; | |
27 | |
28 void EmitGenericElementStore(Node* receiver, Node* receiver_map, | |
29 Node* instance_type, Node* intptr_index, | |
30 Node* value, Node* context, Label* slow); | |
31 | |
32 void EmitGenericPropertyStore(Node* receiver, Node* receiver_map, | |
33 const StoreICParameters* p, Label* slow); | |
34 | |
35 void BranchIfPrototypesHaveNonFastElements(Node* receiver_map, | |
36 Label* non_fast_elements, | |
37 Label* only_fast_elements); | |
38 | |
39 void TryRewriteElements(Node* receiver, Node* receiver_map, Node* elements, | |
40 Node* native_context, ElementsKind from_kind, | |
41 ElementsKind to_kind, Label* bailout); | |
42 | |
43 void StoreElementWithCapacity(Node* receiver, Node* receiver_map, | |
44 Node* elements, Node* elements_kind, | |
45 Node* intptr_index, Node* value, Node* context, | |
46 Label* slow, UpdateLength update_length); | |
47 | |
48 void MaybeUpdateLengthAndReturn(Node* receiver, Node* index, Node* value, | |
49 UpdateLength update_length); | |
50 | |
51 void TryChangeToHoleyMapHelper(Node* receiver, Node* receiver_map, | |
52 Node* native_context, ElementsKind packed_kind, | |
53 ElementsKind holey_kind, Label* done, | |
54 Label* map_mismatch, Label* bailout); | |
55 void TryChangeToHoleyMap(Node* receiver, Node* receiver_map, | |
56 Node* current_elements_kind, Node* context, | |
57 ElementsKind packed_kind, Label* bailout); | |
58 void TryChangeToHoleyMapMulti(Node* receiver, Node* receiver_map, | |
59 Node* current_elements_kind, Node* context, | |
60 ElementsKind packed_kind, | |
61 ElementsKind packed_kind_2, Label* bailout); | |
62 }; | |
63 | |
64 // Do not add fields, so that this is safe to reinterpret_cast to CSA. | |
65 STATIC_ASSERT(sizeof(CodeStubAssembler) == sizeof(KeyedStoreGenericAssembler)); | |
Igor Sheludko
2016/11/07 14:43:39
Maybe put it near the reinterpret cast.
Jakob Kummerow
2016/11/07 16:55:25
Done.
| |
66 | |
67 void KeyedStoreGenericGenerator::Generate( | |
68 CodeStubAssembler* assembler, const CodeStubAssembler::StoreICParameters* p, | |
69 LanguageMode language_mode) { | |
70 auto assm = reinterpret_cast<KeyedStoreGenericAssembler*>(assembler); | |
71 assm->KeyedStoreGeneric(p, language_mode); | |
72 } | |
73 | |
74 void KeyedStoreGenericAssembler::BranchIfPrototypesHaveNonFastElements( | |
75 Node* receiver_map, Label* non_fast_elements, Label* only_fast_elements) { | |
76 Variable var_map(this, MachineRepresentation::kTagged); | |
77 var_map.Bind(receiver_map); | |
78 Label loop_body(this, &var_map); | |
79 Goto(&loop_body); | |
80 | |
81 Bind(&loop_body); | |
82 { | |
83 Node* map = var_map.value(); | |
84 Node* prototype = LoadMapPrototype(map); | |
85 GotoIf(WordEqual(prototype, NullConstant()), only_fast_elements); | |
86 Node* prototype_map = LoadMap(prototype); | |
87 var_map.Bind(prototype_map); | |
88 Node* instance_type = LoadMapInstanceType(prototype_map); | |
89 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE); | |
90 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE); | |
91 GotoIf(Int32LessThanOrEqual(instance_type, | |
92 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)), | |
93 non_fast_elements); | |
94 Node* elements_kind = LoadMapElementsKind(prototype_map); | |
95 STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND); | |
96 GotoIf(Int32LessThanOrEqual(elements_kind, | |
97 Int32Constant(LAST_FAST_ELEMENTS_KIND)), | |
98 &loop_body); | |
99 GotoIf(Word32Equal(elements_kind, Int32Constant(NO_ELEMENTS)), &loop_body); | |
100 Goto(non_fast_elements); | |
101 } | |
102 } | |
103 | |
104 void KeyedStoreGenericAssembler::TryRewriteElements( | |
105 Node* receiver, Node* receiver_map, Node* elements, Node* native_context, | |
106 ElementsKind from_kind, ElementsKind to_kind, Label* bailout) { | |
107 DCHECK(IsFastPackedElementsKind(from_kind)); | |
108 ElementsKind holey_from_kind = GetHoleyElementsKind(from_kind); | |
109 ElementsKind holey_to_kind = GetHoleyElementsKind(to_kind); | |
110 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { | |
111 TrapAllocationMemento(receiver, bailout); | |
112 } | |
113 Label perform_transition(this), check_holey_map(this); | |
114 Variable var_target_map(this, MachineType::PointerRepresentation()); | |
115 // Check if the receiver has the default |from_kind| map. | |
116 { | |
117 Node* packed_map = | |
118 LoadContextElement(native_context, Context::ArrayMapIndex(from_kind)); | |
119 GotoIf(WordNotEqual(receiver_map, packed_map), &check_holey_map); | |
120 var_target_map.Bind( | |
121 LoadContextElement(native_context, Context::ArrayMapIndex(to_kind))); | |
122 Goto(&perform_transition); | |
123 } | |
124 | |
125 // Check if the receiver has the default |holey_from_kind| map. | |
126 Bind(&check_holey_map); | |
127 { | |
128 Node* holey_map = LoadContextElement( | |
129 native_context, Context::ArrayMapIndex(holey_from_kind)); | |
130 GotoIf(WordNotEqual(receiver_map, holey_map), bailout); | |
131 var_target_map.Bind(LoadContextElement( | |
132 native_context, Context::ArrayMapIndex(holey_to_kind))); | |
133 Goto(&perform_transition); | |
134 } | |
135 | |
136 // Found a supported transition target map, perform the transition! | |
137 Bind(&perform_transition); | |
138 { | |
139 Node* capacity = LoadFixedArrayBaseLength(elements); | |
140 GrowElementsCapacity(receiver, elements, from_kind, to_kind, capacity, | |
141 capacity, INTPTR_PARAMETERS, bailout); | |
142 StoreObjectField(receiver, JSObject::kMapOffset, var_target_map.value()); | |
143 } | |
144 } | |
145 | |
146 void KeyedStoreGenericAssembler::TryChangeToHoleyMapHelper( | |
147 Node* receiver, Node* receiver_map, Node* native_context, | |
148 ElementsKind packed_kind, ElementsKind holey_kind, Label* done, | |
149 Label* map_mismatch, Label* bailout) { | |
150 Node* packed_map = | |
151 LoadContextElement(native_context, Context::ArrayMapIndex(packed_kind)); | |
152 GotoIf(WordNotEqual(receiver_map, packed_map), map_mismatch); | |
153 if (AllocationSite::GetMode(packed_kind, holey_kind) == | |
154 TRACK_ALLOCATION_SITE) { | |
155 TrapAllocationMemento(receiver, bailout); | |
156 } | |
157 Node* holey_map = | |
158 LoadContextElement(native_context, Context::ArrayMapIndex(holey_kind)); | |
159 StoreObjectFieldNoWriteBarrier(receiver, JSObject::kMapOffset, holey_map); | |
Igor Sheludko
2016/11/07 14:43:39
I don't think we can skip write barrier here.
Jakob Kummerow
2016/11/07 16:55:25
Done.
| |
160 Goto(done); | |
161 } | |
162 | |
163 void KeyedStoreGenericAssembler::TryChangeToHoleyMap( | |
164 Node* receiver, Node* receiver_map, Node* current_elements_kind, | |
165 Node* context, ElementsKind packed_kind, Label* bailout) { | |
166 ElementsKind holey_kind = GetHoleyElementsKind(packed_kind); | |
167 Label already_holey(this); | |
168 | |
169 GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)), | |
170 &already_holey); | |
171 Node* native_context = LoadNativeContext(context); | |
172 TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind, | |
173 holey_kind, &already_holey, bailout, bailout); | |
174 Bind(&already_holey); | |
175 } | |
176 | |
177 void KeyedStoreGenericAssembler::TryChangeToHoleyMapMulti( | |
178 Node* receiver, Node* receiver_map, Node* current_elements_kind, | |
179 Node* context, ElementsKind packed_kind, ElementsKind packed_kind_2, | |
180 Label* bailout) { | |
181 ElementsKind holey_kind = GetHoleyElementsKind(packed_kind); | |
182 ElementsKind holey_kind_2 = GetHoleyElementsKind(packed_kind_2); | |
183 Label already_holey(this), check_other_kind(this); | |
184 | |
185 GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)), | |
186 &already_holey); | |
187 GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind_2)), | |
188 &already_holey); | |
189 | |
190 Node* native_context = LoadNativeContext(context); | |
191 TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind, | |
192 holey_kind, &already_holey, &check_other_kind, | |
193 bailout); | |
194 Bind(&check_other_kind); | |
195 TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, | |
196 packed_kind_2, holey_kind_2, &already_holey, | |
197 bailout, bailout); | |
198 Bind(&already_holey); | |
199 } | |
200 | |
201 void KeyedStoreGenericAssembler::MaybeUpdateLengthAndReturn( | |
202 Node* receiver, Node* index, Node* value, UpdateLength update_length) { | |
203 if (update_length != kDontChangeLength) { | |
204 Node* new_length = SmiTag(IntPtrAdd(index, IntPtrConstant(1))); | |
205 StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset, new_length, | |
206 MachineRepresentation::kTagged); | |
207 } | |
208 Return(value); | |
209 } | |
210 | |
211 void KeyedStoreGenericAssembler::StoreElementWithCapacity( | |
212 Node* receiver, Node* receiver_map, Node* elements, Node* elements_kind, | |
213 Node* intptr_index, Node* value, Node* context, Label* slow, | |
214 UpdateLength update_length) { | |
215 if (update_length != kDontChangeLength) { | |
216 CSA_ASSERT(Word32Equal(LoadMapInstanceType(receiver_map), | |
217 Int32Constant(JS_ARRAY_TYPE))); | |
218 } | |
219 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | |
220 const int kHeaderSize = FixedArray::kHeaderSize - kHeapObjectTag; | |
221 | |
222 Label check_double_elements(this), check_cow_elements(this); | |
223 Node* elements_map = LoadMap(elements); | |
224 GotoIf(WordNotEqual(elements_map, LoadRoot(Heap::kFixedArrayMapRootIndex)), | |
225 &check_double_elements); | |
226 | |
227 // FixedArray backing store -> Smi or object elements. | |
228 { | |
229 Node* offset = ElementOffsetFromIndex(intptr_index, FAST_ELEMENTS, | |
230 INTPTR_PARAMETERS, kHeaderSize); | |
231 // Check if we're about to overwrite the hole. We can safely do that | |
232 // only if there can be no setters on the prototype chain. | |
233 // If we know that we're storing beyond the previous array length, we | |
234 // can skip the hole check (and always assume the hole). | |
235 { | |
236 Label hole_check_passed(this); | |
237 if (update_length == kDontChangeLength) { | |
238 Node* element = Load(MachineType::AnyTagged(), elements, offset); | |
239 GotoIf(WordNotEqual(element, TheHoleConstant()), &hole_check_passed); | |
240 } | |
241 BranchIfPrototypesHaveNonFastElements(receiver_map, slow, | |
242 &hole_check_passed); | |
243 Bind(&hole_check_passed); | |
244 } | |
245 | |
246 // Check if the value we're storing matches the elements_kind. Smis | |
247 // can always be stored. | |
248 { | |
249 Label non_smi_value(this); | |
250 GotoUnless(TaggedIsSmi(value), &non_smi_value); | |
251 // If we're about to introduce holes, ensure holey elements. | |
252 if (update_length == kBumpLengthWithGap) { | |
253 TryChangeToHoleyMapMulti(receiver, receiver_map, elements_kind, context, | |
254 FAST_SMI_ELEMENTS, FAST_ELEMENTS, slow); | |
255 } | |
256 StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset, | |
257 value); | |
258 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
259 | |
260 Bind(&non_smi_value); | |
261 } | |
262 | |
263 // Check if we already have object elements; just do the store if so. | |
264 { | |
265 Label must_transition(this); | |
266 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
267 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
268 GotoIf(Int32LessThanOrEqual(elements_kind, | |
269 Int32Constant(FAST_HOLEY_SMI_ELEMENTS)), | |
270 &must_transition); | |
271 if (update_length == kBumpLengthWithGap) { | |
272 TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context, | |
273 FAST_ELEMENTS, slow); | |
274 } | |
275 Store(MachineRepresentation::kTagged, elements, offset, value); | |
276 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
277 | |
278 Bind(&must_transition); | |
279 } | |
280 | |
281 // Transition to the required ElementsKind. | |
282 { | |
283 Label transition_to_double(this), transition_to_object(this); | |
284 Node* native_context = LoadNativeContext(context); | |
285 Branch(WordEqual(LoadMap(value), LoadRoot(Heap::kHeapNumberMapRootIndex)), | |
286 &transition_to_double, &transition_to_object); | |
287 Bind(&transition_to_double); | |
288 { | |
289 // If we're adding holes at the end, always transition to a holey | |
290 // elements kind, otherwise try to remain packed. | |
291 ElementsKind target_kind = update_length == kBumpLengthWithGap | |
292 ? FAST_HOLEY_DOUBLE_ELEMENTS | |
293 : FAST_DOUBLE_ELEMENTS; | |
294 TryRewriteElements(receiver, receiver_map, elements, native_context, | |
295 FAST_SMI_ELEMENTS, target_kind, slow); | |
296 // Reload migrated elements. | |
297 Node* double_elements = LoadElements(receiver); | |
298 Node* double_offset = ElementOffsetFromIndex( | |
299 intptr_index, FAST_DOUBLE_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize); | |
300 // Make sure we do not store signalling NaNs into double arrays. | |
301 Node* double_value = Float64SilenceNaN(LoadHeapNumberValue(value)); | |
302 StoreNoWriteBarrier(MachineRepresentation::kFloat64, double_elements, | |
303 double_offset, double_value); | |
304 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, | |
305 update_length); | |
306 } | |
307 | |
308 Bind(&transition_to_object); | |
309 { | |
310 // If we're adding holes at the end, always transition to a holey | |
311 // elements kind, otherwise try to remain packed. | |
312 ElementsKind target_kind = update_length == kBumpLengthWithGap | |
313 ? FAST_HOLEY_ELEMENTS | |
314 : FAST_ELEMENTS; | |
315 TryRewriteElements(receiver, receiver_map, elements, native_context, | |
316 FAST_SMI_ELEMENTS, target_kind, slow); | |
317 Store(MachineRepresentation::kTagged, elements, offset, value); | |
318 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, | |
319 update_length); | |
320 } | |
321 } | |
322 } | |
323 | |
324 Bind(&check_double_elements); | |
325 Node* fixed_double_array_map = LoadRoot(Heap::kFixedDoubleArrayMapRootIndex); | |
326 GotoIf(WordNotEqual(elements_map, fixed_double_array_map), | |
327 &check_cow_elements); | |
328 // FixedDoubleArray backing store -> double elements. | |
329 { | |
330 Node* offset = ElementOffsetFromIndex(intptr_index, FAST_DOUBLE_ELEMENTS, | |
331 INTPTR_PARAMETERS, kHeaderSize); | |
332 // Check if we're about to overwrite the hole. We can safely do that | |
333 // only if there can be no setters on the prototype chain. | |
334 { | |
335 Label hole_check_passed(this); | |
336 // If we know that we're storing beyond the previous array length, we | |
337 // can skip the hole check (and always assume the hole). | |
338 if (update_length == kDontChangeLength) { | |
339 Label found_hole(this); | |
340 LoadDoubleWithHoleCheck(elements, offset, &found_hole, | |
341 MachineType::None()); | |
342 Goto(&hole_check_passed); | |
343 Bind(&found_hole); | |
344 } | |
345 BranchIfPrototypesHaveNonFastElements(receiver_map, slow, | |
346 &hole_check_passed); | |
347 Bind(&hole_check_passed); | |
348 } | |
349 | |
350 // Try to store the value as a double. | |
351 { | |
352 Label non_number_value(this); | |
353 Node* double_value = PrepareValueForWrite(value, Representation::Double(), | |
354 &non_number_value); | |
355 // Make sure we do not store signalling NaNs into double arrays. | |
356 double_value = Float64SilenceNaN(double_value); | |
357 // If we're about to introduce holes, ensure holey elements. | |
358 if (update_length == kBumpLengthWithGap) { | |
359 TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context, | |
360 FAST_DOUBLE_ELEMENTS, slow); | |
361 } | |
362 StoreNoWriteBarrier(MachineRepresentation::kFloat64, elements, offset, | |
363 double_value); | |
364 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
365 | |
366 Bind(&non_number_value); | |
367 } | |
368 | |
369 // Transition to object elements. | |
370 { | |
371 Node* native_context = LoadNativeContext(context); | |
372 ElementsKind target_kind = update_length == kBumpLengthWithGap | |
373 ? FAST_HOLEY_ELEMENTS | |
374 : FAST_ELEMENTS; | |
375 TryRewriteElements(receiver, receiver_map, elements, native_context, | |
376 FAST_DOUBLE_ELEMENTS, target_kind, slow); | |
377 // Reload migrated elements. | |
378 Node* fast_elements = LoadElements(receiver); | |
379 Node* fast_offset = ElementOffsetFromIndex( | |
380 intptr_index, FAST_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize); | |
381 Store(MachineRepresentation::kTagged, fast_elements, fast_offset, value); | |
382 MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length); | |
383 } | |
384 } | |
385 | |
386 Bind(&check_cow_elements); | |
387 { | |
388 // TODO(jkummerow): Use GrowElementsCapacity instead of bailing out. | |
389 Goto(slow); | |
390 } | |
391 } | |
392 | |
393 void KeyedStoreGenericAssembler::EmitGenericElementStore( | |
394 Node* receiver, Node* receiver_map, Node* instance_type, Node* intptr_index, | |
395 Node* value, Node* context, Label* slow) { | |
396 Label if_in_bounds(this), if_increment_length_by_one(this), | |
397 if_bump_length_with_gap(this), if_grow(this), if_nonfast(this), | |
398 if_typed_array(this), if_dictionary(this); | |
399 Node* elements = LoadElements(receiver); | |
400 Node* elements_kind = LoadMapElementsKind(receiver_map); | |
401 GotoIf( | |
402 IntPtrGreaterThan(elements_kind, IntPtrConstant(LAST_FAST_ELEMENTS_KIND)), | |
Igor Sheludko
2016/11/07 14:43:39
s/IntPtr/Int32/ as elements_kind is a 32-bit value
Jakob Kummerow
2016/11/07 16:55:25
Done.
| |
403 &if_nonfast); | |
404 | |
405 Label if_array(this); | |
406 GotoIf(Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)), &if_array); | |
407 { | |
408 Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements)); | |
409 Branch(UintPtrLessThan(intptr_index, capacity), &if_in_bounds, &if_grow); | |
410 } | |
411 Bind(&if_array); | |
412 { | |
413 Node* length = SmiUntag(LoadJSArrayLength(receiver)); | |
414 GotoIf(UintPtrLessThan(intptr_index, length), &if_in_bounds); | |
415 Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements)); | |
416 GotoIf(UintPtrGreaterThanOrEqual(intptr_index, capacity), &if_grow); | |
417 Branch(WordEqual(intptr_index, length), &if_increment_length_by_one, | |
418 &if_bump_length_with_gap); | |
419 } | |
420 | |
421 Bind(&if_in_bounds); | |
422 { | |
423 StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind, | |
424 intptr_index, value, context, slow, | |
425 kDontChangeLength); | |
426 } | |
427 | |
428 Bind(&if_increment_length_by_one); | |
429 { | |
430 StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind, | |
431 intptr_index, value, context, slow, | |
432 kIncrementLengthByOne); | |
433 } | |
434 | |
435 Bind(&if_bump_length_with_gap); | |
436 { | |
437 StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind, | |
438 intptr_index, value, context, slow, | |
439 kBumpLengthWithGap); | |
440 } | |
441 | |
442 // Out-of-capacity accesses (index >= capacity) jump here. Additionally, | |
443 // an ElementsKind transition might be necessary. | |
444 Bind(&if_grow); | |
445 { | |
446 Comment("Grow backing store"); | |
447 // TODO(jkummerow): Support inline backing store growth. | |
448 Goto(slow); | |
449 } | |
450 | |
451 // Any ElementsKind > LAST_FAST_ELEMENTS_KIND jumps here for further dispatch. | |
452 Bind(&if_nonfast); | |
453 { | |
454 STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND); | |
455 GotoIf(IntPtrGreaterThanOrEqual( | |
Igor Sheludko
2016/11/07 14:43:39
Same here and below
Jakob Kummerow
2016/11/07 16:55:25
Done.
| |
456 elements_kind, | |
457 IntPtrConstant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)), | |
458 &if_typed_array); | |
459 GotoIf(IntPtrEqual(elements_kind, IntPtrConstant(DICTIONARY_ELEMENTS)), | |
460 &if_dictionary); | |
461 Goto(slow); | |
462 } | |
463 | |
464 Bind(&if_dictionary); | |
465 { | |
466 Comment("Dictionary"); | |
467 // TODO(jkummerow): Support storing to dictionary elements. | |
468 Goto(slow); | |
469 } | |
470 | |
471 Bind(&if_typed_array); | |
472 { | |
473 Comment("Typed array"); | |
474 // TODO(jkummerow): Support typed arrays. | |
475 Goto(slow); | |
476 } | |
477 } | |
478 | |
479 void KeyedStoreGenericAssembler::EmitGenericPropertyStore( | |
480 Node* receiver, Node* receiver_map, const StoreICParameters* p, | |
481 Label* slow) { | |
482 Comment("stub cache probe"); | |
483 // TODO(jkummerow): Don't rely on the stub cache as much. | |
484 // - existing properties can be overwritten inline (unless readonly). | |
485 // - for dictionary mode receivers, we can even add properties inline | |
486 // (unless the prototype chain prevents it). | |
487 Variable var_handler(this, MachineRepresentation::kTagged); | |
488 Label found_handler(this, &var_handler), stub_cache_miss(this); | |
489 TryProbeStubCache(isolate()->store_stub_cache(), receiver, p->name, | |
490 &found_handler, &var_handler, &stub_cache_miss); | |
491 Bind(&found_handler); | |
492 { | |
493 Comment("KeyedStoreGeneric found handler"); | |
494 HandleStoreICHandlerCase(p, var_handler.value(), slow); | |
495 } | |
496 Bind(&stub_cache_miss); | |
497 { | |
498 Comment("KeyedStoreGeneric_miss"); | |
499 TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value, p->slot, | |
500 p->vector, p->receiver, p->name); | |
501 } | |
502 } | |
503 | |
504 void KeyedStoreGenericAssembler::KeyedStoreGeneric(const StoreICParameters* p, | |
505 LanguageMode language_mode) { | |
506 Variable var_index(this, MachineType::PointerRepresentation()); | |
507 Label if_index(this), if_unique_name(this), slow(this); | |
508 | |
509 Node* receiver = p->receiver; | |
510 GotoIf(TaggedIsSmi(receiver), &slow); | |
511 Node* receiver_map = LoadMap(receiver); | |
512 Node* instance_type = LoadMapInstanceType(receiver_map); | |
513 // Receivers requiring non-standard element accesses (interceptors, access | |
514 // checks, strings and string wrappers, proxies) are handled in the runtime. | |
515 GotoIf(Int32LessThanOrEqual(instance_type, | |
516 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)), | |
517 &slow); | |
518 | |
519 TryToName(p->name, &if_index, &var_index, &if_unique_name, &slow); | |
520 | |
521 Bind(&if_index); | |
522 { | |
523 Comment("integer index"); | |
524 EmitGenericElementStore(receiver, receiver_map, instance_type, | |
525 var_index.value(), p->value, p->context, &slow); | |
526 } | |
527 | |
528 Bind(&if_unique_name); | |
529 { | |
530 Comment("key is unique name"); | |
531 EmitGenericPropertyStore(receiver, receiver_map, p, &slow); | |
532 } | |
533 | |
534 Bind(&slow); | |
535 { | |
536 Comment("KeyedStoreGeneric_slow"); | |
537 TailCallRuntime(Runtime::kSetProperty, p->context, p->receiver, p->name, | |
538 p->value, SmiConstant(language_mode)); | |
539 } | |
540 } | |
541 | |
542 } // namespace internal | |
543 } // namespace v8 | |
OLD | NEW |