OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
11 | 11 |
12 namespace v8 { | 12 namespace v8 { |
13 namespace internal { | 13 namespace internal { |
14 | 14 |
15 // ---------------------------------------------------------------------------- | 15 // ---------------------------------------------------------------------------- |
16 // Static IC stub generators. | 16 // Static IC stub generators. |
17 // | 17 // |
18 | 18 |
19 #define __ ACCESS_MASM(masm) | 19 #define __ ACCESS_MASM(masm) |
20 | 20 |
21 | |
22 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type, | |
23 Label* global_object) { | |
24 // Register usage: | |
25 // type: holds the receiver instance type on entry. | |
26 __ cmp(type, JS_GLOBAL_OBJECT_TYPE); | |
27 __ j(equal, global_object); | |
28 __ cmp(type, JS_GLOBAL_PROXY_TYPE); | |
29 __ j(equal, global_object); | |
30 } | |
31 | |
32 | |
33 // Helper function used to load a property from a dictionary backing | 21 // Helper function used to load a property from a dictionary backing |
34 // storage. This function may fail to load a property even though it is | 22 // storage. This function may fail to load a property even though it is |
35 // in the dictionary, so code at miss_label must always call a backup | 23 // in the dictionary, so code at miss_label must always call a backup |
36 // property load that is complete. This function is safe to call if | 24 // property load that is complete. This function is safe to call if |
37 // name is not internalized, and will jump to the miss_label in that | 25 // name is not internalized, and will jump to the miss_label in that |
38 // case. The generated code assumes that the receiver has slow | 26 // case. The generated code assumes that the receiver has slow |
39 // properties, is not a global object and does not have interceptors. | 27 // properties, is not a global object and does not have interceptors. |
40 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label, | 28 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label, |
41 Register elements, Register name, | 29 Register elements, Register name, |
42 Register r0, Register r1, Register result) { | 30 Register r0, Register r1, Register result) { |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
125 // Store the value at the masked, scaled index. | 113 // Store the value at the masked, scaled index. |
126 const int kValueOffset = kElementsStartOffset + kPointerSize; | 114 const int kValueOffset = kElementsStartOffset + kPointerSize; |
127 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); | 115 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); |
128 __ mov(Operand(r0, 0), value); | 116 __ mov(Operand(r0, 0), value); |
129 | 117 |
130 // Update write barrier. Make sure not to clobber the value. | 118 // Update write barrier. Make sure not to clobber the value. |
131 __ mov(r1, value); | 119 __ mov(r1, value); |
132 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs); | 120 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs); |
133 } | 121 } |
134 | 122 |
135 | |
136 // Checks the receiver for special cases (value type, slow case bits). | |
137 // Falls through for regular JS object. | |
138 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | |
139 Register receiver, Register map, | |
140 int interceptor_bit, Label* slow) { | |
141 // Register use: | |
142 // receiver - holds the receiver and is unchanged. | |
143 // Scratch registers: | |
144 // map - used to hold the map of the receiver. | |
145 | |
146 // Check that the object isn't a smi. | |
147 __ JumpIfSmi(receiver, slow); | |
148 | |
149 // Get the map of the receiver. | |
150 __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset)); | |
151 | |
152 // Check bit field. | |
153 __ test_b( | |
154 FieldOperand(map, Map::kBitFieldOffset), | |
155 Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); | |
156 __ j(not_zero, slow); | |
157 // Check that the object is some kind of JS object EXCEPT JS Value type. In | |
158 // the case that the object is a value-wrapper object, we enter the runtime | |
159 // system to make sure that indexing into string objects works as intended. | |
160 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | |
161 | |
162 __ CmpInstanceType(map, JS_OBJECT_TYPE); | |
163 __ j(below, slow); | |
164 } | |
165 | |
166 | |
167 // Loads an indexed element from a fast case array. | |
168 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | |
169 Register key, Register scratch, | |
170 Register scratch2, Register result, | |
171 Label* slow) { | |
172 // Register use: | |
173 // receiver - holds the receiver and is unchanged. | |
174 // key - holds the key and is unchanged (must be a smi). | |
175 // Scratch registers: | |
176 // scratch - used to hold elements of the receiver and the loaded value. | |
177 // scratch2 - holds maps and prototypes during prototype chain check. | |
178 // result - holds the result on exit if the load succeeds and | |
179 // we fall through. | |
180 Label check_prototypes, check_next_prototype; | |
181 Label done, in_bounds, absent; | |
182 | |
183 __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset)); | |
184 __ AssertFastElements(scratch); | |
185 | |
186 // Check that the key (index) is within bounds. | |
187 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); | |
188 __ j(below, &in_bounds); | |
189 // Out-of-bounds. Check the prototype chain to see if we can just return | |
190 // 'undefined'. | |
191 __ cmp(key, 0); | |
192 __ j(less, slow); // Negative keys can't take the fast OOB path. | |
193 __ bind(&check_prototypes); | |
194 __ mov(scratch2, FieldOperand(receiver, HeapObject::kMapOffset)); | |
195 __ bind(&check_next_prototype); | |
196 __ mov(scratch2, FieldOperand(scratch2, Map::kPrototypeOffset)); | |
197 // scratch2: current prototype | |
198 __ cmp(scratch2, masm->isolate()->factory()->null_value()); | |
199 __ j(equal, &absent); | |
200 __ mov(scratch, FieldOperand(scratch2, JSObject::kElementsOffset)); | |
201 __ mov(scratch2, FieldOperand(scratch2, HeapObject::kMapOffset)); | |
202 // scratch: elements of current prototype | |
203 // scratch2: map of current prototype | |
204 __ CmpInstanceType(scratch2, JS_OBJECT_TYPE); | |
205 __ j(below, slow); | |
206 __ test_b(FieldOperand(scratch2, Map::kBitFieldOffset), | |
207 Immediate((1 << Map::kIsAccessCheckNeeded) | | |
208 (1 << Map::kHasIndexedInterceptor))); | |
209 __ j(not_zero, slow); | |
210 __ cmp(scratch, masm->isolate()->factory()->empty_fixed_array()); | |
211 __ j(not_equal, slow); | |
212 __ jmp(&check_next_prototype); | |
213 | |
214 __ bind(&absent); | |
215 __ mov(result, masm->isolate()->factory()->undefined_value()); | |
216 __ jmp(&done); | |
217 | |
218 __ bind(&in_bounds); | |
219 // Fast case: Do the load. | |
220 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); | |
221 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); | |
222 __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value())); | |
223 // In case the loaded value is the_hole we have to check the prototype chain. | |
224 __ j(equal, &check_prototypes); | |
225 __ Move(result, scratch); | |
226 __ bind(&done); | |
227 } | |
228 | |
229 | |
230 // Checks whether a key is an array index string or a unique name. | |
231 // Falls through if the key is a unique name. | |
232 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key, | |
233 Register map, Register hash, | |
234 Label* index_string, Label* not_unique) { | |
235 // Register use: | |
236 // key - holds the key and is unchanged. Assumed to be non-smi. | |
237 // Scratch registers: | |
238 // map - used to hold the map of the key. | |
239 // hash - used to hold the hash of the key. | |
240 Label unique; | |
241 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map); | |
242 __ j(above, not_unique); | |
243 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); | |
244 __ j(equal, &unique); | |
245 | |
246 // Is the string an array index, with cached numeric value? | |
247 __ mov(hash, FieldOperand(key, Name::kHashFieldOffset)); | |
248 __ test(hash, Immediate(Name::kContainsCachedArrayIndexMask)); | |
249 __ j(zero, index_string); | |
250 | |
251 // Is the string internalized? We already know it's a string so a single | |
252 // bit test is enough. | |
253 STATIC_ASSERT(kNotInternalizedTag != 0); | |
254 __ test_b(FieldOperand(map, Map::kInstanceTypeOffset), | |
255 Immediate(kIsNotInternalizedMask)); | |
256 __ j(not_zero, not_unique); | |
257 | |
258 __ bind(&unique); | |
259 } | |
260 | |
261 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | |
262 // The return address is on the stack. | |
263 Label slow, check_name, index_smi, index_name, property_array_property; | |
264 Label probe_dictionary, check_number_dictionary; | |
265 | |
266 Register receiver = LoadDescriptor::ReceiverRegister(); | |
267 Register key = LoadDescriptor::NameRegister(); | |
268 DCHECK(receiver.is(edx)); | |
269 DCHECK(key.is(ecx)); | |
270 | |
271 // Check that the key is a smi. | |
272 __ JumpIfNotSmi(key, &check_name); | |
273 __ bind(&index_smi); | |
274 // Now the key is known to be a smi. This place is also jumped to from | |
275 // where a numeric string is converted to a smi. | |
276 | |
277 GenerateKeyedLoadReceiverCheck(masm, receiver, eax, | |
278 Map::kHasIndexedInterceptor, &slow); | |
279 | |
280 // Check the receiver's map to see if it has fast elements. | |
281 __ CheckFastElements(eax, &check_number_dictionary); | |
282 | |
283 GenerateFastArrayLoad(masm, receiver, key, eax, ebx, eax, &slow); | |
284 Isolate* isolate = masm->isolate(); | |
285 Counters* counters = isolate->counters(); | |
286 __ IncrementCounter(counters->ic_keyed_load_generic_smi(), 1); | |
287 __ ret(0); | |
288 | |
289 __ bind(&check_number_dictionary); | |
290 __ mov(ebx, key); | |
291 __ SmiUntag(ebx); | |
292 __ mov(eax, FieldOperand(receiver, JSObject::kElementsOffset)); | |
293 | |
294 // Check whether the elements is a number dictionary. | |
295 // ebx: untagged index | |
296 // eax: elements | |
297 __ CheckMap(eax, isolate->factory()->hash_table_map(), &slow, | |
298 DONT_DO_SMI_CHECK); | |
299 Label slow_pop_receiver; | |
300 // Push receiver on the stack to free up a register for the dictionary | |
301 // probing. | |
302 __ push(receiver); | |
303 __ LoadFromNumberDictionary(&slow_pop_receiver, eax, key, ebx, edx, edi, eax); | |
304 // Pop receiver before returning. | |
305 __ pop(receiver); | |
306 __ ret(0); | |
307 | |
308 __ bind(&slow_pop_receiver); | |
309 // Pop the receiver from the stack and jump to runtime. | |
310 __ pop(receiver); | |
311 | |
312 __ bind(&slow); | |
313 // Slow case: jump to runtime. | |
314 __ IncrementCounter(counters->ic_keyed_load_generic_slow(), 1); | |
315 GenerateRuntimeGetProperty(masm); | |
316 | |
317 __ bind(&check_name); | |
318 GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow); | |
319 | |
320 GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor, | |
321 &slow); | |
322 | |
323 // If the receiver is a fast-case object, check the stub cache. Otherwise | |
324 // probe the dictionary. | |
325 __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
326 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), | |
327 Immediate(isolate->factory()->hash_table_map())); | |
328 __ j(equal, &probe_dictionary); | |
329 | |
330 // The handlers in the stub cache expect a vector and slot. Since we won't | |
331 // change the IC from any downstream misses, a dummy vector can be used. | |
332 Handle<TypeFeedbackVector> dummy_vector = | |
333 TypeFeedbackVector::DummyVector(isolate); | |
334 int slot = dummy_vector->GetIndex( | |
335 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)); | |
336 __ push(Immediate(Smi::FromInt(slot))); | |
337 __ push(Immediate(dummy_vector)); | |
338 | |
339 masm->isolate()->load_stub_cache()->GenerateProbe(masm, receiver, key, ebx, | |
340 edi); | |
341 | |
342 __ pop(LoadWithVectorDescriptor::VectorRegister()); | |
343 __ pop(LoadDescriptor::SlotRegister()); | |
344 | |
345 // Cache miss. | |
346 GenerateMiss(masm); | |
347 | |
348 // Do a quick inline probe of the receiver's dictionary, if it | |
349 // exists. | |
350 __ bind(&probe_dictionary); | |
351 | |
352 __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset)); | |
353 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); | |
354 GenerateGlobalInstanceTypeCheck(masm, eax, &slow); | |
355 | |
356 GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax); | |
357 __ IncrementCounter(counters->ic_keyed_load_generic_symbol(), 1); | |
358 __ ret(0); | |
359 | |
360 __ bind(&index_name); | |
361 __ IndexFromHash(ebx, key); | |
362 // Now jump to the place where smi keys are handled. | |
363 __ jmp(&index_smi); | |
364 } | |
365 | |
366 | |
367 static void KeyedStoreGenerateMegamorphicHelper( | 123 static void KeyedStoreGenerateMegamorphicHelper( |
368 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow, | 124 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow, |
369 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) { | 125 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) { |
370 Label transition_smi_elements; | 126 Label transition_smi_elements; |
371 Label finish_object_store, non_double_value, transition_double_elements; | 127 Label finish_object_store, non_double_value, transition_double_elements; |
372 Label fast_double_without_map_check; | 128 Label fast_double_without_map_check; |
373 Register receiver = StoreDescriptor::ReceiverRegister(); | 129 Register receiver = StoreDescriptor::ReceiverRegister(); |
374 Register key = StoreDescriptor::NameRegister(); | 130 Register key = StoreDescriptor::NameRegister(); |
375 Register value = StoreDescriptor::ValueRegister(); | 131 Register value = StoreDescriptor::ValueRegister(); |
376 DCHECK(receiver.is(edx)); | 132 DCHECK(receiver.is(edx)); |
(...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
843 Condition cc = | 599 Condition cc = |
844 (check == ENABLE_INLINED_SMI_CHECK) | 600 (check == ENABLE_INLINED_SMI_CHECK) |
845 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 601 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
846 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 602 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
847 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 603 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
848 } | 604 } |
849 } // namespace internal | 605 } // namespace internal |
850 } // namespace v8 | 606 } // namespace v8 |
851 | 607 |
852 #endif // V8_TARGET_ARCH_IA32 | 608 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |