OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
11 | 11 |
12 namespace v8 { | 12 namespace v8 { |
13 namespace internal { | 13 namespace internal { |
14 | 14 |
15 | 15 |
16 // ---------------------------------------------------------------------------- | 16 // ---------------------------------------------------------------------------- |
17 // Static IC stub generators. | 17 // Static IC stub generators. |
18 // | 18 // |
19 | 19 |
20 #define __ ACCESS_MASM(masm) | 20 #define __ ACCESS_MASM(masm) |
21 | 21 |
22 | |
23 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type, | |
24 Label* global_object) { | |
25 // Register usage: | |
26 // type: holds the receiver instance type on entry. | |
27 __ cmpi(type, Operand(JS_GLOBAL_OBJECT_TYPE)); | |
28 __ beq(global_object); | |
29 __ cmpi(type, Operand(JS_GLOBAL_PROXY_TYPE)); | |
30 __ beq(global_object); | |
31 } | |
32 | |
33 | |
34 // Helper function used from LoadIC GenerateNormal. | 22 // Helper function used from LoadIC GenerateNormal. |
35 // | 23 // |
36 // elements: Property dictionary. It is not clobbered if a jump to the miss | 24 // elements: Property dictionary. It is not clobbered if a jump to the miss |
37 // label is done. | 25 // label is done. |
38 // name: Property name. It is not clobbered if a jump to the miss label is | 26 // name: Property name. It is not clobbered if a jump to the miss label is |
39 // done | 27 // done |
40 // result: Register for the result. It is only updated if a jump to the miss | 28 // result: Register for the result. It is only updated if a jump to the miss |
41 // label is not done. Can be the same as elements or name clobbering | 29 // label is not done. Can be the same as elements or name clobbering |
42 // one of these in the case of not jumping to the miss label. | 30 // one of these in the case of not jumping to the miss label. |
43 // The two scratch registers need to be different from elements, name and | 31 // The two scratch registers need to be different from elements, name and |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
124 const int kValueOffset = kElementsStartOffset + kPointerSize; | 112 const int kValueOffset = kElementsStartOffset + kPointerSize; |
125 __ addi(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); | 113 __ addi(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); |
126 __ StoreP(value, MemOperand(scratch2)); | 114 __ StoreP(value, MemOperand(scratch2)); |
127 | 115 |
128 // Update the write barrier. Make sure not to clobber the value. | 116 // Update the write barrier. Make sure not to clobber the value. |
129 __ mr(scratch1, value); | 117 __ mr(scratch1, value); |
130 __ RecordWrite(elements, scratch2, scratch1, kLRHasNotBeenSaved, | 118 __ RecordWrite(elements, scratch2, scratch1, kLRHasNotBeenSaved, |
131 kDontSaveFPRegs); | 119 kDontSaveFPRegs); |
132 } | 120 } |
133 | 121 |
134 | |
135 // Checks the receiver for special cases (value type, slow case bits). | |
136 // Falls through for regular JS object. | |
137 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | |
138 Register receiver, Register map, | |
139 Register scratch, | |
140 int interceptor_bit, Label* slow) { | |
141 // Check that the object isn't a smi. | |
142 __ JumpIfSmi(receiver, slow); | |
143 // Get the map of the receiver. | |
144 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
145 // Check bit field. | |
146 __ lbz(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); | |
147 DCHECK(((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)) < 0x8000); | |
148 __ andi(r0, scratch, | |
149 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); | |
150 __ bne(slow, cr0); | |
151 // Check that the object is some kind of JS object EXCEPT JS Value type. | |
152 // In the case that the object is a value-wrapper object, | |
153 // we enter the runtime system to make sure that indexing into string | |
154 // objects work as intended. | |
155 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | |
156 __ lbz(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
157 __ cmpi(scratch, Operand(JS_OBJECT_TYPE)); | |
158 __ blt(slow); | |
159 } | |
160 | |
161 | |
162 // Loads an indexed element from a fast case array. | |
163 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | |
164 Register key, Register elements, | |
165 Register scratch1, Register scratch2, | |
166 Register result, Label* slow) { | |
167 // Register use: | |
168 // | |
169 // receiver - holds the receiver on entry. | |
170 // Unchanged unless 'result' is the same register. | |
171 // | |
172 // key - holds the smi key on entry. | |
173 // Unchanged unless 'result' is the same register. | |
174 // | |
175 // result - holds the result on exit if the load succeeded. | |
176 // Allowed to be the the same as 'receiver' or 'key'. | |
177 // Unchanged on bailout so 'receiver' and 'key' can be safely | |
178 // used by further computation. | |
179 // | |
180 // Scratch registers: | |
181 // | |
182 // elements - holds the elements of the receiver and its protoypes. | |
183 // | |
184 // scratch1 - used to hold elements length, bit fields, base addresses. | |
185 // | |
186 // scratch2 - used to hold maps, prototypes, and the loaded value. | |
187 Label check_prototypes, check_next_prototype; | |
188 Label done, in_bounds, absent; | |
189 | |
190 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
191 __ AssertFastElements(elements); | |
192 | |
193 // Check that the key (index) is within bounds. | |
194 __ LoadP(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
195 __ cmpl(key, scratch1); | |
196 __ blt(&in_bounds); | |
197 // Out-of-bounds. Check the prototype chain to see if we can just return | |
198 // 'undefined'. | |
199 __ cmpi(key, Operand::Zero()); | |
200 __ blt(slow); // Negative keys can't take the fast OOB path. | |
201 __ bind(&check_prototypes); | |
202 __ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
203 __ bind(&check_next_prototype); | |
204 __ LoadP(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); | |
205 // scratch2: current prototype | |
206 __ CompareRoot(scratch2, Heap::kNullValueRootIndex); | |
207 __ beq(&absent); | |
208 __ LoadP(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); | |
209 __ LoadP(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | |
210 // elements: elements of current prototype | |
211 // scratch2: map of current prototype | |
212 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE); | |
213 __ blt(slow); | |
214 __ lbz(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | |
215 __ andi(r0, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | | |
216 (1 << Map::kHasIndexedInterceptor))); | |
217 __ bne(slow, cr0); | |
218 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex); | |
219 __ bne(slow); | |
220 __ jmp(&check_next_prototype); | |
221 | |
222 __ bind(&absent); | |
223 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
224 __ jmp(&done); | |
225 | |
226 __ bind(&in_bounds); | |
227 // Fast case: Do the load. | |
228 __ addi(scratch1, elements, | |
229 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
230 // The key is a smi. | |
231 __ SmiToPtrArrayOffset(scratch2, key); | |
232 __ LoadPX(scratch2, MemOperand(scratch2, scratch1)); | |
233 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex); | |
234 // In case the loaded value is the_hole we have to check the prototype chain. | |
235 __ beq(&check_prototypes); | |
236 __ mr(result, scratch2); | |
237 __ bind(&done); | |
238 } | |
239 | |
240 | |
241 // Checks whether a key is an array index string or a unique name. | |
242 // Falls through if a key is a unique name. | |
243 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key, | |
244 Register map, Register hash, | |
245 Label* index_string, Label* not_unique) { | |
246 // The key is not a smi. | |
247 Label unique; | |
248 // Is it a name? | |
249 __ CompareObjectType(key, map, hash, LAST_UNIQUE_NAME_TYPE); | |
250 __ bgt(not_unique); | |
251 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); | |
252 __ beq(&unique); | |
253 | |
254 // Is the string an array index, with cached numeric value? | |
255 __ lwz(hash, FieldMemOperand(key, Name::kHashFieldOffset)); | |
256 __ mov(r8, Operand(Name::kContainsCachedArrayIndexMask)); | |
257 __ and_(r0, hash, r8, SetRC); | |
258 __ beq(index_string, cr0); | |
259 | |
260 // Is the string internalized? We know it's a string, so a single | |
261 // bit test is enough. | |
262 // map: key map | |
263 __ lbz(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
264 STATIC_ASSERT(kInternalizedTag == 0); | |
265 __ andi(r0, hash, Operand(kIsNotInternalizedMask)); | |
266 __ bne(not_unique, cr0); | |
267 | |
268 __ bind(&unique); | |
269 } | |
270 | |
271 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 122 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
272 Register dictionary = r3; | 123 Register dictionary = r3; |
273 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); | 124 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); |
274 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); | 125 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); |
275 | 126 |
276 Label slow; | 127 Label slow; |
277 | 128 |
278 __ LoadP(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 129 __ LoadP(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
279 JSObject::kPropertiesOffset)); | 130 JSObject::kPropertiesOffset)); |
280 GenerateDictionaryLoad(masm, &slow, dictionary, | 131 GenerateDictionaryLoad(masm, &slow, dictionary, |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
342 | 193 |
343 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 194 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
344 // The return address is in lr. | 195 // The return address is in lr. |
345 | 196 |
346 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 197 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
347 | 198 |
348 // Do tail-call to runtime routine. | 199 // Do tail-call to runtime routine. |
349 __ TailCallRuntime(Runtime::kKeyedGetProperty); | 200 __ TailCallRuntime(Runtime::kKeyedGetProperty); |
350 } | 201 } |
351 | 202 |
352 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | |
353 // The return address is in lr. | |
354 Label slow, check_name, index_smi, index_name, property_array_property; | |
355 Label probe_dictionary, check_number_dictionary; | |
356 | |
357 Register key = LoadDescriptor::NameRegister(); | |
358 Register receiver = LoadDescriptor::ReceiverRegister(); | |
359 DCHECK(key.is(r5)); | |
360 DCHECK(receiver.is(r4)); | |
361 | |
362 Isolate* isolate = masm->isolate(); | |
363 | |
364 // Check that the key is a smi. | |
365 __ JumpIfNotSmi(key, &check_name); | |
366 __ bind(&index_smi); | |
367 // Now the key is known to be a smi. This place is also jumped to from below | |
368 // where a numeric string is converted to a smi. | |
369 | |
370 GenerateKeyedLoadReceiverCheck(masm, receiver, r3, r6, | |
371 Map::kHasIndexedInterceptor, &slow); | |
372 | |
373 // Check the receiver's map to see if it has fast elements. | |
374 __ CheckFastElements(r3, r6, &check_number_dictionary); | |
375 | |
376 GenerateFastArrayLoad(masm, receiver, key, r3, r6, r7, r3, &slow); | |
377 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_smi(), 1, r7, | |
378 r6); | |
379 __ Ret(); | |
380 | |
381 __ bind(&check_number_dictionary); | |
382 __ LoadP(r7, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
383 __ LoadP(r6, FieldMemOperand(r7, JSObject::kMapOffset)); | |
384 | |
385 // Check whether the elements is a number dictionary. | |
386 // r6: elements map | |
387 // r7: elements | |
388 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | |
389 __ cmp(r6, ip); | |
390 __ bne(&slow); | |
391 __ SmiUntag(r3, key); | |
392 __ LoadFromNumberDictionary(&slow, r7, key, r3, r3, r6, r8); | |
393 __ Ret(); | |
394 | |
395 // Slow case, key and receiver still in r3 and r4. | |
396 __ bind(&slow); | |
397 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_slow(), 1, r7, | |
398 r6); | |
399 GenerateRuntimeGetProperty(masm); | |
400 | |
401 __ bind(&check_name); | |
402 GenerateKeyNameCheck(masm, key, r3, r6, &index_name, &slow); | |
403 | |
404 GenerateKeyedLoadReceiverCheck(masm, receiver, r3, r6, | |
405 Map::kHasNamedInterceptor, &slow); | |
406 | |
407 // If the receiver is a fast-case object, check the stub cache. Otherwise | |
408 // probe the dictionary. | |
409 __ LoadP(r6, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | |
410 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); | |
411 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | |
412 __ cmp(r7, ip); | |
413 __ beq(&probe_dictionary); | |
414 | |
415 | |
416 // The handlers in the stub cache expect a vector and slot. Since we won't | |
417 // change the IC from any downstream misses, a dummy vector can be used. | |
418 Register vector = LoadWithVectorDescriptor::VectorRegister(); | |
419 Register slot = LoadWithVectorDescriptor::SlotRegister(); | |
420 DCHECK(!AreAliased(vector, slot, r7, r8, r9, r10)); | |
421 Handle<TypeFeedbackVector> dummy_vector = | |
422 TypeFeedbackVector::DummyVector(masm->isolate()); | |
423 int slot_index = dummy_vector->GetIndex( | |
424 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)); | |
425 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); | |
426 __ LoadSmiLiteral(slot, Smi::FromInt(slot_index)); | |
427 | |
428 masm->isolate()->load_stub_cache()->GenerateProbe(masm, receiver, key, r7, r8, | |
429 r9, r10); | |
430 // Cache miss. | |
431 GenerateMiss(masm); | |
432 | |
433 // Do a quick inline probe of the receiver's dictionary, if it | |
434 // exists. | |
435 __ bind(&probe_dictionary); | |
436 // r6: elements | |
437 __ LoadP(r3, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
438 __ lbz(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); | |
439 GenerateGlobalInstanceTypeCheck(masm, r3, &slow); | |
440 // Load the property to r3. | |
441 GenerateDictionaryLoad(masm, &slow, r6, key, r3, r8, r7); | |
442 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_symbol(), 1, | |
443 r7, r6); | |
444 __ Ret(); | |
445 | |
446 __ bind(&index_name); | |
447 __ IndexFromHash(r6, key); | |
448 // Now jump to the place where smi keys are handled. | |
449 __ b(&index_smi); | |
450 } | |
451 | |
452 | |
453 static void StoreIC_PushArgs(MacroAssembler* masm) { | 203 static void StoreIC_PushArgs(MacroAssembler* masm) { |
454 __ Push(StoreWithVectorDescriptor::ValueRegister(), | 204 __ Push(StoreWithVectorDescriptor::ValueRegister(), |
455 StoreWithVectorDescriptor::SlotRegister(), | 205 StoreWithVectorDescriptor::SlotRegister(), |
456 StoreWithVectorDescriptor::VectorRegister(), | 206 StoreWithVectorDescriptor::VectorRegister(), |
457 StoreWithVectorDescriptor::ReceiverRegister(), | 207 StoreWithVectorDescriptor::ReceiverRegister(), |
458 StoreWithVectorDescriptor::NameRegister()); | 208 StoreWithVectorDescriptor::NameRegister()); |
459 } | 209 } |
460 | 210 |
461 | 211 |
462 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { | 212 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { |
(...skipping 408 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
871 patcher.EmitCondition(ne); | 621 patcher.EmitCondition(ne); |
872 } else { | 622 } else { |
873 DCHECK(Assembler::GetCondition(branch_instr) == ne); | 623 DCHECK(Assembler::GetCondition(branch_instr) == ne); |
874 patcher.EmitCondition(eq); | 624 patcher.EmitCondition(eq); |
875 } | 625 } |
876 } | 626 } |
877 } // namespace internal | 627 } // namespace internal |
878 } // namespace v8 | 628 } // namespace v8 |
879 | 629 |
880 #endif // V8_TARGET_ARCH_PPC | 630 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |