OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 | 5 |
6 #include "src/v8.h" | 6 #include "src/v8.h" |
7 | 7 |
8 #if V8_TARGET_ARCH_MIPS64 | 8 #if V8_TARGET_ARCH_MIPS64 |
9 | 9 |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
155 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 155 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
156 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 156 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
157 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); | 157 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); |
158 } | 158 } |
159 | 159 |
160 | 160 |
161 // Loads an indexed element from a fast case array. | 161 // Loads an indexed element from a fast case array. |
162 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 162 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
163 Register key, Register elements, | 163 Register key, Register elements, |
164 Register scratch1, Register scratch2, | 164 Register scratch1, Register scratch2, |
165 Register result, Label* slow, | 165 Register result, Label* slow) { |
166 LanguageMode language_mode) { | |
167 // Register use: | 166 // Register use: |
168 // | 167 // |
169 // receiver - holds the receiver on entry. | 168 // receiver - holds the receiver on entry. |
170 // Unchanged unless 'result' is the same register. | 169 // Unchanged unless 'result' is the same register. |
171 // | 170 // |
172 // key - holds the smi key on entry. | 171 // key - holds the smi key on entry. |
173 // Unchanged unless 'result' is the same register. | 172 // Unchanged unless 'result' is the same register. |
174 // | 173 // |
175 // result - holds the result on exit if the load succeeded. | 174 // result - holds the result on exit if the load succeeded. |
176 // Allowed to be the the same as 'receiver' or 'key'. | 175 // Allowed to be the the same as 'receiver' or 'key'. |
177 // Unchanged on bailout so 'receiver' and 'key' can be safely | 176 // Unchanged on bailout so 'receiver' and 'key' can be safely |
178 // used by further computation. | 177 // used by further computation. |
179 // | 178 // |
180 // Scratch registers: | 179 // Scratch registers: |
181 // | 180 // |
182 // elements - holds the elements of the receiver and its prototypes. | 181 // elements - holds the elements of the receiver and its prototypes. |
183 // | 182 // |
184 // scratch1 - used to hold elements length, bit fields, base addresses. | 183 // scratch1 - used to hold elements length, bit fields, base addresses. |
185 // | 184 // |
186 // scratch2 - used to hold maps, prototypes, and the loaded value. | 185 // scratch2 - used to hold maps, prototypes, and the loaded value. |
187 Label check_prototypes, check_next_prototype; | 186 Label check_prototypes, check_next_prototype; |
188 Label done, in_bounds, absent; | 187 Label done, in_bounds, return_undefined; |
189 | 188 |
190 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 189 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
191 __ AssertFastElements(elements); | 190 __ AssertFastElements(elements); |
192 | 191 |
193 // Check that the key (index) is within bounds. | 192 // Check that the key (index) is within bounds. |
194 __ ld(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 193 __ ld(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
195 __ Branch(&in_bounds, lo, key, Operand(scratch1)); | 194 __ Branch(&in_bounds, lo, key, Operand(scratch1)); |
196 // Out-of-bounds. Check the prototype chain to see if we can just return | 195 // Out-of-bounds. Check the prototype chain to see if we can just return |
197 // 'undefined'. | 196 // 'undefined'. |
198 // Negative keys can't take the fast OOB path. | 197 // Negative keys can't take the fast OOB path. |
199 __ Branch(slow, lt, key, Operand(zero_reg)); | 198 __ Branch(slow, lt, key, Operand(zero_reg)); |
200 __ bind(&check_prototypes); | 199 __ bind(&check_prototypes); |
201 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 200 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
202 __ bind(&check_next_prototype); | 201 __ bind(&check_next_prototype); |
203 __ ld(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); | 202 __ ld(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); |
204 // scratch2: current prototype | 203 // scratch2: current prototype |
205 __ LoadRoot(at, Heap::kNullValueRootIndex); | 204 __ LoadRoot(at, Heap::kNullValueRootIndex); |
206 __ Branch(&absent, eq, scratch2, Operand(at)); | 205 __ Branch(&return_undefined, eq, scratch2, Operand(at)); |
207 __ ld(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); | 206 __ ld(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); |
208 __ ld(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | 207 __ ld(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); |
209 // elements: elements of current prototype | 208 // elements: elements of current prototype |
210 // scratch2: map of current prototype | 209 // scratch2: map of current prototype |
211 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); | 210 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); |
212 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); | 211 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); |
213 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | 212 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); |
214 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | | 213 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | |
215 (1 << Map::kHasIndexedInterceptor))); | 214 (1 << Map::kHasIndexedInterceptor))); |
216 __ Branch(slow, ne, at, Operand(zero_reg)); | 215 __ Branch(slow, ne, at, Operand(zero_reg)); |
217 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | 216 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
218 __ Branch(slow, ne, elements, Operand(at)); | 217 __ Branch(slow, ne, elements, Operand(at)); |
219 __ Branch(&check_next_prototype); | 218 __ Branch(&check_next_prototype); |
220 | 219 |
221 __ bind(&absent); | 220 __ bind(&return_undefined); |
222 if (is_strong(language_mode)) { | 221 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
223 __ Branch(slow); | 222 __ Branch(&done); |
224 } else { | |
225 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
226 __ Branch(&done); | |
227 } | |
228 | 223 |
229 __ bind(&in_bounds); | 224 __ bind(&in_bounds); |
230 // Fast case: Do the load. | 225 // Fast case: Do the load. |
231 __ Daddu(scratch1, elements, | 226 __ Daddu(scratch1, elements, |
232 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 227 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
233 // The key is a smi. | 228 // The key is a smi. |
234 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 229 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
235 __ SmiScale(at, key, kPointerSizeLog2); | 230 __ SmiScale(at, key, kPointerSizeLog2); |
236 __ daddu(at, at, scratch1); | 231 __ daddu(at, at, scratch1); |
237 __ ld(scratch2, MemOperand(at)); | 232 __ ld(scratch2, MemOperand(at)); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
281 Label slow; | 276 Label slow; |
282 | 277 |
283 __ ld(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 278 __ ld(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
284 JSObject::kPropertiesOffset)); | 279 JSObject::kPropertiesOffset)); |
285 GenerateDictionaryLoad(masm, &slow, dictionary, | 280 GenerateDictionaryLoad(masm, &slow, dictionary, |
286 LoadDescriptor::NameRegister(), v0, a3, a4); | 281 LoadDescriptor::NameRegister(), v0, a3, a4); |
287 __ Ret(); | 282 __ Ret(); |
288 | 283 |
289 // Dictionary load failed, go slow (but don't miss). | 284 // Dictionary load failed, go slow (but don't miss). |
290 __ bind(&slow); | 285 __ bind(&slow); |
291 GenerateSlow(masm); | 286 GenerateRuntimeGetProperty(masm); |
292 } | 287 } |
293 | 288 |
294 | 289 |
295 // A register that isn't one of the parameters to the load ic. | 290 // A register that isn't one of the parameters to the load ic. |
296 static const Register LoadIC_TempRegister() { return a3; } | 291 static const Register LoadIC_TempRegister() { return a3; } |
297 | 292 |
298 | 293 |
299 static void LoadIC_PushArgs(MacroAssembler* masm) { | 294 static void LoadIC_PushArgs(MacroAssembler* masm) { |
300 Register receiver = LoadDescriptor::ReceiverRegister(); | 295 Register receiver = LoadDescriptor::ReceiverRegister(); |
301 Register name = LoadDescriptor::NameRegister(); | 296 Register name = LoadDescriptor::NameRegister(); |
(...skipping 14 matching lines...) Expand all Loading... |
316 | 311 |
317 LoadIC_PushArgs(masm); | 312 LoadIC_PushArgs(masm); |
318 | 313 |
319 // Perform tail call to the entry. | 314 // Perform tail call to the entry. |
320 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 315 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
321 int arg_count = 4; | 316 int arg_count = 4; |
322 __ TailCallExternalReference(ref, arg_count, 1); | 317 __ TailCallExternalReference(ref, arg_count, 1); |
323 } | 318 } |
324 | 319 |
325 | 320 |
326 void LoadIC::GenerateSlow(MacroAssembler* masm) { | 321 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
327 // The return address is in ra. | 322 // The return address is in ra. |
328 | 323 |
329 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 324 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); |
330 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 325 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); |
331 | 326 |
332 ExternalReference ref = | 327 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
333 ExternalReference(IC_Utility(kLoadIC_Slow), masm->isolate()); | |
334 int arg_count = 2; | |
335 __ TailCallExternalReference(ref, arg_count, 1); | |
336 } | 328 } |
337 | 329 |
338 | 330 |
339 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 331 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
340 // The return address is in ra. | 332 // The return address is in ra. |
341 Isolate* isolate = masm->isolate(); | 333 Isolate* isolate = masm->isolate(); |
342 | 334 |
343 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::SlotRegister(), | 335 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::SlotRegister(), |
344 LoadWithVectorDescriptor::VectorRegister())); | 336 LoadWithVectorDescriptor::VectorRegister())); |
345 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a4, a5); | 337 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a4, a5); |
346 | 338 |
347 LoadIC_PushArgs(masm); | 339 LoadIC_PushArgs(masm); |
348 | 340 |
349 // Perform tail call to the entry. | 341 // Perform tail call to the entry. |
350 ExternalReference ref = | 342 ExternalReference ref = |
351 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); | 343 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); |
352 | 344 |
353 int arg_count = 4; | 345 int arg_count = 4; |
354 __ TailCallExternalReference(ref, arg_count, 1); | 346 __ TailCallExternalReference(ref, arg_count, 1); |
355 } | 347 } |
356 | 348 |
357 | 349 |
358 void KeyedLoadIC::GenerateSlow(MacroAssembler* masm) { | 350 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
359 // The return address is in ra. | 351 // The return address is in ra. |
360 | 352 |
361 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 353 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
362 | 354 |
363 ExternalReference ref = | 355 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
364 ExternalReference(IC_Utility(kKeyedLoadIC_Slow), masm->isolate()); | |
365 int arg_count = 2; | |
366 __ TailCallExternalReference(ref, arg_count, 1); | |
367 } | 356 } |
368 | 357 |
369 | 358 |
370 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, | 359 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
371 LanguageMode language_mode) { | |
372 // The return address is in ra. | 360 // The return address is in ra. |
373 Label slow, check_name, index_smi, index_name, property_array_property; | 361 Label slow, check_name, index_smi, index_name, property_array_property; |
374 Label probe_dictionary, check_number_dictionary; | 362 Label probe_dictionary, check_number_dictionary; |
375 | 363 |
376 Register key = LoadDescriptor::NameRegister(); | 364 Register key = LoadDescriptor::NameRegister(); |
377 Register receiver = LoadDescriptor::ReceiverRegister(); | 365 Register receiver = LoadDescriptor::ReceiverRegister(); |
378 DCHECK(key.is(a2)); | 366 DCHECK(key.is(a2)); |
379 DCHECK(receiver.is(a1)); | 367 DCHECK(receiver.is(a1)); |
380 | 368 |
381 Isolate* isolate = masm->isolate(); | 369 Isolate* isolate = masm->isolate(); |
382 | 370 |
383 // Check that the key is a smi. | 371 // Check that the key is a smi. |
384 __ JumpIfNotSmi(key, &check_name); | 372 __ JumpIfNotSmi(key, &check_name); |
385 __ bind(&index_smi); | 373 __ bind(&index_smi); |
386 // Now the key is known to be a smi. This place is also jumped to from below | 374 // Now the key is known to be a smi. This place is also jumped to from below |
387 // where a numeric string is converted to a smi. | 375 // where a numeric string is converted to a smi. |
388 | 376 |
389 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 377 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
390 Map::kHasIndexedInterceptor, &slow); | 378 Map::kHasIndexedInterceptor, &slow); |
391 | 379 |
392 // Check the receiver's map to see if it has fast elements. | 380 // Check the receiver's map to see if it has fast elements. |
393 __ CheckFastElements(a0, a3, &check_number_dictionary); | 381 __ CheckFastElements(a0, a3, &check_number_dictionary); |
394 | 382 |
395 GenerateFastArrayLoad(masm, receiver, key, a0, a3, a4, v0, &slow, | 383 GenerateFastArrayLoad(masm, receiver, key, a0, a3, a4, v0, &slow); |
396 language_mode); | |
397 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a4, a3); | 384 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a4, a3); |
398 __ Ret(); | 385 __ Ret(); |
399 | 386 |
400 __ bind(&check_number_dictionary); | 387 __ bind(&check_number_dictionary); |
401 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 388 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
402 __ ld(a3, FieldMemOperand(a4, JSObject::kMapOffset)); | 389 __ ld(a3, FieldMemOperand(a4, JSObject::kMapOffset)); |
403 | 390 |
404 // Check whether the elements is a number dictionary. | 391 // Check whether the elements is a number dictionary. |
405 // a3: elements map | 392 // a3: elements map |
406 // a4: elements | 393 // a4: elements |
407 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 394 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
408 __ Branch(&slow, ne, a3, Operand(at)); | 395 __ Branch(&slow, ne, a3, Operand(at)); |
409 __ dsra32(a0, key, 0); | 396 __ dsra32(a0, key, 0); |
410 __ LoadFromNumberDictionary(&slow, a4, key, v0, a0, a3, a5); | 397 __ LoadFromNumberDictionary(&slow, a4, key, v0, a0, a3, a5); |
411 __ Ret(); | 398 __ Ret(); |
412 | 399 |
413 // Slow case, key and receiver still in a2 and a1. | 400 // Slow case, key and receiver still in a2 and a1. |
414 __ bind(&slow); | 401 __ bind(&slow); |
415 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, a4, | 402 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, a4, |
416 a3); | 403 a3); |
417 GenerateSlow(masm); | 404 GenerateRuntimeGetProperty(masm); |
418 | 405 |
419 __ bind(&check_name); | 406 __ bind(&check_name); |
420 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); | 407 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); |
421 | 408 |
422 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 409 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
423 Map::kHasNamedInterceptor, &slow); | 410 Map::kHasNamedInterceptor, &slow); |
424 | 411 |
425 | 412 |
426 // If the receiver is a fast-case object, check the stub cache. Otherwise | 413 // If the receiver is a fast-case object, check the stub cache. Otherwise |
427 // probe the dictionary. | 414 // probe the dictionary. |
(...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
880 patcher.ChangeBranchCondition(ne); | 867 patcher.ChangeBranchCondition(ne); |
881 } else { | 868 } else { |
882 DCHECK(Assembler::IsBne(branch_instr)); | 869 DCHECK(Assembler::IsBne(branch_instr)); |
883 patcher.ChangeBranchCondition(eq); | 870 patcher.ChangeBranchCondition(eq); |
884 } | 871 } |
885 } | 872 } |
886 } // namespace internal | 873 } // namespace internal |
887 } // namespace v8 | 874 } // namespace v8 |
888 | 875 |
889 #endif // V8_TARGET_ARCH_MIPS64 | 876 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |