OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
67 Label* miss) { | 67 Label* miss) { |
68 // Register usage: | 68 // Register usage: |
69 // receiver: holds the receiver on entry and is unchanged. | 69 // receiver: holds the receiver on entry and is unchanged. |
70 // r0: used to hold receiver instance type. | 70 // r0: used to hold receiver instance type. |
71 // Holds the property dictionary on fall through. | 71 // Holds the property dictionary on fall through. |
72 // r1: used to hold receivers map. | 72 // r1: used to hold receivers map. |
73 | 73 |
74 __ JumpIfSmi(receiver, miss); | 74 __ JumpIfSmi(receiver, miss); |
75 | 75 |
76 // Check that the receiver is a valid JS object. | 76 // Check that the receiver is a valid JS object. |
77 __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset)); | 77 __ movp(r1, FieldOperand(receiver, HeapObject::kMapOffset)); |
78 __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset)); | 78 __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset)); |
79 __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE)); | 79 __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE)); |
80 __ j(below, miss); | 80 __ j(below, miss); |
81 | 81 |
82 // If this assert fails, we have to check upper bound too. | 82 // If this assert fails, we have to check upper bound too. |
83 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); | 83 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); |
84 | 84 |
85 GenerateGlobalInstanceTypeCheck(masm, r0, miss); | 85 GenerateGlobalInstanceTypeCheck(masm, r0, miss); |
86 | 86 |
87 // Check for non-global object that requires access check. | 87 // Check for non-global object that requires access check. |
88 __ testb(FieldOperand(r1, Map::kBitFieldOffset), | 88 __ testb(FieldOperand(r1, Map::kBitFieldOffset), |
89 Immediate((1 << Map::kIsAccessCheckNeeded) | | 89 Immediate((1 << Map::kIsAccessCheckNeeded) | |
90 (1 << Map::kHasNamedInterceptor))); | 90 (1 << Map::kHasNamedInterceptor))); |
91 __ j(not_zero, miss); | 91 __ j(not_zero, miss); |
92 | 92 |
93 __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset)); | 93 __ movp(r0, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
94 __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset), | 94 __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset), |
95 Heap::kHashTableMapRootIndex); | 95 Heap::kHashTableMapRootIndex); |
96 __ j(not_equal, miss); | 96 __ j(not_equal, miss); |
97 } | 97 } |
98 | 98 |
99 | 99 |
100 | 100 |
101 // Helper function used to load a property from a dictionary backing storage. | 101 // Helper function used to load a property from a dictionary backing storage. |
102 // This function may return false negatives, so miss_label | 102 // This function may return false negatives, so miss_label |
103 // must always call a backup property load that is complete. | 103 // must always call a backup property load that is complete. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
143 NameDictionary::kHeaderSize + | 143 NameDictionary::kHeaderSize + |
144 NameDictionary::kElementsStartIndex * kPointerSize; | 144 NameDictionary::kElementsStartIndex * kPointerSize; |
145 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; | 145 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
146 __ Test(Operand(elements, r1, times_pointer_size, | 146 __ Test(Operand(elements, r1, times_pointer_size, |
147 kDetailsOffset - kHeapObjectTag), | 147 kDetailsOffset - kHeapObjectTag), |
148 Smi::FromInt(PropertyDetails::TypeField::kMask)); | 148 Smi::FromInt(PropertyDetails::TypeField::kMask)); |
149 __ j(not_zero, miss_label); | 149 __ j(not_zero, miss_label); |
150 | 150 |
151 // Get the value at the masked, scaled index. | 151 // Get the value at the masked, scaled index. |
152 const int kValueOffset = kElementsStartOffset + kPointerSize; | 152 const int kValueOffset = kElementsStartOffset + kPointerSize; |
153 __ movq(result, | 153 __ movp(result, |
154 Operand(elements, r1, times_pointer_size, | 154 Operand(elements, r1, times_pointer_size, |
155 kValueOffset - kHeapObjectTag)); | 155 kValueOffset - kHeapObjectTag)); |
156 } | 156 } |
157 | 157 |
158 | 158 |
159 // Helper function used to store a property to a dictionary backing | 159 // Helper function used to store a property to a dictionary backing |
160 // storage. This function may fail to store a property even though it | 160 // storage. This function may fail to store a property even though it |
161 // is in the dictionary, so code at miss_label must always call a | 161 // is in the dictionary, so code at miss_label must always call a |
162 // backup property store that is complete. This function is safe to | 162 // backup property store that is complete. This function is safe to |
163 // call if name is not an internalized string, and will jump to the miss_label | 163 // call if name is not an internalized string, and will jump to the miss_label |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
209 kDetailsOffset - kHeapObjectTag), | 209 kDetailsOffset - kHeapObjectTag), |
210 Smi::FromInt(kTypeAndReadOnlyMask)); | 210 Smi::FromInt(kTypeAndReadOnlyMask)); |
211 __ j(not_zero, miss_label); | 211 __ j(not_zero, miss_label); |
212 | 212 |
213 // Store the value at the masked, scaled index. | 213 // Store the value at the masked, scaled index. |
214 const int kValueOffset = kElementsStartOffset + kPointerSize; | 214 const int kValueOffset = kElementsStartOffset + kPointerSize; |
215 __ lea(scratch1, Operand(elements, | 215 __ lea(scratch1, Operand(elements, |
216 scratch1, | 216 scratch1, |
217 times_pointer_size, | 217 times_pointer_size, |
218 kValueOffset - kHeapObjectTag)); | 218 kValueOffset - kHeapObjectTag)); |
219 __ movq(Operand(scratch1, 0), value); | 219 __ movp(Operand(scratch1, 0), value); |
220 | 220 |
221 // Update write barrier. Make sure not to clobber the value. | 221 // Update write barrier. Make sure not to clobber the value. |
222 __ movq(scratch0, value); | 222 __ movp(scratch0, value); |
223 __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs); | 223 __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs); |
224 } | 224 } |
225 | 225 |
226 | 226 |
227 // Checks the receiver for special cases (value type, slow case bits). | 227 // Checks the receiver for special cases (value type, slow case bits). |
228 // Falls through for regular JS object. | 228 // Falls through for regular JS object. |
229 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | 229 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, |
230 Register receiver, | 230 Register receiver, |
231 Register map, | 231 Register map, |
232 int interceptor_bit, | 232 int interceptor_bit, |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
277 // | 277 // |
278 // result - holds the result on exit if the load succeeded. | 278 // result - holds the result on exit if the load succeeded. |
279 // Allowed to be the the same as 'receiver' or 'key'. | 279 // Allowed to be the the same as 'receiver' or 'key'. |
280 // Unchanged on bailout so 'receiver' and 'key' can be safely | 280 // Unchanged on bailout so 'receiver' and 'key' can be safely |
281 // used by further computation. | 281 // used by further computation. |
282 // | 282 // |
283 // Scratch registers: | 283 // Scratch registers: |
284 // | 284 // |
285 // scratch - used to hold elements of the receiver and the loaded value. | 285 // scratch - used to hold elements of the receiver and the loaded value. |
286 | 286 |
287 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset)); | 287 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
288 if (not_fast_array != NULL) { | 288 if (not_fast_array != NULL) { |
289 // Check that the object is in fast mode and writable. | 289 // Check that the object is in fast mode and writable. |
290 __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), | 290 __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), |
291 Heap::kFixedArrayMapRootIndex); | 291 Heap::kFixedArrayMapRootIndex); |
292 __ j(not_equal, not_fast_array); | 292 __ j(not_equal, not_fast_array); |
293 } else { | 293 } else { |
294 __ AssertFastElements(elements); | 294 __ AssertFastElements(elements); |
295 } | 295 } |
296 // Check that the key (index) is within bounds. | 296 // Check that the key (index) is within bounds. |
297 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset)); | 297 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset)); |
298 // Unsigned comparison rejects negative indices. | 298 // Unsigned comparison rejects negative indices. |
299 __ j(above_equal, out_of_range); | 299 __ j(above_equal, out_of_range); |
300 // Fast case: Do the load. | 300 // Fast case: Do the load. |
301 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2); | 301 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2); |
302 __ movq(scratch, FieldOperand(elements, | 302 __ movp(scratch, FieldOperand(elements, |
303 index.reg, | 303 index.reg, |
304 index.scale, | 304 index.scale, |
305 FixedArray::kHeaderSize)); | 305 FixedArray::kHeaderSize)); |
306 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); | 306 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); |
307 // In case the loaded value is the_hole we have to consult GetProperty | 307 // In case the loaded value is the_hole we have to consult GetProperty |
308 // to ensure the prototype chain is searched. | 308 // to ensure the prototype chain is searched. |
309 __ j(equal, out_of_range); | 309 __ j(equal, out_of_range); |
310 if (!result.is(scratch)) { | 310 if (!result.is(scratch)) { |
311 __ movq(result, scratch); | 311 __ movp(result, scratch); |
312 } | 312 } |
313 } | 313 } |
314 | 314 |
315 | 315 |
316 // Checks whether a key is an array index string or a unique name. | 316 // Checks whether a key is an array index string or a unique name. |
317 // Falls through if the key is a unique name. | 317 // Falls through if the key is a unique name. |
318 static void GenerateKeyNameCheck(MacroAssembler* masm, | 318 static void GenerateKeyNameCheck(MacroAssembler* masm, |
319 Register key, | 319 Register key, |
320 Register map, | 320 Register map, |
321 Register hash, | 321 Register hash, |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
377 rbx, | 377 rbx, |
378 rax, | 378 rax, |
379 NULL, | 379 NULL, |
380 &slow); | 380 &slow); |
381 Counters* counters = masm->isolate()->counters(); | 381 Counters* counters = masm->isolate()->counters(); |
382 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); | 382 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); |
383 __ ret(0); | 383 __ ret(0); |
384 | 384 |
385 __ bind(&check_number_dictionary); | 385 __ bind(&check_number_dictionary); |
386 __ SmiToInteger32(rbx, rax); | 386 __ SmiToInteger32(rbx, rax); |
387 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); | 387 __ movp(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
388 | 388 |
389 // Check whether the elements is a number dictionary. | 389 // Check whether the elements is a number dictionary. |
390 // rdx: receiver | 390 // rdx: receiver |
391 // rax: key | 391 // rax: key |
392 // rbx: key as untagged int32 | 392 // rbx: key as untagged int32 |
393 // rcx: elements | 393 // rcx: elements |
394 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 394 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
395 Heap::kHashTableMapRootIndex); | 395 Heap::kHashTableMapRootIndex); |
396 __ j(not_equal, &slow); | 396 __ j(not_equal, &slow); |
397 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); | 397 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); |
398 __ ret(0); | 398 __ ret(0); |
399 | 399 |
400 __ bind(&slow); | 400 __ bind(&slow); |
401 // Slow case: Jump to runtime. | 401 // Slow case: Jump to runtime. |
402 // rdx: receiver | 402 // rdx: receiver |
403 // rax: key | 403 // rax: key |
404 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); | 404 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); |
405 GenerateRuntimeGetProperty(masm); | 405 GenerateRuntimeGetProperty(masm); |
406 | 406 |
407 __ bind(&check_name); | 407 __ bind(&check_name); |
408 GenerateKeyNameCheck(masm, rax, rcx, rbx, &index_name, &slow); | 408 GenerateKeyNameCheck(masm, rax, rcx, rbx, &index_name, &slow); |
409 | 409 |
410 GenerateKeyedLoadReceiverCheck( | 410 GenerateKeyedLoadReceiverCheck( |
411 masm, rdx, rcx, Map::kHasNamedInterceptor, &slow); | 411 masm, rdx, rcx, Map::kHasNamedInterceptor, &slow); |
412 | 412 |
413 // If the receiver is a fast-case object, check the keyed lookup | 413 // If the receiver is a fast-case object, check the keyed lookup |
414 // cache. Otherwise probe the dictionary leaving result in rcx. | 414 // cache. Otherwise probe the dictionary leaving result in rcx. |
415 __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); | 415 __ movp(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); |
416 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | 416 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
417 Heap::kHashTableMapRootIndex); | 417 Heap::kHashTableMapRootIndex); |
418 __ j(equal, &probe_dictionary); | 418 __ j(equal, &probe_dictionary); |
419 | 419 |
420 // Load the map of the receiver, compute the keyed lookup cache hash | 420 // Load the map of the receiver, compute the keyed lookup cache hash |
421 // based on 32 bits of the map pointer and the string hash. | 421 // based on 32 bits of the map pointer and the string hash. |
422 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); | 422 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
423 __ movl(rcx, rbx); | 423 __ movl(rcx, rbx); |
424 __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift)); | 424 __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift)); |
425 __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset)); | 425 __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset)); |
426 __ shr(rdi, Immediate(String::kHashShift)); | 426 __ shr(rdi, Immediate(String::kHashShift)); |
427 __ xor_(rcx, rdi); | 427 __ xor_(rcx, rdi); |
428 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); | 428 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); |
429 __ and_(rcx, Immediate(mask)); | 429 __ and_(rcx, Immediate(mask)); |
430 | 430 |
431 // Load the key (consisting of map and internalized string) from the cache and | 431 // Load the key (consisting of map and internalized string) from the cache and |
432 // check for match. | 432 // check for match. |
433 Label load_in_object_property; | 433 Label load_in_object_property; |
434 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | 434 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; |
435 Label hit_on_nth_entry[kEntriesPerBucket]; | 435 Label hit_on_nth_entry[kEntriesPerBucket]; |
436 ExternalReference cache_keys | 436 ExternalReference cache_keys |
437 = ExternalReference::keyed_lookup_cache_keys(masm->isolate()); | 437 = ExternalReference::keyed_lookup_cache_keys(masm->isolate()); |
438 | 438 |
439 for (int i = 0; i < kEntriesPerBucket - 1; i++) { | 439 for (int i = 0; i < kEntriesPerBucket - 1; i++) { |
440 Label try_next_entry; | 440 Label try_next_entry; |
441 __ movq(rdi, rcx); | 441 __ movp(rdi, rcx); |
442 __ shl(rdi, Immediate(kPointerSizeLog2 + 1)); | 442 __ shl(rdi, Immediate(kPointerSizeLog2 + 1)); |
443 __ LoadAddress(kScratchRegister, cache_keys); | 443 __ LoadAddress(kScratchRegister, cache_keys); |
444 int off = kPointerSize * i * 2; | 444 int off = kPointerSize * i * 2; |
445 __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off)); | 445 __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off)); |
446 __ j(not_equal, &try_next_entry); | 446 __ j(not_equal, &try_next_entry); |
447 __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); | 447 __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); |
448 __ j(equal, &hit_on_nth_entry[i]); | 448 __ j(equal, &hit_on_nth_entry[i]); |
449 __ bind(&try_next_entry); | 449 __ bind(&try_next_entry); |
450 } | 450 } |
451 | 451 |
(...skipping 20 matching lines...) Expand all Loading... |
472 __ j(above_equal, &property_array_property); | 472 __ j(above_equal, &property_array_property); |
473 if (i != 0) { | 473 if (i != 0) { |
474 __ jmp(&load_in_object_property); | 474 __ jmp(&load_in_object_property); |
475 } | 475 } |
476 } | 476 } |
477 | 477 |
478 // Load in-object property. | 478 // Load in-object property. |
479 __ bind(&load_in_object_property); | 479 __ bind(&load_in_object_property); |
480 __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset)); | 480 __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset)); |
481 __ addq(rcx, rdi); | 481 __ addq(rcx, rdi); |
482 __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0)); | 482 __ movp(rax, FieldOperand(rdx, rcx, times_pointer_size, 0)); |
483 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | 483 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); |
484 __ ret(0); | 484 __ ret(0); |
485 | 485 |
486 // Load property array property. | 486 // Load property array property. |
487 __ bind(&property_array_property); | 487 __ bind(&property_array_property); |
488 __ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset)); | 488 __ movp(rax, FieldOperand(rdx, JSObject::kPropertiesOffset)); |
489 __ movq(rax, FieldOperand(rax, rdi, times_pointer_size, | 489 __ movp(rax, FieldOperand(rax, rdi, times_pointer_size, |
490 FixedArray::kHeaderSize)); | 490 FixedArray::kHeaderSize)); |
491 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | 491 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); |
492 __ ret(0); | 492 __ ret(0); |
493 | 493 |
494 // Do a quick inline probe of the receiver's dictionary, if it | 494 // Do a quick inline probe of the receiver's dictionary, if it |
495 // exists. | 495 // exists. |
496 __ bind(&probe_dictionary); | 496 __ bind(&probe_dictionary); |
497 // rdx: receiver | 497 // rdx: receiver |
498 // rax: key | 498 // rax: key |
499 // rbx: elements | 499 // rbx: elements |
500 | 500 |
501 __ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset)); | 501 __ movp(rcx, FieldOperand(rdx, JSObject::kMapOffset)); |
502 __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); | 502 __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); |
503 GenerateGlobalInstanceTypeCheck(masm, rcx, &slow); | 503 GenerateGlobalInstanceTypeCheck(masm, rcx, &slow); |
504 | 504 |
505 GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax); | 505 GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax); |
506 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); | 506 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); |
507 __ ret(0); | 507 __ ret(0); |
508 | 508 |
509 __ bind(&index_name); | 509 __ bind(&index_name); |
510 __ IndexFromHash(rbx, rax); | 510 __ IndexFromHash(rbx, rax); |
511 __ jmp(&index_smi); | 511 __ jmp(&index_smi); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
553 Label slow; | 553 Label slow; |
554 | 554 |
555 // Check that the receiver isn't a smi. | 555 // Check that the receiver isn't a smi. |
556 __ JumpIfSmi(rdx, &slow); | 556 __ JumpIfSmi(rdx, &slow); |
557 | 557 |
558 // Check that the key is an array index, that is Uint32. | 558 // Check that the key is an array index, that is Uint32. |
559 STATIC_ASSERT(kSmiValueSize <= 32); | 559 STATIC_ASSERT(kSmiValueSize <= 32); |
560 __ JumpUnlessNonNegativeSmi(rax, &slow); | 560 __ JumpUnlessNonNegativeSmi(rax, &slow); |
561 | 561 |
562 // Get the map of the receiver. | 562 // Get the map of the receiver. |
563 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); | 563 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); |
564 | 564 |
565 // Check that it has indexed interceptor and access checks | 565 // Check that it has indexed interceptor and access checks |
566 // are not enabled for this object. | 566 // are not enabled for this object. |
567 __ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset)); | 567 __ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset)); |
568 __ andb(rcx, Immediate(kSlowCaseBitFieldMask)); | 568 __ andb(rcx, Immediate(kSlowCaseBitFieldMask)); |
569 __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor)); | 569 __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor)); |
570 __ j(not_zero, &slow); | 570 __ j(not_zero, &slow); |
571 | 571 |
572 // Everything is fine, call runtime. | 572 // Everything is fine, call runtime. |
573 __ PopReturnAddressTo(rcx); | 573 __ PopReturnAddressTo(rcx); |
(...skipping 24 matching lines...) Expand all Loading... |
598 Label finish_object_store, non_double_value, transition_double_elements; | 598 Label finish_object_store, non_double_value, transition_double_elements; |
599 Label fast_double_without_map_check; | 599 Label fast_double_without_map_check; |
600 // Fast case: Do the store, could be either Object or double. | 600 // Fast case: Do the store, could be either Object or double. |
601 __ bind(fast_object); | 601 __ bind(fast_object); |
602 // rax: value | 602 // rax: value |
603 // rbx: receiver's elements array (a FixedArray) | 603 // rbx: receiver's elements array (a FixedArray) |
604 // rcx: index | 604 // rcx: index |
605 // rdx: receiver (a JSArray) | 605 // rdx: receiver (a JSArray) |
606 // r9: map of receiver | 606 // r9: map of receiver |
607 if (check_map == kCheckMap) { | 607 if (check_map == kCheckMap) { |
608 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); | 608 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); |
609 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex); | 609 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex); |
610 __ j(not_equal, fast_double); | 610 __ j(not_equal, fast_double); |
611 } | 611 } |
612 | 612 |
613 // HOLECHECK: guards "A[i] = V" | 613 // HOLECHECK: guards "A[i] = V" |
614 // We have to go to the runtime if the current value is the hole because | 614 // We have to go to the runtime if the current value is the hole because |
615 // there may be a callback on the element | 615 // there may be a callback on the element |
616 Label holecheck_passed1; | 616 Label holecheck_passed1; |
617 __ movq(kScratchRegister, FieldOperand(rbx, | 617 __ movp(kScratchRegister, FieldOperand(rbx, |
618 rcx, | 618 rcx, |
619 times_pointer_size, | 619 times_pointer_size, |
620 FixedArray::kHeaderSize)); | 620 FixedArray::kHeaderSize)); |
621 __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 621 __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
622 __ j(not_equal, &holecheck_passed1); | 622 __ j(not_equal, &holecheck_passed1); |
623 __ JumpIfDictionaryInPrototypeChain(rdx, rdi, kScratchRegister, slow); | 623 __ JumpIfDictionaryInPrototypeChain(rdx, rdi, kScratchRegister, slow); |
624 | 624 |
625 __ bind(&holecheck_passed1); | 625 __ bind(&holecheck_passed1); |
626 | 626 |
627 // Smi stores don't require further checks. | 627 // Smi stores don't require further checks. |
628 Label non_smi_value; | 628 Label non_smi_value; |
629 __ JumpIfNotSmi(rax, &non_smi_value); | 629 __ JumpIfNotSmi(rax, &non_smi_value); |
630 if (increment_length == kIncrementLength) { | 630 if (increment_length == kIncrementLength) { |
631 // Add 1 to receiver->length. | 631 // Add 1 to receiver->length. |
632 __ leal(rdi, Operand(rcx, 1)); | 632 __ leal(rdi, Operand(rcx, 1)); |
633 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); | 633 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); |
634 } | 634 } |
635 // It's irrelevant whether array is smi-only or not when writing a smi. | 635 // It's irrelevant whether array is smi-only or not when writing a smi. |
636 __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), | 636 __ movp(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), |
637 rax); | 637 rax); |
638 __ ret(0); | 638 __ ret(0); |
639 | 639 |
640 __ bind(&non_smi_value); | 640 __ bind(&non_smi_value); |
641 // Writing a non-smi, check whether array allows non-smi elements. | 641 // Writing a non-smi, check whether array allows non-smi elements. |
642 // r9: receiver's map | 642 // r9: receiver's map |
643 __ CheckFastObjectElements(r9, &transition_smi_elements); | 643 __ CheckFastObjectElements(r9, &transition_smi_elements); |
644 | 644 |
645 __ bind(&finish_object_store); | 645 __ bind(&finish_object_store); |
646 if (increment_length == kIncrementLength) { | 646 if (increment_length == kIncrementLength) { |
647 // Add 1 to receiver->length. | 647 // Add 1 to receiver->length. |
648 __ leal(rdi, Operand(rcx, 1)); | 648 __ leal(rdi, Operand(rcx, 1)); |
649 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); | 649 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); |
650 } | 650 } |
651 __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), | 651 __ movp(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), |
652 rax); | 652 rax); |
653 __ movq(rdx, rax); // Preserve the value which is returned. | 653 __ movp(rdx, rax); // Preserve the value which is returned. |
654 __ RecordWriteArray( | 654 __ RecordWriteArray( |
655 rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 655 rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
656 __ ret(0); | 656 __ ret(0); |
657 | 657 |
658 __ bind(fast_double); | 658 __ bind(fast_double); |
659 if (check_map == kCheckMap) { | 659 if (check_map == kCheckMap) { |
660 // Check for fast double array case. If this fails, call through to the | 660 // Check for fast double array case. If this fails, call through to the |
661 // runtime. | 661 // runtime. |
662 // rdi: elements array's map | 662 // rdi: elements array's map |
663 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); | 663 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
(...skipping 12 matching lines...) Expand all Loading... |
676 __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0, | 676 __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0, |
677 &transition_double_elements); | 677 &transition_double_elements); |
678 if (increment_length == kIncrementLength) { | 678 if (increment_length == kIncrementLength) { |
679 // Add 1 to receiver->length. | 679 // Add 1 to receiver->length. |
680 __ leal(rdi, Operand(rcx, 1)); | 680 __ leal(rdi, Operand(rcx, 1)); |
681 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); | 681 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); |
682 } | 682 } |
683 __ ret(0); | 683 __ ret(0); |
684 | 684 |
685 __ bind(&transition_smi_elements); | 685 __ bind(&transition_smi_elements); |
686 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); | 686 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
687 | 687 |
688 // Transition the array appropriately depending on the value type. | 688 // Transition the array appropriately depending on the value type. |
689 __ movq(r9, FieldOperand(rax, HeapObject::kMapOffset)); | 689 __ movp(r9, FieldOperand(rax, HeapObject::kMapOffset)); |
690 __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex); | 690 __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex); |
691 __ j(not_equal, &non_double_value); | 691 __ j(not_equal, &non_double_value); |
692 | 692 |
693 // Value is a double. Transition FAST_SMI_ELEMENTS -> | 693 // Value is a double. Transition FAST_SMI_ELEMENTS -> |
694 // FAST_DOUBLE_ELEMENTS and complete the store. | 694 // FAST_DOUBLE_ELEMENTS and complete the store. |
695 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | 695 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
696 FAST_DOUBLE_ELEMENTS, | 696 FAST_DOUBLE_ELEMENTS, |
697 rbx, | 697 rbx, |
698 rdi, | 698 rdi, |
699 slow); | 699 slow); |
700 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, | 700 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, |
701 FAST_DOUBLE_ELEMENTS); | 701 FAST_DOUBLE_ELEMENTS); |
702 ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); | 702 ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); |
703 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | 703 __ movp(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
704 __ jmp(&fast_double_without_map_check); | 704 __ jmp(&fast_double_without_map_check); |
705 | 705 |
706 __ bind(&non_double_value); | 706 __ bind(&non_double_value); |
707 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS | 707 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS |
708 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | 708 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
709 FAST_ELEMENTS, | 709 FAST_ELEMENTS, |
710 rbx, | 710 rbx, |
711 rdi, | 711 rdi, |
712 slow); | 712 slow); |
713 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); | 713 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); |
714 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, | 714 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, |
715 slow); | 715 slow); |
716 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | 716 __ movp(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
717 __ jmp(&finish_object_store); | 717 __ jmp(&finish_object_store); |
718 | 718 |
719 __ bind(&transition_double_elements); | 719 __ bind(&transition_double_elements); |
720 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a | 720 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a |
721 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and | 721 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and |
722 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS | 722 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS |
723 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); | 723 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
724 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, | 724 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, |
725 FAST_ELEMENTS, | 725 FAST_ELEMENTS, |
726 rbx, | 726 rbx, |
727 rdi, | 727 rdi, |
728 slow); | 728 slow); |
729 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); | 729 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); |
730 ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); | 730 ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); |
731 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | 731 __ movp(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
732 __ jmp(&finish_object_store); | 732 __ jmp(&finish_object_store); |
733 } | 733 } |
734 | 734 |
735 | 735 |
736 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | 736 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, |
737 StrictModeFlag strict_mode) { | 737 StrictModeFlag strict_mode) { |
738 // ----------- S t a t e ------------- | 738 // ----------- S t a t e ------------- |
739 // -- rax : value | 739 // -- rax : value |
740 // -- rcx : key | 740 // -- rcx : key |
741 // -- rdx : receiver | 741 // -- rdx : receiver |
742 // -- rsp[0] : return address | 742 // -- rsp[0] : return address |
743 // ----------------------------------- | 743 // ----------------------------------- |
744 Label slow, slow_with_tagged_index, fast_object, fast_object_grow; | 744 Label slow, slow_with_tagged_index, fast_object, fast_object_grow; |
745 Label fast_double, fast_double_grow; | 745 Label fast_double, fast_double_grow; |
746 Label array, extra, check_if_double_array; | 746 Label array, extra, check_if_double_array; |
747 | 747 |
748 // Check that the object isn't a smi. | 748 // Check that the object isn't a smi. |
749 __ JumpIfSmi(rdx, &slow_with_tagged_index); | 749 __ JumpIfSmi(rdx, &slow_with_tagged_index); |
750 // Get the map from the receiver. | 750 // Get the map from the receiver. |
751 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); | 751 __ movp(r9, FieldOperand(rdx, HeapObject::kMapOffset)); |
752 // Check that the receiver does not require access checks and is not observed. | 752 // Check that the receiver does not require access checks and is not observed. |
753 // The generic stub does not perform map checks or handle observed objects. | 753 // The generic stub does not perform map checks or handle observed objects. |
754 __ testb(FieldOperand(r9, Map::kBitFieldOffset), | 754 __ testb(FieldOperand(r9, Map::kBitFieldOffset), |
755 Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved)); | 755 Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved)); |
756 __ j(not_zero, &slow_with_tagged_index); | 756 __ j(not_zero, &slow_with_tagged_index); |
757 // Check that the key is a smi. | 757 // Check that the key is a smi. |
758 __ JumpIfNotSmi(rcx, &slow_with_tagged_index); | 758 __ JumpIfNotSmi(rcx, &slow_with_tagged_index); |
759 __ SmiToInteger32(rcx, rcx); | 759 __ SmiToInteger32(rcx, rcx); |
760 | 760 |
761 __ CmpInstanceType(r9, JS_ARRAY_TYPE); | 761 __ CmpInstanceType(r9, JS_ARRAY_TYPE); |
762 __ j(equal, &array); | 762 __ j(equal, &array); |
763 // Check that the object is some kind of JSObject. | 763 // Check that the object is some kind of JSObject. |
764 __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE); | 764 __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE); |
765 __ j(below, &slow); | 765 __ j(below, &slow); |
766 | 766 |
767 // Object case: Check key against length in the elements array. | 767 // Object case: Check key against length in the elements array. |
768 // rax: value | 768 // rax: value |
769 // rdx: JSObject | 769 // rdx: JSObject |
770 // rcx: index | 770 // rcx: index |
771 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | 771 __ movp(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
772 // Check array bounds. | 772 // Check array bounds. |
773 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx); | 773 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx); |
774 // rax: value | 774 // rax: value |
775 // rbx: FixedArray | 775 // rbx: FixedArray |
776 // rcx: index | 776 // rcx: index |
777 __ j(above, &fast_object); | 777 __ j(above, &fast_object); |
778 | 778 |
779 // Slow case: call runtime. | 779 // Slow case: call runtime. |
780 __ bind(&slow); | 780 __ bind(&slow); |
781 __ Integer32ToSmi(rcx, rcx); | 781 __ Integer32ToSmi(rcx, rcx); |
782 __ bind(&slow_with_tagged_index); | 782 __ bind(&slow_with_tagged_index); |
783 GenerateRuntimeSetProperty(masm, strict_mode); | 783 GenerateRuntimeSetProperty(masm, strict_mode); |
784 // Never returns to here. | 784 // Never returns to here. |
785 | 785 |
786 // Extra capacity case: Check if there is extra capacity to | 786 // Extra capacity case: Check if there is extra capacity to |
787 // perform the store and update the length. Used for adding one | 787 // perform the store and update the length. Used for adding one |
788 // element to the array by writing to array[array.length]. | 788 // element to the array by writing to array[array.length]. |
789 __ bind(&extra); | 789 __ bind(&extra); |
790 // rax: value | 790 // rax: value |
791 // rdx: receiver (a JSArray) | 791 // rdx: receiver (a JSArray) |
792 // rbx: receiver's elements array (a FixedArray) | 792 // rbx: receiver's elements array (a FixedArray) |
793 // rcx: index | 793 // rcx: index |
794 // flags: smicompare (rdx.length(), rbx) | 794 // flags: smicompare (rdx.length(), rbx) |
795 __ j(not_equal, &slow); // do not leave holes in the array | 795 __ j(not_equal, &slow); // do not leave holes in the array |
796 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx); | 796 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx); |
797 __ j(below_equal, &slow); | 797 __ j(below_equal, &slow); |
798 // Increment index to get new length. | 798 // Increment index to get new length. |
799 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); | 799 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); |
800 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex); | 800 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex); |
801 __ j(not_equal, &check_if_double_array); | 801 __ j(not_equal, &check_if_double_array); |
802 __ jmp(&fast_object_grow); | 802 __ jmp(&fast_object_grow); |
803 | 803 |
804 __ bind(&check_if_double_array); | 804 __ bind(&check_if_double_array); |
805 // rdi: elements array's map | 805 // rdi: elements array's map |
806 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); | 806 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
807 __ j(not_equal, &slow); | 807 __ j(not_equal, &slow); |
808 __ jmp(&fast_double_grow); | 808 __ jmp(&fast_double_grow); |
809 | 809 |
810 // Array case: Get the length and the elements array from the JS | 810 // Array case: Get the length and the elements array from the JS |
811 // array. Check that the array is in fast mode (and writable); if it | 811 // array. Check that the array is in fast mode (and writable); if it |
812 // is the length is always a smi. | 812 // is the length is always a smi. |
813 __ bind(&array); | 813 __ bind(&array); |
814 // rax: value | 814 // rax: value |
815 // rdx: receiver (a JSArray) | 815 // rdx: receiver (a JSArray) |
816 // rcx: index | 816 // rcx: index |
817 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | 817 __ movp(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
818 | 818 |
819 // Check the key against the length in the array, compute the | 819 // Check the key against the length in the array, compute the |
820 // address to store into and fall through to fast case. | 820 // address to store into and fall through to fast case. |
821 __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx); | 821 __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx); |
822 __ j(below_equal, &extra); | 822 __ j(below_equal, &extra); |
823 | 823 |
824 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, | 824 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, |
825 &slow, kCheckMap, kDontIncrementLength); | 825 &slow, kCheckMap, kDontIncrementLength); |
826 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, | 826 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, |
827 &slow, kDontCheckMap, kIncrementLength); | 827 &slow, kDontCheckMap, kIncrementLength); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
921 // rsp[0] : return address | 921 // rsp[0] : return address |
922 // rsp[8] : argument argc | 922 // rsp[8] : argument argc |
923 // rsp[16] : argument argc - 1 | 923 // rsp[16] : argument argc - 1 |
924 // ... | 924 // ... |
925 // rsp[argc * 8] : argument 1 | 925 // rsp[argc * 8] : argument 1 |
926 // rsp[(argc + 1) * 8] : argument 0 = receiver | 926 // rsp[(argc + 1) * 8] : argument 0 = receiver |
927 // ----------------------------------- | 927 // ----------------------------------- |
928 Label miss; | 928 Label miss; |
929 | 929 |
930 StackArgumentsAccessor args(rsp, argc); | 930 StackArgumentsAccessor args(rsp, argc); |
931 __ movq(rdx, args.GetReceiverOperand()); | 931 __ movp(rdx, args.GetReceiverOperand()); |
932 | 932 |
933 GenerateNameDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss); | 933 GenerateNameDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss); |
934 | 934 |
935 // rax: elements | 935 // rax: elements |
936 // Search the dictionary placing the result in rdi. | 936 // Search the dictionary placing the result in rdi. |
937 GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi); | 937 GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi); |
938 | 938 |
939 GenerateFunctionTailCall(masm, argc, &miss); | 939 GenerateFunctionTailCall(masm, argc, &miss); |
940 | 940 |
941 __ bind(&miss); | 941 __ bind(&miss); |
(...skipping 15 matching lines...) Expand all Loading... |
957 // ----------------------------------- | 957 // ----------------------------------- |
958 | 958 |
959 Counters* counters = masm->isolate()->counters(); | 959 Counters* counters = masm->isolate()->counters(); |
960 if (id == IC::kCallIC_Miss) { | 960 if (id == IC::kCallIC_Miss) { |
961 __ IncrementCounter(counters->call_miss(), 1); | 961 __ IncrementCounter(counters->call_miss(), 1); |
962 } else { | 962 } else { |
963 __ IncrementCounter(counters->keyed_call_miss(), 1); | 963 __ IncrementCounter(counters->keyed_call_miss(), 1); |
964 } | 964 } |
965 | 965 |
966 StackArgumentsAccessor args(rsp, argc); | 966 StackArgumentsAccessor args(rsp, argc); |
967 __ movq(rdx, args.GetReceiverOperand()); | 967 __ movp(rdx, args.GetReceiverOperand()); |
968 | 968 |
969 // Enter an internal frame. | 969 // Enter an internal frame. |
970 { | 970 { |
971 FrameScope scope(masm, StackFrame::INTERNAL); | 971 FrameScope scope(masm, StackFrame::INTERNAL); |
972 | 972 |
973 // Push the receiver and the name of the function. | 973 // Push the receiver and the name of the function. |
974 __ push(rdx); | 974 __ push(rdx); |
975 __ push(rcx); | 975 __ push(rcx); |
976 | 976 |
977 // Call the entry. | 977 // Call the entry. |
978 CEntryStub stub(1); | 978 CEntryStub stub(1); |
979 __ Set(rax, 2); | 979 __ Set(rax, 2); |
980 __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate())); | 980 __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate())); |
981 __ CallStub(&stub); | 981 __ CallStub(&stub); |
982 | 982 |
983 // Move result to rdi and exit the internal frame. | 983 // Move result to rdi and exit the internal frame. |
984 __ movq(rdi, rax); | 984 __ movp(rdi, rax); |
985 } | 985 } |
986 | 986 |
987 // Check if the receiver is a global object of some sort. | 987 // Check if the receiver is a global object of some sort. |
988 // This can happen only for regular CallIC but not KeyedCallIC. | 988 // This can happen only for regular CallIC but not KeyedCallIC. |
989 if (id == IC::kCallIC_Miss) { | 989 if (id == IC::kCallIC_Miss) { |
990 Label invoke, global; | 990 Label invoke, global; |
991 __ movq(rdx, args.GetReceiverOperand()); | 991 __ movp(rdx, args.GetReceiverOperand()); |
992 __ JumpIfSmi(rdx, &invoke); | 992 __ JumpIfSmi(rdx, &invoke); |
993 __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx); | 993 __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx); |
994 __ j(equal, &global); | 994 __ j(equal, &global); |
995 __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE); | 995 __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE); |
996 __ j(not_equal, &invoke); | 996 __ j(not_equal, &invoke); |
997 | 997 |
998 // Patch the receiver on the stack. | 998 // Patch the receiver on the stack. |
999 __ bind(&global); | 999 __ bind(&global); |
1000 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | 1000 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
1001 __ movq(args.GetReceiverOperand(), rdx); | 1001 __ movp(args.GetReceiverOperand(), rdx); |
1002 __ bind(&invoke); | 1002 __ bind(&invoke); |
1003 } | 1003 } |
1004 | 1004 |
1005 // Invoke the function. | 1005 // Invoke the function. |
1006 ParameterCount actual(argc); | 1006 ParameterCount actual(argc); |
1007 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); | 1007 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); |
1008 } | 1008 } |
1009 | 1009 |
1010 | 1010 |
1011 void CallIC::GenerateMegamorphic(MacroAssembler* masm, | 1011 void CallIC::GenerateMegamorphic(MacroAssembler* masm, |
1012 int argc, | 1012 int argc, |
1013 ExtraICState extra_ic_state) { | 1013 ExtraICState extra_ic_state) { |
1014 // ----------- S t a t e ------------- | 1014 // ----------- S t a t e ------------- |
1015 // rcx : function name | 1015 // rcx : function name |
1016 // rsp[0] : return address | 1016 // rsp[0] : return address |
1017 // rsp[8] : argument argc | 1017 // rsp[8] : argument argc |
1018 // rsp[16] : argument argc - 1 | 1018 // rsp[16] : argument argc - 1 |
1019 // ... | 1019 // ... |
1020 // rsp[argc * 8] : argument 1 | 1020 // rsp[argc * 8] : argument 1 |
1021 // rsp[(argc + 1) * 8] : argument 0 = receiver | 1021 // rsp[(argc + 1) * 8] : argument 0 = receiver |
1022 // ----------------------------------- | 1022 // ----------------------------------- |
1023 | 1023 |
1024 StackArgumentsAccessor args(rsp, argc); | 1024 StackArgumentsAccessor args(rsp, argc); |
1025 __ movq(rdx, args.GetReceiverOperand()); | 1025 __ movp(rdx, args.GetReceiverOperand()); |
1026 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state); | 1026 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state); |
1027 GenerateMiss(masm, argc, extra_ic_state); | 1027 GenerateMiss(masm, argc, extra_ic_state); |
1028 } | 1028 } |
1029 | 1029 |
1030 | 1030 |
1031 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { | 1031 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { |
1032 // ----------- S t a t e ------------- | 1032 // ----------- S t a t e ------------- |
1033 // rcx : function name | 1033 // rcx : function name |
1034 // rsp[0] : return address | 1034 // rsp[0] : return address |
1035 // rsp[8] : argument argc | 1035 // rsp[8] : argument argc |
1036 // rsp[16] : argument argc - 1 | 1036 // rsp[16] : argument argc - 1 |
1037 // ... | 1037 // ... |
1038 // rsp[argc * 8] : argument 1 | 1038 // rsp[argc * 8] : argument 1 |
1039 // rsp[(argc + 1) * 8] : argument 0 = receiver | 1039 // rsp[(argc + 1) * 8] : argument 0 = receiver |
1040 // ----------------------------------- | 1040 // ----------------------------------- |
1041 | 1041 |
1042 StackArgumentsAccessor args(rsp, argc); | 1042 StackArgumentsAccessor args(rsp, argc); |
1043 __ movq(rdx, args.GetReceiverOperand()); | 1043 __ movp(rdx, args.GetReceiverOperand()); |
1044 | 1044 |
1045 Label do_call, slow_call, slow_load; | 1045 Label do_call, slow_call, slow_load; |
1046 Label check_number_dictionary, check_name, lookup_monomorphic_cache; | 1046 Label check_number_dictionary, check_name, lookup_monomorphic_cache; |
1047 Label index_smi, index_name; | 1047 Label index_smi, index_name; |
1048 | 1048 |
1049 // Check that the key is a smi. | 1049 // Check that the key is a smi. |
1050 __ JumpIfNotSmi(rcx, &check_name); | 1050 __ JumpIfNotSmi(rcx, &check_name); |
1051 | 1051 |
1052 __ bind(&index_smi); | 1052 __ bind(&index_smi); |
1053 // Now the key is known to be a smi. This place is also jumped to from below | 1053 // Now the key is known to be a smi. This place is also jumped to from below |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1085 // nor beneficial. | 1085 // nor beneficial. |
1086 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1); | 1086 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1); |
1087 { | 1087 { |
1088 FrameScope scope(masm, StackFrame::INTERNAL); | 1088 FrameScope scope(masm, StackFrame::INTERNAL); |
1089 __ push(rcx); // save the key | 1089 __ push(rcx); // save the key |
1090 __ push(rdx); // pass the receiver | 1090 __ push(rdx); // pass the receiver |
1091 __ push(rcx); // pass the key | 1091 __ push(rcx); // pass the key |
1092 __ CallRuntime(Runtime::kKeyedGetProperty, 2); | 1092 __ CallRuntime(Runtime::kKeyedGetProperty, 2); |
1093 __ pop(rcx); // restore the key | 1093 __ pop(rcx); // restore the key |
1094 } | 1094 } |
1095 __ movq(rdi, rax); | 1095 __ movp(rdi, rax); |
1096 __ jmp(&do_call); | 1096 __ jmp(&do_call); |
1097 | 1097 |
1098 __ bind(&check_name); | 1098 __ bind(&check_name); |
1099 GenerateKeyNameCheck(masm, rcx, rax, rbx, &index_name, &slow_call); | 1099 GenerateKeyNameCheck(masm, rcx, rax, rbx, &index_name, &slow_call); |
1100 | 1100 |
1101 // The key is known to be a unique name. | 1101 // The key is known to be a unique name. |
1102 // If the receiver is a regular JS object with slow properties then do | 1102 // If the receiver is a regular JS object with slow properties then do |
1103 // a quick inline probe of the receiver's dictionary. | 1103 // a quick inline probe of the receiver's dictionary. |
1104 // Otherwise do the monomorphic cache probe. | 1104 // Otherwise do the monomorphic cache probe. |
1105 GenerateKeyedLoadReceiverCheck( | 1105 GenerateKeyedLoadReceiverCheck( |
1106 masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); | 1106 masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); |
1107 | 1107 |
1108 __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); | 1108 __ movp(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); |
1109 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | 1109 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
1110 Heap::kHashTableMapRootIndex); | 1110 Heap::kHashTableMapRootIndex); |
1111 __ j(not_equal, &lookup_monomorphic_cache); | 1111 __ j(not_equal, &lookup_monomorphic_cache); |
1112 | 1112 |
1113 GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi); | 1113 GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi); |
1114 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1); | 1114 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1); |
1115 __ jmp(&do_call); | 1115 __ jmp(&do_call); |
1116 | 1116 |
1117 __ bind(&lookup_monomorphic_cache); | 1117 __ bind(&lookup_monomorphic_cache); |
1118 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1); | 1118 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1179 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1); | 1179 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1); |
1180 __ j(below, slow_case); | 1180 __ j(below, slow_case); |
1181 | 1181 |
1182 // Check that the key is a positive smi. | 1182 // Check that the key is a positive smi. |
1183 Condition check = masm->CheckNonNegativeSmi(key); | 1183 Condition check = masm->CheckNonNegativeSmi(key); |
1184 __ j(NegateCondition(check), slow_case); | 1184 __ j(NegateCondition(check), slow_case); |
1185 | 1185 |
1186 // Load the elements into scratch1 and check its map. If not, jump | 1186 // Load the elements into scratch1 and check its map. If not, jump |
1187 // to the unmapped lookup with the parameter map in scratch1. | 1187 // to the unmapped lookup with the parameter map in scratch1. |
1188 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); | 1188 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); |
1189 __ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset)); | 1189 __ movp(scratch1, FieldOperand(object, JSObject::kElementsOffset)); |
1190 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); | 1190 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); |
1191 | 1191 |
1192 // Check if element is in the range of mapped arguments. | 1192 // Check if element is in the range of mapped arguments. |
1193 __ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); | 1193 __ movp(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); |
1194 __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2)); | 1194 __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2)); |
1195 __ cmpq(key, scratch2); | 1195 __ cmpq(key, scratch2); |
1196 __ j(greater_equal, unmapped_case); | 1196 __ j(greater_equal, unmapped_case); |
1197 | 1197 |
1198 // Load element index and check whether it is the hole. | 1198 // Load element index and check whether it is the hole. |
1199 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; | 1199 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; |
1200 __ SmiToInteger64(scratch3, key); | 1200 __ SmiToInteger64(scratch3, key); |
1201 __ movq(scratch2, FieldOperand(scratch1, | 1201 __ movp(scratch2, FieldOperand(scratch1, |
1202 scratch3, | 1202 scratch3, |
1203 times_pointer_size, | 1203 times_pointer_size, |
1204 kHeaderSize)); | 1204 kHeaderSize)); |
1205 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex); | 1205 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex); |
1206 __ j(equal, unmapped_case); | 1206 __ j(equal, unmapped_case); |
1207 | 1207 |
1208 // Load value from context and return it. We can reuse scratch1 because | 1208 // Load value from context and return it. We can reuse scratch1 because |
1209 // we do not jump to the unmapped lookup (which requires the parameter | 1209 // we do not jump to the unmapped lookup (which requires the parameter |
1210 // map in scratch1). | 1210 // map in scratch1). |
1211 __ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize)); | 1211 __ movp(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize)); |
1212 __ SmiToInteger64(scratch3, scratch2); | 1212 __ SmiToInteger64(scratch3, scratch2); |
1213 return FieldOperand(scratch1, | 1213 return FieldOperand(scratch1, |
1214 scratch3, | 1214 scratch3, |
1215 times_pointer_size, | 1215 times_pointer_size, |
1216 Context::kHeaderSize); | 1216 Context::kHeaderSize); |
1217 } | 1217 } |
1218 | 1218 |
1219 | 1219 |
1220 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | 1220 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, |
1221 Register key, | 1221 Register key, |
1222 Register parameter_map, | 1222 Register parameter_map, |
1223 Register scratch, | 1223 Register scratch, |
1224 Label* slow_case) { | 1224 Label* slow_case) { |
1225 // Element is in arguments backing store, which is referenced by the | 1225 // Element is in arguments backing store, which is referenced by the |
1226 // second element of the parameter_map. The parameter_map register | 1226 // second element of the parameter_map. The parameter_map register |
1227 // must be loaded with the parameter map of the arguments object and is | 1227 // must be loaded with the parameter map of the arguments object and is |
1228 // overwritten. | 1228 // overwritten. |
1229 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | 1229 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; |
1230 Register backing_store = parameter_map; | 1230 Register backing_store = parameter_map; |
1231 __ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); | 1231 __ movp(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); |
1232 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); | 1232 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); |
1233 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); | 1233 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); |
1234 __ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); | 1234 __ movp(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); |
1235 __ cmpq(key, scratch); | 1235 __ cmpq(key, scratch); |
1236 __ j(greater_equal, slow_case); | 1236 __ j(greater_equal, slow_case); |
1237 __ SmiToInteger64(scratch, key); | 1237 __ SmiToInteger64(scratch, key); |
1238 return FieldOperand(backing_store, | 1238 return FieldOperand(backing_store, |
1239 scratch, | 1239 scratch, |
1240 times_pointer_size, | 1240 times_pointer_size, |
1241 FixedArray::kHeaderSize); | 1241 FixedArray::kHeaderSize); |
1242 } | 1242 } |
1243 | 1243 |
1244 | 1244 |
1245 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 1245 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { |
1246 // ----------- S t a t e ------------- | 1246 // ----------- S t a t e ------------- |
1247 // -- rax : key | 1247 // -- rax : key |
1248 // -- rdx : receiver | 1248 // -- rdx : receiver |
1249 // -- rsp[0] : return address | 1249 // -- rsp[0] : return address |
1250 // ----------------------------------- | 1250 // ----------------------------------- |
1251 Label slow, notin; | 1251 Label slow, notin; |
1252 Operand mapped_location = | 1252 Operand mapped_location = |
1253 GenerateMappedArgumentsLookup( | 1253 GenerateMappedArgumentsLookup( |
1254 masm, rdx, rax, rbx, rcx, rdi, ¬in, &slow); | 1254 masm, rdx, rax, rbx, rcx, rdi, ¬in, &slow); |
1255 __ movq(rax, mapped_location); | 1255 __ movp(rax, mapped_location); |
1256 __ Ret(); | 1256 __ Ret(); |
1257 __ bind(¬in); | 1257 __ bind(¬in); |
1258 // The unmapped lookup expects that the parameter map is in rbx. | 1258 // The unmapped lookup expects that the parameter map is in rbx. |
1259 Operand unmapped_location = | 1259 Operand unmapped_location = |
1260 GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow); | 1260 GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow); |
1261 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); | 1261 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); |
1262 __ j(equal, &slow); | 1262 __ j(equal, &slow); |
1263 __ movq(rax, unmapped_location); | 1263 __ movp(rax, unmapped_location); |
1264 __ Ret(); | 1264 __ Ret(); |
1265 __ bind(&slow); | 1265 __ bind(&slow); |
1266 GenerateMiss(masm); | 1266 GenerateMiss(masm); |
1267 } | 1267 } |
1268 | 1268 |
1269 | 1269 |
1270 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 1270 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) { |
1271 // ----------- S t a t e ------------- | 1271 // ----------- S t a t e ------------- |
1272 // -- rax : value | 1272 // -- rax : value |
1273 // -- rcx : key | 1273 // -- rcx : key |
1274 // -- rdx : receiver | 1274 // -- rdx : receiver |
1275 // -- rsp[0] : return address | 1275 // -- rsp[0] : return address |
1276 // ----------------------------------- | 1276 // ----------------------------------- |
1277 Label slow, notin; | 1277 Label slow, notin; |
1278 Operand mapped_location = GenerateMappedArgumentsLookup( | 1278 Operand mapped_location = GenerateMappedArgumentsLookup( |
1279 masm, rdx, rcx, rbx, rdi, r8, ¬in, &slow); | 1279 masm, rdx, rcx, rbx, rdi, r8, ¬in, &slow); |
1280 __ movq(mapped_location, rax); | 1280 __ movp(mapped_location, rax); |
1281 __ lea(r9, mapped_location); | 1281 __ lea(r9, mapped_location); |
1282 __ movq(r8, rax); | 1282 __ movp(r8, rax); |
1283 __ RecordWrite(rbx, | 1283 __ RecordWrite(rbx, |
1284 r9, | 1284 r9, |
1285 r8, | 1285 r8, |
1286 kDontSaveFPRegs, | 1286 kDontSaveFPRegs, |
1287 EMIT_REMEMBERED_SET, | 1287 EMIT_REMEMBERED_SET, |
1288 INLINE_SMI_CHECK); | 1288 INLINE_SMI_CHECK); |
1289 __ Ret(); | 1289 __ Ret(); |
1290 __ bind(¬in); | 1290 __ bind(¬in); |
1291 // The unmapped lookup expects that the parameter map is in rbx. | 1291 // The unmapped lookup expects that the parameter map is in rbx. |
1292 Operand unmapped_location = | 1292 Operand unmapped_location = |
1293 GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow); | 1293 GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow); |
1294 __ movq(unmapped_location, rax); | 1294 __ movp(unmapped_location, rax); |
1295 __ lea(r9, unmapped_location); | 1295 __ lea(r9, unmapped_location); |
1296 __ movq(r8, rax); | 1296 __ movp(r8, rax); |
1297 __ RecordWrite(rbx, | 1297 __ RecordWrite(rbx, |
1298 r9, | 1298 r9, |
1299 r8, | 1299 r8, |
1300 kDontSaveFPRegs, | 1300 kDontSaveFPRegs, |
1301 EMIT_REMEMBERED_SET, | 1301 EMIT_REMEMBERED_SET, |
1302 INLINE_SMI_CHECK); | 1302 INLINE_SMI_CHECK); |
1303 __ Ret(); | 1303 __ Ret(); |
1304 __ bind(&slow); | 1304 __ bind(&slow); |
1305 GenerateMiss(masm); | 1305 GenerateMiss(masm); |
1306 } | 1306 } |
1307 | 1307 |
1308 | 1308 |
1309 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm, | 1309 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm, |
1310 int argc) { | 1310 int argc) { |
1311 // ----------- S t a t e ------------- | 1311 // ----------- S t a t e ------------- |
1312 // rcx : function name | 1312 // rcx : function name |
1313 // rsp[0] : return address | 1313 // rsp[0] : return address |
1314 // rsp[8] : argument argc | 1314 // rsp[8] : argument argc |
1315 // rsp[16] : argument argc - 1 | 1315 // rsp[16] : argument argc - 1 |
1316 // ... | 1316 // ... |
1317 // rsp[argc * 8] : argument 1 | 1317 // rsp[argc * 8] : argument 1 |
1318 // rsp[(argc + 1) * 8] : argument 0 = receiver | 1318 // rsp[(argc + 1) * 8] : argument 0 = receiver |
1319 // ----------------------------------- | 1319 // ----------------------------------- |
1320 Label slow, notin; | 1320 Label slow, notin; |
1321 StackArgumentsAccessor args(rsp, argc); | 1321 StackArgumentsAccessor args(rsp, argc); |
1322 __ movq(rdx, args.GetReceiverOperand()); | 1322 __ movp(rdx, args.GetReceiverOperand()); |
1323 Operand mapped_location = GenerateMappedArgumentsLookup( | 1323 Operand mapped_location = GenerateMappedArgumentsLookup( |
1324 masm, rdx, rcx, rbx, rax, r8, ¬in, &slow); | 1324 masm, rdx, rcx, rbx, rax, r8, ¬in, &slow); |
1325 __ movq(rdi, mapped_location); | 1325 __ movp(rdi, mapped_location); |
1326 GenerateFunctionTailCall(masm, argc, &slow); | 1326 GenerateFunctionTailCall(masm, argc, &slow); |
1327 __ bind(¬in); | 1327 __ bind(¬in); |
1328 // The unmapped lookup expects that the parameter map is in rbx. | 1328 // The unmapped lookup expects that the parameter map is in rbx. |
1329 Operand unmapped_location = | 1329 Operand unmapped_location = |
1330 GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow); | 1330 GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow); |
1331 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); | 1331 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); |
1332 __ j(equal, &slow); | 1332 __ j(equal, &slow); |
1333 __ movq(rdi, unmapped_location); | 1333 __ movp(rdi, unmapped_location); |
1334 GenerateFunctionTailCall(masm, argc, &slow); | 1334 GenerateFunctionTailCall(masm, argc, &slow); |
1335 __ bind(&slow); | 1335 __ bind(&slow); |
1336 GenerateMiss(masm, argc); | 1336 GenerateMiss(masm, argc); |
1337 } | 1337 } |
1338 | 1338 |
1339 | 1339 |
1340 void LoadIC::GenerateMegamorphic(MacroAssembler* masm, ContextualMode mode) { | 1340 void LoadIC::GenerateMegamorphic(MacroAssembler* masm, ContextualMode mode) { |
1341 // ----------- S t a t e ------------- | 1341 // ----------- S t a t e ------------- |
1342 // -- rax : receiver | 1342 // -- rax : receiver |
1343 // -- rcx : name | 1343 // -- rcx : name |
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1691 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) | 1691 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) |
1692 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 1692 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
1693 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 1693 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
1694 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 1694 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
1695 } | 1695 } |
1696 | 1696 |
1697 | 1697 |
1698 } } // namespace v8::internal | 1698 } } // namespace v8::internal |
1699 | 1699 |
1700 #endif // V8_TARGET_ARCH_X64 | 1700 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |