Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(98)

Side by Side Diff: src/ia32/ic-ia32.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/full-codegen-ia32.cc ('k') | src/ia32/lithium-codegen-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
205 Immediate(kTypeAndReadOnlyMask)); 205 Immediate(kTypeAndReadOnlyMask));
206 __ j(not_zero, miss_label); 206 __ j(not_zero, miss_label);
207 207
208 // Store the value at the masked, scaled index. 208 // Store the value at the masked, scaled index.
209 const int kValueOffset = kElementsStartOffset + kPointerSize; 209 const int kValueOffset = kElementsStartOffset + kPointerSize;
210 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); 210 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
211 __ mov(Operand(r0, 0), value); 211 __ mov(Operand(r0, 0), value);
212 212
213 // Update write barrier. Make sure not to clobber the value. 213 // Update write barrier. Make sure not to clobber the value.
214 __ mov(r1, value); 214 __ mov(r1, value);
215 __ RecordWrite(elements, r0, r1); 215 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
216 } 216 }
217 217
218 218
219 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { 219 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
220 // ----------- S t a t e ------------- 220 // ----------- S t a t e -------------
221 // -- eax : receiver 221 // -- eax : receiver
222 // -- ecx : name 222 // -- ecx : name
223 // -- esp[0] : return address 223 // -- esp[0] : return address
224 // ----------------------------------- 224 // -----------------------------------
225 Label miss; 225 Label miss;
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 DONT_DO_SMI_CHECK); 319 DONT_DO_SMI_CHECK);
320 } else { 320 } else {
321 __ AssertFastElements(scratch); 321 __ AssertFastElements(scratch);
322 } 322 }
323 // Check that the key (index) is within bounds. 323 // Check that the key (index) is within bounds.
324 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); 324 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
325 __ j(above_equal, out_of_range); 325 __ j(above_equal, out_of_range);
326 // Fast case: Do the load. 326 // Fast case: Do the load.
327 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); 327 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
328 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); 328 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
329 __ cmp(Operand(scratch), Immediate(FACTORY->the_hole_value())); 329 __ cmp(scratch, Immediate(FACTORY->the_hole_value()));
330 // In case the loaded value is the_hole we have to consult GetProperty 330 // In case the loaded value is the_hole we have to consult GetProperty
331 // to ensure the prototype chain is searched. 331 // to ensure the prototype chain is searched.
332 __ j(equal, out_of_range); 332 __ j(equal, out_of_range);
333 if (!result.is(scratch)) { 333 if (!result.is(scratch)) {
334 __ mov(result, scratch); 334 __ mov(result, scratch);
335 } 335 }
336 } 336 }
337 337
338 338
339 // Checks whether a key is an array index string or a symbol string. 339 // Checks whether a key is an array index string or a symbol string.
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
387 __ j(not_zero, slow_case); 387 __ j(not_zero, slow_case);
388 388
389 // Load the elements into scratch1 and check its map. 389 // Load the elements into scratch1 and check its map.
390 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); 390 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
391 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset)); 391 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
392 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); 392 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
393 393
394 // Check if element is in the range of mapped arguments. If not, jump 394 // Check if element is in the range of mapped arguments. If not, jump
395 // to the unmapped lookup with the parameter map in scratch1. 395 // to the unmapped lookup with the parameter map in scratch1.
396 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); 396 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
397 __ sub(Operand(scratch2), Immediate(Smi::FromInt(2))); 397 __ sub(scratch2, Immediate(Smi::FromInt(2)));
398 __ cmp(key, Operand(scratch2)); 398 __ cmp(key, scratch2);
399 __ j(greater_equal, unmapped_case); 399 __ j(greater_equal, unmapped_case);
400 400
401 // Load element index and check whether it is the hole. 401 // Load element index and check whether it is the hole.
402 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; 402 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
403 __ mov(scratch2, FieldOperand(scratch1, 403 __ mov(scratch2, FieldOperand(scratch1,
404 key, 404 key,
405 times_half_pointer_size, 405 times_half_pointer_size,
406 kHeaderSize)); 406 kHeaderSize));
407 __ cmp(scratch2, factory->the_hole_value()); 407 __ cmp(scratch2, factory->the_hole_value());
408 __ j(equal, unmapped_case); 408 __ j(equal, unmapped_case);
(...skipping 16 matching lines...) Expand all
425 Register scratch, 425 Register scratch,
426 Label* slow_case) { 426 Label* slow_case) {
427 // Element is in arguments backing store, which is referenced by the 427 // Element is in arguments backing store, which is referenced by the
428 // second element of the parameter_map. 428 // second element of the parameter_map.
429 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; 429 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
430 Register backing_store = parameter_map; 430 Register backing_store = parameter_map;
431 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); 431 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
432 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); 432 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
433 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); 433 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
434 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); 434 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
435 __ cmp(key, Operand(scratch)); 435 __ cmp(key, scratch);
436 __ j(greater_equal, slow_case); 436 __ j(greater_equal, slow_case);
437 return FieldOperand(backing_store, 437 return FieldOperand(backing_store,
438 key, 438 key,
439 times_half_pointer_size, 439 times_half_pointer_size,
440 FixedArray::kHeaderSize); 440 FixedArray::kHeaderSize);
441 } 441 }
442 442
443 443
444 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 444 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
445 // ----------- S t a t e ------------- 445 // ----------- S t a t e -------------
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 Immediate(isolate->factory()->hash_table_map())); 527 Immediate(isolate->factory()->hash_table_map()));
528 __ j(equal, &probe_dictionary); 528 __ j(equal, &probe_dictionary);
529 529
530 // Load the map of the receiver, compute the keyed lookup cache hash 530 // Load the map of the receiver, compute the keyed lookup cache hash
531 // based on 32 bits of the map pointer and the string hash. 531 // based on 32 bits of the map pointer and the string hash.
532 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 532 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
533 __ mov(ecx, ebx); 533 __ mov(ecx, ebx);
534 __ shr(ecx, KeyedLookupCache::kMapHashShift); 534 __ shr(ecx, KeyedLookupCache::kMapHashShift);
535 __ mov(edi, FieldOperand(eax, String::kHashFieldOffset)); 535 __ mov(edi, FieldOperand(eax, String::kHashFieldOffset));
536 __ shr(edi, String::kHashShift); 536 __ shr(edi, String::kHashShift);
537 __ xor_(ecx, Operand(edi)); 537 __ xor_(ecx, edi);
538 __ and_(ecx, KeyedLookupCache::kCapacityMask); 538 __ and_(ecx, KeyedLookupCache::kCapacityMask);
539 539
540 // Load the key (consisting of map and symbol) from the cache and 540 // Load the key (consisting of map and symbol) from the cache and
541 // check for match. 541 // check for match.
542 ExternalReference cache_keys = 542 ExternalReference cache_keys =
543 ExternalReference::keyed_lookup_cache_keys(masm->isolate()); 543 ExternalReference::keyed_lookup_cache_keys(masm->isolate());
544 __ mov(edi, ecx); 544 __ mov(edi, ecx);
545 __ shl(edi, kPointerSizeLog2 + 1); 545 __ shl(edi, kPointerSizeLog2 + 1);
546 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); 546 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
547 __ j(not_equal, &slow); 547 __ j(not_equal, &slow);
548 __ add(Operand(edi), Immediate(kPointerSize)); 548 __ add(edi, Immediate(kPointerSize));
549 __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys)); 549 __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
550 __ j(not_equal, &slow); 550 __ j(not_equal, &slow);
551 551
552 // Get field offset. 552 // Get field offset.
553 // edx : receiver 553 // edx : receiver
554 // ebx : receiver's map 554 // ebx : receiver's map
555 // eax : key 555 // eax : key
556 // ecx : lookup cache index 556 // ecx : lookup cache index
557 ExternalReference cache_field_offsets = 557 ExternalReference cache_field_offsets =
558 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); 558 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
559 __ mov(edi, 559 __ mov(edi,
560 Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets)); 560 Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
561 __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); 561 __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
562 __ sub(edi, Operand(ecx)); 562 __ sub(edi, ecx);
563 __ j(above_equal, &property_array_property); 563 __ j(above_equal, &property_array_property);
564 564
565 // Load in-object property. 565 // Load in-object property.
566 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset)); 566 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
567 __ add(ecx, Operand(edi)); 567 __ add(ecx, edi);
568 __ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0)); 568 __ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
569 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 569 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
570 __ ret(0); 570 __ ret(0);
571 571
572 // Load property array property. 572 // Load property array property.
573 __ bind(&property_array_property); 573 __ bind(&property_array_property);
574 __ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset)); 574 __ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset));
575 __ mov(eax, FieldOperand(eax, edi, times_pointer_size, 575 __ mov(eax, FieldOperand(eax, edi, times_pointer_size,
576 FixedArray::kHeaderSize)); 576 FixedArray::kHeaderSize));
577 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 577 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 // Check that the key is an array index, that is Uint32. 644 // Check that the key is an array index, that is Uint32.
645 __ test(eax, Immediate(kSmiTagMask | kSmiSignMask)); 645 __ test(eax, Immediate(kSmiTagMask | kSmiSignMask));
646 __ j(not_zero, &slow); 646 __ j(not_zero, &slow);
647 647
648 // Get the map of the receiver. 648 // Get the map of the receiver.
649 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 649 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
650 650
651 // Check that it has indexed interceptor and access checks 651 // Check that it has indexed interceptor and access checks
652 // are not enabled for this object. 652 // are not enabled for this object.
653 __ movzx_b(ecx, FieldOperand(ecx, Map::kBitFieldOffset)); 653 __ movzx_b(ecx, FieldOperand(ecx, Map::kBitFieldOffset));
654 __ and_(Operand(ecx), Immediate(kSlowCaseBitFieldMask)); 654 __ and_(ecx, Immediate(kSlowCaseBitFieldMask));
655 __ cmp(Operand(ecx), Immediate(1 << Map::kHasIndexedInterceptor)); 655 __ cmp(ecx, Immediate(1 << Map::kHasIndexedInterceptor));
656 __ j(not_zero, &slow); 656 __ j(not_zero, &slow);
657 657
658 // Everything is fine, call runtime. 658 // Everything is fine, call runtime.
659 __ pop(ecx); 659 __ pop(ecx);
660 __ push(edx); // receiver 660 __ push(edx); // receiver
661 __ push(eax); // key 661 __ push(eax); // key
662 __ push(ecx); // return address 662 __ push(ecx); // return address
663 663
664 // Perform tail call to the entry. 664 // Perform tail call to the entry.
665 ExternalReference ref = 665 ExternalReference ref =
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
703 // -- ecx : key 703 // -- ecx : key
704 // -- edx : receiver 704 // -- edx : receiver
705 // -- esp[0] : return address 705 // -- esp[0] : return address
706 // ----------------------------------- 706 // -----------------------------------
707 Label slow, notin; 707 Label slow, notin;
708 Operand mapped_location = 708 Operand mapped_location =
709 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, &notin, &slow); 709 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, &notin, &slow);
710 __ mov(mapped_location, eax); 710 __ mov(mapped_location, eax);
711 __ lea(ecx, mapped_location); 711 __ lea(ecx, mapped_location);
712 __ mov(edx, eax); 712 __ mov(edx, eax);
713 __ RecordWrite(ebx, ecx, edx); 713 __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs);
714 __ Ret(); 714 __ Ret();
715 __ bind(&notin); 715 __ bind(&notin);
716 // The unmapped lookup expects that the parameter map is in ebx. 716 // The unmapped lookup expects that the parameter map is in ebx.
717 Operand unmapped_location = 717 Operand unmapped_location =
718 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow); 718 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow);
719 __ mov(unmapped_location, eax); 719 __ mov(unmapped_location, eax);
720 __ lea(edi, unmapped_location); 720 __ lea(edi, unmapped_location);
721 __ mov(edx, eax); 721 __ mov(edx, eax);
722 __ RecordWrite(ebx, edi, edx); 722 __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs);
723 __ Ret(); 723 __ Ret();
724 __ bind(&slow); 724 __ bind(&slow);
725 GenerateMiss(masm, false); 725 GenerateMiss(masm, false);
726 } 726 }
727 727
728 728
729 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, 729 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
730 StrictModeFlag strict_mode) { 730 StrictModeFlag strict_mode) {
731 // ----------- S t a t e ------------- 731 // ----------- S t a t e -------------
732 // -- eax : value 732 // -- eax : value
733 // -- ecx : key 733 // -- ecx : key
734 // -- edx : receiver 734 // -- edx : receiver
735 // -- esp[0] : return address 735 // -- esp[0] : return address
736 // ----------------------------------- 736 // -----------------------------------
737 Label slow, fast, array, extra; 737 Label slow, fast_object_with_map_check, fast_object_without_map_check;
738 Label fast_double_with_map_check, fast_double_without_map_check;
739 Label check_if_double_array, array, extra;
738 740
739 // Check that the object isn't a smi. 741 // Check that the object isn't a smi.
740 __ JumpIfSmi(edx, &slow); 742 __ JumpIfSmi(edx, &slow);
741 // Get the map from the receiver. 743 // Get the map from the receiver.
742 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); 744 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
743 // Check that the receiver does not require access checks. We need 745 // Check that the receiver does not require access checks. We need
744 // to do this because this generic stub does not perform map checks. 746 // to do this because this generic stub does not perform map checks.
745 __ test_b(FieldOperand(edi, Map::kBitFieldOffset), 747 __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
746 1 << Map::kIsAccessCheckNeeded); 748 1 << Map::kIsAccessCheckNeeded);
747 __ j(not_zero, &slow); 749 __ j(not_zero, &slow);
748 // Check that the key is a smi. 750 // Check that the key is a smi.
749 __ JumpIfNotSmi(ecx, &slow); 751 __ JumpIfNotSmi(ecx, &slow);
750 __ CmpInstanceType(edi, JS_ARRAY_TYPE); 752 __ CmpInstanceType(edi, JS_ARRAY_TYPE);
751 __ j(equal, &array); 753 __ j(equal, &array);
752 // Check that the object is some kind of JSObject. 754 // Check that the object is some kind of JSObject.
753 __ CmpInstanceType(edi, FIRST_JS_RECEIVER_TYPE); 755 __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE);
754 __ j(below, &slow); 756 __ j(below, &slow);
755 __ CmpInstanceType(edi, JS_PROXY_TYPE);
756 __ j(equal, &slow);
757 __ CmpInstanceType(edi, JS_FUNCTION_PROXY_TYPE);
758 __ j(equal, &slow);
759 757
760 // Object case: Check key against length in the elements array. 758 // Object case: Check key against length in the elements array.
761 // eax: value 759 // eax: value
762 // edx: JSObject 760 // edx: JSObject
763 // ecx: key (a smi) 761 // ecx: key (a smi)
764 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); 762 // edi: receiver map
765 // Check that the object is in fast mode and writable. 763 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
766 __ CheckMap(edi, FACTORY->fixed_array_map(), &slow, DONT_DO_SMI_CHECK); 764 // Check array bounds. Both the key and the length of FixedArray are smis.
767 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); 765 __ cmp(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
768 __ j(below, &fast); 766 __ j(below, &fast_object_with_map_check);
769 767
770 // Slow case: call runtime. 768 // Slow case: call runtime.
771 __ bind(&slow); 769 __ bind(&slow);
772 GenerateRuntimeSetProperty(masm, strict_mode); 770 GenerateRuntimeSetProperty(masm, strict_mode);
773 771
774 // Extra capacity case: Check if there is extra capacity to 772 // Extra capacity case: Check if there is extra capacity to
775 // perform the store and update the length. Used for adding one 773 // perform the store and update the length. Used for adding one
776 // element to the array by writing to array[array.length]. 774 // element to the array by writing to array[array.length].
777 __ bind(&extra); 775 __ bind(&extra);
778 // eax: value 776 // eax: value
779 // edx: receiver, a JSArray 777 // edx: receiver, a JSArray
780 // ecx: key, a smi. 778 // ecx: key, a smi.
781 // edi: receiver->elements, a FixedArray 779 // ebx: receiver->elements, a FixedArray
780 // edi: receiver map
782 // flags: compare (ecx, edx.length()) 781 // flags: compare (ecx, edx.length())
783 // do not leave holes in the array: 782 // do not leave holes in the array:
784 __ j(not_equal, &slow); 783 __ j(not_equal, &slow);
785 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); 784 __ cmp(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
786 __ j(above_equal, &slow); 785 __ j(above_equal, &slow);
787 // Add 1 to receiver->length, and go to fast array write. 786 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
787 __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
788 __ j(not_equal, &check_if_double_array);
789 // Add 1 to receiver->length, and go to common element store code for Objects.
788 __ add(FieldOperand(edx, JSArray::kLengthOffset), 790 __ add(FieldOperand(edx, JSArray::kLengthOffset),
789 Immediate(Smi::FromInt(1))); 791 Immediate(Smi::FromInt(1)));
790 __ jmp(&fast); 792 __ jmp(&fast_object_without_map_check);
793
794 __ bind(&check_if_double_array);
795 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
796 __ j(not_equal, &slow);
797 // Add 1 to receiver->length, and go to common element store code for doubles.
798 __ add(FieldOperand(edx, JSArray::kLengthOffset),
799 Immediate(Smi::FromInt(1)));
800 __ jmp(&fast_double_without_map_check);
791 801
792 // Array case: Get the length and the elements array from the JS 802 // Array case: Get the length and the elements array from the JS
793 // array. Check that the array is in fast mode (and writable); if it 803 // array. Check that the array is in fast mode (and writable); if it
794 // is the length is always a smi. 804 // is the length is always a smi.
795 __ bind(&array); 805 __ bind(&array);
796 // eax: value 806 // eax: value
797 // edx: receiver, a JSArray 807 // edx: receiver, a JSArray
798 // ecx: key, a smi. 808 // ecx: key, a smi.
799 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); 809 // edi: receiver map
800 __ CheckMap(edi, FACTORY->fixed_array_map(), &slow, DONT_DO_SMI_CHECK); 810 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
801 811
802 // Check the key against the length in the array, compute the 812 // Check the key against the length in the array and fall through to the
803 // address to store into and fall through to fast case. 813 // common store code.
804 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis. 814 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
805 __ j(above_equal, &extra); 815 __ j(above_equal, &extra);
806 816
807 // Fast case: Do the store. 817 // Fast case: Do the store, could either Object or double.
808 __ bind(&fast); 818 __ bind(&fast_object_with_map_check);
809 // eax: value 819 // eax: value
810 // ecx: key (a smi) 820 // ecx: key (a smi)
811 // edx: receiver 821 // edx: receiver
812 // edi: FixedArray receiver->elements 822 // ebx: FixedArray receiver->elements
813 __ mov(CodeGenerator::FixedArrayElementOperand(edi, ecx), eax); 823 // edi: receiver map
824 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
825 __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
826 __ j(not_equal, &fast_double_with_map_check);
827 __ bind(&fast_object_without_map_check);
828 // Smi stores don't require further checks.
829 Label non_smi_value;
830 __ JumpIfNotSmi(eax, &non_smi_value);
831 // It's irrelevant whether array is smi-only or not when writing a smi.
832 __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
833 __ ret(0);
834
835 __ bind(&non_smi_value);
836 if (FLAG_smi_only_arrays) {
837 // Escape to slow case when writing non-smi into smi-only array.
838 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
839 __ CheckFastObjectElements(edi, &slow, Label::kNear);
840 }
841
842 // Fast elements array, store the value to the elements backing store.
843 __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
814 // Update write barrier for the elements array address. 844 // Update write barrier for the elements array address.
815 __ mov(edx, Operand(eax)); 845 __ mov(edx, eax); // Preserve the value which is returned.
816 __ RecordWrite(edi, 0, edx, ecx); 846 __ RecordWriteArray(
847 ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
848 __ ret(0);
849
850 __ bind(&fast_double_with_map_check);
851 // Check for fast double array case. If this fails, call through to the
852 // runtime.
853 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
854 __ j(not_equal, &slow);
855 __ bind(&fast_double_without_map_check);
856 // If the value is a number, store it as a double in the FastDoubleElements
857 // array.
858 __ StoreNumberToDoubleElements(eax, ebx, ecx, edx, xmm0, &slow, false);
817 __ ret(0); 859 __ ret(0);
818 } 860 }
819 861
820 862
821 // The generated code does not accept smi keys. 863 // The generated code does not accept smi keys.
822 // The generated code falls through if both probes miss. 864 // The generated code falls through if both probes miss.
823 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, 865 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
824 int argc, 866 int argc,
825 Code::Kind kind, 867 Code::Kind kind,
826 Code::ExtraICState extra_ic_state) { 868 Code::ExtraICState extra_ic_state) {
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
944 Counters* counters = masm->isolate()->counters(); 986 Counters* counters = masm->isolate()->counters();
945 if (id == IC::kCallIC_Miss) { 987 if (id == IC::kCallIC_Miss) {
946 __ IncrementCounter(counters->call_miss(), 1); 988 __ IncrementCounter(counters->call_miss(), 1);
947 } else { 989 } else {
948 __ IncrementCounter(counters->keyed_call_miss(), 1); 990 __ IncrementCounter(counters->keyed_call_miss(), 1);
949 } 991 }
950 992
951 // Get the receiver of the function from the stack; 1 ~ return address. 993 // Get the receiver of the function from the stack; 1 ~ return address.
952 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 994 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
953 995
954 // Enter an internal frame. 996 {
955 __ EnterInternalFrame(); 997 FrameScope scope(masm, StackFrame::INTERNAL);
956 998
957 // Push the receiver and the name of the function. 999 // Push the receiver and the name of the function.
958 __ push(edx); 1000 __ push(edx);
959 __ push(ecx); 1001 __ push(ecx);
960 1002
961 // Call the entry. 1003 // Call the entry.
962 CEntryStub stub(1); 1004 CEntryStub stub(1);
963 __ mov(eax, Immediate(2)); 1005 __ mov(eax, Immediate(2));
964 __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate()))); 1006 __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
965 __ CallStub(&stub); 1007 __ CallStub(&stub);
966 1008
967 // Move result to edi and exit the internal frame. 1009 // Move result to edi and exit the internal frame.
968 __ mov(edi, eax); 1010 __ mov(edi, eax);
969 __ LeaveInternalFrame(); 1011 }
970 1012
971 // Check if the receiver is a global object of some sort. 1013 // Check if the receiver is a global object of some sort.
972 // This can happen only for regular CallIC but not KeyedCallIC. 1014 // This can happen only for regular CallIC but not KeyedCallIC.
973 if (id == IC::kCallIC_Miss) { 1015 if (id == IC::kCallIC_Miss) {
974 Label invoke, global; 1016 Label invoke, global;
975 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // receiver 1017 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // receiver
976 __ JumpIfSmi(edx, &invoke, Label::kNear); 1018 __ JumpIfSmi(edx, &invoke, Label::kNear);
977 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 1019 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
978 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 1020 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
979 __ cmp(ebx, JS_GLOBAL_OBJECT_TYPE); 1021 __ cmp(ebx, JS_GLOBAL_OBJECT_TYPE);
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
1104 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1); 1146 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1105 __ jmp(&do_call); 1147 __ jmp(&do_call);
1106 1148
1107 __ bind(&slow_reload_receiver); 1149 __ bind(&slow_reload_receiver);
1108 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1150 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1109 1151
1110 __ bind(&slow_load); 1152 __ bind(&slow_load);
1111 // This branch is taken when calling KeyedCallIC_Miss is neither required 1153 // This branch is taken when calling KeyedCallIC_Miss is neither required
1112 // nor beneficial. 1154 // nor beneficial.
1113 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1); 1155 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1114 __ EnterInternalFrame(); 1156
1115 __ push(ecx); // save the key 1157 {
1116 __ push(edx); // pass the receiver 1158 FrameScope scope(masm, StackFrame::INTERNAL);
1117 __ push(ecx); // pass the key 1159 __ push(ecx); // save the key
1118 __ CallRuntime(Runtime::kKeyedGetProperty, 2); 1160 __ push(edx); // pass the receiver
1119 __ pop(ecx); // restore the key 1161 __ push(ecx); // pass the key
1120 __ LeaveInternalFrame(); 1162 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1163 __ pop(ecx); // restore the key
1164 // Leave the internal frame.
1165 }
1166
1121 __ mov(edi, eax); 1167 __ mov(edi, eax);
1122 __ jmp(&do_call); 1168 __ jmp(&do_call);
1123 1169
1124 __ bind(&check_string); 1170 __ bind(&check_string);
1125 GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow_call); 1171 GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow_call);
1126 1172
1127 // The key is known to be a symbol. 1173 // The key is known to be a symbol.
1128 // If the receiver is a regular JS object with slow properties then do 1174 // If the receiver is a regular JS object with slow properties then do
1129 // a quick inline probe of the receiver's dictionary. 1175 // a quick inline probe of the receiver's dictionary.
1130 // Otherwise do the monomorphic cache probe. 1176 // Otherwise do the monomorphic cache probe.
(...skipping 501 matching lines...) Expand 10 before | Expand all | Expand 10 after
1632 Condition cc = *jmp_address == Assembler::kJncShortOpcode 1678 Condition cc = *jmp_address == Assembler::kJncShortOpcode
1633 ? not_zero 1679 ? not_zero
1634 : zero; 1680 : zero;
1635 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); 1681 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1636 } 1682 }
1637 1683
1638 1684
1639 } } // namespace v8::internal 1685 } } // namespace v8::internal
1640 1686
1641 #endif // V8_TARGET_ARCH_IA32 1687 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/full-codegen-ia32.cc ('k') | src/ia32/lithium-codegen-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698