| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 101 // |done| label if a property with the given name is found leaving the | 101 // |done| label if a property with the given name is found leaving the |
| 102 // index into the dictionary in |r1|. Jump to the |miss| label | 102 // index into the dictionary in |r1|. Jump to the |miss| label |
| 103 // otherwise. | 103 // otherwise. |
| 104 static void GenerateStringDictionaryProbes(MacroAssembler* masm, | 104 static void GenerateStringDictionaryProbes(MacroAssembler* masm, |
| 105 Label* miss, | 105 Label* miss, |
| 106 Label* done, | 106 Label* done, |
| 107 Register elements, | 107 Register elements, |
| 108 Register name, | 108 Register name, |
| 109 Register r0, | 109 Register r0, |
| 110 Register r1) { | 110 Register r1) { |
| 111 // Assert that name contains a string. |
| 112 if (FLAG_debug_code) __ AbortIfNotString(name); |
| 113 |
| 111 // Compute the capacity mask. | 114 // Compute the capacity mask. |
| 112 const int kCapacityOffset = | 115 const int kCapacityOffset = |
| 113 StringDictionary::kHeaderSize + | 116 StringDictionary::kHeaderSize + |
| 114 StringDictionary::kCapacityIndex * kPointerSize; | 117 StringDictionary::kCapacityIndex * kPointerSize; |
| 115 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); | 118 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); |
| 116 __ decl(r0); | 119 __ decl(r0); |
| 117 | 120 |
| 118 // Generate an unrolled loop that performs a few probes before | 121 // Generate an unrolled loop that performs a few probes before |
| 119 // giving up. Measurements done on Gmail indicate that 2 probes | 122 // giving up. Measurements done on Gmail indicate that 2 probes |
| 120 // cover ~93% of loads from dictionaries. | 123 // cover ~93% of loads from dictionaries. |
| (...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 390 // -- rsp[0] : return address | 393 // -- rsp[0] : return address |
| 391 // ----------------------------------- | 394 // ----------------------------------- |
| 392 Label miss; | 395 Label miss; |
| 393 | 396 |
| 394 StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss); | 397 StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss); |
| 395 __ bind(&miss); | 398 __ bind(&miss); |
| 396 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); | 399 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); |
| 397 } | 400 } |
| 398 | 401 |
| 399 | 402 |
| 400 void LoadIC::GenerateStringLength(MacroAssembler* masm) { | 403 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) { |
| 401 // ----------- S t a t e ------------- | 404 // ----------- S t a t e ------------- |
| 402 // -- rax : receiver | 405 // -- rax : receiver |
| 403 // -- rcx : name | 406 // -- rcx : name |
| 404 // -- rsp[0] : return address | 407 // -- rsp[0] : return address |
| 405 // ----------------------------------- | 408 // ----------------------------------- |
| 406 Label miss; | 409 Label miss; |
| 407 | 410 |
| 408 StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss); | 411 StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss, |
| 412 support_wrappers); |
| 409 __ bind(&miss); | 413 __ bind(&miss); |
| 410 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); | 414 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); |
| 411 } | 415 } |
| 412 | 416 |
| 413 | 417 |
| 414 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { | 418 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { |
| 415 // ----------- S t a t e ------------- | 419 // ----------- S t a t e ------------- |
| 416 // -- rax : receiver | 420 // -- rax : receiver |
| 417 // -- rcx : name | 421 // -- rcx : name |
| 418 // -- rsp[0] : return address | 422 // -- rsp[0] : return address |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 572 rax, | 576 rax, |
| 573 rcx, | 577 rcx, |
| 574 rbx, | 578 rbx, |
| 575 rax, | 579 rax, |
| 576 NULL, | 580 NULL, |
| 577 &slow); | 581 &slow); |
| 578 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1); | 582 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1); |
| 579 __ ret(0); | 583 __ ret(0); |
| 580 | 584 |
| 581 __ bind(&check_pixel_array); | 585 __ bind(&check_pixel_array); |
| 582 // Check whether the elements object is a pixel array. | 586 GenerateFastPixelArrayLoad(masm, |
| 583 // rdx: receiver | 587 rdx, |
| 584 // rax: key | 588 rax, |
| 585 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); | 589 rcx, |
| 586 __ SmiToInteger32(rbx, rax); // Used on both directions of next branch. | 590 rbx, |
| 587 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 591 rax, |
| 588 Heap::kPixelArrayMapRootIndex); | 592 &check_number_dictionary, |
| 589 __ j(not_equal, &check_number_dictionary); | 593 NULL, |
| 590 __ cmpl(rbx, FieldOperand(rcx, PixelArray::kLengthOffset)); | 594 &slow); |
| 591 __ j(above_equal, &slow); | |
| 592 __ movq(rax, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); | |
| 593 __ movzxbq(rax, Operand(rax, rbx, times_1, 0)); | |
| 594 __ Integer32ToSmi(rax, rax); | |
| 595 __ ret(0); | |
| 596 | 595 |
| 597 __ bind(&check_number_dictionary); | 596 __ bind(&check_number_dictionary); |
| 598 // Check whether the elements is a number dictionary. | 597 // Check whether the elements is a number dictionary. |
| 599 // rdx: receiver | 598 // rdx: receiver |
| 600 // rax: key | 599 // rax: key |
| 601 // rbx: key as untagged int32 | 600 // rbx: key as untagged int32 |
| 602 // rcx: elements | 601 // rcx: elements |
| 603 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 602 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 604 Heap::kHashTableMapRootIndex); | 603 Heap::kHashTableMapRootIndex); |
| 605 __ j(not_equal, &slow); | 604 __ j(not_equal, &slow); |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 720 __ ret(0); | 719 __ ret(0); |
| 721 | 720 |
| 722 StubRuntimeCallHelper call_helper; | 721 StubRuntimeCallHelper call_helper; |
| 723 char_at_generator.GenerateSlow(masm, call_helper); | 722 char_at_generator.GenerateSlow(masm, call_helper); |
| 724 | 723 |
| 725 __ bind(&miss); | 724 __ bind(&miss); |
| 726 GenerateMiss(masm); | 725 GenerateMiss(masm); |
| 727 } | 726 } |
| 728 | 727 |
| 729 | 728 |
| 730 void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm, | |
| 731 ExternalArrayType array_type) { | |
| 732 // ----------- S t a t e ------------- | |
| 733 // -- rax : key | |
| 734 // -- rdx : receiver | |
| 735 // -- rsp[0] : return address | |
| 736 // ----------------------------------- | |
| 737 Label slow; | |
| 738 | |
| 739 // Check that the object isn't a smi. | |
| 740 __ JumpIfSmi(rdx, &slow); | |
| 741 | |
| 742 // Check that the key is a smi. | |
| 743 __ JumpIfNotSmi(rax, &slow); | |
| 744 | |
| 745 // Check that the object is a JS object. | |
| 746 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); | |
| 747 __ j(not_equal, &slow); | |
| 748 // Check that the receiver does not require access checks. We need | |
| 749 // to check this explicitly since this generic stub does not perform | |
| 750 // map checks. The map is already in rdx. | |
| 751 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 752 Immediate(1 << Map::kIsAccessCheckNeeded)); | |
| 753 __ j(not_zero, &slow); | |
| 754 | |
| 755 // Check that the elements array is the appropriate type of | |
| 756 // ExternalArray. | |
| 757 // rax: index (as a smi) | |
| 758 // rdx: JSObject | |
| 759 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | |
| 760 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | |
| 761 Heap::RootIndexForExternalArrayType(array_type)); | |
| 762 __ j(not_equal, &slow); | |
| 763 | |
| 764 // Check that the index is in range. | |
| 765 __ SmiToInteger32(rcx, rax); | |
| 766 __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset)); | |
| 767 // Unsigned comparison catches both negative and too-large values. | |
| 768 __ j(above_equal, &slow); | |
| 769 | |
| 770 // rax: index (as a smi) | |
| 771 // rdx: receiver (JSObject) | |
| 772 // rcx: untagged index | |
| 773 // rbx: elements array | |
| 774 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset)); | |
| 775 // rbx: base pointer of external storage | |
| 776 switch (array_type) { | |
| 777 case kExternalByteArray: | |
| 778 __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0)); | |
| 779 break; | |
| 780 case kExternalUnsignedByteArray: | |
| 781 __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0)); | |
| 782 break; | |
| 783 case kExternalShortArray: | |
| 784 __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0)); | |
| 785 break; | |
| 786 case kExternalUnsignedShortArray: | |
| 787 __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0)); | |
| 788 break; | |
| 789 case kExternalIntArray: | |
| 790 __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0)); | |
| 791 break; | |
| 792 case kExternalUnsignedIntArray: | |
| 793 __ movl(rcx, Operand(rbx, rcx, times_4, 0)); | |
| 794 break; | |
| 795 case kExternalFloatArray: | |
| 796 __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0)); | |
| 797 break; | |
| 798 default: | |
| 799 UNREACHABLE(); | |
| 800 break; | |
| 801 } | |
| 802 | |
| 803 // rax: index | |
| 804 // rdx: receiver | |
| 805 // For integer array types: | |
| 806 // rcx: value | |
| 807 // For floating-point array type: | |
| 808 // xmm0: value as double. | |
| 809 | |
| 810 ASSERT(kSmiValueSize == 32); | |
| 811 if (array_type == kExternalUnsignedIntArray) { | |
| 812 // For the UnsignedInt array type, we need to see whether | |
| 813 // the value can be represented in a Smi. If not, we need to convert | |
| 814 // it to a HeapNumber. | |
| 815 NearLabel box_int; | |
| 816 | |
| 817 __ JumpIfUIntNotValidSmiValue(rcx, &box_int); | |
| 818 | |
| 819 __ Integer32ToSmi(rax, rcx); | |
| 820 __ ret(0); | |
| 821 | |
| 822 __ bind(&box_int); | |
| 823 | |
| 824 // Allocate a HeapNumber for the int and perform int-to-double | |
| 825 // conversion. | |
| 826 // The value is zero-extended since we loaded the value from memory | |
| 827 // with movl. | |
| 828 __ cvtqsi2sd(xmm0, rcx); | |
| 829 | |
| 830 __ AllocateHeapNumber(rcx, rbx, &slow); | |
| 831 // Set the value. | |
| 832 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); | |
| 833 __ movq(rax, rcx); | |
| 834 __ ret(0); | |
| 835 } else if (array_type == kExternalFloatArray) { | |
| 836 // For the floating-point array type, we need to always allocate a | |
| 837 // HeapNumber. | |
| 838 __ AllocateHeapNumber(rcx, rbx, &slow); | |
| 839 // Set the value. | |
| 840 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); | |
| 841 __ movq(rax, rcx); | |
| 842 __ ret(0); | |
| 843 } else { | |
| 844 __ Integer32ToSmi(rax, rcx); | |
| 845 __ ret(0); | |
| 846 } | |
| 847 | |
| 848 // Slow case: Jump to runtime. | |
| 849 __ bind(&slow); | |
| 850 __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1); | |
| 851 GenerateRuntimeGetProperty(masm); | |
| 852 } | |
| 853 | |
| 854 | |
| 855 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { | 729 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { |
| 856 // ----------- S t a t e ------------- | 730 // ----------- S t a t e ------------- |
| 857 // -- rax : key | 731 // -- rax : key |
| 858 // -- rdx : receiver | 732 // -- rdx : receiver |
| 859 // -- rsp[0] : return address | 733 // -- rsp[0] : return address |
| 860 // ----------------------------------- | 734 // ----------------------------------- |
| 861 Label slow; | 735 Label slow; |
| 862 | 736 |
| 863 // Check that the receiver isn't a smi. | 737 // Check that the receiver isn't a smi. |
| 864 __ JumpIfSmi(rdx, &slow); | 738 __ JumpIfSmi(rdx, &slow); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 941 __ bind(&slow_with_tagged_index); | 815 __ bind(&slow_with_tagged_index); |
| 942 GenerateRuntimeSetProperty(masm); | 816 GenerateRuntimeSetProperty(masm); |
| 943 // Never returns to here. | 817 // Never returns to here. |
| 944 | 818 |
| 945 // Check whether the elements is a pixel array. | 819 // Check whether the elements is a pixel array. |
| 946 // rax: value | 820 // rax: value |
| 947 // rdx: receiver | 821 // rdx: receiver |
| 948 // rbx: receiver's elements array | 822 // rbx: receiver's elements array |
| 949 // rcx: index, zero-extended. | 823 // rcx: index, zero-extended. |
| 950 __ bind(&check_pixel_array); | 824 __ bind(&check_pixel_array); |
| 951 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | 825 GenerateFastPixelArrayStore(masm, |
| 952 Heap::kPixelArrayMapRootIndex); | 826 rdx, |
| 953 __ j(not_equal, &slow); | 827 rcx, |
| 954 // Check that the value is a smi. If a conversion is needed call into the | 828 rax, |
| 955 // runtime to convert and clamp. | 829 rbx, |
| 956 __ JumpIfNotSmi(rax, &slow); | 830 rdi, |
| 957 __ cmpl(rcx, FieldOperand(rbx, PixelArray::kLengthOffset)); | 831 false, |
| 958 __ j(above_equal, &slow); | 832 true, |
| 959 // No more bailouts to slow case on this path, so key not needed. | 833 NULL, |
| 960 __ SmiToInteger32(rdi, rax); | 834 &slow, |
| 961 { // Clamp the value to [0..255]. | 835 &slow, |
| 962 NearLabel done; | 836 &slow); |
| 963 __ testl(rdi, Immediate(0xFFFFFF00)); | |
| 964 __ j(zero, &done); | |
| 965 __ setcc(negative, rdi); // 1 if negative, 0 if positive. | |
| 966 __ decb(rdi); // 0 if negative, 255 if positive. | |
| 967 __ bind(&done); | |
| 968 } | |
| 969 __ movq(rbx, FieldOperand(rbx, PixelArray::kExternalPointerOffset)); | |
| 970 __ movb(Operand(rbx, rcx, times_1, 0), rdi); | |
| 971 __ ret(0); | |
| 972 | 837 |
| 973 // Extra capacity case: Check if there is extra capacity to | 838 // Extra capacity case: Check if there is extra capacity to |
| 974 // perform the store and update the length. Used for adding one | 839 // perform the store and update the length. Used for adding one |
| 975 // element to the array by writing to array[array.length]. | 840 // element to the array by writing to array[array.length]. |
| 976 __ bind(&extra); | 841 __ bind(&extra); |
| 977 // rax: value | 842 // rax: value |
| 978 // rdx: receiver (a JSArray) | 843 // rdx: receiver (a JSArray) |
| 979 // rbx: receiver's elements array (a FixedArray) | 844 // rbx: receiver's elements array (a FixedArray) |
| 980 // rcx: index | 845 // rcx: index |
| 981 // flags: smicompare (rdx.length(), rbx) | 846 // flags: smicompare (rdx.length(), rbx) |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1016 __ ret(0); | 881 __ ret(0); |
| 1017 __ bind(&non_smi_value); | 882 __ bind(&non_smi_value); |
| 1018 // Slow case that needs to retain rcx for use by RecordWrite. | 883 // Slow case that needs to retain rcx for use by RecordWrite. |
| 1019 // Update write barrier for the elements array address. | 884 // Update write barrier for the elements array address. |
| 1020 __ movq(rdx, rax); | 885 __ movq(rdx, rax); |
| 1021 __ RecordWriteNonSmi(rbx, 0, rdx, rcx, kDontSaveFPRegs); | 886 __ RecordWriteNonSmi(rbx, 0, rdx, rcx, kDontSaveFPRegs); |
| 1022 __ ret(0); | 887 __ ret(0); |
| 1023 } | 888 } |
| 1024 | 889 |
| 1025 | 890 |
| 1026 void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm, | |
| 1027 ExternalArrayType array_type) { | |
| 1028 // ----------- S t a t e ------------- | |
| 1029 // -- rax : value | |
| 1030 // -- rcx : key | |
| 1031 // -- rdx : receiver | |
| 1032 // -- rsp[0] : return address | |
| 1033 // ----------------------------------- | |
| 1034 Label slow; | |
| 1035 | |
| 1036 // Check that the object isn't a smi. | |
| 1037 __ JumpIfSmi(rdx, &slow); | |
| 1038 // Get the map from the receiver. | |
| 1039 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); | |
| 1040 // Check that the receiver does not require access checks. We need | |
| 1041 // to do this because this generic stub does not perform map checks. | |
| 1042 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), | |
| 1043 Immediate(1 << Map::kIsAccessCheckNeeded)); | |
| 1044 __ j(not_zero, &slow); | |
| 1045 // Check that the key is a smi. | |
| 1046 __ JumpIfNotSmi(rcx, &slow); | |
| 1047 | |
| 1048 // Check that the object is a JS object. | |
| 1049 __ CmpInstanceType(rbx, JS_OBJECT_TYPE); | |
| 1050 __ j(not_equal, &slow); | |
| 1051 | |
| 1052 // Check that the elements array is the appropriate type of | |
| 1053 // ExternalArray. | |
| 1054 // rax: value | |
| 1055 // rcx: key (a smi) | |
| 1056 // rdx: receiver (a JSObject) | |
| 1057 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); | |
| 1058 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | |
| 1059 Heap::RootIndexForExternalArrayType(array_type)); | |
| 1060 __ j(not_equal, &slow); | |
| 1061 | |
| 1062 // Check that the index is in range. | |
| 1063 __ SmiToInteger32(rdi, rcx); // Untag the index. | |
| 1064 __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset)); | |
| 1065 // Unsigned comparison catches both negative and too-large values. | |
| 1066 __ j(above_equal, &slow); | |
| 1067 | |
| 1068 // Handle both smis and HeapNumbers in the fast path. Go to the | |
| 1069 // runtime for all other kinds of values. | |
| 1070 // rax: value | |
| 1071 // rcx: key (a smi) | |
| 1072 // rdx: receiver (a JSObject) | |
| 1073 // rbx: elements array | |
| 1074 // rdi: untagged key | |
| 1075 NearLabel check_heap_number; | |
| 1076 __ JumpIfNotSmi(rax, &check_heap_number); | |
| 1077 // No more branches to slow case on this path. Key and receiver not needed. | |
| 1078 __ SmiToInteger32(rdx, rax); | |
| 1079 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset)); | |
| 1080 // rbx: base pointer of external storage | |
| 1081 switch (array_type) { | |
| 1082 case kExternalByteArray: | |
| 1083 case kExternalUnsignedByteArray: | |
| 1084 __ movb(Operand(rbx, rdi, times_1, 0), rdx); | |
| 1085 break; | |
| 1086 case kExternalShortArray: | |
| 1087 case kExternalUnsignedShortArray: | |
| 1088 __ movw(Operand(rbx, rdi, times_2, 0), rdx); | |
| 1089 break; | |
| 1090 case kExternalIntArray: | |
| 1091 case kExternalUnsignedIntArray: | |
| 1092 __ movl(Operand(rbx, rdi, times_4, 0), rdx); | |
| 1093 break; | |
| 1094 case kExternalFloatArray: | |
| 1095 // Need to perform int-to-float conversion. | |
| 1096 __ cvtlsi2ss(xmm0, rdx); | |
| 1097 __ movss(Operand(rbx, rdi, times_4, 0), xmm0); | |
| 1098 break; | |
| 1099 default: | |
| 1100 UNREACHABLE(); | |
| 1101 break; | |
| 1102 } | |
| 1103 __ ret(0); | |
| 1104 | |
| 1105 __ bind(&check_heap_number); | |
| 1106 // rax: value | |
| 1107 // rcx: key (a smi) | |
| 1108 // rdx: receiver (a JSObject) | |
| 1109 // rbx: elements array | |
| 1110 // rdi: untagged key | |
| 1111 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister); | |
| 1112 __ j(not_equal, &slow); | |
| 1113 // No more branches to slow case on this path. | |
| 1114 | |
| 1115 // The WebGL specification leaves the behavior of storing NaN and | |
| 1116 // +/-Infinity into integer arrays basically undefined. For more | |
| 1117 // reproducible behavior, convert these to zero. | |
| 1118 __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset)); | |
| 1119 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset)); | |
| 1120 // rdi: untagged index | |
| 1121 // rbx: base pointer of external storage | |
| 1122 // top of FPU stack: value | |
| 1123 if (array_type == kExternalFloatArray) { | |
| 1124 __ cvtsd2ss(xmm0, xmm0); | |
| 1125 __ movss(Operand(rbx, rdi, times_4, 0), xmm0); | |
| 1126 __ ret(0); | |
| 1127 } else { | |
| 1128 // Need to perform float-to-int conversion. | |
| 1129 // Test the value for NaN. | |
| 1130 | |
| 1131 // Convert to int32 and store the low byte/word. | |
| 1132 // If the value is NaN or +/-infinity, the result is 0x80000000, | |
| 1133 // which is automatically zero when taken mod 2^n, n < 32. | |
| 1134 // rdx: value (converted to an untagged integer) | |
| 1135 // rdi: untagged index | |
| 1136 // rbx: base pointer of external storage | |
| 1137 switch (array_type) { | |
| 1138 case kExternalByteArray: | |
| 1139 case kExternalUnsignedByteArray: | |
| 1140 __ cvtsd2si(rdx, xmm0); | |
| 1141 __ movb(Operand(rbx, rdi, times_1, 0), rdx); | |
| 1142 break; | |
| 1143 case kExternalShortArray: | |
| 1144 case kExternalUnsignedShortArray: | |
| 1145 __ cvtsd2si(rdx, xmm0); | |
| 1146 __ movw(Operand(rbx, rdi, times_2, 0), rdx); | |
| 1147 break; | |
| 1148 case kExternalIntArray: | |
| 1149 case kExternalUnsignedIntArray: { | |
| 1150 // Convert to int64, so that NaN and infinities become | |
| 1151 // 0x8000000000000000, which is zero mod 2^32. | |
| 1152 __ cvtsd2siq(rdx, xmm0); | |
| 1153 __ movl(Operand(rbx, rdi, times_4, 0), rdx); | |
| 1154 break; | |
| 1155 } | |
| 1156 default: | |
| 1157 UNREACHABLE(); | |
| 1158 break; | |
| 1159 } | |
| 1160 __ ret(0); | |
| 1161 } | |
| 1162 | |
| 1163 // Slow case: call runtime. | |
| 1164 __ bind(&slow); | |
| 1165 GenerateRuntimeSetProperty(masm); | |
| 1166 } | |
| 1167 | |
| 1168 | |
| 1169 // The generated code does not accept smi keys. | 891 // The generated code does not accept smi keys. |
| 1170 // The generated code falls through if both probes miss. | 892 // The generated code falls through if both probes miss. |
| 1171 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, | 893 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, |
| 1172 int argc, | 894 int argc, |
| 1173 Code::Kind kind) { | 895 Code::Kind kind) { |
| 1174 // ----------- S t a t e ------------- | 896 // ----------- S t a t e ------------- |
| 1175 // rcx : function name | 897 // rcx : function name |
| 1176 // rdx : receiver | 898 // rdx : receiver |
| 1177 // ----------------------------------- | 899 // ----------------------------------- |
| 1178 Label number, non_number, non_string, boolean, probe, miss; | 900 Label number, non_number, non_string, boolean, probe, miss; |
| 1179 | 901 |
| 1180 // Probe the stub cache. | 902 // Probe the stub cache. |
| 1181 Code::Flags flags = | 903 Code::Flags flags = Code::ComputeFlags(kind, |
| 1182 Code::ComputeFlags(kind, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc); | 904 NOT_IN_LOOP, |
| 905 MONOMORPHIC, |
| 906 Code::kNoExtraICState, |
| 907 NORMAL, |
| 908 argc); |
| 1183 StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, rax); | 909 StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, rax); |
| 1184 | 910 |
| 1185 // If the stub cache probing failed, the receiver might be a value. | 911 // If the stub cache probing failed, the receiver might be a value. |
| 1186 // For value objects, we use the map of the prototype objects for | 912 // For value objects, we use the map of the prototype objects for |
| 1187 // the corresponding JSValue for the cache and that is what we need | 913 // the corresponding JSValue for the cache and that is what we need |
| 1188 // to probe. | 914 // to probe. |
| 1189 // | 915 // |
| 1190 // Check for number. | 916 // Check for number. |
| 1191 __ JumpIfSmi(rdx, &number); | 917 __ JumpIfSmi(rdx, &number); |
| 1192 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx); | 918 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx); |
| (...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1494 // ----------- S t a t e ------------- | 1220 // ----------- S t a t e ------------- |
| 1495 // rcx : function name | 1221 // rcx : function name |
| 1496 // rsp[0] : return address | 1222 // rsp[0] : return address |
| 1497 // rsp[8] : argument argc | 1223 // rsp[8] : argument argc |
| 1498 // rsp[16] : argument argc - 1 | 1224 // rsp[16] : argument argc - 1 |
| 1499 // ... | 1225 // ... |
| 1500 // rsp[argc * 8] : argument 1 | 1226 // rsp[argc * 8] : argument 1 |
| 1501 // rsp[(argc + 1) * 8] : argument 0 = receiver | 1227 // rsp[(argc + 1) * 8] : argument 0 = receiver |
| 1502 // ----------------------------------- | 1228 // ----------------------------------- |
| 1503 | 1229 |
| 1230 // Check if the name is a string. |
| 1231 Label miss; |
| 1232 __ JumpIfSmi(rcx, &miss); |
| 1233 Condition cond = masm->IsObjectStringType(rcx, rax, rax); |
| 1234 __ j(NegateCondition(cond), &miss); |
| 1504 GenerateCallNormal(masm, argc); | 1235 GenerateCallNormal(masm, argc); |
| 1236 __ bind(&miss); |
| 1505 GenerateMiss(masm, argc); | 1237 GenerateMiss(masm, argc); |
| 1506 } | 1238 } |
| 1507 | 1239 |
| 1508 | 1240 |
| 1509 void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { | 1241 void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { |
| 1510 // ----------- S t a t e ------------- | 1242 // ----------- S t a t e ------------- |
| 1511 // rcx : function name | 1243 // rcx : function name |
| 1512 // rsp[0] : return address | 1244 // rsp[0] : return address |
| 1513 // rsp[8] : argument argc | 1245 // rsp[8] : argument argc |
| 1514 // rsp[16] : argument argc - 1 | 1246 // rsp[16] : argument argc - 1 |
| (...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1734 __ pop(rbx); | 1466 __ pop(rbx); |
| 1735 __ push(rdx); // receiver | 1467 __ push(rdx); // receiver |
| 1736 __ push(rax); // name | 1468 __ push(rax); // name |
| 1737 __ push(rbx); // return address | 1469 __ push(rbx); // return address |
| 1738 | 1470 |
| 1739 // Perform tail call to the entry. | 1471 // Perform tail call to the entry. |
| 1740 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 1472 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
| 1741 } | 1473 } |
| 1742 | 1474 |
| 1743 | 1475 |
| 1744 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { | 1476 void StoreIC::GenerateMegamorphic(MacroAssembler* masm, |
| 1477 Code::ExtraICState extra_ic_state) { |
| 1745 // ----------- S t a t e ------------- | 1478 // ----------- S t a t e ------------- |
| 1746 // -- rax : value | 1479 // -- rax : value |
| 1747 // -- rcx : name | 1480 // -- rcx : name |
| 1748 // -- rdx : receiver | 1481 // -- rdx : receiver |
| 1749 // -- rsp[0] : return address | 1482 // -- rsp[0] : return address |
| 1750 // ----------------------------------- | 1483 // ----------------------------------- |
| 1751 | 1484 |
| 1752 // Get the receiver from the stack and probe the stub cache. | 1485 // Get the receiver from the stack and probe the stub cache. |
| 1753 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, | 1486 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, |
| 1754 NOT_IN_LOOP, | 1487 NOT_IN_LOOP, |
| 1755 MONOMORPHIC); | 1488 MONOMORPHIC, |
| 1489 extra_ic_state); |
| 1756 StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg); | 1490 StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg); |
| 1757 | 1491 |
| 1758 // Cache miss: Jump to runtime. | 1492 // Cache miss: Jump to runtime. |
| 1759 GenerateMiss(masm); | 1493 GenerateMiss(masm); |
| 1760 } | 1494 } |
| 1761 | 1495 |
| 1762 | 1496 |
| 1763 void StoreIC::GenerateMiss(MacroAssembler* masm) { | 1497 void StoreIC::GenerateMiss(MacroAssembler* masm) { |
| 1764 // ----------- S t a t e ------------- | 1498 // ----------- S t a t e ------------- |
| 1765 // -- rax : value | 1499 // -- rax : value |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1934 return greater_equal; | 1668 return greater_equal; |
| 1935 case Token::GTE: | 1669 case Token::GTE: |
| 1936 return greater_equal; | 1670 return greater_equal; |
| 1937 default: | 1671 default: |
| 1938 UNREACHABLE(); | 1672 UNREACHABLE(); |
| 1939 return no_condition; | 1673 return no_condition; |
| 1940 } | 1674 } |
| 1941 } | 1675 } |
| 1942 | 1676 |
| 1943 | 1677 |
| 1678 static bool HasInlinedSmiCode(Address address) { |
| 1679 // The address of the instruction following the call. |
| 1680 Address test_instruction_address = |
| 1681 address + Assembler::kCallTargetAddressOffset; |
| 1682 |
| 1683 // If the instruction following the call is not a test al, nothing |
| 1684 // was inlined. |
| 1685 return *test_instruction_address == Assembler::kTestAlByte; |
| 1686 } |
| 1687 |
| 1688 |
| 1944 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { | 1689 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { |
| 1945 HandleScope scope; | 1690 HandleScope scope; |
| 1946 Handle<Code> rewritten; | 1691 Handle<Code> rewritten; |
| 1947 State previous_state = GetState(); | 1692 State previous_state = GetState(); |
| 1948 State state = TargetState(previous_state, false, x, y); | 1693 |
| 1694 State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y); |
| 1949 if (state == GENERIC) { | 1695 if (state == GENERIC) { |
| 1950 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); | 1696 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); |
| 1951 rewritten = stub.GetCode(); | 1697 rewritten = stub.GetCode(); |
| 1952 } else { | 1698 } else { |
| 1953 ICCompareStub stub(op_, state); | 1699 ICCompareStub stub(op_, state); |
| 1954 rewritten = stub.GetCode(); | 1700 rewritten = stub.GetCode(); |
| 1955 } | 1701 } |
| 1956 set_target(*rewritten); | 1702 set_target(*rewritten); |
| 1957 | 1703 |
| 1958 #ifdef DEBUG | 1704 #ifdef DEBUG |
| 1959 if (FLAG_trace_ic) { | 1705 if (FLAG_trace_ic) { |
| 1960 PrintF("[CompareIC (%s->%s)#%s]\n", | 1706 PrintF("[CompareIC (%s->%s)#%s]\n", |
| 1961 GetStateName(previous_state), | 1707 GetStateName(previous_state), |
| 1962 GetStateName(state), | 1708 GetStateName(state), |
| 1963 Token::Name(op_)); | 1709 Token::Name(op_)); |
| 1964 } | 1710 } |
| 1965 #endif | 1711 #endif |
| 1712 |
| 1713 // Activate inlined smi code. |
| 1714 if (previous_state == UNINITIALIZED) { |
| 1715 PatchInlinedSmiCode(address()); |
| 1716 } |
| 1966 } | 1717 } |
| 1967 | 1718 |
| 1968 void PatchInlinedSmiCode(Address address) { | 1719 void PatchInlinedSmiCode(Address address) { |
| 1969 UNIMPLEMENTED(); | 1720 // The address of the instruction following the call. |
| 1721 Address test_instruction_address = |
| 1722 address + Assembler::kCallTargetAddressOffset; |
| 1723 |
| 1724 // If the instruction following the call is not a test al, nothing |
| 1725 // was inlined. |
| 1726 if (*test_instruction_address != Assembler::kTestAlByte) { |
| 1727 ASSERT(*test_instruction_address == Assembler::kNopByte); |
| 1728 return; |
| 1729 } |
| 1730 |
| 1731 Address delta_address = test_instruction_address + 1; |
| 1732 // The delta to the start of the map check instruction and the |
| 1733 // condition code uses at the patched jump. |
| 1734 int8_t delta = *reinterpret_cast<int8_t*>(delta_address); |
| 1735 if (FLAG_trace_ic) { |
| 1736 PrintF("[ patching ic at %p, test=%p, delta=%d\n", |
| 1737 address, test_instruction_address, delta); |
| 1738 } |
| 1739 |
| 1740 // Patch with a short conditional jump. There must be a |
| 1741 // short jump-if-carry/not-carry at this position. |
| 1742 Address jmp_address = test_instruction_address - delta; |
| 1743 ASSERT(*jmp_address == Assembler::kJncShortOpcode || |
| 1744 *jmp_address == Assembler::kJcShortOpcode); |
| 1745 Condition cc = *jmp_address == Assembler::kJncShortOpcode |
| 1746 ? not_zero |
| 1747 : zero; |
| 1748 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
| 1970 } | 1749 } |
| 1971 | 1750 |
| 1972 | 1751 |
| 1973 } } // namespace v8::internal | 1752 } } // namespace v8::internal |
| 1974 | 1753 |
| 1975 #endif // V8_TARGET_ARCH_X64 | 1754 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |