Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(362)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 3388004: Add support for near labels.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 10 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
196 196
197 // Return and remove the on-stack parameters. 197 // Return and remove the on-stack parameters.
198 __ ret(3 * kPointerSize); 198 __ ret(3 * kPointerSize);
199 199
200 __ bind(&slow_case); 200 __ bind(&slow_case);
201 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 201 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
202 } 202 }
203 203
204 204
205 void ToBooleanStub::Generate(MacroAssembler* masm) { 205 void ToBooleanStub::Generate(MacroAssembler* masm) {
206 Label false_result, true_result, not_string; 206 NearLabel false_result, true_result, not_string;
207 __ movq(rax, Operand(rsp, 1 * kPointerSize)); 207 __ movq(rax, Operand(rsp, 1 * kPointerSize));
208 208
209 // 'null' => false. 209 // 'null' => false.
210 __ CompareRoot(rax, Heap::kNullValueRootIndex); 210 __ CompareRoot(rax, Heap::kNullValueRootIndex);
211 __ j(equal, &false_result); 211 __ j(equal, &false_result);
212 212
213 // Get the map and type of the heap object. 213 // Get the map and type of the heap object.
214 // We don't use CmpObjectType because we manipulate the type field. 214 // We don't use CmpObjectType because we manipulate the type field.
215 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 215 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
216 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset)); 216 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
(...skipping 765 matching lines...) Expand 10 before | Expand all | Expand 10 after
982 } 982 }
983 983
984 984
985 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { 985 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
986 // Input on stack: 986 // Input on stack:
987 // rsp[8]: argument (should be number). 987 // rsp[8]: argument (should be number).
988 // rsp[0]: return address. 988 // rsp[0]: return address.
989 Label runtime_call; 989 Label runtime_call;
990 Label runtime_call_clear_stack; 990 Label runtime_call_clear_stack;
991 Label input_not_smi; 991 Label input_not_smi;
992 Label loaded; 992 NearLabel loaded;
993 // Test that rax is a number. 993 // Test that rax is a number.
994 __ movq(rax, Operand(rsp, kPointerSize)); 994 __ movq(rax, Operand(rsp, kPointerSize));
995 __ JumpIfNotSmi(rax, &input_not_smi); 995 __ JumpIfNotSmi(rax, &input_not_smi);
996 // Input is a smi. Untag and load it onto the FPU stack. 996 // Input is a smi. Untag and load it onto the FPU stack.
997 // Then load the bits of the double into rbx. 997 // Then load the bits of the double into rbx.
998 __ SmiToInteger32(rax, rax); 998 __ SmiToInteger32(rax, rax);
999 __ subq(rsp, Immediate(kPointerSize)); 999 __ subq(rsp, Immediate(kPointerSize));
1000 __ cvtlsi2sd(xmm1, rax); 1000 __ cvtlsi2sd(xmm1, rax);
1001 __ movsd(Operand(rsp, 0), xmm1); 1001 __ movsd(Operand(rsp, 0), xmm1);
1002 __ movq(rbx, xmm1); 1002 __ movq(rbx, xmm1);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
1062 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start)); 1062 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1063 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start)); 1063 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1064 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start)); 1064 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
1065 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start)); 1065 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
1066 } 1066 }
1067 #endif 1067 #endif
1068 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16]. 1068 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
1069 __ addl(rcx, rcx); 1069 __ addl(rcx, rcx);
1070 __ lea(rcx, Operand(rax, rcx, times_8, 0)); 1070 __ lea(rcx, Operand(rax, rcx, times_8, 0));
1071 // Check if cache matches: Double value is stored in uint32_t[2] array. 1071 // Check if cache matches: Double value is stored in uint32_t[2] array.
1072 Label cache_miss; 1072 NearLabel cache_miss;
1073 __ cmpq(rbx, Operand(rcx, 0)); 1073 __ cmpq(rbx, Operand(rcx, 0));
1074 __ j(not_equal, &cache_miss); 1074 __ j(not_equal, &cache_miss);
1075 // Cache hit! 1075 // Cache hit!
1076 __ movq(rax, Operand(rcx, 2 * kIntSize)); 1076 __ movq(rax, Operand(rcx, 2 * kIntSize));
1077 __ fstp(0); // Clear FPU stack. 1077 __ fstp(0); // Clear FPU stack.
1078 __ ret(kPointerSize); 1078 __ ret(kPointerSize);
1079 1079
1080 __ bind(&cache_miss); 1080 __ bind(&cache_miss);
1081 // Update cache with new value. 1081 // Update cache with new value.
1082 Label nan_result; 1082 Label nan_result;
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
1153 __ fnstsw_ax(); 1153 __ fnstsw_ax();
1154 // Clear if Illegal Operand or Zero Division exceptions are set. 1154 // Clear if Illegal Operand or Zero Division exceptions are set.
1155 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word. 1155 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
1156 __ j(zero, &no_exceptions); 1156 __ j(zero, &no_exceptions);
1157 __ fnclex(); 1157 __ fnclex();
1158 __ bind(&no_exceptions); 1158 __ bind(&no_exceptions);
1159 } 1159 }
1160 1160
1161 // Compute st(0) % st(1) 1161 // Compute st(0) % st(1)
1162 { 1162 {
1163 Label partial_remainder_loop; 1163 NearLabel partial_remainder_loop;
1164 __ bind(&partial_remainder_loop); 1164 __ bind(&partial_remainder_loop);
1165 __ fprem1(); 1165 __ fprem1();
1166 __ fwait(); 1166 __ fwait();
1167 __ fnstsw_ax(); 1167 __ fnstsw_ax();
1168 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word. 1168 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
1169 // If C2 is set, computation only has partial result. Loop to 1169 // If C2 is set, computation only has partial result. Loop to
1170 // continue computation. 1170 // continue computation.
1171 __ j(not_zero, &partial_remainder_loop); 1171 __ j(not_zero, &partial_remainder_loop);
1172 } 1172 }
1173 // FPU Stack: input, 2*pi, input % 2*pi 1173 // FPU Stack: input, 2*pi, input % 2*pi
(...skipping 21 matching lines...) Expand all
1195 void IntegerConvert(MacroAssembler* masm, 1195 void IntegerConvert(MacroAssembler* masm,
1196 Register result, 1196 Register result,
1197 Register source) { 1197 Register source) {
1198 // Result may be rcx. If result and source are the same register, source will 1198 // Result may be rcx. If result and source are the same register, source will
1199 // be overwritten. 1199 // be overwritten.
1200 ASSERT(!result.is(rdi) && !result.is(rbx)); 1200 ASSERT(!result.is(rdi) && !result.is(rbx));
1201 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use 1201 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
1202 // cvttsd2si (32-bit version) directly. 1202 // cvttsd2si (32-bit version) directly.
1203 Register double_exponent = rbx; 1203 Register double_exponent = rbx;
1204 Register double_value = rdi; 1204 Register double_value = rdi;
1205 Label done, exponent_63_plus; 1205 NearLabel done, exponent_63_plus;
1206 // Get double and extract exponent. 1206 // Get double and extract exponent.
1207 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset)); 1207 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
1208 // Clear result preemptively, in case we need to return zero. 1208 // Clear result preemptively, in case we need to return zero.
1209 __ xorl(result, result); 1209 __ xorl(result, result);
1210 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there. 1210 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
1211 // Double to remove sign bit, shift exponent down to least significant bits. 1211 // Double to remove sign bit, shift exponent down to least significant bits.
1212 // and subtract bias to get the unshifted, unbiased exponent. 1212 // and subtract bias to get the unshifted, unbiased exponent.
1213 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0)); 1213 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
1214 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits)); 1214 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
1215 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias)); 1215 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after
1751 // Check that the last match info has space for the capture registers and the 1751 // Check that the last match info has space for the capture registers and the
1752 // additional information. Ensure no overflow in add. 1752 // additional information. Ensure no overflow in add.
1753 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); 1753 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1754 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); 1754 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1755 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); 1755 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
1756 __ cmpl(rdx, rax); 1756 __ cmpl(rdx, rax);
1757 __ j(greater, &runtime); 1757 __ j(greater, &runtime);
1758 1758
1759 // rcx: RegExp data (FixedArray) 1759 // rcx: RegExp data (FixedArray)
1760 // Check the representation and encoding of the subject string. 1760 // Check the representation and encoding of the subject string.
1761 Label seq_ascii_string, seq_two_byte_string, check_code; 1761 NearLabel seq_ascii_string, seq_two_byte_string, check_code;
1762 __ movq(rax, Operand(rsp, kSubjectOffset)); 1762 __ movq(rax, Operand(rsp, kSubjectOffset));
1763 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 1763 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1764 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 1764 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1765 // First check for flat two byte string. 1765 // First check for flat two byte string.
1766 __ andb(rbx, Immediate( 1766 __ andb(rbx, Immediate(
1767 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask)); 1767 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
1768 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 1768 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1769 __ j(zero, &seq_two_byte_string); 1769 __ j(zero, &seq_two_byte_string);
1770 // Any other flat string must be a flat ascii string. 1770 // Any other flat string must be a flat ascii string.
1771 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask)); 1771 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1876 #endif 1876 #endif
1877 1877
1878 // Keep track on aliasing between argX defined above and the registers used. 1878 // Keep track on aliasing between argX defined above and the registers used.
1879 // rax: subject string 1879 // rax: subject string
1880 // rbx: previous index 1880 // rbx: previous index
1881 // rdi: encoding of subject string (1 if ascii 0 if two_byte); 1881 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
1882 // r11: code 1882 // r11: code
1883 1883
1884 // Argument 4: End of string data 1884 // Argument 4: End of string data
1885 // Argument 3: Start of string data 1885 // Argument 3: Start of string data
1886 Label setup_two_byte, setup_rest; 1886 NearLabel setup_two_byte, setup_rest;
1887 __ testb(rdi, rdi); 1887 __ testb(rdi, rdi);
1888 __ j(zero, &setup_two_byte); 1888 __ j(zero, &setup_two_byte);
1889 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); 1889 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
1890 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize)); 1890 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize));
1891 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize)); 1891 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize));
1892 __ jmp(&setup_rest); 1892 __ jmp(&setup_rest);
1893 __ bind(&setup_two_byte); 1893 __ bind(&setup_two_byte);
1894 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); 1894 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
1895 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize)); 1895 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize));
1896 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize)); 1896 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize));
1897 1897
1898 __ bind(&setup_rest); 1898 __ bind(&setup_rest);
1899 // Argument 2: Previous index. 1899 // Argument 2: Previous index.
1900 __ movq(arg2, rbx); 1900 __ movq(arg2, rbx);
1901 1901
1902 // Argument 1: Subject string. 1902 // Argument 1: Subject string.
1903 __ movq(arg1, rax); 1903 __ movq(arg1, rax);
1904 1904
1905 // Locate the code entry and call it. 1905 // Locate the code entry and call it.
1906 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); 1906 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1907 __ CallCFunction(r11, kRegExpExecuteArguments); 1907 __ CallCFunction(r11, kRegExpExecuteArguments);
1908 1908
1909 // rsi is caller save, as it is used to pass parameter. 1909 // rsi is caller save, as it is used to pass parameter.
1910 __ pop(rsi); 1910 __ pop(rsi);
1911 1911
1912 // Check the result. 1912 // Check the result.
1913 Label success; 1913 NearLabel success;
1914 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS)); 1914 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
1915 __ j(equal, &success); 1915 __ j(equal, &success);
1916 Label failure; 1916 NearLabel failure;
1917 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); 1917 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1918 __ j(equal, &failure); 1918 __ j(equal, &failure);
1919 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); 1919 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
1920 // If not exception it can only be retry. Handle that in the runtime system. 1920 // If not exception it can only be retry. Handle that in the runtime system.
1921 __ j(not_equal, &runtime); 1921 __ j(not_equal, &runtime);
1922 // Result must now be exception. If there is no pending exception already a 1922 // Result must now be exception. If there is no pending exception already a
1923 // stack overflow (on the backtrack stack) was detected in RegExp code but 1923 // stack overflow (on the backtrack stack) was detected in RegExp code but
1924 // haven't created the exception yet. Handle that in the runtime system. 1924 // haven't created the exception yet. Handle that in the runtime system.
1925 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 1925 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1926 ExternalReference pending_exception_address(Top::k_pending_exception_address); 1926 ExternalReference pending_exception_address(Top::k_pending_exception_address);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1961 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 1961 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
1962 __ movq(rcx, rbx); 1962 __ movq(rcx, rbx);
1963 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi); 1963 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
1964 1964
1965 // Get the static offsets vector filled by the native regexp code. 1965 // Get the static offsets vector filled by the native regexp code.
1966 __ movq(rcx, ExternalReference::address_of_static_offsets_vector()); 1966 __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
1967 1967
1968 // rbx: last_match_info backing store (FixedArray) 1968 // rbx: last_match_info backing store (FixedArray)
1969 // rcx: offsets vector 1969 // rcx: offsets vector
1970 // rdx: number of capture registers 1970 // rdx: number of capture registers
1971 Label next_capture, done; 1971 NearLabel next_capture, done;
1972 // Capture register counter starts from number of capture registers and 1972 // Capture register counter starts from number of capture registers and
1973 // counts down until wraping after zero. 1973 // counts down until wraping after zero.
1974 __ bind(&next_capture); 1974 __ bind(&next_capture);
1975 __ subq(rdx, Immediate(1)); 1975 __ subq(rdx, Immediate(1));
1976 __ j(negative, &done); 1976 __ j(negative, &done);
1977 // Read the value from the static offsets vector buffer and make it a smi. 1977 // Read the value from the static offsets vector buffer and make it a smi.
1978 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); 1978 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
1979 __ Integer32ToSmi(rdi, rdi, &runtime); 1979 __ Integer32ToSmi(rdi, rdi, &runtime);
1980 // Store the smi value in the last match info. 1980 // Store the smi value in the last match info.
1981 __ movq(FieldOperand(rbx, 1981 __ movq(FieldOperand(rbx,
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
2115 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); 2115 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2116 2116
2117 Label check_unequal_objects, done; 2117 Label check_unequal_objects, done;
2118 // The compare stub returns a positive, negative, or zero 64-bit integer 2118 // The compare stub returns a positive, negative, or zero 64-bit integer
2119 // value in rax, corresponding to result of comparing the two inputs. 2119 // value in rax, corresponding to result of comparing the two inputs.
2120 // NOTICE! This code is only reached after a smi-fast-case check, so 2120 // NOTICE! This code is only reached after a smi-fast-case check, so
2121 // it is certain that at least one operand isn't a smi. 2121 // it is certain that at least one operand isn't a smi.
2122 2122
2123 // Two identical objects are equal unless they are both NaN or undefined. 2123 // Two identical objects are equal unless they are both NaN or undefined.
2124 { 2124 {
2125 Label not_identical; 2125 NearLabel not_identical;
2126 __ cmpq(rax, rdx); 2126 __ cmpq(rax, rdx);
2127 __ j(not_equal, &not_identical); 2127 __ j(not_equal, &not_identical);
2128 2128
2129 if (cc_ != equal) { 2129 if (cc_ != equal) {
2130 // Check for undefined. undefined OP undefined is false even though 2130 // Check for undefined. undefined OP undefined is false even though
2131 // undefined == undefined. 2131 // undefined == undefined.
2132 Label check_for_nan; 2132 NearLabel check_for_nan;
2133 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 2133 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2134 __ j(not_equal, &check_for_nan); 2134 __ j(not_equal, &check_for_nan);
2135 __ Set(rax, NegativeComparisonResult(cc_)); 2135 __ Set(rax, NegativeComparisonResult(cc_));
2136 __ ret(0); 2136 __ ret(0);
2137 __ bind(&check_for_nan); 2137 __ bind(&check_for_nan);
2138 } 2138 }
2139 2139
2140 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), 2140 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
2141 // so we do the second best thing - test it ourselves. 2141 // so we do the second best thing - test it ourselves.
2142 // Note: if cc_ != equal, never_nan_nan_ is not used. 2142 // Note: if cc_ != equal, never_nan_nan_ is not used.
2143 // We cannot set rax to EQUAL until just before return because 2143 // We cannot set rax to EQUAL until just before return because
2144 // rax must be unchanged on jump to not_identical. 2144 // rax must be unchanged on jump to not_identical.
2145 2145
2146 if (never_nan_nan_ && (cc_ == equal)) { 2146 if (never_nan_nan_ && (cc_ == equal)) {
2147 __ Set(rax, EQUAL); 2147 __ Set(rax, EQUAL);
2148 __ ret(0); 2148 __ ret(0);
2149 } else { 2149 } else {
2150 Label heap_number; 2150 NearLabel heap_number;
2151 // If it's not a heap number, then return equal for (in)equality operator. 2151 // If it's not a heap number, then return equal for (in)equality operator.
2152 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2152 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2153 Factory::heap_number_map()); 2153 Factory::heap_number_map());
2154 __ j(equal, &heap_number); 2154 __ j(equal, &heap_number);
2155 if (cc_ != equal) { 2155 if (cc_ != equal) {
2156 // Call runtime on identical JSObjects. Otherwise return equal. 2156 // Call runtime on identical JSObjects. Otherwise return equal.
2157 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); 2157 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2158 __ j(above_equal, &not_identical); 2158 __ j(above_equal, &not_identical);
2159 } 2159 }
2160 __ Set(rax, EQUAL); 2160 __ Set(rax, EQUAL);
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2204 2204
2205 __ bind(&not_smis); 2205 __ bind(&not_smis);
2206 } 2206 }
2207 2207
2208 // If either operand is a JSObject or an oddball value, then they are not 2208 // If either operand is a JSObject or an oddball value, then they are not
2209 // equal since their pointers are different 2209 // equal since their pointers are different
2210 // There is no test for undetectability in strict equality. 2210 // There is no test for undetectability in strict equality.
2211 2211
2212 // If the first object is a JS object, we have done pointer comparison. 2212 // If the first object is a JS object, we have done pointer comparison.
2213 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 2213 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2214 Label first_non_object; 2214 NearLabel first_non_object;
2215 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); 2215 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2216 __ j(below, &first_non_object); 2216 __ j(below, &first_non_object);
2217 // Return non-zero (eax (not rax) is not zero) 2217 // Return non-zero (eax (not rax) is not zero)
2218 Label return_not_equal; 2218 Label return_not_equal;
2219 STATIC_ASSERT(kHeapObjectTag != 0); 2219 STATIC_ASSERT(kHeapObjectTag != 0);
2220 __ bind(&return_not_equal); 2220 __ bind(&return_not_equal);
2221 __ ret(0); 2221 __ ret(0);
2222 2222
2223 __ bind(&first_non_object); 2223 __ bind(&first_non_object);
2224 // Check for oddballs: true, false, null, undefined. 2224 // Check for oddballs: true, false, null, undefined.
2225 __ CmpInstanceType(rcx, ODDBALL_TYPE); 2225 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2226 __ j(equal, &return_not_equal); 2226 __ j(equal, &return_not_equal);
2227 2227
2228 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx); 2228 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
2229 __ j(above_equal, &return_not_equal); 2229 __ j(above_equal, &return_not_equal);
2230 2230
2231 // Check for oddballs: true, false, null, undefined. 2231 // Check for oddballs: true, false, null, undefined.
2232 __ CmpInstanceType(rcx, ODDBALL_TYPE); 2232 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2233 __ j(equal, &return_not_equal); 2233 __ j(equal, &return_not_equal);
2234 2234
2235 // Fall through to the general case. 2235 // Fall through to the general case.
2236 } 2236 }
2237 __ bind(&slow); 2237 __ bind(&slow);
2238 } 2238 }
2239 2239
2240 // Generate the number comparison code. 2240 // Generate the number comparison code.
2241 if (include_number_compare_) { 2241 if (include_number_compare_) {
2242 Label non_number_comparison; 2242 Label non_number_comparison;
2243 Label unordered; 2243 NearLabel unordered;
2244 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); 2244 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
2245 __ xorl(rax, rax); 2245 __ xorl(rax, rax);
2246 __ xorl(rcx, rcx); 2246 __ xorl(rcx, rcx);
2247 __ ucomisd(xmm0, xmm1); 2247 __ ucomisd(xmm0, xmm1);
2248 2248
2249 // Don't base result on EFLAGS when a NaN is involved. 2249 // Don't base result on EFLAGS when a NaN is involved.
2250 __ j(parity_even, &unordered); 2250 __ j(parity_even, &unordered);
2251 // Return a result of -1, 0, or 1, based on EFLAGS. 2251 // Return a result of -1, 0, or 1, based on EFLAGS.
2252 __ setcc(above, rax); 2252 __ setcc(above, rax);
2253 __ setcc(below, rcx); 2253 __ setcc(below, rcx);
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2297 2297
2298 #ifdef DEBUG 2298 #ifdef DEBUG
2299 __ Abort("Unexpected fall-through from string comparison"); 2299 __ Abort("Unexpected fall-through from string comparison");
2300 #endif 2300 #endif
2301 2301
2302 __ bind(&check_unequal_objects); 2302 __ bind(&check_unequal_objects);
2303 if (cc_ == equal && !strict_) { 2303 if (cc_ == equal && !strict_) {
2304 // Not strict equality. Objects are unequal if 2304 // Not strict equality. Objects are unequal if
2305 // they are both JSObjects and not undetectable, 2305 // they are both JSObjects and not undetectable,
2306 // and their pointers are different. 2306 // and their pointers are different.
2307 Label not_both_objects, return_unequal; 2307 NearLabel not_both_objects, return_unequal;
2308 // At most one is a smi, so we can test for smi by adding the two. 2308 // At most one is a smi, so we can test for smi by adding the two.
2309 // A smi plus a heap object has the low bit set, a heap object plus 2309 // A smi plus a heap object has the low bit set, a heap object plus
2310 // a heap object has the low bit clear. 2310 // a heap object has the low bit clear.
2311 STATIC_ASSERT(kSmiTag == 0); 2311 STATIC_ASSERT(kSmiTag == 0);
2312 STATIC_ASSERT(kSmiTagMask == 1); 2312 STATIC_ASSERT(kSmiTagMask == 1);
2313 __ lea(rcx, Operand(rax, rdx, times_1, 0)); 2313 __ lea(rcx, Operand(rax, rdx, times_1, 0));
2314 __ testb(rcx, Immediate(kSmiTagMask)); 2314 __ testb(rcx, Immediate(kSmiTagMask));
2315 __ j(not_zero, &not_both_objects); 2315 __ j(not_zero, &not_both_objects);
2316 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); 2316 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2317 __ j(below, &not_both_objects); 2317 __ j(below, &not_both_objects);
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
2455 __ movq(rsp, Operand(kScratchRegister, 0)); 2455 __ movq(rsp, Operand(kScratchRegister, 0));
2456 // get next in chain 2456 // get next in chain
2457 __ pop(rcx); 2457 __ pop(rcx);
2458 __ movq(Operand(kScratchRegister, 0), rcx); 2458 __ movq(Operand(kScratchRegister, 0), rcx);
2459 __ pop(rbp); // pop frame pointer 2459 __ pop(rbp); // pop frame pointer
2460 __ pop(rdx); // remove state 2460 __ pop(rdx); // remove state
2461 2461
2462 // Before returning we restore the context from the frame pointer if not NULL. 2462 // Before returning we restore the context from the frame pointer if not NULL.
2463 // The frame pointer is NULL in the exception handler of a JS entry frame. 2463 // The frame pointer is NULL in the exception handler of a JS entry frame.
2464 __ xor_(rsi, rsi); // tentatively set context pointer to NULL 2464 __ xor_(rsi, rsi); // tentatively set context pointer to NULL
2465 Label skip; 2465 NearLabel skip;
2466 __ cmpq(rbp, Immediate(0)); 2466 __ cmpq(rbp, Immediate(0));
2467 __ j(equal, &skip); 2467 __ j(equal, &skip);
2468 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2468 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2469 __ bind(&skip); 2469 __ bind(&skip);
2470 __ ret(0); 2470 __ ret(0);
2471 } 2471 }
2472 2472
2473 2473
2474 void ApiGetterEntryStub::Generate(MacroAssembler* masm) { 2474 void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
2475 Label empty_result; 2475 Label empty_result;
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
2615 __ testl(rcx, Immediate(kFailureTagMask)); 2615 __ testl(rcx, Immediate(kFailureTagMask));
2616 __ j(zero, &failure_returned); 2616 __ j(zero, &failure_returned);
2617 2617
2618 // Exit the JavaScript to C++ exit frame. 2618 // Exit the JavaScript to C++ exit frame.
2619 __ LeaveExitFrame(result_size_); 2619 __ LeaveExitFrame(result_size_);
2620 __ ret(0); 2620 __ ret(0);
2621 2621
2622 // Handling of failure. 2622 // Handling of failure.
2623 __ bind(&failure_returned); 2623 __ bind(&failure_returned);
2624 2624
2625 Label retry; 2625 NearLabel retry;
2626 // If the returned exception is RETRY_AFTER_GC continue at retry label 2626 // If the returned exception is RETRY_AFTER_GC continue at retry label
2627 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 2627 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
2628 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 2628 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
2629 __ j(zero, &retry); 2629 __ j(zero, &retry);
2630 2630
2631 // Special handling of out of memory exceptions. 2631 // Special handling of out of memory exceptions.
2632 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE); 2632 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
2633 __ cmpq(rax, kScratchRegister); 2633 __ cmpq(rax, kScratchRegister);
2634 __ j(equal, throw_out_of_memory_exception); 2634 __ j(equal, throw_out_of_memory_exception);
2635 2635
(...skipping 19 matching lines...) Expand all
2655 2655
2656 2656
2657 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, 2657 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
2658 UncatchableExceptionType type) { 2658 UncatchableExceptionType type) {
2659 // Fetch top stack handler. 2659 // Fetch top stack handler.
2660 ExternalReference handler_address(Top::k_handler_address); 2660 ExternalReference handler_address(Top::k_handler_address);
2661 __ movq(kScratchRegister, handler_address); 2661 __ movq(kScratchRegister, handler_address);
2662 __ movq(rsp, Operand(kScratchRegister, 0)); 2662 __ movq(rsp, Operand(kScratchRegister, 0));
2663 2663
2664 // Unwind the handlers until the ENTRY handler is found. 2664 // Unwind the handlers until the ENTRY handler is found.
2665 Label loop, done; 2665 NearLabel loop, done;
2666 __ bind(&loop); 2666 __ bind(&loop);
2667 // Load the type of the current stack handler. 2667 // Load the type of the current stack handler.
2668 const int kStateOffset = StackHandlerConstants::kStateOffset; 2668 const int kStateOffset = StackHandlerConstants::kStateOffset;
2669 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); 2669 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
2670 __ j(equal, &done); 2670 __ j(equal, &done);
2671 // Fetch the next handler in the list. 2671 // Fetch the next handler in the list.
2672 const int kNextOffset = StackHandlerConstants::kNextOffset; 2672 const int kNextOffset = StackHandlerConstants::kNextOffset;
2673 __ movq(rsp, Operand(rsp, kNextOffset)); 2673 __ movq(rsp, Operand(rsp, kNextOffset));
2674 __ jmp(&loop); 2674 __ jmp(&loop);
2675 __ bind(&done); 2675 __ bind(&done);
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after
2925 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); 2925 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
2926 __ j(below, &slow); 2926 __ j(below, &slow);
2927 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); 2927 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
2928 __ j(above, &slow); 2928 __ j(above, &slow);
2929 2929
2930 // Get the prototype of the function. 2930 // Get the prototype of the function.
2931 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 2931 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
2932 // rdx is function, rax is map. 2932 // rdx is function, rax is map.
2933 2933
2934 // Look up the function and the map in the instanceof cache. 2934 // Look up the function and the map in the instanceof cache.
2935 Label miss; 2935 NearLabel miss;
2936 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 2936 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2937 __ j(not_equal, &miss); 2937 __ j(not_equal, &miss);
2938 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); 2938 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2939 __ j(not_equal, &miss); 2939 __ j(not_equal, &miss);
2940 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 2940 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2941 __ ret(2 * kPointerSize); 2941 __ ret(2 * kPointerSize);
2942 2942
2943 __ bind(&miss); 2943 __ bind(&miss);
2944 __ TryGetFunctionPrototype(rdx, rbx, &slow); 2944 __ TryGetFunctionPrototype(rdx, rbx, &slow);
2945 2945
2946 // Check that the function prototype is a JS object. 2946 // Check that the function prototype is a JS object.
2947 __ JumpIfSmi(rbx, &slow); 2947 __ JumpIfSmi(rbx, &slow);
2948 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister); 2948 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
2949 __ j(below, &slow); 2949 __ j(below, &slow);
2950 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); 2950 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
2951 __ j(above, &slow); 2951 __ j(above, &slow);
2952 2952
2953 // Register mapping: 2953 // Register mapping:
2954 // rax is object map. 2954 // rax is object map.
2955 // rdx is function. 2955 // rdx is function.
2956 // rbx is function prototype. 2956 // rbx is function prototype.
2957 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 2957 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2958 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 2958 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2959 2959
2960 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); 2960 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
2961 2961
2962 // Loop through the prototype chain looking for the function prototype. 2962 // Loop through the prototype chain looking for the function prototype.
2963 Label loop, is_instance, is_not_instance; 2963 NearLabel loop, is_instance, is_not_instance;
2964 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); 2964 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
2965 __ bind(&loop); 2965 __ bind(&loop);
2966 __ cmpq(rcx, rbx); 2966 __ cmpq(rcx, rbx);
2967 __ j(equal, &is_instance); 2967 __ j(equal, &is_instance);
2968 __ cmpq(rcx, kScratchRegister); 2968 __ cmpq(rcx, kScratchRegister);
2969 // The code at is_not_instance assumes that kScratchRegister contains a 2969 // The code at is_not_instance assumes that kScratchRegister contains a
2970 // non-zero GCable value (the null object in this case). 2970 // non-zero GCable value (the null object in this case).
2971 __ j(equal, &is_not_instance); 2971 __ j(equal, &is_not_instance);
2972 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); 2972 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2973 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); 2973 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after
3258 is_smi = masm->CheckSmi(rdx); 3258 is_smi = masm->CheckSmi(rdx);
3259 __ j(is_smi, &string_add_runtime); 3259 __ j(is_smi, &string_add_runtime);
3260 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9); 3260 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
3261 __ j(above_equal, &string_add_runtime); 3261 __ j(above_equal, &string_add_runtime);
3262 } 3262 }
3263 3263
3264 // Both arguments are strings. 3264 // Both arguments are strings.
3265 // rax: first string 3265 // rax: first string
3266 // rdx: second string 3266 // rdx: second string
3267 // Check if either of the strings are empty. In that case return the other. 3267 // Check if either of the strings are empty. In that case return the other.
3268 Label second_not_zero_length, both_not_zero_length; 3268 NearLabel second_not_zero_length, both_not_zero_length;
3269 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); 3269 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
3270 __ SmiTest(rcx); 3270 __ SmiTest(rcx);
3271 __ j(not_zero, &second_not_zero_length); 3271 __ j(not_zero, &second_not_zero_length);
3272 // Second string is empty, result is first string which is already in rax. 3272 // Second string is empty, result is first string which is already in rax.
3273 __ IncrementCounter(&Counters::string_add_native, 1); 3273 __ IncrementCounter(&Counters::string_add_native, 1);
3274 __ ret(2 * kPointerSize); 3274 __ ret(2 * kPointerSize);
3275 __ bind(&second_not_zero_length); 3275 __ bind(&second_not_zero_length);
3276 __ movq(rbx, FieldOperand(rax, String::kLengthOffset)); 3276 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
3277 __ SmiTest(rbx); 3277 __ SmiTest(rbx);
3278 __ j(not_zero, &both_not_zero_length); 3278 __ j(not_zero, &both_not_zero_length);
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
3514 bool ascii) { 3514 bool ascii) {
3515 // Copy characters using rep movs of doublewords. Align destination on 4 byte 3515 // Copy characters using rep movs of doublewords. Align destination on 4 byte
3516 // boundary before starting rep movs. Copy remaining characters after running 3516 // boundary before starting rep movs. Copy remaining characters after running
3517 // rep movs. 3517 // rep movs.
3518 // Count is positive int32, dest and src are character pointers. 3518 // Count is positive int32, dest and src are character pointers.
3519 ASSERT(dest.is(rdi)); // rep movs destination 3519 ASSERT(dest.is(rdi)); // rep movs destination
3520 ASSERT(src.is(rsi)); // rep movs source 3520 ASSERT(src.is(rsi)); // rep movs source
3521 ASSERT(count.is(rcx)); // rep movs count 3521 ASSERT(count.is(rcx)); // rep movs count
3522 3522
3523 // Nothing to do for zero characters. 3523 // Nothing to do for zero characters.
3524 Label done; 3524 NearLabel done;
3525 __ testl(count, count); 3525 __ testl(count, count);
3526 __ j(zero, &done); 3526 __ j(zero, &done);
3527 3527
3528 // Make count the number of bytes to copy. 3528 // Make count the number of bytes to copy.
3529 if (!ascii) { 3529 if (!ascii) {
3530 STATIC_ASSERT(2 == sizeof(uc16)); 3530 STATIC_ASSERT(2 == sizeof(uc16));
3531 __ addl(count, count); 3531 __ addl(count, count);
3532 } 3532 }
3533 3533
3534 // Don't enter the rep movs if there are less than 4 bytes to copy. 3534 // Don't enter the rep movs if there are less than 4 bytes to copy.
3535 Label last_bytes; 3535 NearLabel last_bytes;
3536 __ testl(count, Immediate(~7)); 3536 __ testl(count, Immediate(~7));
3537 __ j(zero, &last_bytes); 3537 __ j(zero, &last_bytes);
3538 3538
3539 // Copy from edi to esi using rep movs instruction. 3539 // Copy from edi to esi using rep movs instruction.
3540 __ movl(kScratchRegister, count); 3540 __ movl(kScratchRegister, count);
3541 __ shr(count, Immediate(3)); // Number of doublewords to copy. 3541 __ shr(count, Immediate(3)); // Number of doublewords to copy.
3542 __ repmovsq(); 3542 __ repmovsq();
3543 3543
3544 // Find number of bytes left. 3544 // Find number of bytes left.
3545 __ movl(count, kScratchRegister); 3545 __ movl(count, kScratchRegister);
(...skipping 23 matching lines...) Expand all
3569 Register scratch1, 3569 Register scratch1,
3570 Register scratch2, 3570 Register scratch2,
3571 Register scratch3, 3571 Register scratch3,
3572 Register scratch4, 3572 Register scratch4,
3573 Label* not_found) { 3573 Label* not_found) {
3574 // Register scratch3 is the general scratch register in this function. 3574 // Register scratch3 is the general scratch register in this function.
3575 Register scratch = scratch3; 3575 Register scratch = scratch3;
3576 3576
3577 // Make sure that both characters are not digits as such strings has a 3577 // Make sure that both characters are not digits as such strings has a
3578 // different hash algorithm. Don't try to look for these in the symbol table. 3578 // different hash algorithm. Don't try to look for these in the symbol table.
3579 Label not_array_index; 3579 NearLabel not_array_index;
3580 __ leal(scratch, Operand(c1, -'0')); 3580 __ leal(scratch, Operand(c1, -'0'));
3581 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0'))); 3581 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3582 __ j(above, &not_array_index); 3582 __ j(above, &not_array_index);
3583 __ leal(scratch, Operand(c2, -'0')); 3583 __ leal(scratch, Operand(c2, -'0'));
3584 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0'))); 3584 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3585 __ j(below_equal, not_found); 3585 __ j(below_equal, not_found);
3586 3586
3587 __ bind(&not_array_index); 3587 __ bind(&not_array_index);
3588 // Calculate the two character string hash. 3588 // Calculate the two character string hash.
3589 Register hash = scratch1; 3589 Register hash = scratch1;
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
3893 3893
3894 // Find minimum length and length difference. 3894 // Find minimum length and length difference.
3895 __ movq(scratch1, FieldOperand(left, String::kLengthOffset)); 3895 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
3896 __ movq(scratch4, scratch1); 3896 __ movq(scratch4, scratch1);
3897 __ SmiSub(scratch4, 3897 __ SmiSub(scratch4,
3898 scratch4, 3898 scratch4,
3899 FieldOperand(right, String::kLengthOffset), 3899 FieldOperand(right, String::kLengthOffset),
3900 NULL); 3900 NULL);
3901 // Register scratch4 now holds left.length - right.length. 3901 // Register scratch4 now holds left.length - right.length.
3902 const Register length_difference = scratch4; 3902 const Register length_difference = scratch4;
3903 Label left_shorter; 3903 NearLabel left_shorter;
3904 __ j(less, &left_shorter); 3904 __ j(less, &left_shorter);
3905 // The right string isn't longer that the left one. 3905 // The right string isn't longer that the left one.
3906 // Get the right string's length by subtracting the (non-negative) difference 3906 // Get the right string's length by subtracting the (non-negative) difference
3907 // from the left string's length. 3907 // from the left string's length.
3908 __ SmiSub(scratch1, scratch1, length_difference, NULL); 3908 __ SmiSub(scratch1, scratch1, length_difference, NULL);
3909 __ bind(&left_shorter); 3909 __ bind(&left_shorter);
3910 // Register scratch1 now holds Min(left.length, right.length). 3910 // Register scratch1 now holds Min(left.length, right.length).
3911 const Register min_length = scratch1; 3911 const Register min_length = scratch1;
3912 3912
3913 Label compare_lengths; 3913 NearLabel compare_lengths;
3914 // If min-length is zero, go directly to comparing lengths. 3914 // If min-length is zero, go directly to comparing lengths.
3915 __ SmiTest(min_length); 3915 __ SmiTest(min_length);
3916 __ j(zero, &compare_lengths); 3916 __ j(zero, &compare_lengths);
3917 3917
3918 __ SmiToInteger32(min_length, min_length); 3918 __ SmiToInteger32(min_length, min_length);
3919 3919
3920 // Registers scratch2 and scratch3 are free. 3920 // Registers scratch2 and scratch3 are free.
3921 Label result_not_equal; 3921 NearLabel result_not_equal;
3922 Label loop; 3922 Label loop;
3923 { 3923 {
3924 // Check characters 0 .. min_length - 1 in a loop. 3924 // Check characters 0 .. min_length - 1 in a loop.
3925 // Use scratch3 as loop index, min_length as limit and scratch2 3925 // Use scratch3 as loop index, min_length as limit and scratch2
3926 // for computation. 3926 // for computation.
3927 const Register index = scratch3; 3927 const Register index = scratch3;
3928 __ movl(index, Immediate(0)); // Index into strings. 3928 __ movl(index, Immediate(0)); // Index into strings.
3929 __ bind(&loop); 3929 __ bind(&loop);
3930 // Compare characters. 3930 // Compare characters.
3931 // TODO(lrn): Could we load more than one character at a time? 3931 // TODO(lrn): Could we load more than one character at a time?
(...skipping 15 matching lines...) Expand all
3947 // Completed loop without finding different characters. 3947 // Completed loop without finding different characters.
3948 // Compare lengths (precomputed). 3948 // Compare lengths (precomputed).
3949 __ bind(&compare_lengths); 3949 __ bind(&compare_lengths);
3950 __ SmiTest(length_difference); 3950 __ SmiTest(length_difference);
3951 __ j(not_zero, &result_not_equal); 3951 __ j(not_zero, &result_not_equal);
3952 3952
3953 // Result is EQUAL. 3953 // Result is EQUAL.
3954 __ Move(rax, Smi::FromInt(EQUAL)); 3954 __ Move(rax, Smi::FromInt(EQUAL));
3955 __ ret(0); 3955 __ ret(0);
3956 3956
3957 Label result_greater; 3957 NearLabel result_greater;
3958 __ bind(&result_not_equal); 3958 __ bind(&result_not_equal);
3959 // Unequal comparison of left to right, either character or length. 3959 // Unequal comparison of left to right, either character or length.
3960 __ j(greater, &result_greater); 3960 __ j(greater, &result_greater);
3961 3961
3962 // Result is LESS. 3962 // Result is LESS.
3963 __ Move(rax, Smi::FromInt(LESS)); 3963 __ Move(rax, Smi::FromInt(LESS));
3964 __ ret(0); 3964 __ ret(0);
3965 3965
3966 // Result is GREATER. 3966 // Result is GREATER.
3967 __ bind(&result_greater); 3967 __ bind(&result_greater);
3968 __ Move(rax, Smi::FromInt(GREATER)); 3968 __ Move(rax, Smi::FromInt(GREATER));
3969 __ ret(0); 3969 __ ret(0);
3970 } 3970 }
3971 3971
3972 3972
3973 void StringCompareStub::Generate(MacroAssembler* masm) { 3973 void StringCompareStub::Generate(MacroAssembler* masm) {
3974 Label runtime; 3974 Label runtime;
3975 3975
3976 // Stack frame on entry. 3976 // Stack frame on entry.
3977 // rsp[0]: return address 3977 // rsp[0]: return address
3978 // rsp[8]: right string 3978 // rsp[8]: right string
3979 // rsp[16]: left string 3979 // rsp[16]: left string
3980 3980
3981 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left 3981 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
3982 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right 3982 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
3983 3983
3984 // Check for identity. 3984 // Check for identity.
3985 Label not_same; 3985 NearLabel not_same;
3986 __ cmpq(rdx, rax); 3986 __ cmpq(rdx, rax);
3987 __ j(not_equal, &not_same); 3987 __ j(not_equal, &not_same);
3988 __ Move(rax, Smi::FromInt(EQUAL)); 3988 __ Move(rax, Smi::FromInt(EQUAL));
3989 __ IncrementCounter(&Counters::string_compare_native, 1); 3989 __ IncrementCounter(&Counters::string_compare_native, 1);
3990 __ ret(2 * kPointerSize); 3990 __ ret(2 * kPointerSize);
3991 3991
3992 __ bind(&not_same); 3992 __ bind(&not_same);
3993 3993
3994 // Check that both are sequential ASCII strings. 3994 // Check that both are sequential ASCII strings.
3995 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); 3995 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
(...skipping 10 matching lines...) Expand all
4006 // tagged as a small integer. 4006 // tagged as a small integer.
4007 __ bind(&runtime); 4007 __ bind(&runtime);
4008 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 4008 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4009 } 4009 }
4010 4010
4011 #undef __ 4011 #undef __
4012 4012
4013 } } // namespace v8::internal 4013 } } // namespace v8::internal
4014 4014
4015 #endif // V8_TARGET_ARCH_X64 4015 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698