| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1885 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1896 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1896 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. |
| 1897 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); | 1897 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); |
| 1898 __ mov(r0, Operand(r1)); | 1898 __ mov(r0, Operand(r1)); |
| 1899 } | 1899 } |
| 1900 __ Ret(); | 1900 __ Ret(); |
| 1901 } | 1901 } |
| 1902 | 1902 |
| 1903 | 1903 |
| 1904 void UnaryOpStub::GenerateHeapNumberCodeBitNot( | 1904 void UnaryOpStub::GenerateHeapNumberCodeBitNot( |
| 1905 MacroAssembler* masm, Label* slow) { | 1905 MacroAssembler* masm, Label* slow) { |
| 1906 Label impossible; |
| 1907 |
| 1906 EmitCheckForHeapNumber(masm, r0, r1, r6, slow); | 1908 EmitCheckForHeapNumber(masm, r0, r1, r6, slow); |
| 1907 // Convert the heap number is r0 to an untagged integer in r1. | 1909 // Convert the heap number is r0 to an untagged integer in r1. |
| 1908 __ ConvertToInt32(r0, r1, r2, r3, d0, slow); | 1910 __ ConvertToInt32(r0, r1, r2, r3, d0, slow); |
| 1909 | 1911 |
| 1910 // Do the bitwise operation and check if the result fits in a smi. | 1912 // Do the bitwise operation and check if the result fits in a smi. |
| 1911 Label try_float; | 1913 Label try_float; |
| 1912 __ mvn(r1, Operand(r1)); | 1914 __ mvn(r1, Operand(r1)); |
| 1913 __ add(r2, r1, Operand(0x40000000), SetCC); | 1915 __ add(r2, r1, Operand(0x40000000), SetCC); |
| 1914 __ b(mi, &try_float); | 1916 __ b(mi, &try_float); |
| 1915 | 1917 |
| 1916 // Tag the result as a smi and we're done. | 1918 // Tag the result as a smi and we're done. |
| 1917 __ mov(r0, Operand(r1, LSL, kSmiTagSize)); | 1919 __ mov(r0, Operand(r1, LSL, kSmiTagSize)); |
| 1918 __ Ret(); | 1920 __ Ret(); |
| 1919 | 1921 |
| 1920 // Try to store the result in a heap number. | 1922 // Try to store the result in a heap number. |
| 1921 __ bind(&try_float); | 1923 __ bind(&try_float); |
| 1922 if (mode_ == UNARY_NO_OVERWRITE) { | 1924 if (mode_ == UNARY_NO_OVERWRITE) { |
| 1923 Label slow_allocate_heapnumber, heapnumber_allocated; | 1925 Label slow_allocate_heapnumber, heapnumber_allocated; |
| 1924 __ AllocateHeapNumber(r0, r2, r3, r6, &slow_allocate_heapnumber); | 1926 // Allocate a new heap number without zapping r0, which we need if it fails. |
| 1927 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); |
| 1925 __ jmp(&heapnumber_allocated); | 1928 __ jmp(&heapnumber_allocated); |
| 1926 | 1929 |
| 1927 __ bind(&slow_allocate_heapnumber); | 1930 __ bind(&slow_allocate_heapnumber); |
| 1928 __ EnterInternalFrame(); | 1931 __ EnterInternalFrame(); |
| 1929 __ push(r1); | 1932 __ push(r0); // Push the heap number, not the untagged int32. |
| 1930 __ CallRuntime(Runtime::kNumberAlloc, 0); | 1933 __ CallRuntime(Runtime::kNumberAlloc, 0); |
| 1931 __ pop(r1); | 1934 __ mov(r2, r0); // Move the new heap number into r2. |
| 1935 // Get the heap number into r0, now that the new heap number is in r2. |
| 1936 __ pop(r0); |
| 1932 __ LeaveInternalFrame(); | 1937 __ LeaveInternalFrame(); |
| 1933 | 1938 |
| 1939 // Convert the heap number in r0 to an untagged integer in r1. |
| 1940 // This can't go slow-case because it's the same number we already |
| 1941 // converted once again. |
| 1942 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); |
| 1943 __ mvn(r1, Operand(r1)); |
| 1944 |
| 1934 __ bind(&heapnumber_allocated); | 1945 __ bind(&heapnumber_allocated); |
| 1946 __ mov(r0, r2); // Move newly allocated heap number to r0. |
| 1935 } | 1947 } |
| 1936 | 1948 |
| 1937 if (CpuFeatures::IsSupported(VFP3)) { | 1949 if (CpuFeatures::IsSupported(VFP3)) { |
| 1938 // Convert the int32 in r1 to the heap number in r0. r2 is corrupted. | 1950 // Convert the int32 in r1 to the heap number in r0. r2 is corrupted. |
| 1939 CpuFeatures::Scope scope(VFP3); | 1951 CpuFeatures::Scope scope(VFP3); |
| 1940 __ vmov(s0, r1); | 1952 __ vmov(s0, r1); |
| 1941 __ vcvt_f64_s32(d0, s0); | 1953 __ vcvt_f64_s32(d0, s0); |
| 1942 __ sub(r2, r0, Operand(kHeapObjectTag)); | 1954 __ sub(r2, r0, Operand(kHeapObjectTag)); |
| 1943 __ vstr(d0, r2, HeapNumber::kValueOffset); | 1955 __ vstr(d0, r2, HeapNumber::kValueOffset); |
| 1944 __ Ret(); | 1956 __ Ret(); |
| 1945 } else { | 1957 } else { |
| 1946 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not | 1958 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not |
| 1947 // have to set up a frame. | 1959 // have to set up a frame. |
| 1948 WriteInt32ToHeapNumberStub stub(r1, r0, r2); | 1960 WriteInt32ToHeapNumberStub stub(r1, r0, r2); |
| 1949 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 1961 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 1950 } | 1962 } |
| 1963 |
| 1964 __ bind(&impossible); |
| 1965 if (FLAG_debug_code) { |
| 1966 __ stop("Incorrect assumption in bit-not stub"); |
| 1967 } |
| 1951 } | 1968 } |
| 1952 | 1969 |
| 1953 | 1970 |
| 1954 // TODO(svenpanne): Use virtual functions instead of switch. | 1971 // TODO(svenpanne): Use virtual functions instead of switch. |
| 1955 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | 1972 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
| 1956 switch (op_) { | 1973 switch (op_) { |
| 1957 case Token::SUB: | 1974 case Token::SUB: |
| 1958 GenerateGenericStubSub(masm); | 1975 GenerateGenericStubSub(masm); |
| 1959 break; | 1976 break; |
| 1960 case Token::BIT_NOT: | 1977 case Token::BIT_NOT: |
| (...skipping 4428 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6389 __ mov(result, Operand(0)); | 6406 __ mov(result, Operand(0)); |
| 6390 __ Ret(); | 6407 __ Ret(); |
| 6391 } | 6408 } |
| 6392 | 6409 |
| 6393 | 6410 |
| 6394 #undef __ | 6411 #undef __ |
| 6395 | 6412 |
| 6396 } } // namespace v8::internal | 6413 } } // namespace v8::internal |
| 6397 | 6414 |
| 6398 #endif // V8_TARGET_ARCH_ARM | 6415 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |