OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1949 // it. | 1949 // it. |
1950 GenerateJumpFunctionIgnoreReceiver(function); | 1950 GenerateJumpFunctionIgnoreReceiver(function); |
1951 | 1951 |
1952 HandlerFrontendFooter(&miss); | 1952 HandlerFrontendFooter(&miss); |
1953 | 1953 |
1954 // Return the generated code. | 1954 // Return the generated code. |
1955 return GetCode(type, name); | 1955 return GetCode(type, name); |
1956 } | 1956 } |
1957 | 1957 |
1958 | 1958 |
1959 Handle<Code> CallStubCompiler::CompileMathFloorCall( | |
1960 Handle<Object> object, | |
1961 Handle<JSObject> holder, | |
1962 Handle<Cell> cell, | |
1963 Handle<JSFunction> function, | |
1964 Handle<String> name, | |
1965 Code::StubType type) { | |
1966 const int argc = arguments().immediate(); | |
1967 StackArgumentsAccessor args(rsp, argc); | |
1968 | |
1969 // If the object is not a JSObject or we got an unexpected number of | |
1970 // arguments, bail out to the regular call. | |
1971 if (!object->IsJSObject() || argc != 1) { | |
1972 return Handle<Code>::null(); | |
1973 } | |
1974 | |
1975 Label miss, slow; | |
1976 | |
1977 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
1978 if (!cell.is_null()) { | |
1979 ASSERT(cell->value() == *function); | |
1980 GenerateLoadFunctionFromCell(cell, function, &miss); | |
1981 } | |
1982 | |
1983 // Load the (only) argument into rax. | |
1984 __ movq(rax, args.GetArgumentOperand(1)); | |
1985 | |
1986 // Check if the argument is a smi. | |
1987 Label smi; | |
1988 STATIC_ASSERT(kSmiTag == 0); | |
1989 __ JumpIfSmi(rax, &smi); | |
1990 | |
1991 // Check if the argument is a heap number and load its value into xmm0. | |
1992 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); | |
1993 __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset)); | |
1994 | |
1995 // Check if the argument is strictly positive. Note this also discards NaN. | |
1996 __ xorpd(xmm1, xmm1); | |
1997 __ ucomisd(xmm0, xmm1); | |
1998 __ j(below_equal, &slow); | |
1999 | |
2000 // Do a truncating conversion. | |
2001 __ cvttsd2si(rax, xmm0); | |
2002 | |
2003 // Checks for 0x80000000 which signals a failed conversion. | |
2004 Label conversion_failure; | |
2005 __ cmpl(rax, Immediate(0x80000000)); | |
2006 __ j(equal, &conversion_failure); | |
2007 | |
2008 // Smi tag and return. | |
2009 __ Integer32ToSmi(rax, rax); | |
2010 __ bind(&smi); | |
2011 __ ret(2 * kPointerSize); | |
2012 | |
2013 // Check if the argument is < 2^kMantissaBits. | |
2014 Label already_round; | |
2015 __ bind(&conversion_failure); | |
2016 int64_t kTwoMantissaBits= V8_INT64_C(0x4330000000000000); | |
2017 __ movq(rbx, kTwoMantissaBits); | |
2018 __ movq(xmm1, rbx); | |
2019 __ ucomisd(xmm0, xmm1); | |
2020 __ j(above_equal, &already_round); | |
2021 | |
2022 // Save a copy of the argument. | |
2023 __ movaps(xmm2, xmm0); | |
2024 | |
2025 // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits. | |
2026 __ addsd(xmm0, xmm1); | |
2027 __ subsd(xmm0, xmm1); | |
2028 | |
2029 // Compare the argument and the tentative result to get the right mask: | |
2030 // if xmm2 < xmm0: | |
2031 // xmm2 = 1...1 | |
2032 // else: | |
2033 // xmm2 = 0...0 | |
2034 __ cmpltsd(xmm2, xmm0); | |
2035 | |
2036 // Subtract 1 if the argument was less than the tentative result. | |
2037 int64_t kOne = V8_INT64_C(0x3ff0000000000000); | |
2038 __ movq(rbx, kOne); | |
2039 __ movq(xmm1, rbx); | |
2040 __ andpd(xmm1, xmm2); | |
2041 __ subsd(xmm0, xmm1); | |
2042 | |
2043 // Return a new heap number. | |
2044 __ AllocateHeapNumber(rax, rbx, &slow); | |
2045 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | |
2046 __ ret(2 * kPointerSize); | |
2047 | |
2048 // Return the argument (when it's an already round heap number). | |
2049 __ bind(&already_round); | |
2050 __ movq(rax, args.GetArgumentOperand(1)); | |
2051 __ ret(2 * kPointerSize); | |
2052 | |
2053 __ bind(&slow); | |
2054 // We do not have to patch the receiver because the function makes no use of | |
2055 // it. | |
2056 GenerateJumpFunctionIgnoreReceiver(function); | |
2057 | |
2058 HandlerFrontendFooter(&miss); | |
2059 | |
2060 // Return the generated code. | |
2061 return GetCode(type, name); | |
2062 } | |
2063 | |
2064 | |
2065 Handle<Code> CallStubCompiler::CompileMathAbsCall( | |
2066 Handle<Object> object, | |
2067 Handle<JSObject> holder, | |
2068 Handle<Cell> cell, | |
2069 Handle<JSFunction> function, | |
2070 Handle<String> name, | |
2071 Code::StubType type) { | |
2072 // If the object is not a JSObject or we got an unexpected number of | |
2073 // arguments, bail out to the regular call. | |
2074 const int argc = arguments().immediate(); | |
2075 StackArgumentsAccessor args(rsp, argc); | |
2076 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); | |
2077 | |
2078 Label miss; | |
2079 | |
2080 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
2081 if (!cell.is_null()) { | |
2082 ASSERT(cell->value() == *function); | |
2083 GenerateLoadFunctionFromCell(cell, function, &miss); | |
2084 } | |
2085 | |
2086 // Load the (only) argument into rax. | |
2087 __ movq(rax, args.GetArgumentOperand(1)); | |
2088 | |
2089 // Check if the argument is a smi. | |
2090 Label not_smi; | |
2091 STATIC_ASSERT(kSmiTag == 0); | |
2092 __ JumpIfNotSmi(rax, ¬_smi); | |
2093 | |
2094 // Branchless abs implementation, refer to below: | |
2095 // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs | |
2096 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 | |
2097 // otherwise. | |
2098 __ movq(rbx, rax); | |
2099 __ sar(rbx, Immediate(kBitsPerPointer - 1)); | |
2100 | |
2101 // Do bitwise not or do nothing depending on ebx. | |
2102 __ xor_(rax, rbx); | |
2103 | |
2104 // Add 1 or do nothing depending on ebx. | |
2105 __ subq(rax, rbx); | |
2106 | |
2107 // If the result is still negative, go to the slow case. | |
2108 // This only happens for the most negative smi. | |
2109 Label slow; | |
2110 __ j(negative, &slow); | |
2111 | |
2112 __ ret(2 * kPointerSize); | |
2113 | |
2114 // Check if the argument is a heap number and load its value. | |
2115 __ bind(¬_smi); | |
2116 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); | |
2117 __ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); | |
2118 | |
2119 // Check the sign of the argument. If the argument is positive, | |
2120 // just return it. | |
2121 Label negative_sign; | |
2122 const int sign_mask_shift = | |
2123 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; | |
2124 __ Set(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift); | |
2125 __ testq(rbx, rdi); | |
2126 __ j(not_zero, &negative_sign); | |
2127 __ ret(2 * kPointerSize); | |
2128 | |
2129 // If the argument is negative, clear the sign, and return a new | |
2130 // number. We still have the sign mask in rdi. | |
2131 __ bind(&negative_sign); | |
2132 __ xor_(rbx, rdi); | |
2133 __ AllocateHeapNumber(rax, rdx, &slow); | |
2134 __ MoveDouble(FieldOperand(rax, HeapNumber::kValueOffset), rbx); | |
2135 __ ret(2 * kPointerSize); | |
2136 | |
2137 __ bind(&slow); | |
2138 // We do not have to patch the receiver because the function makes no use of | |
2139 // it. | |
2140 GenerateJumpFunctionIgnoreReceiver(function); | |
2141 | |
2142 HandlerFrontendFooter(&miss); | |
2143 | |
2144 // Return the generated code. | |
2145 return GetCode(type, name); | |
2146 } | |
2147 | |
2148 | |
2149 Handle<Code> CallStubCompiler::CompileFastApiCall( | 1959 Handle<Code> CallStubCompiler::CompileFastApiCall( |
2150 const CallOptimization& optimization, | 1960 const CallOptimization& optimization, |
2151 Handle<Object> object, | 1961 Handle<Object> object, |
2152 Handle<JSObject> holder, | 1962 Handle<JSObject> holder, |
2153 Handle<Cell> cell, | 1963 Handle<Cell> cell, |
2154 Handle<JSFunction> function, | 1964 Handle<JSFunction> function, |
2155 Handle<String> name) { | 1965 Handle<String> name) { |
2156 ASSERT(optimization.is_simple_api_call()); | 1966 ASSERT(optimization.is_simple_api_call()); |
2157 // Bail out if object is a global object as we don't want to | 1967 // Bail out if object is a global object as we don't want to |
2158 // repatch it to global receiver. | 1968 // repatch it to global receiver. |
(...skipping 586 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2745 // ----------------------------------- | 2555 // ----------------------------------- |
2746 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 2556 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
2747 } | 2557 } |
2748 | 2558 |
2749 | 2559 |
2750 #undef __ | 2560 #undef __ |
2751 | 2561 |
2752 } } // namespace v8::internal | 2562 } } // namespace v8::internal |
2753 | 2563 |
2754 #endif // V8_TARGET_ARCH_X64 | 2564 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |