Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/arm/macro-assembler-arm.h" | 7 #include "src/arm/macro-assembler-arm.h" |
| 8 #include "src/compilation-info.h" | 8 #include "src/compilation-info.h" |
| 9 #include "src/compiler/code-generator-impl.h" | 9 #include "src/compiler/code-generator-impl.h" |
| 10 #include "src/compiler/gap-resolver.h" | 10 #include "src/compiler/gap-resolver.h" |
| (...skipping 1873 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1884 MachineRepresentation rep = LocationOperand::cast(source)->representation(); | 1884 MachineRepresentation rep = LocationOperand::cast(source)->representation(); |
| 1885 if (rep == MachineRepresentation::kFloat64) { | 1885 if (rep == MachineRepresentation::kFloat64) { |
| 1886 DwVfpRegister src = g.ToDoubleRegister(source); | 1886 DwVfpRegister src = g.ToDoubleRegister(source); |
| 1887 if (destination->IsDoubleRegister()) { | 1887 if (destination->IsDoubleRegister()) { |
| 1888 DwVfpRegister dst = g.ToDoubleRegister(destination); | 1888 DwVfpRegister dst = g.ToDoubleRegister(destination); |
| 1889 __ Move(dst, src); | 1889 __ Move(dst, src); |
| 1890 } else { | 1890 } else { |
| 1891 DCHECK(destination->IsDoubleStackSlot()); | 1891 DCHECK(destination->IsDoubleStackSlot()); |
| 1892 __ vstr(src, g.ToMemOperand(destination)); | 1892 __ vstr(src, g.ToMemOperand(destination)); |
| 1893 } | 1893 } |
| 1894 } else { | 1894 } else if (rep == MachineRepresentation::kFloat32) { |
| 1895 DCHECK_EQ(MachineRepresentation::kFloat32, rep); | |
| 1896 // GapResolver may give us reg codes that don't map to actual s-registers. | 1895 // GapResolver may give us reg codes that don't map to actual s-registers. |
| 1897 // Generate code to work around those cases. | 1896 // Generate code to work around those cases. |
| 1898 int src_code = LocationOperand::cast(source)->register_code(); | 1897 int src_code = LocationOperand::cast(source)->register_code(); |
| 1899 if (destination->IsFloatRegister()) { | 1898 if (destination->IsFloatRegister()) { |
| 1900 int dst_code = LocationOperand::cast(destination)->register_code(); | 1899 int dst_code = LocationOperand::cast(destination)->register_code(); |
| 1901 __ VmovExtended(dst_code, src_code, kScratchReg); | 1900 __ VmovExtended(dst_code, src_code, kScratchReg); |
| 1902 } else { | 1901 } else { |
| 1903 DCHECK(destination->IsFloatStackSlot()); | 1902 DCHECK(destination->IsFloatStackSlot()); |
| 1904 __ VmovExtended(g.ToMemOperand(destination), src_code, kScratchReg); | 1903 __ VmovExtended(g.ToMemOperand(destination), src_code, kScratchReg); |
| 1905 } | 1904 } |
| 1905 } else { | |
| 1906 DCHECK_EQ(MachineRepresentation::kSimd128, rep); | |
| 1907 QwNeonRegister src = g.ToSimd128Register(source); | |
| 1908 if (destination->IsSimd128Register()) { | |
| 1909 QwNeonRegister dst = g.ToSimd128Register(destination); | |
| 1910 __ Move(dst, src); | |
| 1911 } else { | |
| 1912 DCHECK(destination->IsSimd128StackSlot()); | |
| 1913 MemOperand dst = g.ToMemOperand(destination); | |
| 1914 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); | |
| 1915 __ vst1(Neon8, NeonListOperand(src.low(), 2), | |
| 1916 NeonMemOperand(kScratchReg)); | |
| 1917 // // TODO(bbudge) Use vst1 when it's available. | |
|
martyn.capewell
2016/11/24 11:40:58
I hadn't realised vld1/vst1 had already been imple
bbudge
2016/11/24 13:48:05
I forgot to remove these commented out sections.
| |
| 1918 // __ vstr(src.low(), dst); | |
| 1919 // dst.set_offset(dst.offset() + kDoubleSize); | |
| 1920 // __ vstr(src.high(), dst); | |
| 1921 } | |
| 1906 } | 1922 } |
| 1907 } else if (source->IsFPStackSlot()) { | 1923 } else if (source->IsFPStackSlot()) { |
| 1908 MemOperand src = g.ToMemOperand(source); | 1924 MemOperand src = g.ToMemOperand(source); |
| 1909 MachineRepresentation rep = | 1925 MachineRepresentation rep = |
| 1910 LocationOperand::cast(destination)->representation(); | 1926 LocationOperand::cast(destination)->representation(); |
| 1911 if (destination->IsFPRegister()) { | 1927 if (destination->IsFPRegister()) { |
| 1912 if (rep == MachineRepresentation::kFloat64) { | 1928 if (rep == MachineRepresentation::kFloat64) { |
| 1913 __ vldr(g.ToDoubleRegister(destination), src); | 1929 __ vldr(g.ToDoubleRegister(destination), src); |
| 1914 } else { | 1930 } else if (rep == MachineRepresentation::kFloat32) { |
| 1915 DCHECK_EQ(MachineRepresentation::kFloat32, rep); | |
| 1916 // GapResolver may give us reg codes that don't map to actual | 1931 // GapResolver may give us reg codes that don't map to actual |
| 1917 // s-registers. Generate code to work around those cases. | 1932 // s-registers. Generate code to work around those cases. |
| 1918 int dst_code = LocationOperand::cast(destination)->register_code(); | 1933 int dst_code = LocationOperand::cast(destination)->register_code(); |
| 1919 __ VmovExtended(dst_code, src, kScratchReg); | 1934 __ VmovExtended(dst_code, src, kScratchReg); |
| 1935 } else { | |
| 1936 DCHECK_EQ(MachineRepresentation::kSimd128, rep); | |
| 1937 QwNeonRegister dst = g.ToSimd128Register(destination); | |
| 1938 __ add(kScratchReg, src.rn(), Operand(src.offset())); | |
| 1939 __ vld1(Neon8, NeonListOperand(dst.low(), 2), | |
| 1940 NeonMemOperand(kScratchReg)); | |
| 1941 // // TODO(bbudge) Use vld1 when it's available. | |
| 1942 // __ vldr(dst.low(), src); | |
| 1943 // src.set_offset(src.offset() + kDoubleSize); | |
| 1944 // __ vldr(dst.high(), src); | |
| 1920 } | 1945 } |
| 1921 } else { | 1946 } else if (rep == MachineRepresentation::kFloat64) { |
| 1922 DCHECK(destination->IsFPStackSlot()); | 1947 DCHECK(destination->IsFPStackSlot()); |
| 1923 if (rep == MachineRepresentation::kFloat64) { | 1948 if (rep == MachineRepresentation::kFloat64) { |
| 1924 DwVfpRegister temp = kScratchDoubleReg; | 1949 DwVfpRegister temp = kScratchDoubleReg; |
| 1925 __ vldr(temp, src); | 1950 __ vldr(temp, src); |
| 1926 __ vstr(temp, g.ToMemOperand(destination)); | 1951 __ vstr(temp, g.ToMemOperand(destination)); |
| 1927 } else { | 1952 } else if (rep == MachineRepresentation::kFloat32) { |
| 1928 DCHECK_EQ(MachineRepresentation::kFloat32, rep); | |
| 1929 SwVfpRegister temp = kScratchDoubleReg.low(); | 1953 SwVfpRegister temp = kScratchDoubleReg.low(); |
| 1930 __ vldr(temp, src); | 1954 __ vldr(temp, src); |
| 1931 __ vstr(temp, g.ToMemOperand(destination)); | 1955 __ vstr(temp, g.ToMemOperand(destination)); |
| 1956 } else { | |
| 1957 DCHECK_EQ(MachineRepresentation::kSimd128, rep); | |
| 1958 MemOperand dst = g.ToMemOperand(destination); | |
| 1959 __ add(kScratchReg, src.rn(), Operand(src.offset())); | |
| 1960 __ vld1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), | |
| 1961 NeonMemOperand(kScratchReg)); | |
| 1962 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); | |
| 1963 __ vst1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), | |
| 1964 NeonMemOperand(kScratchReg)); | |
| 1965 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
| 1966 // // TODO(bbudge) Use vld1/vst1 when they're available. | |
| 1967 // __ vldr(kScratchDoubleReg, src); | |
| 1968 // __ vstr(kScratchDoubleReg, dst); | |
| 1969 // src.set_offset(src.offset() + kDoubleSize); | |
| 1970 // dst.set_offset(dst.offset() + kDoubleSize); | |
| 1971 // __ vldr(kScratchDoubleReg, src); | |
| 1972 // __ vstr(kScratchDoubleReg, dst); | |
| 1932 } | 1973 } |
| 1933 } | 1974 } |
| 1934 } else { | 1975 } else { |
| 1935 UNREACHABLE(); | 1976 UNREACHABLE(); |
| 1936 } | 1977 } |
| 1937 } | 1978 } |
| 1938 | 1979 |
| 1939 | |
| 1940 void CodeGenerator::AssembleSwap(InstructionOperand* source, | 1980 void CodeGenerator::AssembleSwap(InstructionOperand* source, |
| 1941 InstructionOperand* destination) { | 1981 InstructionOperand* destination) { |
| 1942 ArmOperandConverter g(this, nullptr); | 1982 ArmOperandConverter g(this, nullptr); |
| 1943 // Dispatch on the source and destination operand kinds. Not all | 1983 // Dispatch on the source and destination operand kinds. Not all |
| 1944 // combinations are possible. | 1984 // combinations are possible. |
| 1945 if (source->IsRegister()) { | 1985 if (source->IsRegister()) { |
| 1946 // Register-register. | 1986 // Register-register. |
| 1947 Register temp = kScratchReg; | 1987 Register temp = kScratchReg; |
| 1948 Register src = g.ToRegister(source); | 1988 Register src = g.ToRegister(source); |
| 1949 if (destination->IsRegister()) { | 1989 if (destination->IsRegister()) { |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 1968 __ vldr(temp_1, dst); | 2008 __ vldr(temp_1, dst); |
| 1969 __ str(temp_0, dst); | 2009 __ str(temp_0, dst); |
| 1970 __ vstr(temp_1, src); | 2010 __ vstr(temp_1, src); |
| 1971 } else if (source->IsFPRegister()) { | 2011 } else if (source->IsFPRegister()) { |
| 1972 MachineRepresentation rep = LocationOperand::cast(source)->representation(); | 2012 MachineRepresentation rep = LocationOperand::cast(source)->representation(); |
| 1973 LowDwVfpRegister temp = kScratchDoubleReg; | 2013 LowDwVfpRegister temp = kScratchDoubleReg; |
| 1974 if (rep == MachineRepresentation::kFloat64) { | 2014 if (rep == MachineRepresentation::kFloat64) { |
| 1975 DwVfpRegister src = g.ToDoubleRegister(source); | 2015 DwVfpRegister src = g.ToDoubleRegister(source); |
| 1976 if (destination->IsFPRegister()) { | 2016 if (destination->IsFPRegister()) { |
| 1977 DwVfpRegister dst = g.ToDoubleRegister(destination); | 2017 DwVfpRegister dst = g.ToDoubleRegister(destination); |
| 1978 __ vswp(src, dst); | 2018 __ Swap(src, dst); |
| 1979 } else { | 2019 } else { |
| 1980 DCHECK(destination->IsFPStackSlot()); | 2020 DCHECK(destination->IsFPStackSlot()); |
| 1981 MemOperand dst = g.ToMemOperand(destination); | 2021 MemOperand dst = g.ToMemOperand(destination); |
| 1982 __ Move(temp, src); | 2022 __ Move(temp, src); |
| 1983 __ vldr(src, dst); | 2023 __ vldr(src, dst); |
| 1984 __ vstr(temp, dst); | 2024 __ vstr(temp, dst); |
| 1985 } | 2025 } |
| 1986 } else { | 2026 } else if (rep == MachineRepresentation::kFloat32) { |
| 1987 DCHECK_EQ(MachineRepresentation::kFloat32, rep); | |
| 1988 int src_code = LocationOperand::cast(source)->register_code(); | 2027 int src_code = LocationOperand::cast(source)->register_code(); |
| 1989 if (destination->IsFPRegister()) { | 2028 if (destination->IsFPRegister()) { |
| 1990 int dst_code = LocationOperand::cast(destination)->register_code(); | 2029 int dst_code = LocationOperand::cast(destination)->register_code(); |
| 1991 __ VmovExtended(temp.low().code(), src_code, kScratchReg); | 2030 __ VmovExtended(temp.low().code(), src_code, kScratchReg); |
| 1992 __ VmovExtended(src_code, dst_code, kScratchReg); | 2031 __ VmovExtended(src_code, dst_code, kScratchReg); |
| 1993 __ VmovExtended(dst_code, temp.low().code(), kScratchReg); | 2032 __ VmovExtended(dst_code, temp.low().code(), kScratchReg); |
| 1994 } else { | 2033 } else { |
| 1995 DCHECK(destination->IsFPStackSlot()); | 2034 DCHECK(destination->IsFPStackSlot()); |
| 1996 MemOperand dst = g.ToMemOperand(destination); | 2035 MemOperand dst = g.ToMemOperand(destination); |
| 1997 __ VmovExtended(temp.low().code(), src_code, kScratchReg); | 2036 __ VmovExtended(temp.low().code(), src_code, kScratchReg); |
| 1998 __ VmovExtended(src_code, dst, kScratchReg); | 2037 __ VmovExtended(src_code, dst, kScratchReg); |
| 1999 __ vstr(temp.low(), dst); | 2038 __ vstr(temp.low(), dst); |
| 2000 } | 2039 } |
| 2040 } else { | |
| 2041 DCHECK_EQ(MachineRepresentation::kSimd128, rep); | |
| 2042 QwNeonRegister src = g.ToSimd128Register(source); | |
| 2043 if (destination->IsFPRegister()) { | |
| 2044 QwNeonRegister dst = g.ToSimd128Register(destination); | |
| 2045 __ Swap(src, dst); | |
| 2046 } else { | |
| 2047 DCHECK(destination->IsFPStackSlot()); | |
| 2048 MemOperand dst = g.ToMemOperand(destination); | |
| 2049 __ Move(kScratchQuadReg, src); | |
| 2050 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); | |
| 2051 __ vld1(Neon8, NeonListOperand(src.low(), 2), | |
| 2052 NeonMemOperand(kScratchReg)); | |
| 2053 __ vst1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), | |
| 2054 NeonMemOperand(kScratchReg)); | |
| 2055 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
| 2056 // QwNeonRegister temp = kScratchQuadReg; | |
| 2057 // MemOperand dst2(dst.rn(), dst.offset() + kDoubleSize); | |
| 2058 // // TODO(bbudge) Use vld1 / vst1 when they're available. | |
| 2059 // __ Move(temp, src); | |
| 2060 // __ vldr(src.low(), dst); | |
| 2061 // __ vldr(src.high(), dst2); | |
| 2062 // __ vstr(temp.low(), dst); | |
| 2063 // __ vstr(temp.high(), dst2); | |
| 2064 // // Restore the 0 register. | |
| 2065 // __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
| 2066 } | |
| 2001 } | 2067 } |
| 2002 } else if (source->IsFPStackSlot()) { | 2068 } else if (source->IsFPStackSlot()) { |
| 2003 DCHECK(destination->IsFPStackSlot()); | 2069 DCHECK(destination->IsFPStackSlot()); |
| 2004 Register temp_0 = kScratchReg; | 2070 MemOperand src = g.ToMemOperand(source); |
| 2005 LowDwVfpRegister temp_1 = kScratchDoubleReg; | 2071 MemOperand dst = g.ToMemOperand(destination); |
| 2006 MemOperand src0 = g.ToMemOperand(source); | |
| 2007 MemOperand dst0 = g.ToMemOperand(destination); | |
| 2008 MachineRepresentation rep = LocationOperand::cast(source)->representation(); | 2072 MachineRepresentation rep = LocationOperand::cast(source)->representation(); |
| 2009 if (rep == MachineRepresentation::kFloat64) { | 2073 if (rep == MachineRepresentation::kFloat64) { |
| 2010 MemOperand src1(src0.rn(), src0.offset() + kPointerSize); | 2074 __ vldr(kScratchDoubleReg, dst); |
| 2011 MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize); | 2075 __ vldr(kDoubleRegZero, src); |
| 2012 __ vldr(temp_1, dst0); // Save destination in temp_1. | 2076 __ vstr(kScratchDoubleReg, src); |
| 2013 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination. | 2077 __ vstr(kDoubleRegZero, dst); |
| 2014 __ str(temp_0, dst0); | 2078 // Restore the 0 register. |
| 2015 __ ldr(temp_0, src1); | 2079 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); |
| 2016 __ str(temp_0, dst1); | 2080 } else if (rep == MachineRepresentation::kFloat32) { |
| 2017 __ vstr(temp_1, src0); | 2081 __ vldr(kScratchDoubleReg.low(), dst); |
| 2082 __ vldr(kScratchDoubleReg.high(), src); | |
| 2083 __ vstr(kScratchDoubleReg.low(), src); | |
| 2084 __ vstr(kScratchDoubleReg.high(), dst); | |
| 2018 } else { | 2085 } else { |
| 2019 DCHECK_EQ(MachineRepresentation::kFloat32, rep); | 2086 DCHECK_EQ(MachineRepresentation::kSimd128, rep); |
| 2020 __ vldr(temp_1.low(), dst0); // Save destination in temp_1. | 2087 __ vldr(kScratchDoubleReg, dst); |
| 2021 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination. | 2088 __ vldr(kDoubleRegZero, src); |
| 2022 __ str(temp_0, dst0); | 2089 __ vstr(kScratchDoubleReg, src); |
| 2023 __ vstr(temp_1.low(), src0); | 2090 __ vstr(kDoubleRegZero, dst); |
| 2091 src.set_offset(src.offset() + kDoubleSize); | |
| 2092 dst.set_offset(dst.offset() + kDoubleSize); | |
| 2093 __ vldr(kScratchDoubleReg, dst); | |
| 2094 __ vldr(kDoubleRegZero, src); | |
| 2095 __ vstr(kScratchDoubleReg, src); | |
| 2096 __ vstr(kDoubleRegZero, dst); | |
| 2097 // Restore the 0 register. | |
| 2098 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
| 2024 } | 2099 } |
| 2025 } else { | 2100 } else { |
| 2026 // No other combinations are possible. | 2101 // No other combinations are possible. |
| 2027 UNREACHABLE(); | 2102 UNREACHABLE(); |
| 2028 } | 2103 } |
| 2029 } | 2104 } |
| 2030 | 2105 |
| 2031 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { | 2106 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { |
| 2032 // On 32-bit ARM we emit the jump tables inline. | 2107 // On 32-bit ARM we emit the jump tables inline. |
| 2033 UNREACHABLE(); | 2108 UNREACHABLE(); |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 2053 padding_size -= v8::internal::Assembler::kInstrSize; | 2128 padding_size -= v8::internal::Assembler::kInstrSize; |
| 2054 } | 2129 } |
| 2055 } | 2130 } |
| 2056 } | 2131 } |
| 2057 | 2132 |
| 2058 #undef __ | 2133 #undef __ |
| 2059 | 2134 |
| 2060 } // namespace compiler | 2135 } // namespace compiler |
| 2061 } // namespace internal | 2136 } // namespace internal |
| 2062 } // namespace v8 | 2137 } // namespace v8 |
| OLD | NEW |