| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. |
| 6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
| (...skipping 1809 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1820 __ ldr(length_reg, | 1820 __ ldr(length_reg, |
| 1821 FieldAddress(value_reg, | 1821 FieldAddress(value_reg, |
| 1822 field().guarded_list_length_in_object_offset())); | 1822 field().guarded_list_length_in_object_offset())); |
| 1823 __ CompareImmediate(length_reg, | 1823 __ CompareImmediate(length_reg, |
| 1824 Smi::RawValue(field().guarded_list_length())); | 1824 Smi::RawValue(field().guarded_list_length())); |
| 1825 __ b(deopt, NE); | 1825 __ b(deopt, NE); |
| 1826 } | 1826 } |
| 1827 } | 1827 } |
| 1828 | 1828 |
| 1829 | 1829 |
| 1830 class StoreInstanceFieldSlowPath : public SlowPathCode { | 1830 class BoxAllocationSlowPath : public SlowPathCode { |
| 1831 public: | 1831 public: |
| 1832 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, | 1832 BoxAllocationSlowPath(Instruction* instruction, |
| 1833 const Class& cls) | 1833 const Class& cls, |
| 1834 : instruction_(instruction), cls_(cls) { } | 1834 Register result) |
| 1835 : instruction_(instruction), |
| 1836 cls_(cls), |
| 1837 result_(result) { } |
| 1835 | 1838 |
| 1836 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 1839 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1837 Isolate* isolate = compiler->isolate(); | 1840 Isolate* isolate = compiler->isolate(); |
| 1838 StubCode* stub_code = isolate->stub_code(); | 1841 StubCode* stub_code = isolate->stub_code(); |
| 1839 | 1842 |
| 1840 __ Comment("StoreInstanceFieldSlowPath"); | 1843 if (Assembler::EmittingComments()) { |
| 1844 __ Comment("%s slow path allocation of %s", |
| 1845 instruction_->DebugName(), |
| 1846 String::Handle(cls_.PrettyName()).ToCString()); |
| 1847 } |
| 1841 __ Bind(entry_label()); | 1848 __ Bind(entry_label()); |
| 1842 | 1849 |
| 1843 const Code& stub = | 1850 const Code& stub = |
| 1844 Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_)); | 1851 Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_)); |
| 1845 const ExternalLabel label(stub.EntryPoint()); | 1852 const ExternalLabel label(stub.EntryPoint()); |
| 1846 | 1853 |
| 1847 LocationSummary* locs = instruction_->locs(); | 1854 LocationSummary* locs = instruction_->locs(); |
| 1848 locs->live_registers()->Remove(locs->temp(0)); | 1855 |
| 1856 locs->live_registers()->Remove(Location::RegisterLocation(result_)); |
| 1849 | 1857 |
| 1850 compiler->SaveLiveRegisters(locs); | 1858 compiler->SaveLiveRegisters(locs); |
| 1851 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 1859 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 1852 &label, | 1860 &label, |
| 1853 RawPcDescriptors::kOther, | 1861 RawPcDescriptors::kOther, |
| 1854 locs); | 1862 locs); |
| 1855 __ MoveRegister(locs->temp(0).reg(), R0); | 1863 __ MoveRegister(result_, R0); |
| 1856 compiler->RestoreLiveRegisters(locs); | 1864 compiler->RestoreLiveRegisters(locs); |
| 1857 | 1865 |
| 1858 __ b(exit_label()); | 1866 __ b(exit_label()); |
| 1859 } | 1867 } |
| 1860 | 1868 |
| 1869 static void Allocate(FlowGraphCompiler* compiler, |
| 1870 Instruction* instruction, |
| 1871 const Class& cls, |
| 1872 Register result, |
| 1873 Register temp) { |
| 1874 BoxAllocationSlowPath* slow_path = |
| 1875 new BoxAllocationSlowPath(instruction, cls, result); |
| 1876 compiler->AddSlowPathCode(slow_path); |
| 1877 |
| 1878 __ TryAllocate(cls, |
| 1879 slow_path->entry_label(), |
| 1880 result, |
| 1881 temp); |
| 1882 __ Bind(slow_path->exit_label()); |
| 1883 } |
| 1884 |
| 1861 private: | 1885 private: |
| 1862 StoreInstanceFieldInstr* instruction_; | 1886 Instruction* instruction_; |
| 1863 const Class& cls_; | 1887 const Class& cls_; |
| 1888 Register result_; |
| 1864 }; | 1889 }; |
| 1865 | 1890 |
| 1866 | 1891 |
| 1867 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate, | 1892 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate, |
| 1868 bool opt) const { | 1893 bool opt) const { |
| 1869 const intptr_t kNumInputs = 2; | 1894 const intptr_t kNumInputs = 2; |
| 1870 const intptr_t kNumTemps = | 1895 const intptr_t kNumTemps = |
| 1871 (IsUnboxedStore() && opt) ? 2 : | 1896 (IsUnboxedStore() && opt) ? 2 : |
| 1872 ((IsPotentialUnboxedStore()) ? 3 : 0); | 1897 ((IsPotentialUnboxedStore()) ? 3 : 0); |
| 1873 LocationSummary* summary = new(isolate) LocationSummary( | 1898 LocationSummary* summary = new(isolate) LocationSummary( |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1892 : Location::FpuRegisterLocation(Q1)); | 1917 : Location::FpuRegisterLocation(Q1)); |
| 1893 } else { | 1918 } else { |
| 1894 summary->set_in(1, ShouldEmitStoreBarrier() | 1919 summary->set_in(1, ShouldEmitStoreBarrier() |
| 1895 ? Location::WritableRegister() | 1920 ? Location::WritableRegister() |
| 1896 : Location::RegisterOrConstant(value())); | 1921 : Location::RegisterOrConstant(value())); |
| 1897 } | 1922 } |
| 1898 return summary; | 1923 return summary; |
| 1899 } | 1924 } |
| 1900 | 1925 |
| 1901 | 1926 |
| 1927 static void EnsureMutableBox(FlowGraphCompiler* compiler, |
| 1928 StoreInstanceFieldInstr* instruction, |
| 1929 Register box_reg, |
| 1930 const Class& cls, |
| 1931 Register instance_reg, |
| 1932 intptr_t offset, |
| 1933 Register temp) { |
| 1934 Label done; |
| 1935 __ ldr(box_reg, FieldAddress(instance_reg, offset)); |
| 1936 __ CompareImmediate(box_reg, |
| 1937 reinterpret_cast<intptr_t>(Object::null())); |
| 1938 __ b(&done, NE); |
| 1939 |
| 1940 BoxAllocationSlowPath::Allocate( |
| 1941 compiler, instruction, cls, box_reg, temp); |
| 1942 |
| 1943 __ MoveRegister(temp, box_reg); |
| 1944 __ StoreIntoObjectOffset(instance_reg, offset, temp); |
| 1945 __ Bind(&done); |
| 1946 } |
| 1947 |
| 1948 |
| 1902 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1949 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1903 Label skip_store; | 1950 Label skip_store; |
| 1904 | 1951 |
| 1905 const Register instance_reg = locs()->in(0).reg(); | 1952 const Register instance_reg = locs()->in(0).reg(); |
| 1906 | 1953 |
| 1907 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1954 if (IsUnboxedStore() && compiler->is_optimizing()) { |
| 1908 const DRegister value = EvenDRegisterOf(locs()->in(1).fpu_reg()); | 1955 const DRegister value = EvenDRegisterOf(locs()->in(1).fpu_reg()); |
| 1909 const Register temp = locs()->temp(0).reg(); | 1956 const Register temp = locs()->temp(0).reg(); |
| 1910 const Register temp2 = locs()->temp(1).reg(); | 1957 const Register temp2 = locs()->temp(1).reg(); |
| 1911 const intptr_t cid = field().UnboxedFieldCid(); | 1958 const intptr_t cid = field().UnboxedFieldCid(); |
| 1912 | 1959 |
| 1913 if (is_initialization_) { | 1960 if (is_initialization_) { |
| 1914 const Class* cls = NULL; | 1961 const Class* cls = NULL; |
| 1915 switch (cid) { | 1962 switch (cid) { |
| 1916 case kDoubleCid: | 1963 case kDoubleCid: |
| 1917 cls = &compiler->double_class(); | 1964 cls = &compiler->double_class(); |
| 1918 break; | 1965 break; |
| 1919 case kFloat32x4Cid: | 1966 case kFloat32x4Cid: |
| 1920 cls = &compiler->float32x4_class(); | 1967 cls = &compiler->float32x4_class(); |
| 1921 break; | 1968 break; |
| 1922 case kFloat64x2Cid: | 1969 case kFloat64x2Cid: |
| 1923 cls = &compiler->float64x2_class(); | 1970 cls = &compiler->float64x2_class(); |
| 1924 break; | 1971 break; |
| 1925 default: | 1972 default: |
| 1926 UNREACHABLE(); | 1973 UNREACHABLE(); |
| 1927 } | 1974 } |
| 1928 | 1975 |
| 1929 StoreInstanceFieldSlowPath* slow_path = | 1976 BoxAllocationSlowPath::Allocate( |
| 1930 new StoreInstanceFieldSlowPath(this, *cls); | 1977 compiler, this, *cls, temp, temp2); |
| 1931 compiler->AddSlowPathCode(slow_path); | |
| 1932 | |
| 1933 __ TryAllocate(*cls, | |
| 1934 slow_path->entry_label(), | |
| 1935 temp, | |
| 1936 temp2); | |
| 1937 __ Bind(slow_path->exit_label()); | |
| 1938 __ MoveRegister(temp2, temp); | 1978 __ MoveRegister(temp2, temp); |
| 1939 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2); | 1979 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2); |
| 1940 } else { | 1980 } else { |
| 1941 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes_)); | 1981 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes_)); |
| 1942 } | 1982 } |
| 1943 switch (cid) { | 1983 switch (cid) { |
| 1944 case kDoubleCid: | 1984 case kDoubleCid: |
| 1945 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); | 1985 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); |
| 1946 __ StoreDToOffset(value, temp, Double::value_offset() - kHeapObjectTag); | 1986 __ StoreDToOffset(value, temp, Double::value_offset() - kHeapObjectTag); |
| 1947 break; | 1987 break; |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1998 // Fall through. | 2038 // Fall through. |
| 1999 __ b(&store_pointer); | 2039 __ b(&store_pointer); |
| 2000 | 2040 |
| 2001 if (!compiler->is_optimizing()) { | 2041 if (!compiler->is_optimizing()) { |
| 2002 locs()->live_registers()->Add(locs()->in(0)); | 2042 locs()->live_registers()->Add(locs()->in(0)); |
| 2003 locs()->live_registers()->Add(locs()->in(1)); | 2043 locs()->live_registers()->Add(locs()->in(1)); |
| 2004 } | 2044 } |
| 2005 | 2045 |
| 2006 { | 2046 { |
| 2007 __ Bind(&store_double); | 2047 __ Bind(&store_double); |
| 2008 Label copy_double; | 2048 EnsureMutableBox(compiler, |
| 2009 StoreInstanceFieldSlowPath* slow_path = | 2049 this, |
| 2010 new StoreInstanceFieldSlowPath(this, compiler->double_class()); | 2050 temp, |
| 2011 compiler->AddSlowPathCode(slow_path); | 2051 compiler->double_class(), |
| 2012 | 2052 instance_reg, |
| 2013 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes_)); | 2053 offset_in_bytes_, |
| 2014 __ CompareImmediate(temp, | 2054 temp2); |
| 2015 reinterpret_cast<intptr_t>(Object::null())); | |
| 2016 __ b(©_double, NE); | |
| 2017 | |
| 2018 __ TryAllocate(compiler->double_class(), | |
| 2019 slow_path->entry_label(), | |
| 2020 temp, | |
| 2021 temp2); | |
| 2022 __ Bind(slow_path->exit_label()); | |
| 2023 __ MoveRegister(temp2, temp); | |
| 2024 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2); | |
| 2025 __ Bind(©_double); | |
| 2026 __ CopyDoubleField(temp, value_reg, TMP, temp2, fpu_temp); | 2055 __ CopyDoubleField(temp, value_reg, TMP, temp2, fpu_temp); |
| 2027 __ b(&skip_store); | 2056 __ b(&skip_store); |
| 2028 } | 2057 } |
| 2029 | 2058 |
| 2030 { | 2059 { |
| 2031 __ Bind(&store_float32x4); | 2060 __ Bind(&store_float32x4); |
| 2032 Label copy_float32x4; | 2061 EnsureMutableBox(compiler, |
| 2033 StoreInstanceFieldSlowPath* slow_path = | 2062 this, |
| 2034 new StoreInstanceFieldSlowPath(this, compiler->float32x4_class()); | 2063 temp, |
| 2035 compiler->AddSlowPathCode(slow_path); | 2064 compiler->float32x4_class(), |
| 2036 | 2065 instance_reg, |
| 2037 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes_)); | 2066 offset_in_bytes_, |
| 2038 __ CompareImmediate(temp, | 2067 temp2); |
| 2039 reinterpret_cast<intptr_t>(Object::null())); | |
| 2040 __ b(©_float32x4, NE); | |
| 2041 | |
| 2042 __ TryAllocate(compiler->float32x4_class(), | |
| 2043 slow_path->entry_label(), | |
| 2044 temp, | |
| 2045 temp2); | |
| 2046 __ Bind(slow_path->exit_label()); | |
| 2047 __ MoveRegister(temp2, temp); | |
| 2048 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2); | |
| 2049 __ Bind(©_float32x4); | |
| 2050 __ CopyFloat32x4Field(temp, value_reg, TMP, temp2, fpu_temp); | 2068 __ CopyFloat32x4Field(temp, value_reg, TMP, temp2, fpu_temp); |
| 2051 __ b(&skip_store); | 2069 __ b(&skip_store); |
| 2052 } | 2070 } |
| 2053 | 2071 |
| 2054 { | 2072 { |
| 2055 __ Bind(&store_float64x2); | 2073 __ Bind(&store_float64x2); |
| 2056 Label copy_float64x2; | 2074 EnsureMutableBox(compiler, |
| 2057 StoreInstanceFieldSlowPath* slow_path = | 2075 this, |
| 2058 new StoreInstanceFieldSlowPath(this, compiler->float64x2_class()); | 2076 temp, |
| 2059 compiler->AddSlowPathCode(slow_path); | 2077 compiler->float64x2_class(), |
| 2060 | 2078 instance_reg, |
| 2061 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes_)); | 2079 offset_in_bytes_, |
| 2062 __ CompareImmediate(temp, | 2080 temp2); |
| 2063 reinterpret_cast<intptr_t>(Object::null())); | |
| 2064 __ b(©_float64x2, NE); | |
| 2065 | |
| 2066 __ TryAllocate(compiler->float64x2_class(), | |
| 2067 slow_path->entry_label(), | |
| 2068 temp, | |
| 2069 temp2); | |
| 2070 __ Bind(slow_path->exit_label()); | |
| 2071 __ MoveRegister(temp2, temp); | |
| 2072 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2); | |
| 2073 __ Bind(©_float64x2); | |
| 2074 __ CopyFloat64x2Field(temp, value_reg, TMP, temp2, fpu_temp); | 2081 __ CopyFloat64x2Field(temp, value_reg, TMP, temp2, fpu_temp); |
| 2075 __ b(&skip_store); | 2082 __ b(&skip_store); |
| 2076 } | 2083 } |
| 2077 | 2084 |
| 2078 __ Bind(&store_pointer); | 2085 __ Bind(&store_pointer); |
| 2079 } | 2086 } |
| 2080 | 2087 |
| 2081 if (ShouldEmitStoreBarrier()) { | 2088 if (ShouldEmitStoreBarrier()) { |
| 2082 const Register value_reg = locs()->in(1).reg(); | 2089 const Register value_reg = locs()->in(1).reg(); |
| 2083 __ StoreIntoObjectOffset(instance_reg, | 2090 __ StoreIntoObjectOffset(instance_reg, |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2299 | 2306 |
| 2300 StubCode* stub_code = compiler->isolate()->stub_code(); | 2307 StubCode* stub_code = compiler->isolate()->stub_code(); |
| 2301 compiler->GenerateCall(token_pos(), | 2308 compiler->GenerateCall(token_pos(), |
| 2302 &stub_code->AllocateArrayLabel(), | 2309 &stub_code->AllocateArrayLabel(), |
| 2303 RawPcDescriptors::kOther, | 2310 RawPcDescriptors::kOther, |
| 2304 locs()); | 2311 locs()); |
| 2305 ASSERT(locs()->out(0).reg() == kResultReg); | 2312 ASSERT(locs()->out(0).reg() == kResultReg); |
| 2306 } | 2313 } |
| 2307 | 2314 |
| 2308 | 2315 |
| 2309 class BoxDoubleSlowPath : public SlowPathCode { | |
| 2310 public: | |
| 2311 explicit BoxDoubleSlowPath(Instruction* instruction) | |
| 2312 : instruction_(instruction) { } | |
| 2313 | |
| 2314 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
| 2315 __ Comment("BoxDoubleSlowPath"); | |
| 2316 __ Bind(entry_label()); | |
| 2317 Isolate* isolate = compiler->isolate(); | |
| 2318 StubCode* stub_code = isolate->stub_code(); | |
| 2319 const Class& double_class = compiler->double_class(); | |
| 2320 const Code& stub = | |
| 2321 Code::Handle(isolate, | |
| 2322 stub_code->GetAllocationStubForClass(double_class)); | |
| 2323 const ExternalLabel label(stub.EntryPoint()); | |
| 2324 | |
| 2325 LocationSummary* locs = instruction_->locs(); | |
| 2326 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
| 2327 | |
| 2328 compiler->SaveLiveRegisters(locs); | |
| 2329 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
| 2330 &label, | |
| 2331 RawPcDescriptors::kOther, | |
| 2332 locs); | |
| 2333 __ MoveRegister(locs->out(0).reg(), R0); | |
| 2334 compiler->RestoreLiveRegisters(locs); | |
| 2335 | |
| 2336 __ b(exit_label()); | |
| 2337 } | |
| 2338 | |
| 2339 private: | |
| 2340 Instruction* instruction_; | |
| 2341 }; | |
| 2342 | |
| 2343 | |
| 2344 class BoxFloat32x4SlowPath : public SlowPathCode { | |
| 2345 public: | |
| 2346 explicit BoxFloat32x4SlowPath(Instruction* instruction) | |
| 2347 : instruction_(instruction) { } | |
| 2348 | |
| 2349 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
| 2350 __ Comment("BoxFloat32x4SlowPath"); | |
| 2351 __ Bind(entry_label()); | |
| 2352 Isolate* isolate = compiler->isolate(); | |
| 2353 StubCode* stub_code = isolate->stub_code(); | |
| 2354 const Class& float32x4_class = compiler->float32x4_class(); | |
| 2355 const Code& stub = | |
| 2356 Code::Handle(isolate, | |
| 2357 stub_code->GetAllocationStubForClass(float32x4_class)); | |
| 2358 const ExternalLabel label(stub.EntryPoint()); | |
| 2359 | |
| 2360 LocationSummary* locs = instruction_->locs(); | |
| 2361 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
| 2362 | |
| 2363 compiler->SaveLiveRegisters(locs); | |
| 2364 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
| 2365 &label, | |
| 2366 RawPcDescriptors::kOther, | |
| 2367 locs); | |
| 2368 __ mov(locs->out(0).reg(), Operand(R0)); | |
| 2369 compiler->RestoreLiveRegisters(locs); | |
| 2370 | |
| 2371 __ b(exit_label()); | |
| 2372 } | |
| 2373 | |
| 2374 private: | |
| 2375 Instruction* instruction_; | |
| 2376 }; | |
| 2377 | |
| 2378 | |
| 2379 class BoxFloat64x2SlowPath : public SlowPathCode { | |
| 2380 public: | |
| 2381 explicit BoxFloat64x2SlowPath(Instruction* instruction) | |
| 2382 : instruction_(instruction) { } | |
| 2383 | |
| 2384 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
| 2385 __ Comment("BoxFloat64x2SlowPath"); | |
| 2386 __ Bind(entry_label()); | |
| 2387 Isolate* isolate = compiler->isolate(); | |
| 2388 StubCode* stub_code = isolate->stub_code(); | |
| 2389 const Class& float64x2_class = compiler->float64x2_class(); | |
| 2390 const Code& stub = | |
| 2391 Code::Handle(isolate, | |
| 2392 stub_code->GetAllocationStubForClass(float64x2_class)); | |
| 2393 const ExternalLabel label(stub.EntryPoint()); | |
| 2394 | |
| 2395 LocationSummary* locs = instruction_->locs(); | |
| 2396 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
| 2397 | |
| 2398 compiler->SaveLiveRegisters(locs); | |
| 2399 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
| 2400 &label, | |
| 2401 RawPcDescriptors::kOther, | |
| 2402 locs); | |
| 2403 __ mov(locs->out(0).reg(), Operand(R0)); | |
| 2404 compiler->RestoreLiveRegisters(locs); | |
| 2405 | |
| 2406 __ b(exit_label()); | |
| 2407 } | |
| 2408 | |
| 2409 private: | |
| 2410 Instruction* instruction_; | |
| 2411 }; | |
| 2412 | |
| 2413 | |
| 2414 LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate, | 2316 LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate, |
| 2415 bool opt) const { | 2317 bool opt) const { |
| 2416 const intptr_t kNumInputs = 1; | 2318 const intptr_t kNumInputs = 1; |
| 2417 const intptr_t kNumTemps = | 2319 const intptr_t kNumTemps = |
| 2418 (IsUnboxedLoad() && opt) ? 1 : | 2320 (IsUnboxedLoad() && opt) ? 1 : |
| 2419 ((IsPotentialUnboxedLoad()) ? 3 : 0); | 2321 ((IsPotentialUnboxedLoad()) ? 3 : 0); |
| 2420 | 2322 |
| 2421 LocationSummary* locs = new(isolate) LocationSummary( | 2323 LocationSummary* locs = new(isolate) LocationSummary( |
| 2422 isolate, kNumInputs, kNumTemps, | 2324 isolate, kNumInputs, kNumTemps, |
| 2423 (opt && !IsPotentialUnboxedLoad()) | 2325 (opt && !IsPotentialUnboxedLoad()) |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2504 | 2406 |
| 2505 // Fall through. | 2407 // Fall through. |
| 2506 __ b(&load_pointer); | 2408 __ b(&load_pointer); |
| 2507 | 2409 |
| 2508 if (!compiler->is_optimizing()) { | 2410 if (!compiler->is_optimizing()) { |
| 2509 locs()->live_registers()->Add(locs()->in(0)); | 2411 locs()->live_registers()->Add(locs()->in(0)); |
| 2510 } | 2412 } |
| 2511 | 2413 |
| 2512 { | 2414 { |
| 2513 __ Bind(&load_double); | 2415 __ Bind(&load_double); |
| 2514 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | 2416 BoxAllocationSlowPath::Allocate( |
| 2515 compiler->AddSlowPathCode(slow_path); | 2417 compiler, |
| 2516 | 2418 this, |
| 2517 __ TryAllocate(compiler->double_class(), | 2419 compiler->double_class(), |
| 2518 slow_path->entry_label(), | 2420 result_reg, |
| 2519 result_reg, | 2421 temp); |
| 2520 temp); | |
| 2521 __ Bind(slow_path->exit_label()); | |
| 2522 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2422 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| 2523 __ CopyDoubleField(result_reg, temp, TMP, temp2, value); | 2423 __ CopyDoubleField(result_reg, temp, TMP, temp2, value); |
| 2524 __ b(&done); | 2424 __ b(&done); |
| 2525 } | 2425 } |
| 2526 | 2426 |
| 2527 { | 2427 { |
| 2528 __ Bind(&load_float32x4); | 2428 __ Bind(&load_float32x4); |
| 2529 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); | 2429 BoxAllocationSlowPath::Allocate( |
| 2530 compiler->AddSlowPathCode(slow_path); | 2430 compiler, |
| 2531 | 2431 this, |
| 2532 __ TryAllocate(compiler->float32x4_class(), | 2432 compiler->float32x4_class(), |
| 2533 slow_path->entry_label(), | 2433 result_reg, |
| 2534 result_reg, | 2434 temp); |
| 2535 temp); | |
| 2536 __ Bind(slow_path->exit_label()); | |
| 2537 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2435 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| 2538 __ CopyFloat32x4Field(result_reg, temp, TMP, temp2, value); | 2436 __ CopyFloat32x4Field(result_reg, temp, TMP, temp2, value); |
| 2539 __ b(&done); | 2437 __ b(&done); |
| 2540 } | 2438 } |
| 2541 | 2439 |
| 2542 { | 2440 { |
| 2543 __ Bind(&load_float64x2); | 2441 __ Bind(&load_float64x2); |
| 2544 BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this); | 2442 BoxAllocationSlowPath::Allocate( |
| 2545 compiler->AddSlowPathCode(slow_path); | 2443 compiler, |
| 2546 | 2444 this, |
| 2547 __ TryAllocate(compiler->float64x2_class(), | 2445 compiler->float64x2_class(), |
| 2548 slow_path->entry_label(), | 2446 result_reg, |
| 2549 result_reg, | 2447 temp); |
| 2550 temp); | |
| 2551 __ Bind(slow_path->exit_label()); | |
| 2552 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2448 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| 2553 __ CopyFloat64x2Field(result_reg, temp, TMP, temp2, value); | 2449 __ CopyFloat64x2Field(result_reg, temp, TMP, temp2, value); |
| 2554 __ b(&done); | 2450 __ b(&done); |
| 2555 } | 2451 } |
| 2556 | 2452 |
| 2557 __ Bind(&load_pointer); | 2453 __ Bind(&load_pointer); |
| 2558 } | 2454 } |
| 2559 __ LoadFromOffset(kWord, result_reg, | 2455 __ LoadFromOffset(kWord, result_reg, |
| 2560 instance_reg, offset_in_bytes() - kHeapObjectTag); | 2456 instance_reg, offset_in_bytes() - kHeapObjectTag); |
| 2561 __ Bind(&done); | 2457 __ Bind(&done); |
| (...skipping 818 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3380 kNumTemps, | 3276 kNumTemps, |
| 3381 LocationSummary::kCallOnSlowPath); | 3277 LocationSummary::kCallOnSlowPath); |
| 3382 summary->set_in(0, Location::RequiresFpuRegister()); | 3278 summary->set_in(0, Location::RequiresFpuRegister()); |
| 3383 summary->set_temp(0, Location::RequiresRegister()); | 3279 summary->set_temp(0, Location::RequiresRegister()); |
| 3384 summary->set_out(0, Location::RequiresRegister()); | 3280 summary->set_out(0, Location::RequiresRegister()); |
| 3385 return summary; | 3281 return summary; |
| 3386 } | 3282 } |
| 3387 | 3283 |
| 3388 | 3284 |
| 3389 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3285 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3390 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | |
| 3391 compiler->AddSlowPathCode(slow_path); | |
| 3392 | |
| 3393 const Register out_reg = locs()->out(0).reg(); | 3286 const Register out_reg = locs()->out(0).reg(); |
| 3394 const DRegister value = EvenDRegisterOf(locs()->in(0).fpu_reg()); | 3287 const DRegister value = EvenDRegisterOf(locs()->in(0).fpu_reg()); |
| 3395 | 3288 |
| 3396 __ TryAllocate(compiler->double_class(), | 3289 BoxAllocationSlowPath::Allocate( |
| 3397 slow_path->entry_label(), | 3290 compiler, |
| 3398 out_reg, | 3291 this, |
| 3399 locs()->temp(0).reg()); | 3292 compiler->double_class(), |
| 3400 __ Bind(slow_path->exit_label()); | 3293 out_reg, |
| 3294 locs()->temp(0).reg()); |
| 3401 __ StoreDToOffset(value, out_reg, Double::value_offset() - kHeapObjectTag); | 3295 __ StoreDToOffset(value, out_reg, Double::value_offset() - kHeapObjectTag); |
| 3402 } | 3296 } |
| 3403 | 3297 |
| 3404 | 3298 |
| 3405 LocationSummary* UnboxDoubleInstr::MakeLocationSummary(Isolate* isolate, | 3299 LocationSummary* UnboxDoubleInstr::MakeLocationSummary(Isolate* isolate, |
| 3406 bool opt) const { | 3300 bool opt) const { |
| 3407 const intptr_t kNumInputs = 1; | 3301 const intptr_t kNumInputs = 1; |
| 3408 const intptr_t value_cid = value()->Type()->ToCid(); | 3302 const intptr_t value_cid = value()->Type()->ToCid(); |
| 3409 const bool needs_temp = ((value_cid != kSmiCid) && (value_cid != kDoubleCid)); | 3303 const bool needs_temp = ((value_cid != kSmiCid) && (value_cid != kDoubleCid)); |
| 3410 const intptr_t kNumTemps = needs_temp ? 1 : 0; | 3304 const intptr_t kNumTemps = needs_temp ? 1 : 0; |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3468 kNumTemps, | 3362 kNumTemps, |
| 3469 LocationSummary::kCallOnSlowPath); | 3363 LocationSummary::kCallOnSlowPath); |
| 3470 summary->set_in(0, Location::RequiresFpuRegister()); | 3364 summary->set_in(0, Location::RequiresFpuRegister()); |
| 3471 summary->set_temp(0, Location::RequiresRegister()); | 3365 summary->set_temp(0, Location::RequiresRegister()); |
| 3472 summary->set_out(0, Location::RequiresRegister()); | 3366 summary->set_out(0, Location::RequiresRegister()); |
| 3473 return summary; | 3367 return summary; |
| 3474 } | 3368 } |
| 3475 | 3369 |
| 3476 | 3370 |
| 3477 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3371 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3478 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); | |
| 3479 compiler->AddSlowPathCode(slow_path); | |
| 3480 | |
| 3481 const Register out_reg = locs()->out(0).reg(); | 3372 const Register out_reg = locs()->out(0).reg(); |
| 3482 const QRegister value = locs()->in(0).fpu_reg(); | 3373 const QRegister value = locs()->in(0).fpu_reg(); |
| 3483 const DRegister dvalue0 = EvenDRegisterOf(value); | 3374 const DRegister dvalue0 = EvenDRegisterOf(value); |
| 3484 | 3375 |
| 3485 __ TryAllocate(compiler->float32x4_class(), | 3376 BoxAllocationSlowPath::Allocate( |
| 3486 slow_path->entry_label(), | 3377 compiler, |
| 3487 out_reg, | 3378 this, |
| 3488 locs()->temp(0).reg()); | 3379 compiler->float32x4_class(), |
| 3489 __ Bind(slow_path->exit_label()); | 3380 out_reg, |
| 3490 | 3381 locs()->temp(0).reg()); |
| 3491 __ StoreMultipleDToOffset(dvalue0, 2, out_reg, | 3382 __ StoreMultipleDToOffset(dvalue0, 2, out_reg, |
| 3492 Float32x4::value_offset() - kHeapObjectTag); | 3383 Float32x4::value_offset() - kHeapObjectTag); |
| 3493 } | 3384 } |
| 3494 | 3385 |
| 3495 | 3386 |
| 3496 LocationSummary* UnboxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, | 3387 LocationSummary* UnboxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, |
| 3497 bool opt) const { | 3388 bool opt) const { |
| 3498 const intptr_t value_cid = value()->Type()->ToCid(); | 3389 const intptr_t value_cid = value()->Type()->ToCid(); |
| 3499 const intptr_t kNumInputs = 1; | 3390 const intptr_t kNumInputs = 1; |
| 3500 const intptr_t kNumTemps = value_cid == kFloat32x4Cid ? 0 : 1; | 3391 const intptr_t kNumTemps = value_cid == kFloat32x4Cid ? 0 : 1; |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3539 kNumTemps, | 3430 kNumTemps, |
| 3540 LocationSummary::kCallOnSlowPath); | 3431 LocationSummary::kCallOnSlowPath); |
| 3541 summary->set_in(0, Location::RequiresFpuRegister()); | 3432 summary->set_in(0, Location::RequiresFpuRegister()); |
| 3542 summary->set_temp(0, Location::RequiresRegister()); | 3433 summary->set_temp(0, Location::RequiresRegister()); |
| 3543 summary->set_out(0, Location::RequiresRegister()); | 3434 summary->set_out(0, Location::RequiresRegister()); |
| 3544 return summary; | 3435 return summary; |
| 3545 } | 3436 } |
| 3546 | 3437 |
| 3547 | 3438 |
| 3548 void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3439 void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3549 BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this); | |
| 3550 compiler->AddSlowPathCode(slow_path); | |
| 3551 | |
| 3552 const Register out_reg = locs()->out(0).reg(); | 3440 const Register out_reg = locs()->out(0).reg(); |
| 3553 const QRegister value = locs()->in(0).fpu_reg(); | 3441 const QRegister value = locs()->in(0).fpu_reg(); |
| 3554 const DRegister dvalue0 = EvenDRegisterOf(value); | 3442 const DRegister dvalue0 = EvenDRegisterOf(value); |
| 3555 | 3443 |
| 3556 __ TryAllocate(compiler->float64x2_class(), | 3444 BoxAllocationSlowPath::Allocate( |
| 3557 slow_path->entry_label(), | 3445 compiler, |
| 3558 out_reg, | 3446 this, |
| 3559 locs()->temp(0).reg()); | 3447 compiler->float64x2_class(), |
| 3560 __ Bind(slow_path->exit_label()); | 3448 out_reg, |
| 3561 | 3449 locs()->temp(0).reg()); |
| 3562 __ StoreMultipleDToOffset(dvalue0, 2, out_reg, | 3450 __ StoreMultipleDToOffset(dvalue0, 2, out_reg, |
| 3563 Float64x2::value_offset() - kHeapObjectTag); | 3451 Float64x2::value_offset() - kHeapObjectTag); |
| 3564 } | 3452 } |
| 3565 | 3453 |
| 3566 | 3454 |
| 3567 LocationSummary* UnboxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, | 3455 LocationSummary* UnboxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, |
| 3568 bool opt) const { | 3456 bool opt) const { |
| 3569 const intptr_t value_cid = value()->Type()->ToCid(); | 3457 const intptr_t value_cid = value()->Type()->ToCid(); |
| 3570 const intptr_t kNumInputs = 1; | 3458 const intptr_t kNumInputs = 1; |
| 3571 const intptr_t kNumTemps = value_cid == kFloat64x2Cid ? 0 : 1; | 3459 const intptr_t kNumTemps = value_cid == kFloat64x2Cid ? 0 : 1; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3609 isolate, kNumInputs, | 3497 isolate, kNumInputs, |
| 3610 kNumTemps, | 3498 kNumTemps, |
| 3611 LocationSummary::kCallOnSlowPath); | 3499 LocationSummary::kCallOnSlowPath); |
| 3612 summary->set_in(0, Location::RequiresFpuRegister()); | 3500 summary->set_in(0, Location::RequiresFpuRegister()); |
| 3613 summary->set_temp(0, Location::RequiresRegister()); | 3501 summary->set_temp(0, Location::RequiresRegister()); |
| 3614 summary->set_out(0, Location::RequiresRegister()); | 3502 summary->set_out(0, Location::RequiresRegister()); |
| 3615 return summary; | 3503 return summary; |
| 3616 } | 3504 } |
| 3617 | 3505 |
| 3618 | 3506 |
| 3619 class BoxInt32x4SlowPath : public SlowPathCode { | |
| 3620 public: | |
| 3621 explicit BoxInt32x4SlowPath(BoxInt32x4Instr* instruction) | |
| 3622 : instruction_(instruction) { } | |
| 3623 | |
| 3624 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
| 3625 __ Comment("BoxInt32x4SlowPath"); | |
| 3626 __ Bind(entry_label()); | |
| 3627 Isolate* isolate = compiler->isolate(); | |
| 3628 StubCode* stub_code = isolate->stub_code(); | |
| 3629 const Class& int32x4_class = compiler->int32x4_class(); | |
| 3630 const Code& stub = | |
| 3631 Code::Handle(isolate, | |
| 3632 stub_code->GetAllocationStubForClass(int32x4_class)); | |
| 3633 const ExternalLabel label(stub.EntryPoint()); | |
| 3634 | |
| 3635 LocationSummary* locs = instruction_->locs(); | |
| 3636 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
| 3637 | |
| 3638 compiler->SaveLiveRegisters(locs); | |
| 3639 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
| 3640 &label, | |
| 3641 RawPcDescriptors::kOther, | |
| 3642 locs); | |
| 3643 __ mov(locs->out(0).reg(), Operand(R0)); | |
| 3644 compiler->RestoreLiveRegisters(locs); | |
| 3645 | |
| 3646 __ b(exit_label()); | |
| 3647 } | |
| 3648 | |
| 3649 private: | |
| 3650 BoxInt32x4Instr* instruction_; | |
| 3651 }; | |
| 3652 | |
| 3653 | |
| 3654 void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3507 void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3655 BoxInt32x4SlowPath* slow_path = new BoxInt32x4SlowPath(this); | |
| 3656 compiler->AddSlowPathCode(slow_path); | |
| 3657 | |
| 3658 const Register out_reg = locs()->out(0).reg(); | 3508 const Register out_reg = locs()->out(0).reg(); |
| 3659 const QRegister value = locs()->in(0).fpu_reg(); | 3509 const QRegister value = locs()->in(0).fpu_reg(); |
| 3660 const DRegister dvalue0 = EvenDRegisterOf(value); | 3510 const DRegister dvalue0 = EvenDRegisterOf(value); |
| 3661 | 3511 |
| 3662 __ TryAllocate(compiler->int32x4_class(), | 3512 BoxAllocationSlowPath::Allocate( |
| 3663 slow_path->entry_label(), | 3513 compiler, |
| 3664 out_reg, | 3514 this, |
| 3665 locs()->temp(0).reg()); | 3515 compiler->int32x4_class(), |
| 3666 __ Bind(slow_path->exit_label()); | 3516 out_reg, |
| 3517 locs()->temp(0).reg()); |
| 3667 __ StoreMultipleDToOffset(dvalue0, 2, out_reg, | 3518 __ StoreMultipleDToOffset(dvalue0, 2, out_reg, |
| 3668 Int32x4::value_offset() - kHeapObjectTag); | 3519 Int32x4::value_offset() - kHeapObjectTag); |
| 3669 } | 3520 } |
| 3670 | 3521 |
| 3671 | 3522 |
| 3672 LocationSummary* UnboxInt32x4Instr::MakeLocationSummary(Isolate* isolate, | 3523 LocationSummary* UnboxInt32x4Instr::MakeLocationSummary(Isolate* isolate, |
| 3673 bool opt) const { | 3524 bool opt) const { |
| 3674 const intptr_t value_cid = value()->Type()->ToCid(); | 3525 const intptr_t value_cid = value()->Type()->ToCid(); |
| 3675 const intptr_t kNumInputs = 1; | 3526 const intptr_t kNumInputs = 1; |
| 3676 const intptr_t kNumTemps = value_cid == kInt32x4Cid ? 0 : 1; | 3527 const intptr_t kNumTemps = value_cid == kInt32x4Cid ? 0 : 1; |
| (...skipping 2196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5873 summary->set_in(0, Location::Pair(Location::RequiresRegister(), | 5724 summary->set_in(0, Location::Pair(Location::RequiresRegister(), |
| 5874 Location::RequiresRegister())); | 5725 Location::RequiresRegister())); |
| 5875 if (!is_smi()) { | 5726 if (!is_smi()) { |
| 5876 summary->set_temp(0, Location::RequiresRegister()); | 5727 summary->set_temp(0, Location::RequiresRegister()); |
| 5877 } | 5728 } |
| 5878 summary->set_out(0, Location::RequiresRegister()); | 5729 summary->set_out(0, Location::RequiresRegister()); |
| 5879 return summary; | 5730 return summary; |
| 5880 } | 5731 } |
| 5881 | 5732 |
| 5882 | 5733 |
| 5883 class BoxIntegerSlowPath : public SlowPathCode { | |
| 5884 public: | |
| 5885 explicit BoxIntegerSlowPath(Definition* instruction) | |
| 5886 : instruction_(instruction) { } | |
| 5887 | |
| 5888 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
| 5889 __ Comment("BoxIntegerSlowPath"); | |
| 5890 __ Bind(entry_label()); | |
| 5891 Isolate* isolate = compiler->isolate(); | |
| 5892 StubCode* stub_code = isolate->stub_code(); | |
| 5893 const Class& mint_class = | |
| 5894 Class::ZoneHandle(isolate->object_store()->mint_class()); | |
| 5895 const Code& stub = | |
| 5896 Code::Handle(isolate, stub_code->GetAllocationStubForClass(mint_class)); | |
| 5897 const ExternalLabel label(stub.EntryPoint()); | |
| 5898 | |
| 5899 LocationSummary* locs = instruction_->locs(); | |
| 5900 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
| 5901 | |
| 5902 compiler->SaveLiveRegisters(locs); | |
| 5903 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
| 5904 &label, | |
| 5905 RawPcDescriptors::kOther, | |
| 5906 locs); | |
| 5907 __ mov(locs->out(0).reg(), Operand(R0)); | |
| 5908 compiler->RestoreLiveRegisters(locs); | |
| 5909 | |
| 5910 __ b(exit_label()); | |
| 5911 } | |
| 5912 | |
| 5913 private: | |
| 5914 Definition* instruction_; | |
| 5915 }; | |
| 5916 | |
| 5917 | |
| 5918 void BoxIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5734 void BoxIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 5919 if (is_smi()) { | 5735 if (is_smi()) { |
| 5920 PairLocation* value_pair = locs()->in(0).AsPairLocation(); | 5736 PairLocation* value_pair = locs()->in(0).AsPairLocation(); |
| 5921 Register value_lo = value_pair->At(0).reg(); | 5737 Register value_lo = value_pair->At(0).reg(); |
| 5922 Register out_reg = locs()->out(0).reg(); | 5738 Register out_reg = locs()->out(0).reg(); |
| 5923 __ mov(out_reg, Operand(value_lo)); | 5739 __ mov(out_reg, Operand(value_lo)); |
| 5924 __ SmiTag(out_reg); | 5740 __ SmiTag(out_reg); |
| 5925 return; | 5741 return; |
| 5926 } | 5742 } |
| 5927 | 5743 |
| 5928 BoxIntegerSlowPath* slow_path = new BoxIntegerSlowPath(this); | |
| 5929 compiler->AddSlowPathCode(slow_path); | |
| 5930 PairLocation* value_pair = locs()->in(0).AsPairLocation(); | 5744 PairLocation* value_pair = locs()->in(0).AsPairLocation(); |
| 5931 Register value_lo = value_pair->At(0).reg(); | 5745 Register value_lo = value_pair->At(0).reg(); |
| 5932 Register value_hi = value_pair->At(1).reg(); | 5746 Register value_hi = value_pair->At(1).reg(); |
| 5933 Register tmp = locs()->temp(0).reg(); | 5747 Register tmp = locs()->temp(0).reg(); |
| 5934 Register out_reg = locs()->out(0).reg(); | 5748 Register out_reg = locs()->out(0).reg(); |
| 5935 | 5749 |
| 5936 // Unboxed operations produce smis or mint-sized values. | 5750 // Unboxed operations produce smis or mint-sized values. |
| 5937 // Check if value fits into a smi. | 5751 // Check if value fits into a smi. |
| 5938 __ Comment("BoxIntegerInstr"); | 5752 __ Comment("BoxIntegerInstr"); |
| 5939 Label not_smi, done, maybe_pos_smi, maybe_neg_smi, is_smi; | 5753 Label not_smi, done, maybe_pos_smi, maybe_neg_smi, is_smi; |
| (...skipping 17 matching lines...) Expand all Loading... |
| 5957 __ b(¬_smi, LT); | 5771 __ b(¬_smi, LT); |
| 5958 | 5772 |
| 5959 // lo is a Smi. Tag it and return. | 5773 // lo is a Smi. Tag it and return. |
| 5960 __ Bind(&is_smi); | 5774 __ Bind(&is_smi); |
| 5961 __ mov(out_reg, Operand(value_lo)); | 5775 __ mov(out_reg, Operand(value_lo)); |
| 5962 __ SmiTag(out_reg); | 5776 __ SmiTag(out_reg); |
| 5963 __ b(&done); | 5777 __ b(&done); |
| 5964 | 5778 |
| 5965 // Not a smi. Box it. | 5779 // Not a smi. Box it. |
| 5966 __ Bind(¬_smi); | 5780 __ Bind(¬_smi); |
| 5967 __ TryAllocate( | 5781 BoxAllocationSlowPath::Allocate( |
| 5968 Class::ZoneHandle(Isolate::Current()->object_store()->mint_class()), | 5782 compiler, |
| 5969 slow_path->entry_label(), | 5783 this, |
| 5784 compiler->mint_class(), |
| 5970 out_reg, | 5785 out_reg, |
| 5971 tmp); | 5786 tmp); |
| 5972 __ Bind(slow_path->exit_label()); | |
| 5973 __ StoreToOffset(kWord, | 5787 __ StoreToOffset(kWord, |
| 5974 value_lo, | 5788 value_lo, |
| 5975 out_reg, | 5789 out_reg, |
| 5976 Mint::value_offset() - kHeapObjectTag); | 5790 Mint::value_offset() - kHeapObjectTag); |
| 5977 __ StoreToOffset(kWord, | 5791 __ StoreToOffset(kWord, |
| 5978 value_hi, | 5792 value_hi, |
| 5979 out_reg, | 5793 out_reg, |
| 5980 Mint::value_offset() - kHeapObjectTag + kWordSize); | 5794 Mint::value_offset() - kHeapObjectTag + kWordSize); |
| 5981 __ Bind(&done); | 5795 __ Bind(&done); |
| 5982 } | 5796 } |
| (...skipping 457 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6440 LocationSummary* summary = new(isolate) LocationSummary( | 6254 LocationSummary* summary = new(isolate) LocationSummary( |
| 6441 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); | 6255 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
| 6442 summary->set_in(0, Location::RequiresRegister()); | 6256 summary->set_in(0, Location::RequiresRegister()); |
| 6443 summary->set_temp(0, Location::RequiresRegister()); | 6257 summary->set_temp(0, Location::RequiresRegister()); |
| 6444 summary->set_out(0, Location::RequiresRegister()); | 6258 summary->set_out(0, Location::RequiresRegister()); |
| 6445 return summary; | 6259 return summary; |
| 6446 } | 6260 } |
| 6447 | 6261 |
| 6448 | 6262 |
| 6449 void BoxUint32Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 6263 void BoxUint32Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 6450 BoxIntegerSlowPath* slow_path = new BoxIntegerSlowPath(this); | |
| 6451 compiler->AddSlowPathCode(slow_path); | |
| 6452 Register value = locs()->in(0).reg(); | 6264 Register value = locs()->in(0).reg(); |
| 6453 Register out = locs()->out(0).reg(); | 6265 Register out = locs()->out(0).reg(); |
| 6454 Register temp = locs()->temp(0).reg(); | 6266 Register temp = locs()->temp(0).reg(); |
| 6455 ASSERT(value != out); | 6267 ASSERT(value != out); |
| 6456 | 6268 |
| 6457 Label not_smi, done; | 6269 Label not_smi, done; |
| 6458 | 6270 |
| 6459 // TODO(johnmccutchan): Use range information to fast path smi / mint boxing. | 6271 // TODO(johnmccutchan): Use range information to fast path smi / mint boxing. |
| 6460 | 6272 |
| 6461 // Test if this value is <= kSmiMax. | 6273 // Test if this value is <= kSmiMax. |
| 6462 __ CompareImmediate(value, kSmiMax); | 6274 __ CompareImmediate(value, kSmiMax); |
| 6463 __ b(¬_smi, HI); | 6275 __ b(¬_smi, HI); |
| 6464 // Smi. | 6276 // Smi. |
| 6465 __ mov(out, Operand(value)); | 6277 __ mov(out, Operand(value)); |
| 6466 __ SmiTag(out); | 6278 __ SmiTag(out); |
| 6467 __ b(&done); | 6279 __ b(&done); |
| 6468 __ Bind(¬_smi); | 6280 __ Bind(¬_smi); |
| 6469 // Allocate a mint. | 6281 // Allocate a mint. |
| 6470 __ TryAllocate( | 6282 BoxAllocationSlowPath::Allocate( |
| 6471 Class::ZoneHandle(Isolate::Current()->object_store()->mint_class()), | 6283 compiler, |
| 6472 slow_path->entry_label(), | 6284 this, |
| 6285 compiler->mint_class(), |
| 6473 out, | 6286 out, |
| 6474 temp); | 6287 temp); |
| 6475 __ Bind(slow_path->exit_label()); | |
| 6476 // Copy low word into mint. | 6288 // Copy low word into mint. |
| 6477 __ StoreToOffset(kWord, | 6289 __ StoreToOffset(kWord, |
| 6478 value, | 6290 value, |
| 6479 out, | 6291 out, |
| 6480 Mint::value_offset() - kHeapObjectTag); | 6292 Mint::value_offset() - kHeapObjectTag); |
| 6481 // Zero high word. | 6293 // Zero high word. |
| 6482 __ eor(temp, temp, Operand(temp)); | 6294 __ eor(temp, temp, Operand(temp)); |
| 6483 __ StoreToOffset(kWord, | 6295 __ StoreToOffset(kWord, |
| 6484 temp, | 6296 temp, |
| 6485 out, | 6297 out, |
| (...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6804 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); | 6616 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); |
| 6805 #if defined(DEBUG) | 6617 #if defined(DEBUG) |
| 6806 __ LoadImmediate(R4, kInvalidObjectPointer); | 6618 __ LoadImmediate(R4, kInvalidObjectPointer); |
| 6807 __ LoadImmediate(R5, kInvalidObjectPointer); | 6619 __ LoadImmediate(R5, kInvalidObjectPointer); |
| 6808 #endif | 6620 #endif |
| 6809 } | 6621 } |
| 6810 | 6622 |
| 6811 } // namespace dart | 6623 } // namespace dart |
| 6812 | 6624 |
| 6813 #endif // defined TARGET_ARCH_ARM | 6625 #endif // defined TARGET_ARCH_ARM |
| OLD | NEW |