| OLD | NEW |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
| 6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
| (...skipping 1529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1540 | 1540 |
| 1541 for (intptr_t i = 0; i < num_temps; i++) { | 1541 for (intptr_t i = 0; i < num_temps; i++) { |
| 1542 summary->set_temp(i, Location::RequiresRegister()); | 1542 summary->set_temp(i, Location::RequiresRegister()); |
| 1543 } | 1543 } |
| 1544 | 1544 |
| 1545 return summary; | 1545 return summary; |
| 1546 } | 1546 } |
| 1547 | 1547 |
| 1548 | 1548 |
| 1549 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1549 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1550 ASSERT(sizeof(classid_t) == kInt32Size); | 1550 ASSERT(sizeof(classid_t) == kInt16Size); |
| 1551 const intptr_t value_cid = value()->Type()->ToCid(); | 1551 const intptr_t value_cid = value()->Type()->ToCid(); |
| 1552 const intptr_t field_cid = field().guarded_cid(); | 1552 const intptr_t field_cid = field().guarded_cid(); |
| 1553 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; | 1553 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; |
| 1554 | 1554 |
| 1555 if (field_cid == kDynamicCid) { | 1555 if (field_cid == kDynamicCid) { |
| 1556 if (Compiler::IsBackgroundCompilation()) { | 1556 if (Compiler::IsBackgroundCompilation()) { |
| 1557 // Field state changed while compiling. | 1557 // Field state changed while compiling. |
| 1558 Compiler::AbortBackgroundCompilation( | 1558 Compiler::AbortBackgroundCompilation( |
| 1559 deopt_id(), | 1559 deopt_id(), |
| 1560 "GuardFieldClassInstr: field state changed while compiling"); | 1560 "GuardFieldClassInstr: field state changed while compiling"); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1586 compiler->is_optimizing() | 1586 compiler->is_optimizing() |
| 1587 ? compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) | 1587 ? compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) |
| 1588 : NULL; | 1588 : NULL; |
| 1589 | 1589 |
| 1590 Label* fail = (deopt != NULL) ? deopt : &fail_label; | 1590 Label* fail = (deopt != NULL) ? deopt : &fail_label; |
| 1591 | 1591 |
| 1592 if (emit_full_guard) { | 1592 if (emit_full_guard) { |
| 1593 __ LoadObject(field_reg, Field::ZoneHandle(field().Original())); | 1593 __ LoadObject(field_reg, Field::ZoneHandle(field().Original())); |
| 1594 | 1594 |
| 1595 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset(), | 1595 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset(), |
| 1596 kUnsignedWord); | 1596 kUnsignedHalfword); |
| 1597 FieldAddress field_nullability_operand( | 1597 FieldAddress field_nullability_operand( |
| 1598 field_reg, Field::is_nullable_offset(), kUnsignedWord); | 1598 field_reg, Field::is_nullable_offset(), kUnsignedHalfword); |
| 1599 | 1599 |
| 1600 if (value_cid == kDynamicCid) { | 1600 if (value_cid == kDynamicCid) { |
| 1601 LoadValueCid(compiler, value_cid_reg, value_reg); | 1601 LoadValueCid(compiler, value_cid_reg, value_reg); |
| 1602 Label skip_length_check; | 1602 Label skip_length_check; |
| 1603 __ ldr(TMP, field_cid_operand, kUnsignedWord); | 1603 __ ldr(TMP, field_cid_operand, kUnsignedHalfword); |
| 1604 __ CompareRegisters(value_cid_reg, TMP); | 1604 __ CompareRegisters(value_cid_reg, TMP); |
| 1605 __ b(&ok, EQ); | 1605 __ b(&ok, EQ); |
| 1606 __ ldr(TMP, field_nullability_operand, kUnsignedWord); | 1606 __ ldr(TMP, field_nullability_operand, kUnsignedHalfword); |
| 1607 __ CompareRegisters(value_cid_reg, TMP); | 1607 __ CompareRegisters(value_cid_reg, TMP); |
| 1608 } else if (value_cid == kNullCid) { | 1608 } else if (value_cid == kNullCid) { |
| 1609 __ ldr(value_cid_reg, field_nullability_operand, kUnsignedWord); | 1609 __ ldr(value_cid_reg, field_nullability_operand, kUnsignedHalfword); |
| 1610 __ CompareImmediate(value_cid_reg, value_cid); | 1610 __ CompareImmediate(value_cid_reg, value_cid); |
| 1611 } else { | 1611 } else { |
| 1612 Label skip_length_check; | 1612 Label skip_length_check; |
| 1613 __ ldr(value_cid_reg, field_cid_operand, kUnsignedWord); | 1613 __ ldr(value_cid_reg, field_cid_operand, kUnsignedHalfword); |
| 1614 __ CompareImmediate(value_cid_reg, value_cid); | 1614 __ CompareImmediate(value_cid_reg, value_cid); |
| 1615 } | 1615 } |
| 1616 __ b(&ok, EQ); | 1616 __ b(&ok, EQ); |
| 1617 | 1617 |
| 1618 // Check if the tracked state of the guarded field can be initialized | 1618 // Check if the tracked state of the guarded field can be initialized |
| 1619 // inline. If the field needs length check we fall through to runtime | 1619 // inline. If the field needs length check we fall through to runtime |
| 1620 // which is responsible for computing offset of the length field | 1620 // which is responsible for computing offset of the length field |
| 1621 // based on the class id. | 1621 // based on the class id. |
| 1622 // Length guard will be emitted separately when needed via GuardFieldLength | 1622 // Length guard will be emitted separately when needed via GuardFieldLength |
| 1623 // instruction after GuardFieldClass. | 1623 // instruction after GuardFieldClass. |
| 1624 if (!field().needs_length_check()) { | 1624 if (!field().needs_length_check()) { |
| 1625 // Uninitialized field can be handled inline. Check if the | 1625 // Uninitialized field can be handled inline. Check if the |
| 1626 // field is still unitialized. | 1626 // field is still unitialized. |
| 1627 __ ldr(TMP, field_cid_operand, kUnsignedWord); | 1627 __ ldr(TMP, field_cid_operand, kUnsignedHalfword); |
| 1628 __ CompareImmediate(TMP, kIllegalCid); | 1628 __ CompareImmediate(TMP, kIllegalCid); |
| 1629 __ b(fail, NE); | 1629 __ b(fail, NE); |
| 1630 | 1630 |
| 1631 if (value_cid == kDynamicCid) { | 1631 if (value_cid == kDynamicCid) { |
| 1632 __ str(value_cid_reg, field_cid_operand, kUnsignedWord); | 1632 __ str(value_cid_reg, field_cid_operand, kUnsignedHalfword); |
| 1633 __ str(value_cid_reg, field_nullability_operand, kUnsignedWord); | 1633 __ str(value_cid_reg, field_nullability_operand, kUnsignedHalfword); |
| 1634 } else { | 1634 } else { |
| 1635 __ LoadImmediate(TMP, value_cid); | 1635 __ LoadImmediate(TMP, value_cid); |
| 1636 __ str(TMP, field_cid_operand, kUnsignedWord); | 1636 __ str(TMP, field_cid_operand, kUnsignedHalfword); |
| 1637 __ str(TMP, field_nullability_operand, kUnsignedWord); | 1637 __ str(TMP, field_nullability_operand, kUnsignedHalfword); |
| 1638 } | 1638 } |
| 1639 | 1639 |
| 1640 if (deopt == NULL) { | 1640 if (deopt == NULL) { |
| 1641 ASSERT(!compiler->is_optimizing()); | 1641 ASSERT(!compiler->is_optimizing()); |
| 1642 __ b(&ok); | 1642 __ b(&ok); |
| 1643 } | 1643 } |
| 1644 } | 1644 } |
| 1645 | 1645 |
| 1646 if (deopt == NULL) { | 1646 if (deopt == NULL) { |
| 1647 ASSERT(!compiler->is_optimizing()); | 1647 ASSERT(!compiler->is_optimizing()); |
| 1648 __ Bind(fail); | 1648 __ Bind(fail); |
| 1649 | 1649 |
| 1650 __ LoadFieldFromOffset(TMP, field_reg, Field::guarded_cid_offset(), | 1650 __ LoadFieldFromOffset(TMP, field_reg, Field::guarded_cid_offset(), |
| 1651 kUnsignedWord); | 1651 kUnsignedHalfword); |
| 1652 __ CompareImmediate(TMP, kDynamicCid); | 1652 __ CompareImmediate(TMP, kDynamicCid); |
| 1653 __ b(&ok, EQ); | 1653 __ b(&ok, EQ); |
| 1654 | 1654 |
| 1655 __ Push(field_reg); | 1655 __ Push(field_reg); |
| 1656 __ Push(value_reg); | 1656 __ Push(value_reg); |
| 1657 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); | 1657 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); |
| 1658 __ Drop(2); // Drop the field and the value. | 1658 __ Drop(2); // Drop the field and the value. |
| 1659 } | 1659 } |
| 1660 } else { | 1660 } else { |
| 1661 ASSERT(compiler->is_optimizing()); | 1661 ASSERT(compiler->is_optimizing()); |
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1880 } else { | 1880 } else { |
| 1881 summary->set_in(1, ShouldEmitStoreBarrier() | 1881 summary->set_in(1, ShouldEmitStoreBarrier() |
| 1882 ? Location::WritableRegister() | 1882 ? Location::WritableRegister() |
| 1883 : Location::RegisterOrConstant(value())); | 1883 : Location::RegisterOrConstant(value())); |
| 1884 } | 1884 } |
| 1885 return summary; | 1885 return summary; |
| 1886 } | 1886 } |
| 1887 | 1887 |
| 1888 | 1888 |
| 1889 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1889 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1890 ASSERT(sizeof(classid_t) == kInt32Size); | 1890 ASSERT(sizeof(classid_t) == kInt16Size); |
| 1891 Label skip_store; | 1891 Label skip_store; |
| 1892 | 1892 |
| 1893 const Register instance_reg = locs()->in(0).reg(); | 1893 const Register instance_reg = locs()->in(0).reg(); |
| 1894 | 1894 |
| 1895 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1895 if (IsUnboxedStore() && compiler->is_optimizing()) { |
| 1896 const VRegister value = locs()->in(1).fpu_reg(); | 1896 const VRegister value = locs()->in(1).fpu_reg(); |
| 1897 const Register temp = locs()->temp(0).reg(); | 1897 const Register temp = locs()->temp(0).reg(); |
| 1898 const Register temp2 = locs()->temp(1).reg(); | 1898 const Register temp2 = locs()->temp(1).reg(); |
| 1899 const intptr_t cid = field().UnboxedFieldCid(); | 1899 const intptr_t cid = field().UnboxedFieldCid(); |
| 1900 | 1900 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1952 } | 1952 } |
| 1953 | 1953 |
| 1954 Label store_pointer; | 1954 Label store_pointer; |
| 1955 Label store_double; | 1955 Label store_double; |
| 1956 Label store_float32x4; | 1956 Label store_float32x4; |
| 1957 Label store_float64x2; | 1957 Label store_float64x2; |
| 1958 | 1958 |
| 1959 __ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); | 1959 __ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); |
| 1960 | 1960 |
| 1961 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset(), | 1961 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset(), |
| 1962 kUnsignedWord); | 1962 kUnsignedHalfword); |
| 1963 __ CompareImmediate(temp2, kNullCid); | 1963 __ CompareImmediate(temp2, kNullCid); |
| 1964 __ b(&store_pointer, EQ); | 1964 __ b(&store_pointer, EQ); |
| 1965 | 1965 |
| 1966 __ LoadFromOffset(temp2, temp, Field::kind_bits_offset() - kHeapObjectTag, | 1966 __ LoadFromOffset(temp2, temp, Field::kind_bits_offset() - kHeapObjectTag, |
| 1967 kUnsignedByte); | 1967 kUnsignedByte); |
| 1968 __ tsti(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1968 __ tsti(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
| 1969 __ b(&store_pointer, EQ); | 1969 __ b(&store_pointer, EQ); |
| 1970 | 1970 |
| 1971 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), | 1971 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), |
| 1972 kUnsignedWord); | 1972 kUnsignedHalfword); |
| 1973 __ CompareImmediate(temp2, kDoubleCid); | 1973 __ CompareImmediate(temp2, kDoubleCid); |
| 1974 __ b(&store_double, EQ); | 1974 __ b(&store_double, EQ); |
| 1975 | 1975 |
| 1976 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), | 1976 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), |
| 1977 kUnsignedWord); | 1977 kUnsignedHalfword); |
| 1978 __ CompareImmediate(temp2, kFloat32x4Cid); | 1978 __ CompareImmediate(temp2, kFloat32x4Cid); |
| 1979 __ b(&store_float32x4, EQ); | 1979 __ b(&store_float32x4, EQ); |
| 1980 | 1980 |
| 1981 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), | 1981 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), |
| 1982 kUnsignedWord); | 1982 kUnsignedHalfword); |
| 1983 __ CompareImmediate(temp2, kFloat64x2Cid); | 1983 __ CompareImmediate(temp2, kFloat64x2Cid); |
| 1984 __ b(&store_float64x2, EQ); | 1984 __ b(&store_float64x2, EQ); |
| 1985 | 1985 |
| 1986 // Fall through. | 1986 // Fall through. |
| 1987 __ b(&store_pointer); | 1987 __ b(&store_pointer); |
| 1988 | 1988 |
| 1989 if (!compiler->is_optimizing()) { | 1989 if (!compiler->is_optimizing()) { |
| 1990 locs()->live_registers()->Add(locs()->in(0)); | 1990 locs()->live_registers()->Add(locs()->in(0)); |
| 1991 locs()->live_registers()->Add(locs()->in(1)); | 1991 locs()->live_registers()->Add(locs()->in(1)); |
| 1992 } | 1992 } |
| (...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2236 locs->set_temp(0, Location::RequiresRegister()); | 2236 locs->set_temp(0, Location::RequiresRegister()); |
| 2237 } else if (IsPotentialUnboxedLoad()) { | 2237 } else if (IsPotentialUnboxedLoad()) { |
| 2238 locs->set_temp(0, Location::RequiresRegister()); | 2238 locs->set_temp(0, Location::RequiresRegister()); |
| 2239 } | 2239 } |
| 2240 locs->set_out(0, Location::RequiresRegister()); | 2240 locs->set_out(0, Location::RequiresRegister()); |
| 2241 return locs; | 2241 return locs; |
| 2242 } | 2242 } |
| 2243 | 2243 |
| 2244 | 2244 |
| 2245 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2245 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2246 ASSERT(sizeof(classid_t) == kInt32Size); | 2246 ASSERT(sizeof(classid_t) == kInt16Size); |
| 2247 const Register instance_reg = locs()->in(0).reg(); | 2247 const Register instance_reg = locs()->in(0).reg(); |
| 2248 if (IsUnboxedLoad() && compiler->is_optimizing()) { | 2248 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
| 2249 const VRegister result = locs()->out(0).fpu_reg(); | 2249 const VRegister result = locs()->out(0).fpu_reg(); |
| 2250 const Register temp = locs()->temp(0).reg(); | 2250 const Register temp = locs()->temp(0).reg(); |
| 2251 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes()); | 2251 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes()); |
| 2252 const intptr_t cid = field()->UnboxedFieldCid(); | 2252 const intptr_t cid = field()->UnboxedFieldCid(); |
| 2253 switch (cid) { | 2253 switch (cid) { |
| 2254 case kDoubleCid: | 2254 case kDoubleCid: |
| 2255 __ Comment("UnboxedDoubleLoadFieldInstr"); | 2255 __ Comment("UnboxedDoubleLoadFieldInstr"); |
| 2256 __ LoadDFieldFromOffset(result, temp, Double::value_offset()); | 2256 __ LoadDFieldFromOffset(result, temp, Double::value_offset()); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2273 const Register temp = locs()->temp(0).reg(); | 2273 const Register temp = locs()->temp(0).reg(); |
| 2274 | 2274 |
| 2275 Label load_pointer; | 2275 Label load_pointer; |
| 2276 Label load_double; | 2276 Label load_double; |
| 2277 Label load_float32x4; | 2277 Label load_float32x4; |
| 2278 Label load_float64x2; | 2278 Label load_float64x2; |
| 2279 | 2279 |
| 2280 __ LoadObject(result_reg, Field::ZoneHandle(field()->Original())); | 2280 __ LoadObject(result_reg, Field::ZoneHandle(field()->Original())); |
| 2281 | 2281 |
| 2282 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset(), | 2282 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset(), |
| 2283 kUnsignedWord); | 2283 kUnsignedHalfword); |
| 2284 FieldAddress field_nullability_operand( | 2284 FieldAddress field_nullability_operand( |
| 2285 result_reg, Field::is_nullable_offset(), kUnsignedWord); | 2285 result_reg, Field::is_nullable_offset(), kUnsignedHalfword); |
| 2286 | 2286 |
| 2287 __ ldr(temp, field_nullability_operand, kUnsignedWord); | 2287 __ ldr(temp, field_nullability_operand, kUnsignedHalfword); |
| 2288 __ CompareImmediate(temp, kNullCid); | 2288 __ CompareImmediate(temp, kNullCid); |
| 2289 __ b(&load_pointer, EQ); | 2289 __ b(&load_pointer, EQ); |
| 2290 | 2290 |
| 2291 __ ldr(temp, field_cid_operand, kUnsignedWord); | 2291 __ ldr(temp, field_cid_operand, kUnsignedHalfword); |
| 2292 __ CompareImmediate(temp, kDoubleCid); | 2292 __ CompareImmediate(temp, kDoubleCid); |
| 2293 __ b(&load_double, EQ); | 2293 __ b(&load_double, EQ); |
| 2294 | 2294 |
| 2295 __ ldr(temp, field_cid_operand, kUnsignedWord); | 2295 __ ldr(temp, field_cid_operand, kUnsignedHalfword); |
| 2296 __ CompareImmediate(temp, kFloat32x4Cid); | 2296 __ CompareImmediate(temp, kFloat32x4Cid); |
| 2297 __ b(&load_float32x4, EQ); | 2297 __ b(&load_float32x4, EQ); |
| 2298 | 2298 |
| 2299 __ ldr(temp, field_cid_operand, kUnsignedWord); | 2299 __ ldr(temp, field_cid_operand, kUnsignedHalfword); |
| 2300 __ CompareImmediate(temp, kFloat64x2Cid); | 2300 __ CompareImmediate(temp, kFloat64x2Cid); |
| 2301 __ b(&load_float64x2, EQ); | 2301 __ b(&load_float64x2, EQ); |
| 2302 | 2302 |
| 2303 // Fall through. | 2303 // Fall through. |
| 2304 __ b(&load_pointer); | 2304 __ b(&load_pointer); |
| 2305 | 2305 |
| 2306 if (!compiler->is_optimizing()) { | 2306 if (!compiler->is_optimizing()) { |
| 2307 locs()->live_registers()->Add(locs()->in(0)); | 2307 locs()->live_registers()->Add(locs()->in(0)); |
| 2308 } | 2308 } |
| 2309 | 2309 |
| (...skipping 3780 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6090 compiler->GenerateRuntimeCall(TokenPosition::kNoSource, deopt_id(), | 6090 compiler->GenerateRuntimeCall(TokenPosition::kNoSource, deopt_id(), |
| 6091 kGrowRegExpStackRuntimeEntry, 1, locs()); | 6091 kGrowRegExpStackRuntimeEntry, 1, locs()); |
| 6092 __ Drop(1); | 6092 __ Drop(1); |
| 6093 __ Pop(result); | 6093 __ Pop(result); |
| 6094 } | 6094 } |
| 6095 | 6095 |
| 6096 | 6096 |
| 6097 } // namespace dart | 6097 } // namespace dart |
| 6098 | 6098 |
| 6099 #endif // defined TARGET_ARCH_ARM64 | 6099 #endif // defined TARGET_ARCH_ARM64 |
| OLD | NEW |