| OLD | NEW |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
| 6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
| (...skipping 1530 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1541 | 1541 |
| 1542 for (intptr_t i = 0; i < num_temps; i++) { | 1542 for (intptr_t i = 0; i < num_temps; i++) { |
| 1543 summary->set_temp(i, Location::RequiresRegister()); | 1543 summary->set_temp(i, Location::RequiresRegister()); |
| 1544 } | 1544 } |
| 1545 | 1545 |
| 1546 return summary; | 1546 return summary; |
| 1547 } | 1547 } |
| 1548 | 1548 |
| 1549 | 1549 |
| 1550 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1550 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1551 ASSERT(sizeof(classid_t) == kInt32Size); | 1551 ASSERT(sizeof(classid_t) == kInt16Size); |
| 1552 const intptr_t value_cid = value()->Type()->ToCid(); | 1552 const intptr_t value_cid = value()->Type()->ToCid(); |
| 1553 const intptr_t field_cid = field().guarded_cid(); | 1553 const intptr_t field_cid = field().guarded_cid(); |
| 1554 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; | 1554 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; |
| 1555 | 1555 |
| 1556 if (field_cid == kDynamicCid) { | 1556 if (field_cid == kDynamicCid) { |
| 1557 if (Compiler::IsBackgroundCompilation()) { | 1557 if (Compiler::IsBackgroundCompilation()) { |
| 1558 // Field state changed while compiling. | 1558 // Field state changed while compiling. |
| 1559 Compiler::AbortBackgroundCompilation( | 1559 Compiler::AbortBackgroundCompilation( |
| 1560 deopt_id(), | 1560 deopt_id(), |
| 1561 "GuardFieldClassInstr: field state changed while compiling"); | 1561 "GuardFieldClassInstr: field state changed while compiling"); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1587 compiler->is_optimizing() | 1587 compiler->is_optimizing() |
| 1588 ? compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) | 1588 ? compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) |
| 1589 : NULL; | 1589 : NULL; |
| 1590 | 1590 |
| 1591 Label* fail = (deopt != NULL) ? deopt : &fail_label; | 1591 Label* fail = (deopt != NULL) ? deopt : &fail_label; |
| 1592 | 1592 |
| 1593 if (emit_full_guard) { | 1593 if (emit_full_guard) { |
| 1594 __ LoadObject(field_reg, Field::ZoneHandle(field().Original())); | 1594 __ LoadObject(field_reg, Field::ZoneHandle(field().Original())); |
| 1595 | 1595 |
| 1596 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset(), | 1596 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset(), |
| 1597 kUnsignedWord); | 1597 kUnsignedHalfword); |
| 1598 FieldAddress field_nullability_operand( | 1598 FieldAddress field_nullability_operand( |
| 1599 field_reg, Field::is_nullable_offset(), kUnsignedWord); | 1599 field_reg, Field::is_nullable_offset(), kUnsignedHalfword); |
| 1600 | 1600 |
| 1601 if (value_cid == kDynamicCid) { | 1601 if (value_cid == kDynamicCid) { |
| 1602 LoadValueCid(compiler, value_cid_reg, value_reg); | 1602 LoadValueCid(compiler, value_cid_reg, value_reg); |
| 1603 Label skip_length_check; | 1603 Label skip_length_check; |
| 1604 __ ldr(TMP, field_cid_operand, kUnsignedWord); | 1604 __ ldr(TMP, field_cid_operand, kUnsignedHalfword); |
| 1605 __ CompareRegisters(value_cid_reg, TMP); | 1605 __ CompareRegisters(value_cid_reg, TMP); |
| 1606 __ b(&ok, EQ); | 1606 __ b(&ok, EQ); |
| 1607 __ ldr(TMP, field_nullability_operand, kUnsignedWord); | 1607 __ ldr(TMP, field_nullability_operand, kUnsignedHalfword); |
| 1608 __ CompareRegisters(value_cid_reg, TMP); | 1608 __ CompareRegisters(value_cid_reg, TMP); |
| 1609 } else if (value_cid == kNullCid) { | 1609 } else if (value_cid == kNullCid) { |
| 1610 __ ldr(value_cid_reg, field_nullability_operand, kUnsignedWord); | 1610 __ ldr(value_cid_reg, field_nullability_operand, kUnsignedHalfword); |
| 1611 __ CompareImmediate(value_cid_reg, value_cid); | 1611 __ CompareImmediate(value_cid_reg, value_cid); |
| 1612 } else { | 1612 } else { |
| 1613 Label skip_length_check; | 1613 Label skip_length_check; |
| 1614 __ ldr(value_cid_reg, field_cid_operand, kUnsignedWord); | 1614 __ ldr(value_cid_reg, field_cid_operand, kUnsignedHalfword); |
| 1615 __ CompareImmediate(value_cid_reg, value_cid); | 1615 __ CompareImmediate(value_cid_reg, value_cid); |
| 1616 } | 1616 } |
| 1617 __ b(&ok, EQ); | 1617 __ b(&ok, EQ); |
| 1618 | 1618 |
| 1619 // Check if the tracked state of the guarded field can be initialized | 1619 // Check if the tracked state of the guarded field can be initialized |
| 1620 // inline. If the field needs length check we fall through to runtime | 1620 // inline. If the field needs length check we fall through to runtime |
| 1621 // which is responsible for computing offset of the length field | 1621 // which is responsible for computing offset of the length field |
| 1622 // based on the class id. | 1622 // based on the class id. |
| 1623 // Length guard will be emitted separately when needed via GuardFieldLength | 1623 // Length guard will be emitted separately when needed via GuardFieldLength |
| 1624 // instruction after GuardFieldClass. | 1624 // instruction after GuardFieldClass. |
| 1625 if (!field().needs_length_check()) { | 1625 if (!field().needs_length_check()) { |
| 1626 // Uninitialized field can be handled inline. Check if the | 1626 // Uninitialized field can be handled inline. Check if the |
| 1627 // field is still unitialized. | 1627 // field is still unitialized. |
| 1628 __ ldr(TMP, field_cid_operand, kUnsignedWord); | 1628 __ ldr(TMP, field_cid_operand, kUnsignedHalfword); |
| 1629 __ CompareImmediate(TMP, kIllegalCid); | 1629 __ CompareImmediate(TMP, kIllegalCid); |
| 1630 __ b(fail, NE); | 1630 __ b(fail, NE); |
| 1631 | 1631 |
| 1632 if (value_cid == kDynamicCid) { | 1632 if (value_cid == kDynamicCid) { |
| 1633 __ str(value_cid_reg, field_cid_operand, kUnsignedWord); | 1633 __ str(value_cid_reg, field_cid_operand, kUnsignedHalfword); |
| 1634 __ str(value_cid_reg, field_nullability_operand, kUnsignedWord); | 1634 __ str(value_cid_reg, field_nullability_operand, kUnsignedHalfword); |
| 1635 } else { | 1635 } else { |
| 1636 __ LoadImmediate(TMP, value_cid); | 1636 __ LoadImmediate(TMP, value_cid); |
| 1637 __ str(TMP, field_cid_operand, kUnsignedWord); | 1637 __ str(TMP, field_cid_operand, kUnsignedHalfword); |
| 1638 __ str(TMP, field_nullability_operand, kUnsignedWord); | 1638 __ str(TMP, field_nullability_operand, kUnsignedHalfword); |
| 1639 } | 1639 } |
| 1640 | 1640 |
| 1641 if (deopt == NULL) { | 1641 if (deopt == NULL) { |
| 1642 ASSERT(!compiler->is_optimizing()); | 1642 ASSERT(!compiler->is_optimizing()); |
| 1643 __ b(&ok); | 1643 __ b(&ok); |
| 1644 } | 1644 } |
| 1645 } | 1645 } |
| 1646 | 1646 |
| 1647 if (deopt == NULL) { | 1647 if (deopt == NULL) { |
| 1648 ASSERT(!compiler->is_optimizing()); | 1648 ASSERT(!compiler->is_optimizing()); |
| 1649 __ Bind(fail); | 1649 __ Bind(fail); |
| 1650 | 1650 |
| 1651 __ LoadFieldFromOffset(TMP, field_reg, Field::guarded_cid_offset(), | 1651 __ LoadFieldFromOffset(TMP, field_reg, Field::guarded_cid_offset(), |
| 1652 kUnsignedWord); | 1652 kUnsignedHalfword); |
| 1653 __ CompareImmediate(TMP, kDynamicCid); | 1653 __ CompareImmediate(TMP, kDynamicCid); |
| 1654 __ b(&ok, EQ); | 1654 __ b(&ok, EQ); |
| 1655 | 1655 |
| 1656 __ Push(field_reg); | 1656 __ Push(field_reg); |
| 1657 __ Push(value_reg); | 1657 __ Push(value_reg); |
| 1658 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); | 1658 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); |
| 1659 __ Drop(2); // Drop the field and the value. | 1659 __ Drop(2); // Drop the field and the value. |
| 1660 } | 1660 } |
| 1661 } else { | 1661 } else { |
| 1662 ASSERT(compiler->is_optimizing()); | 1662 ASSERT(compiler->is_optimizing()); |
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1881 } else { | 1881 } else { |
| 1882 summary->set_in(1, ShouldEmitStoreBarrier() | 1882 summary->set_in(1, ShouldEmitStoreBarrier() |
| 1883 ? Location::WritableRegister() | 1883 ? Location::WritableRegister() |
| 1884 : Location::RegisterOrConstant(value())); | 1884 : Location::RegisterOrConstant(value())); |
| 1885 } | 1885 } |
| 1886 return summary; | 1886 return summary; |
| 1887 } | 1887 } |
| 1888 | 1888 |
| 1889 | 1889 |
| 1890 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1890 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1891 ASSERT(sizeof(classid_t) == kInt32Size); | 1891 ASSERT(sizeof(classid_t) == kInt16Size); |
| 1892 Label skip_store; | 1892 Label skip_store; |
| 1893 | 1893 |
| 1894 const Register instance_reg = locs()->in(0).reg(); | 1894 const Register instance_reg = locs()->in(0).reg(); |
| 1895 | 1895 |
| 1896 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1896 if (IsUnboxedStore() && compiler->is_optimizing()) { |
| 1897 const VRegister value = locs()->in(1).fpu_reg(); | 1897 const VRegister value = locs()->in(1).fpu_reg(); |
| 1898 const Register temp = locs()->temp(0).reg(); | 1898 const Register temp = locs()->temp(0).reg(); |
| 1899 const Register temp2 = locs()->temp(1).reg(); | 1899 const Register temp2 = locs()->temp(1).reg(); |
| 1900 const intptr_t cid = field().UnboxedFieldCid(); | 1900 const intptr_t cid = field().UnboxedFieldCid(); |
| 1901 | 1901 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1953 } | 1953 } |
| 1954 | 1954 |
| 1955 Label store_pointer; | 1955 Label store_pointer; |
| 1956 Label store_double; | 1956 Label store_double; |
| 1957 Label store_float32x4; | 1957 Label store_float32x4; |
| 1958 Label store_float64x2; | 1958 Label store_float64x2; |
| 1959 | 1959 |
| 1960 __ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); | 1960 __ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); |
| 1961 | 1961 |
| 1962 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset(), | 1962 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset(), |
| 1963 kUnsignedWord); | 1963 kUnsignedHalfword); |
| 1964 __ CompareImmediate(temp2, kNullCid); | 1964 __ CompareImmediate(temp2, kNullCid); |
| 1965 __ b(&store_pointer, EQ); | 1965 __ b(&store_pointer, EQ); |
| 1966 | 1966 |
| 1967 __ LoadFromOffset(temp2, temp, Field::kind_bits_offset() - kHeapObjectTag, | 1967 __ LoadFromOffset(temp2, temp, Field::kind_bits_offset() - kHeapObjectTag, |
| 1968 kUnsignedByte); | 1968 kUnsignedByte); |
| 1969 __ tsti(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1969 __ tsti(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
| 1970 __ b(&store_pointer, EQ); | 1970 __ b(&store_pointer, EQ); |
| 1971 | 1971 |
| 1972 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), | 1972 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), |
| 1973 kUnsignedWord); | 1973 kUnsignedHalfword); |
| 1974 __ CompareImmediate(temp2, kDoubleCid); | 1974 __ CompareImmediate(temp2, kDoubleCid); |
| 1975 __ b(&store_double, EQ); | 1975 __ b(&store_double, EQ); |
| 1976 | 1976 |
| 1977 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), | 1977 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), |
| 1978 kUnsignedWord); | 1978 kUnsignedHalfword); |
| 1979 __ CompareImmediate(temp2, kFloat32x4Cid); | 1979 __ CompareImmediate(temp2, kFloat32x4Cid); |
| 1980 __ b(&store_float32x4, EQ); | 1980 __ b(&store_float32x4, EQ); |
| 1981 | 1981 |
| 1982 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), | 1982 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), |
| 1983 kUnsignedWord); | 1983 kUnsignedHalfword); |
| 1984 __ CompareImmediate(temp2, kFloat64x2Cid); | 1984 __ CompareImmediate(temp2, kFloat64x2Cid); |
| 1985 __ b(&store_float64x2, EQ); | 1985 __ b(&store_float64x2, EQ); |
| 1986 | 1986 |
| 1987 // Fall through. | 1987 // Fall through. |
| 1988 __ b(&store_pointer); | 1988 __ b(&store_pointer); |
| 1989 | 1989 |
| 1990 if (!compiler->is_optimizing()) { | 1990 if (!compiler->is_optimizing()) { |
| 1991 locs()->live_registers()->Add(locs()->in(0)); | 1991 locs()->live_registers()->Add(locs()->in(0)); |
| 1992 locs()->live_registers()->Add(locs()->in(1)); | 1992 locs()->live_registers()->Add(locs()->in(1)); |
| 1993 } | 1993 } |
| (...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2237 locs->set_temp(0, Location::RequiresRegister()); | 2237 locs->set_temp(0, Location::RequiresRegister()); |
| 2238 } else if (IsPotentialUnboxedLoad()) { | 2238 } else if (IsPotentialUnboxedLoad()) { |
| 2239 locs->set_temp(0, Location::RequiresRegister()); | 2239 locs->set_temp(0, Location::RequiresRegister()); |
| 2240 } | 2240 } |
| 2241 locs->set_out(0, Location::RequiresRegister()); | 2241 locs->set_out(0, Location::RequiresRegister()); |
| 2242 return locs; | 2242 return locs; |
| 2243 } | 2243 } |
| 2244 | 2244 |
| 2245 | 2245 |
| 2246 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2246 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2247 ASSERT(sizeof(classid_t) == kInt32Size); | 2247 ASSERT(sizeof(classid_t) == kInt16Size); |
| 2248 const Register instance_reg = locs()->in(0).reg(); | 2248 const Register instance_reg = locs()->in(0).reg(); |
| 2249 if (IsUnboxedLoad() && compiler->is_optimizing()) { | 2249 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
| 2250 const VRegister result = locs()->out(0).fpu_reg(); | 2250 const VRegister result = locs()->out(0).fpu_reg(); |
| 2251 const Register temp = locs()->temp(0).reg(); | 2251 const Register temp = locs()->temp(0).reg(); |
| 2252 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes()); | 2252 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes()); |
| 2253 const intptr_t cid = field()->UnboxedFieldCid(); | 2253 const intptr_t cid = field()->UnboxedFieldCid(); |
| 2254 switch (cid) { | 2254 switch (cid) { |
| 2255 case kDoubleCid: | 2255 case kDoubleCid: |
| 2256 __ Comment("UnboxedDoubleLoadFieldInstr"); | 2256 __ Comment("UnboxedDoubleLoadFieldInstr"); |
| 2257 __ LoadDFieldFromOffset(result, temp, Double::value_offset()); | 2257 __ LoadDFieldFromOffset(result, temp, Double::value_offset()); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2274 const Register temp = locs()->temp(0).reg(); | 2274 const Register temp = locs()->temp(0).reg(); |
| 2275 | 2275 |
| 2276 Label load_pointer; | 2276 Label load_pointer; |
| 2277 Label load_double; | 2277 Label load_double; |
| 2278 Label load_float32x4; | 2278 Label load_float32x4; |
| 2279 Label load_float64x2; | 2279 Label load_float64x2; |
| 2280 | 2280 |
| 2281 __ LoadObject(result_reg, Field::ZoneHandle(field()->Original())); | 2281 __ LoadObject(result_reg, Field::ZoneHandle(field()->Original())); |
| 2282 | 2282 |
| 2283 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset(), | 2283 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset(), |
| 2284 kUnsignedWord); | 2284 kUnsignedHalfword); |
| 2285 FieldAddress field_nullability_operand( | 2285 FieldAddress field_nullability_operand( |
| 2286 result_reg, Field::is_nullable_offset(), kUnsignedWord); | 2286 result_reg, Field::is_nullable_offset(), kUnsignedHalfword); |
| 2287 | 2287 |
| 2288 __ ldr(temp, field_nullability_operand, kUnsignedWord); | 2288 __ ldr(temp, field_nullability_operand, kUnsignedHalfword); |
| 2289 __ CompareImmediate(temp, kNullCid); | 2289 __ CompareImmediate(temp, kNullCid); |
| 2290 __ b(&load_pointer, EQ); | 2290 __ b(&load_pointer, EQ); |
| 2291 | 2291 |
| 2292 __ ldr(temp, field_cid_operand, kUnsignedWord); | 2292 __ ldr(temp, field_cid_operand, kUnsignedHalfword); |
| 2293 __ CompareImmediate(temp, kDoubleCid); | 2293 __ CompareImmediate(temp, kDoubleCid); |
| 2294 __ b(&load_double, EQ); | 2294 __ b(&load_double, EQ); |
| 2295 | 2295 |
| 2296 __ ldr(temp, field_cid_operand, kUnsignedWord); | 2296 __ ldr(temp, field_cid_operand, kUnsignedHalfword); |
| 2297 __ CompareImmediate(temp, kFloat32x4Cid); | 2297 __ CompareImmediate(temp, kFloat32x4Cid); |
| 2298 __ b(&load_float32x4, EQ); | 2298 __ b(&load_float32x4, EQ); |
| 2299 | 2299 |
| 2300 __ ldr(temp, field_cid_operand, kUnsignedWord); | 2300 __ ldr(temp, field_cid_operand, kUnsignedHalfword); |
| 2301 __ CompareImmediate(temp, kFloat64x2Cid); | 2301 __ CompareImmediate(temp, kFloat64x2Cid); |
| 2302 __ b(&load_float64x2, EQ); | 2302 __ b(&load_float64x2, EQ); |
| 2303 | 2303 |
| 2304 // Fall through. | 2304 // Fall through. |
| 2305 __ b(&load_pointer); | 2305 __ b(&load_pointer); |
| 2306 | 2306 |
| 2307 if (!compiler->is_optimizing()) { | 2307 if (!compiler->is_optimizing()) { |
| 2308 locs()->live_registers()->Add(locs()->in(0)); | 2308 locs()->live_registers()->Add(locs()->in(0)); |
| 2309 } | 2309 } |
| 2310 | 2310 |
| (...skipping 3766 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6077 compiler->GenerateRuntimeCall(TokenPosition::kNoSource, deopt_id(), | 6077 compiler->GenerateRuntimeCall(TokenPosition::kNoSource, deopt_id(), |
| 6078 kGrowRegExpStackRuntimeEntry, 1, locs()); | 6078 kGrowRegExpStackRuntimeEntry, 1, locs()); |
| 6079 __ Drop(1); | 6079 __ Drop(1); |
| 6080 __ Pop(result); | 6080 __ Pop(result); |
| 6081 } | 6081 } |
| 6082 | 6082 |
| 6083 | 6083 |
| 6084 } // namespace dart | 6084 } // namespace dart |
| 6085 | 6085 |
| 6086 #endif // defined TARGET_ARCH_ARM64 | 6086 #endif // defined TARGET_ARCH_ARM64 |
| OLD | NEW |