| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
| (...skipping 1480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1491 for (intptr_t i = 0; i < num_temps; i++) { | 1491 for (intptr_t i = 0; i < num_temps; i++) { |
| 1492 summary->set_temp(i, Location::RequiresRegister()); | 1492 summary->set_temp(i, Location::RequiresRegister()); |
| 1493 } | 1493 } |
| 1494 | 1494 |
| 1495 | 1495 |
| 1496 return summary; | 1496 return summary; |
| 1497 } | 1497 } |
| 1498 | 1498 |
| 1499 | 1499 |
| 1500 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1500 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1501 ASSERT(sizeof(classid_t) == kInt32Size); | 1501 ASSERT(sizeof(classid_t) == kInt16Size); |
| 1502 const intptr_t value_cid = value()->Type()->ToCid(); | 1502 const intptr_t value_cid = value()->Type()->ToCid(); |
| 1503 const intptr_t field_cid = field().guarded_cid(); | 1503 const intptr_t field_cid = field().guarded_cid(); |
| 1504 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; | 1504 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; |
| 1505 | 1505 |
| 1506 if (field_cid == kDynamicCid) { | 1506 if (field_cid == kDynamicCid) { |
| 1507 if (Compiler::IsBackgroundCompilation()) { | 1507 if (Compiler::IsBackgroundCompilation()) { |
| 1508 // Field state changed while compiling. | 1508 // Field state changed while compiling. |
| 1509 Compiler::AbortBackgroundCompilation( | 1509 Compiler::AbortBackgroundCompilation( |
| 1510 deopt_id(), | 1510 deopt_id(), |
| 1511 "GuardFieldClassInstr: field state changed while compiling"); | 1511 "GuardFieldClassInstr: field state changed while compiling"); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1543 if (emit_full_guard) { | 1543 if (emit_full_guard) { |
| 1544 __ LoadObject(field_reg, Field::ZoneHandle(field().Original())); | 1544 __ LoadObject(field_reg, Field::ZoneHandle(field().Original())); |
| 1545 | 1545 |
| 1546 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); | 1546 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); |
| 1547 FieldAddress field_nullability_operand(field_reg, | 1547 FieldAddress field_nullability_operand(field_reg, |
| 1548 Field::is_nullable_offset()); | 1548 Field::is_nullable_offset()); |
| 1549 | 1549 |
| 1550 if (value_cid == kDynamicCid) { | 1550 if (value_cid == kDynamicCid) { |
| 1551 LoadValueCid(compiler, value_cid_reg, value_reg); | 1551 LoadValueCid(compiler, value_cid_reg, value_reg); |
| 1552 | 1552 |
| 1553 __ cmpl(value_cid_reg, field_cid_operand); | 1553 __ cmpw(value_cid_reg, field_cid_operand); |
| 1554 __ j(EQUAL, &ok); | 1554 __ j(EQUAL, &ok); |
| 1555 __ cmpl(value_cid_reg, field_nullability_operand); | 1555 __ cmpw(value_cid_reg, field_nullability_operand); |
| 1556 } else if (value_cid == kNullCid) { | 1556 } else if (value_cid == kNullCid) { |
| 1557 __ cmpl(field_nullability_operand, Immediate(value_cid)); | 1557 __ cmpw(field_nullability_operand, Immediate(value_cid)); |
| 1558 } else { | 1558 } else { |
| 1559 __ cmpl(field_cid_operand, Immediate(value_cid)); | 1559 __ cmpw(field_cid_operand, Immediate(value_cid)); |
| 1560 } | 1560 } |
| 1561 __ j(EQUAL, &ok); | 1561 __ j(EQUAL, &ok); |
| 1562 | 1562 |
| 1563 // Check if the tracked state of the guarded field can be initialized | 1563 // Check if the tracked state of the guarded field can be initialized |
| 1564 // inline. If the field needs length check we fall through to runtime | 1564 // inline. If the field needs length check we fall through to runtime |
| 1565 // which is responsible for computing offset of the length field | 1565 // which is responsible for computing offset of the length field |
| 1566 // based on the class id. | 1566 // based on the class id. |
| 1567 if (!field().needs_length_check()) { | 1567 if (!field().needs_length_check()) { |
| 1568 // Uninitialized field can be handled inline. Check if the | 1568 // Uninitialized field can be handled inline. Check if the |
| 1569 // field is still unitialized. | 1569 // field is still unitialized. |
| 1570 __ cmpl(field_cid_operand, Immediate(kIllegalCid)); | 1570 __ cmpw(field_cid_operand, Immediate(kIllegalCid)); |
| 1571 __ j(NOT_EQUAL, fail); | 1571 __ j(NOT_EQUAL, fail); |
| 1572 | 1572 |
| 1573 if (value_cid == kDynamicCid) { | 1573 if (value_cid == kDynamicCid) { |
| 1574 __ movl(field_cid_operand, value_cid_reg); | 1574 __ movw(field_cid_operand, value_cid_reg); |
| 1575 __ movl(field_nullability_operand, value_cid_reg); | 1575 __ movw(field_nullability_operand, value_cid_reg); |
| 1576 } else { | 1576 } else { |
| 1577 ASSERT(field_reg != kNoRegister); | 1577 ASSERT(field_reg != kNoRegister); |
| 1578 __ movl(field_cid_operand, Immediate(value_cid)); | 1578 __ movw(field_cid_operand, Immediate(value_cid)); |
| 1579 __ movl(field_nullability_operand, Immediate(value_cid)); | 1579 __ movw(field_nullability_operand, Immediate(value_cid)); |
| 1580 } | 1580 } |
| 1581 | 1581 |
| 1582 if (deopt == NULL) { | 1582 if (deopt == NULL) { |
| 1583 ASSERT(!compiler->is_optimizing()); | 1583 ASSERT(!compiler->is_optimizing()); |
| 1584 __ jmp(&ok); | 1584 __ jmp(&ok); |
| 1585 } | 1585 } |
| 1586 } | 1586 } |
| 1587 | 1587 |
| 1588 if (deopt == NULL) { | 1588 if (deopt == NULL) { |
| 1589 ASSERT(!compiler->is_optimizing()); | 1589 ASSERT(!compiler->is_optimizing()); |
| 1590 __ Bind(fail); | 1590 __ Bind(fail); |
| 1591 | 1591 |
| 1592 __ cmpl(FieldAddress(field_reg, Field::guarded_cid_offset()), | 1592 __ cmpw(FieldAddress(field_reg, Field::guarded_cid_offset()), |
| 1593 Immediate(kDynamicCid)); | 1593 Immediate(kDynamicCid)); |
| 1594 __ j(EQUAL, &ok); | 1594 __ j(EQUAL, &ok); |
| 1595 | 1595 |
| 1596 __ pushq(field_reg); | 1596 __ pushq(field_reg); |
| 1597 __ pushq(value_reg); | 1597 __ pushq(value_reg); |
| 1598 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); | 1598 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); |
| 1599 __ Drop(2); // Drop the field and the value. | 1599 __ Drop(2); // Drop the field and the value. |
| 1600 } | 1600 } |
| 1601 } else { | 1601 } else { |
| 1602 ASSERT(compiler->is_optimizing()); | 1602 ASSERT(compiler->is_optimizing()); |
| (...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1825 __ j(NOT_EQUAL, &done); | 1825 __ j(NOT_EQUAL, &done); |
| 1826 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg, temp); | 1826 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg, temp); |
| 1827 __ movq(temp, box_reg); | 1827 __ movq(temp, box_reg); |
| 1828 __ StoreIntoObject(instance_reg, FieldAddress(instance_reg, offset), temp); | 1828 __ StoreIntoObject(instance_reg, FieldAddress(instance_reg, offset), temp); |
| 1829 | 1829 |
| 1830 __ Bind(&done); | 1830 __ Bind(&done); |
| 1831 } | 1831 } |
| 1832 | 1832 |
| 1833 | 1833 |
| 1834 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1834 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1835 ASSERT(sizeof(classid_t) == kInt32Size); | 1835 ASSERT(sizeof(classid_t) == kInt16Size); |
| 1836 Label skip_store; | 1836 Label skip_store; |
| 1837 | 1837 |
| 1838 Register instance_reg = locs()->in(0).reg(); | 1838 Register instance_reg = locs()->in(0).reg(); |
| 1839 | 1839 |
| 1840 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1840 if (IsUnboxedStore() && compiler->is_optimizing()) { |
| 1841 XmmRegister value = locs()->in(1).fpu_reg(); | 1841 XmmRegister value = locs()->in(1).fpu_reg(); |
| 1842 Register temp = locs()->temp(0).reg(); | 1842 Register temp = locs()->temp(0).reg(); |
| 1843 Register temp2 = locs()->temp(1).reg(); | 1843 Register temp2 = locs()->temp(1).reg(); |
| 1844 const intptr_t cid = field().UnboxedFieldCid(); | 1844 const intptr_t cid = field().UnboxedFieldCid(); |
| 1845 | 1845 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1897 locs()->live_registers()->Add(locs()->in(1), kTagged); | 1897 locs()->live_registers()->Add(locs()->in(1), kTagged); |
| 1898 } | 1898 } |
| 1899 | 1899 |
| 1900 Label store_pointer; | 1900 Label store_pointer; |
| 1901 Label store_double; | 1901 Label store_double; |
| 1902 Label store_float32x4; | 1902 Label store_float32x4; |
| 1903 Label store_float64x2; | 1903 Label store_float64x2; |
| 1904 | 1904 |
| 1905 __ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); | 1905 __ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); |
| 1906 | 1906 |
| 1907 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), | 1907 __ cmpw(FieldAddress(temp, Field::is_nullable_offset()), |
| 1908 Immediate(kNullCid)); | 1908 Immediate(kNullCid)); |
| 1909 __ j(EQUAL, &store_pointer); | 1909 __ j(EQUAL, &store_pointer); |
| 1910 | 1910 |
| 1911 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); | 1911 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); |
| 1912 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1912 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
| 1913 __ j(ZERO, &store_pointer); | 1913 __ j(ZERO, &store_pointer); |
| 1914 | 1914 |
| 1915 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), | 1915 __ cmpw(FieldAddress(temp, Field::guarded_cid_offset()), |
| 1916 Immediate(kDoubleCid)); | 1916 Immediate(kDoubleCid)); |
| 1917 __ j(EQUAL, &store_double); | 1917 __ j(EQUAL, &store_double); |
| 1918 | 1918 |
| 1919 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), | 1919 __ cmpw(FieldAddress(temp, Field::guarded_cid_offset()), |
| 1920 Immediate(kFloat32x4Cid)); | 1920 Immediate(kFloat32x4Cid)); |
| 1921 __ j(EQUAL, &store_float32x4); | 1921 __ j(EQUAL, &store_float32x4); |
| 1922 | 1922 |
| 1923 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), | 1923 __ cmpw(FieldAddress(temp, Field::guarded_cid_offset()), |
| 1924 Immediate(kFloat64x2Cid)); | 1924 Immediate(kFloat64x2Cid)); |
| 1925 __ j(EQUAL, &store_float64x2); | 1925 __ j(EQUAL, &store_float64x2); |
| 1926 | 1926 |
| 1927 // Fall through. | 1927 // Fall through. |
| 1928 __ jmp(&store_pointer); | 1928 __ jmp(&store_pointer); |
| 1929 | 1929 |
| 1930 if (!compiler->is_optimizing()) { | 1930 if (!compiler->is_optimizing()) { |
| 1931 locs()->live_registers()->Add(locs()->in(0)); | 1931 locs()->live_registers()->Add(locs()->in(0)); |
| 1932 locs()->live_registers()->Add(locs()->in(1)); | 1932 locs()->live_registers()->Add(locs()->in(1)); |
| 1933 } | 1933 } |
| (...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2183 locs->set_temp(0, opt ? Location::RequiresFpuRegister() | 2183 locs->set_temp(0, opt ? Location::RequiresFpuRegister() |
| 2184 : Location::FpuRegisterLocation(XMM1)); | 2184 : Location::FpuRegisterLocation(XMM1)); |
| 2185 locs->set_temp(1, Location::RequiresRegister()); | 2185 locs->set_temp(1, Location::RequiresRegister()); |
| 2186 } | 2186 } |
| 2187 locs->set_out(0, Location::RequiresRegister()); | 2187 locs->set_out(0, Location::RequiresRegister()); |
| 2188 return locs; | 2188 return locs; |
| 2189 } | 2189 } |
| 2190 | 2190 |
| 2191 | 2191 |
| 2192 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2192 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2193 ASSERT(sizeof(classid_t) == kInt32Size); | 2193 ASSERT(sizeof(classid_t) == kInt16Size); |
| 2194 Register instance_reg = locs()->in(0).reg(); | 2194 Register instance_reg = locs()->in(0).reg(); |
| 2195 if (IsUnboxedLoad() && compiler->is_optimizing()) { | 2195 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
| 2196 XmmRegister result = locs()->out(0).fpu_reg(); | 2196 XmmRegister result = locs()->out(0).fpu_reg(); |
| 2197 Register temp = locs()->temp(0).reg(); | 2197 Register temp = locs()->temp(0).reg(); |
| 2198 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2198 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| 2199 intptr_t cid = field()->UnboxedFieldCid(); | 2199 intptr_t cid = field()->UnboxedFieldCid(); |
| 2200 switch (cid) { | 2200 switch (cid) { |
| 2201 case kDoubleCid: | 2201 case kDoubleCid: |
| 2202 __ Comment("UnboxedDoubleLoadFieldInstr"); | 2202 __ Comment("UnboxedDoubleLoadFieldInstr"); |
| 2203 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 2203 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2222 Register temp = locs()->temp(1).reg(); | 2222 Register temp = locs()->temp(1).reg(); |
| 2223 XmmRegister value = locs()->temp(0).fpu_reg(); | 2223 XmmRegister value = locs()->temp(0).fpu_reg(); |
| 2224 | 2224 |
| 2225 Label load_pointer; | 2225 Label load_pointer; |
| 2226 Label load_double; | 2226 Label load_double; |
| 2227 Label load_float32x4; | 2227 Label load_float32x4; |
| 2228 Label load_float64x2; | 2228 Label load_float64x2; |
| 2229 | 2229 |
| 2230 __ LoadObject(result, Field::ZoneHandle(field()->Original())); | 2230 __ LoadObject(result, Field::ZoneHandle(field()->Original())); |
| 2231 | 2231 |
| 2232 __ cmpl(FieldAddress(result, Field::is_nullable_offset()), | 2232 FieldAddress field_cid_operand(result, Field::guarded_cid_offset()); |
| 2233 Immediate(kNullCid)); | 2233 FieldAddress field_nullability_operand(result, Field::is_nullable_offset()); |
| 2234 |
| 2235 __ cmpw(field_nullability_operand, Immediate(kNullCid)); |
| 2234 __ j(EQUAL, &load_pointer); | 2236 __ j(EQUAL, &load_pointer); |
| 2235 | 2237 |
| 2236 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), | 2238 __ cmpw(field_cid_operand, Immediate(kDoubleCid)); |
| 2237 Immediate(kDoubleCid)); | |
| 2238 __ j(EQUAL, &load_double); | 2239 __ j(EQUAL, &load_double); |
| 2239 | 2240 |
| 2240 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), | 2241 __ cmpw(field_cid_operand, Immediate(kFloat32x4Cid)); |
| 2241 Immediate(kFloat32x4Cid)); | |
| 2242 __ j(EQUAL, &load_float32x4); | 2242 __ j(EQUAL, &load_float32x4); |
| 2243 | 2243 |
| 2244 __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), | 2244 __ cmpw(field_cid_operand, Immediate(kFloat64x2Cid)); |
| 2245 Immediate(kFloat64x2Cid)); | |
| 2246 __ j(EQUAL, &load_float64x2); | 2245 __ j(EQUAL, &load_float64x2); |
| 2247 | 2246 |
| 2248 // Fall through. | 2247 // Fall through. |
| 2249 __ jmp(&load_pointer); | 2248 __ jmp(&load_pointer); |
| 2250 | 2249 |
| 2251 if (!compiler->is_optimizing()) { | 2250 if (!compiler->is_optimizing()) { |
| 2252 locs()->live_registers()->Add(locs()->in(0)); | 2251 locs()->live_registers()->Add(locs()->in(0)); |
| 2253 } | 2252 } |
| 2254 | 2253 |
| 2255 { | 2254 { |
| (...skipping 4534 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6790 __ Drop(1); | 6789 __ Drop(1); |
| 6791 __ popq(result); | 6790 __ popq(result); |
| 6792 } | 6791 } |
| 6793 | 6792 |
| 6794 | 6793 |
| 6795 } // namespace dart | 6794 } // namespace dart |
| 6796 | 6795 |
| 6797 #undef __ | 6796 #undef __ |
| 6798 | 6797 |
| 6799 #endif // defined TARGET_ARCH_X64 | 6798 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |