OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1743 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1754 hash = smi_get_hash(Smi::cast(number)) & mask; | 1754 hash = smi_get_hash(Smi::cast(number)) & mask; |
1755 number_string_cache()->set(hash * 2, Smi::cast(number)); | 1755 number_string_cache()->set(hash * 2, Smi::cast(number)); |
1756 } else { | 1756 } else { |
1757 hash = double_get_hash(number->Number()) & mask; | 1757 hash = double_get_hash(number->Number()) & mask; |
1758 number_string_cache()->set(hash * 2, number); | 1758 number_string_cache()->set(hash * 2, number); |
1759 } | 1759 } |
1760 number_string_cache()->set(hash * 2 + 1, string); | 1760 number_string_cache()->set(hash * 2 + 1, string); |
1761 } | 1761 } |
1762 | 1762 |
1763 | 1763 |
1764 Object* Heap::SmiOrNumberFromDouble(double value, | |
1765 bool new_object, | |
1766 PretenureFlag pretenure) { | |
1767 // We need to distinguish the minus zero value and this cannot be | |
1768 // done after conversion to int. Doing this by comparing bit | |
1769 // patterns is faster than using fpclassify() et al. | |
1770 static const DoubleRepresentation plus_zero(0.0); | |
1771 static const DoubleRepresentation minus_zero(-0.0); | |
1772 static const DoubleRepresentation nan(OS::nan_value()); | |
1773 ASSERT(minus_zero_value() != NULL); | |
1774 ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits)); | |
1775 | |
1776 DoubleRepresentation rep(value); | |
1777 if (rep.bits == plus_zero.bits) return Smi::FromInt(0); // not uncommon | |
1778 if (rep.bits == minus_zero.bits) { | |
1779 return new_object ? AllocateHeapNumber(-0.0, pretenure) | |
1780 : minus_zero_value(); | |
1781 } | |
1782 if (rep.bits == nan.bits) { | |
1783 return new_object | |
1784 ? AllocateHeapNumber(OS::nan_value(), pretenure) | |
1785 : nan_value(); | |
1786 } | |
1787 | |
1788 // Try to represent the value as a tagged small integer. | |
1789 int int_value = FastD2I(value); | |
1790 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) { | |
1791 return Smi::FromInt(int_value); | |
1792 } | |
1793 | |
1794 // Materialize the value in the heap. | |
1795 return AllocateHeapNumber(value, pretenure); | |
1796 } | |
1797 | |
1798 | |
1799 Object* Heap::NumberToString(Object* number, bool check_number_string_cache) { | 1764 Object* Heap::NumberToString(Object* number, bool check_number_string_cache) { |
1800 Counters::number_to_string_runtime.Increment(); | 1765 Counters::number_to_string_runtime.Increment(); |
1801 if (check_number_string_cache) { | 1766 if (check_number_string_cache) { |
1802 Object* cached = GetNumberStringCache(number); | 1767 Object* cached = GetNumberStringCache(number); |
1803 if (cached != undefined_value()) { | 1768 if (cached != undefined_value()) { |
1804 return cached; | 1769 return cached; |
1805 } | 1770 } |
1806 } | 1771 } |
1807 | 1772 |
1808 char arr[100]; | 1773 char arr[100]; |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1846 return kExternalUnsignedIntArrayMapRootIndex; | 1811 return kExternalUnsignedIntArrayMapRootIndex; |
1847 case kExternalFloatArray: | 1812 case kExternalFloatArray: |
1848 return kExternalFloatArrayMapRootIndex; | 1813 return kExternalFloatArrayMapRootIndex; |
1849 default: | 1814 default: |
1850 UNREACHABLE(); | 1815 UNREACHABLE(); |
1851 return kUndefinedValueRootIndex; | 1816 return kUndefinedValueRootIndex; |
1852 } | 1817 } |
1853 } | 1818 } |
1854 | 1819 |
1855 | 1820 |
1856 Object* Heap::NewNumberFromDouble(double value, PretenureFlag pretenure) { | |
1857 return SmiOrNumberFromDouble(value, | |
1858 true /* number object must be new */, | |
1859 pretenure); | |
1860 } | |
1861 | |
1862 | |
1863 Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) { | 1821 Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) { |
1864 return SmiOrNumberFromDouble(value, | 1822 // We need to distinguish the minus zero value and this cannot be |
1865 false /* use preallocated NaN, -0.0 */, | 1823 // done after conversion to int. Doing this by comparing bit |
1866 pretenure); | 1824 // patterns is faster than using fpclassify() et al. |
| 1825 static const DoubleRepresentation minus_zero(-0.0); |
| 1826 |
| 1827 DoubleRepresentation rep(value); |
| 1828 if (rep.bits == minus_zero.bits) { |
| 1829 return AllocateHeapNumber(-0.0, pretenure); |
| 1830 } |
| 1831 |
| 1832 int int_value = FastD2I(value); |
| 1833 if (value == int_value && Smi::IsValid(int_value)) { |
| 1834 return Smi::FromInt(int_value); |
| 1835 } |
| 1836 |
| 1837 // Materialize the value in the heap. |
| 1838 return AllocateHeapNumber(value, pretenure); |
1867 } | 1839 } |
1868 | 1840 |
1869 | 1841 |
1870 Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) { | 1842 Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) { |
1871 // Statically ensure that it is safe to allocate proxies in paged spaces. | 1843 // Statically ensure that it is safe to allocate proxies in paged spaces. |
1872 STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize); | 1844 STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize); |
1873 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 1845 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
1874 Object* result = Allocate(proxy_map(), space); | 1846 Object* result = Allocate(proxy_map(), space); |
1875 if (result->IsFailure()) return result; | 1847 if (result->IsFailure()) return result; |
1876 | 1848 |
(...skipping 2515 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4392 void ExternalStringTable::TearDown() { | 4364 void ExternalStringTable::TearDown() { |
4393 new_space_strings_.Free(); | 4365 new_space_strings_.Free(); |
4394 old_space_strings_.Free(); | 4366 old_space_strings_.Free(); |
4395 } | 4367 } |
4396 | 4368 |
4397 | 4369 |
4398 List<Object*> ExternalStringTable::new_space_strings_; | 4370 List<Object*> ExternalStringTable::new_space_strings_; |
4399 List<Object*> ExternalStringTable::old_space_strings_; | 4371 List<Object*> ExternalStringTable::old_space_strings_; |
4400 | 4372 |
4401 } } // namespace v8::internal | 4373 } } // namespace v8::internal |
OLD | NEW |