OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1749 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1760 return large_objects_[index]; // s.page_offset() is ignored. | 1760 return large_objects_[index]; // s.page_offset() is ignored. |
1761 } | 1761 } |
1762 UNREACHABLE(); | 1762 UNREACHABLE(); |
1763 return NULL; | 1763 return NULL; |
1764 } | 1764 } |
1765 | 1765 |
1766 | 1766 |
1767 Deserializer2::Deserializer2(SnapshotByteSource* source) | 1767 Deserializer2::Deserializer2(SnapshotByteSource* source) |
1768 : source_(source), | 1768 : source_(source), |
1769 external_reference_decoder_(NULL) { | 1769 external_reference_decoder_(NULL) { |
1770 for (int i = 0; i <= LAST_SPACE; i++) { | |
1771 fullness_[i] = 0; | |
1772 } | |
1773 } | 1770 } |
1774 | 1771 |
1775 | 1772 |
1776 // This routine both allocates a new object, and also keeps | 1773 // This routine both allocates a new object, and also keeps |
1777 // track of where objects have been allocated so that we can | 1774 // track of where objects have been allocated so that we can |
1778 // fix back references when deserializing. | 1775 // fix back references when deserializing. |
1779 Address Deserializer2::Allocate(int space_index, int size) { | 1776 Address Deserializer2::Allocate(int space_index, Space* space, int size) { |
1780 HeapObject* new_object; | 1777 Address address; |
1781 int old_fullness = CurrentAllocationAddress(space_index); | 1778 if (!SpaceIsLarge(space_index)) { |
1782 // When we start a new page we need to record its location. | 1779 ASSERT(!SpaceIsPaged(space_index) || |
1783 bool record_page = (old_fullness == 0); | 1780 size <= Page::kPageSize - Page::kObjectStartOffset); |
1784 if (SpaceIsPaged(space_index)) { | 1781 Object* new_allocation; |
1785 PagedSpace* space; | 1782 if (space_index == NEW_SPACE) { |
1786 switch (space_index) { | 1783 new_allocation = reinterpret_cast<NewSpace*>(space)->AllocateRaw(size); |
1787 case OLD_DATA_SPACE: space = Heap::old_data_space(); break; | 1784 } else { |
1788 case OLD_POINTER_SPACE: space = Heap::old_pointer_space(); break; | 1785 new_allocation = reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size); |
1789 case MAP_SPACE: space = Heap::map_space(); break; | |
1790 case CODE_SPACE: space = Heap::code_space(); break; | |
1791 case CELL_SPACE: space = Heap::cell_space(); break; | |
1792 default: UNREACHABLE(); space = NULL; break; | |
1793 } | 1786 } |
1794 ASSERT(size <= Page::kPageSize - Page::kObjectStartOffset); | 1787 HeapObject* new_object = HeapObject::cast(new_allocation); |
1795 int current_page = old_fullness >> Page::kPageSizeBits; | |
1796 int new_fullness = old_fullness + size; | |
1797 int new_page = new_fullness >> Page::kPageSizeBits; | |
1798 // What is our new position within the current page. | |
1799 int intra_page_offset = new_fullness - current_page * Page::kPageSize; | |
1800 if (intra_page_offset > Page::kPageSize - Page::kObjectStartOffset) { | |
1801 // This object will not fit in a page and we have to move to the next. | |
1802 new_page = current_page + 1; | |
1803 old_fullness = new_page << Page::kPageSizeBits; | |
1804 new_fullness = old_fullness + size; | |
1805 record_page = true; | |
1806 } | |
1807 fullness_[space_index] = new_fullness; | |
1808 Object* new_allocation = space->AllocateRaw(size); | |
1809 new_object = HeapObject::cast(new_allocation); | |
1810 ASSERT(!new_object->IsFailure()); | 1788 ASSERT(!new_object->IsFailure()); |
1811 ASSERT((reinterpret_cast<intptr_t>(new_object->address()) & | 1789 address = new_object->address(); |
1812 Page::kPageAlignmentMask) == | 1790 high_water_[space_index] = address + size; |
1813 (old_fullness & Page::kPageAlignmentMask) + | 1791 } else { |
1814 Page::kObjectStartOffset); | 1792 ASSERT(SpaceIsLarge(space_index)); |
1815 } else if (SpaceIsLarge(space_index)) { | |
1816 ASSERT(size > Page::kPageSize - Page::kObjectStartOffset); | 1793 ASSERT(size > Page::kPageSize - Page::kObjectStartOffset); |
1817 fullness_[LO_SPACE]++; | 1794 LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space); |
1818 LargeObjectSpace* lo_space = Heap::lo_space(); | |
1819 Object* new_allocation; | 1795 Object* new_allocation; |
1820 if (space_index == kLargeData) { | 1796 if (space_index == kLargeData) { |
1821 new_allocation = lo_space->AllocateRaw(size); | 1797 new_allocation = lo_space->AllocateRaw(size); |
1822 } else if (space_index == kLargeFixedArray) { | 1798 } else if (space_index == kLargeFixedArray) { |
1823 new_allocation = lo_space->AllocateRawFixedArray(size); | 1799 new_allocation = lo_space->AllocateRawFixedArray(size); |
1824 } else { | 1800 } else { |
1825 ASSERT(space_index == kLargeCode); | 1801 ASSERT(space_index == kLargeCode); |
1826 new_allocation = lo_space->AllocateRawCode(size); | 1802 new_allocation = lo_space->AllocateRawCode(size); |
1827 } | 1803 } |
1828 ASSERT(!new_allocation->IsFailure()); | 1804 ASSERT(!new_allocation->IsFailure()); |
1829 new_object = HeapObject::cast(new_allocation); | 1805 HeapObject* new_object = HeapObject::cast(new_allocation); |
1830 record_page = true; | 1806 // Record all large objects in the same space. |
1831 // The page recording below records all large objects in the same space. | 1807 address = new_object->address(); |
1832 space_index = LO_SPACE; | 1808 high_water_[LO_SPACE] = address + size; |
1833 } else { | |
1834 ASSERT(space_index == NEW_SPACE); | |
1835 Object* new_allocation = Heap::new_space()->AllocateRaw(size); | |
1836 fullness_[space_index] += size; | |
1837 ASSERT(!new_allocation->IsFailure()); | |
1838 new_object = HeapObject::cast(new_allocation); | |
1839 } | 1809 } |
1840 Address address = new_object->address(); | 1810 last_object_address_ = address; |
1841 if (record_page) { | |
1842 pages_[space_index].Add(address); | |
1843 } | |
1844 return address; | 1811 return address; |
1845 } | 1812 } |
1846 | 1813 |
1847 | 1814 |
1848 // This returns the address of an object that has been described in the | 1815 // This returns the address of an object that has been described in the |
1849 // snapshot as being offset bytes back in a particular space. | 1816 // snapshot as being offset bytes back in a particular space. |
1850 HeapObject* Deserializer2::GetAddress(int space) { | 1817 HeapObject* Deserializer2::GetAddressFromEnd(int space) { |
| 1818 int offset = source_->GetInt(); |
| 1819 ASSERT(!SpaceIsLarge(space)); |
| 1820 offset <<= kObjectAlignmentBits; |
| 1821 return HeapObject::FromAddress(high_water_[space] - offset); |
| 1822 } |
| 1823 |
| 1824 |
| 1825 // This returns the address of an object that has been described in the |
| 1826 // snapshot as being offset bytes into a particular space. |
| 1827 HeapObject* Deserializer2::GetAddressFromStart(int space) { |
1851 int offset = source_->GetInt(); | 1828 int offset = source_->GetInt(); |
1852 if (SpaceIsLarge(space)) { | 1829 if (SpaceIsLarge(space)) { |
1853 // Large spaces have one object per 'page'. | 1830 // Large spaces have one object per 'page'. |
1854 return HeapObject::FromAddress( | 1831 return HeapObject::FromAddress(pages_[LO_SPACE][offset]); |
1855 pages_[LO_SPACE][fullness_[LO_SPACE] - offset]); | |
1856 } | 1832 } |
1857 offset <<= kObjectAlignmentBits; | 1833 offset <<= kObjectAlignmentBits; |
1858 if (space == NEW_SPACE) { | 1834 if (space == NEW_SPACE) { |
1859 // New space has only one space - numbered 0. | 1835 // New space has only one space - numbered 0. |
1860 return HeapObject::FromAddress( | 1836 return HeapObject::FromAddress(pages_[space][0] + offset); |
1861 pages_[space][0] + fullness_[space] - offset); | |
1862 } | 1837 } |
1863 ASSERT(SpaceIsPaged(space)); | 1838 ASSERT(SpaceIsPaged(space)); |
1864 int virtual_address = fullness_[space] - offset; | 1839 int page_of_pointee = offset >> Page::kPageSizeBits; |
1865 int page_of_pointee = (virtual_address) >> Page::kPageSizeBits; | |
1866 Address object_address = pages_[space][page_of_pointee] + | 1840 Address object_address = pages_[space][page_of_pointee] + |
1867 (virtual_address & Page::kPageAlignmentMask); | 1841 (offset & Page::kPageAlignmentMask); |
1868 return HeapObject::FromAddress(object_address); | 1842 return HeapObject::FromAddress(object_address); |
1869 } | 1843 } |
1870 | 1844 |
1871 | 1845 |
1872 void Deserializer2::Deserialize() { | 1846 void Deserializer2::Deserialize() { |
1873 // Don't GC while deserializing - just expand the heap. | 1847 // Don't GC while deserializing - just expand the heap. |
1874 AlwaysAllocateScope always_allocate; | 1848 AlwaysAllocateScope always_allocate; |
1875 // Don't use the free lists while deserializing. | 1849 // Don't use the free lists while deserializing. |
1876 LinearAllocationScope allocate_linearly; | 1850 LinearAllocationScope allocate_linearly; |
1877 // No active threads. | 1851 // No active threads. |
1878 ASSERT_EQ(NULL, ThreadState::FirstInUse()); | 1852 ASSERT_EQ(NULL, ThreadState::FirstInUse()); |
1879 // No active handles. | 1853 // No active handles. |
1880 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); | 1854 ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty()); |
1881 ASSERT(external_reference_decoder_ == NULL); | 1855 ASSERT(external_reference_decoder_ == NULL); |
1882 external_reference_decoder_ = new ExternalReferenceDecoder(); | 1856 external_reference_decoder_ = new ExternalReferenceDecoder(); |
1883 Heap::IterateRoots(this); | 1857 Heap::IterateRoots(this); |
1884 ASSERT(source_->AtEOF()); | 1858 ASSERT(source_->AtEOF()); |
1885 delete external_reference_decoder_; | 1859 delete external_reference_decoder_; |
1886 external_reference_decoder_ = NULL; | 1860 external_reference_decoder_ = NULL; |
1887 } | 1861 } |
1888 | 1862 |
1889 | 1863 |
1890 // This is called on the roots. It is the driver of the deserialization | 1864 // This is called on the roots. It is the driver of the deserialization |
1891 // process. | 1865 // process. It is also called on the body of each function. |
1892 void Deserializer2::VisitPointers(Object** start, Object** end) { | 1866 void Deserializer2::VisitPointers(Object** start, Object** end) { |
1893 for (Object** current = start; current < end; current++) { | 1867 // The space must be new space. Any other space would cause ReadChunk to try |
1894 DataType data = static_cast<DataType>(source_->Get()); | 1868 // to update the remembered using NULL as the address. |
1895 if (data == SMI_SERIALIZATION) { | 1869 ReadChunk(start, end, NEW_SPACE, NULL); |
1896 *current = Smi::FromInt(source_->GetInt() - kSmiBias); | |
1897 } else if (data == BACKREF_SERIALIZATION) { | |
1898 int space = source_->Get(); | |
1899 *current = GetAddress(space); | |
1900 } else { | |
1901 ASSERT(data == OBJECT_SERIALIZATION); | |
1902 ReadObject(current); | |
1903 } | |
1904 } | |
1905 } | 1870 } |
1906 | 1871 |
1907 | 1872 |
1908 // This routine writes the new object into the pointer provided and then | 1873 // This routine writes the new object into the pointer provided and then |
1909 // returns true if the new object was in young space and false otherwise. | 1874 // returns true if the new object was in young space and false otherwise. |
1910 // The reason for this strange interface is that otherwise the object is | 1875 // The reason for this strange interface is that otherwise the object is |
1911 // written very late, which means the ByteArray map is not set up by the | 1876 // written very late, which means the ByteArray map is not set up by the |
1912 // time we need to use it to mark the space at the end of a page free (by | 1877 // time we need to use it to mark the space at the end of a page free (by |
1913 // making it into a byte array). | 1878 // making it into a byte array). |
1914 bool Deserializer2::ReadObject(Object** write_back) { | 1879 void Deserializer2::ReadObject(int space_number, |
1915 int space = source_->Get(); | 1880 Space* space, |
| 1881 Object** write_back) { |
1916 int size = source_->GetInt() << kObjectAlignmentBits; | 1882 int size = source_->GetInt() << kObjectAlignmentBits; |
1917 Address address = Allocate(space, size); | 1883 Address address = Allocate(space_number, space, size); |
1918 *write_back = HeapObject::FromAddress(address); | 1884 *write_back = HeapObject::FromAddress(address); |
1919 Object** current = reinterpret_cast<Object**>(address); | 1885 Object** current = reinterpret_cast<Object**>(address); |
1920 Object** limit = current + (size >> kPointerSizeLog2); | 1886 Object** limit = current + (size >> kPointerSizeLog2); |
| 1887 ReadChunk(current, limit, space_number, address); |
| 1888 } |
| 1889 |
| 1890 |
| 1891 #define ONE_CASE_PER_SPACE(base_tag) \ |
| 1892 case (base_tag) + NEW_SPACE: /* NOLINT */ \ |
| 1893 case (base_tag) + OLD_POINTER_SPACE: /* NOLINT */ \ |
| 1894 case (base_tag) + OLD_DATA_SPACE: /* NOLINT */ \ |
| 1895 case (base_tag) + CODE_SPACE: /* NOLINT */ \ |
| 1896 case (base_tag) + MAP_SPACE: /* NOLINT */ \ |
| 1897 case (base_tag) + CELL_SPACE: /* NOLINT */ \ |
| 1898 case (base_tag) + kLargeData: /* NOLINT */ \ |
| 1899 case (base_tag) + kLargeCode: /* NOLINT */ \ |
| 1900 case (base_tag) + kLargeFixedArray: /* NOLINT */ |
| 1901 |
| 1902 |
| 1903 void Deserializer2::ReadChunk(Object** current, |
| 1904 Object** limit, |
| 1905 int space, |
| 1906 Address address) { |
1921 while (current < limit) { | 1907 while (current < limit) { |
1922 DataType data = static_cast<DataType>(source_->Get()); | 1908 int data = source_->Get(); |
1923 switch (data) { | 1909 switch (data) { |
1924 case SMI_SERIALIZATION: | 1910 #define RAW_CASE(index, size) \ |
1925 *current++ = Smi::FromInt(source_->GetInt() - kSmiBias); | 1911 case RAW_DATA_SERIALIZATION + index: { \ |
1926 break; | 1912 byte* raw_data_out = reinterpret_cast<byte*>(current); \ |
| 1913 source_->CopyRaw(raw_data_out, size); \ |
| 1914 current = reinterpret_cast<Object**>(raw_data_out + size); \ |
| 1915 break; \ |
| 1916 } |
| 1917 COMMON_RAW_LENGTHS(RAW_CASE) |
| 1918 #undef RAW_CASE |
1927 case RAW_DATA_SERIALIZATION: { | 1919 case RAW_DATA_SERIALIZATION: { |
1928 int size = source_->GetInt(); | 1920 int size = source_->GetInt(); |
1929 byte* raw_data_out = reinterpret_cast<byte*>(current); | 1921 byte* raw_data_out = reinterpret_cast<byte*>(current); |
1930 source_->CopyRaw(raw_data_out, size); | 1922 source_->CopyRaw(raw_data_out, size); |
1931 current = reinterpret_cast<Object**>(raw_data_out + size); | 1923 current = reinterpret_cast<Object**>(raw_data_out + size); |
1932 break; | 1924 break; |
1933 } | 1925 } |
1934 case OBJECT_SERIALIZATION: { | 1926 case OBJECT_SERIALIZATION + NEW_SPACE: { |
1935 // Recurse to unpack an object that is forward-referenced from here. | 1927 ReadObject(NEW_SPACE, Heap::new_space(), current); |
1936 bool in_new_space = ReadObject(current); | 1928 if (space != NEW_SPACE) { |
1937 if (in_new_space && space != NEW_SPACE) { | |
1938 Heap::RecordWrite(address, | 1929 Heap::RecordWrite(address, |
1939 reinterpret_cast<Address>(current) - address); | 1930 reinterpret_cast<Address>(current) - address); |
1940 } | 1931 } |
1941 current++; | 1932 current++; |
1942 break; | 1933 break; |
1943 } | 1934 } |
1944 case CODE_OBJECT_SERIALIZATION: { | 1935 case OBJECT_SERIALIZATION + OLD_DATA_SPACE: |
| 1936 ReadObject(OLD_DATA_SPACE, Heap::old_data_space(), current++); |
| 1937 break; |
| 1938 case OBJECT_SERIALIZATION + OLD_POINTER_SPACE: |
| 1939 ReadObject(OLD_POINTER_SPACE, Heap::old_pointer_space(), current++); |
| 1940 break; |
| 1941 case OBJECT_SERIALIZATION + MAP_SPACE: |
| 1942 ReadObject(MAP_SPACE, Heap::map_space(), current++); |
| 1943 break; |
| 1944 case OBJECT_SERIALIZATION + CODE_SPACE: |
| 1945 ReadObject(CODE_SPACE, Heap::code_space(), current++); |
| 1946 break; |
| 1947 case OBJECT_SERIALIZATION + CELL_SPACE: |
| 1948 ReadObject(CELL_SPACE, Heap::cell_space(), current++); |
| 1949 break; |
| 1950 case OBJECT_SERIALIZATION + kLargeData: |
| 1951 ReadObject(kLargeData, Heap::lo_space(), current++); |
| 1952 break; |
| 1953 case OBJECT_SERIALIZATION + kLargeCode: |
| 1954 ReadObject(kLargeCode, Heap::lo_space(), current++); |
| 1955 break; |
| 1956 case OBJECT_SERIALIZATION + kLargeFixedArray: |
| 1957 ReadObject(kLargeFixedArray, Heap::lo_space(), current++); |
| 1958 break; |
| 1959 case CODE_OBJECT_SERIALIZATION + kLargeCode: { |
1945 Object* new_code_object = NULL; | 1960 Object* new_code_object = NULL; |
1946 ReadObject(&new_code_object); | 1961 ReadObject(kLargeCode, Heap::lo_space(), &new_code_object); |
1947 Code* code_object = reinterpret_cast<Code*>(new_code_object); | 1962 Code* code_object = reinterpret_cast<Code*>(new_code_object); |
1948 // Setting a branch/call to another code object from code. | 1963 // Setting a branch/call to another code object from code. |
1949 Address location_of_branch_data = reinterpret_cast<Address>(current); | 1964 Address location_of_branch_data = reinterpret_cast<Address>(current); |
1950 Assembler::set_target_at(location_of_branch_data, | 1965 Assembler::set_target_at(location_of_branch_data, |
1951 code_object->instruction_start()); | 1966 code_object->instruction_start()); |
1952 location_of_branch_data += Assembler::kCallTargetSize; | 1967 location_of_branch_data += Assembler::kCallTargetSize; |
1953 current = reinterpret_cast<Object**>(location_of_branch_data); | 1968 current = reinterpret_cast<Object**>(location_of_branch_data); |
1954 break; | 1969 break; |
1955 } | 1970 } |
1956 case BACKREF_SERIALIZATION: { | 1971 case CODE_OBJECT_SERIALIZATION + CODE_SPACE: { |
| 1972 Object* new_code_object = NULL; |
| 1973 ReadObject(CODE_SPACE, Heap::code_space(), &new_code_object); |
| 1974 Code* code_object = reinterpret_cast<Code*>(new_code_object); |
| 1975 // Setting a branch/call to another code object from code. |
| 1976 Address location_of_branch_data = reinterpret_cast<Address>(current); |
| 1977 Assembler::set_target_at(location_of_branch_data, |
| 1978 code_object->instruction_start()); |
| 1979 location_of_branch_data += Assembler::kCallTargetSize; |
| 1980 current = reinterpret_cast<Object**>(location_of_branch_data); |
| 1981 break; |
| 1982 } |
| 1983 ONE_CASE_PER_SPACE(BACKREF_SERIALIZATION) { |
1957 // Write a backreference to an object we unpacked earlier. | 1984 // Write a backreference to an object we unpacked earlier. |
1958 int backref_space = source_->Get(); | 1985 int backref_space = (data & 15); |
1959 if (backref_space == NEW_SPACE && space != NEW_SPACE) { | 1986 if (backref_space == NEW_SPACE && space != NEW_SPACE) { |
1960 Heap::RecordWrite(address, | 1987 Heap::RecordWrite(address, |
1961 reinterpret_cast<Address>(current) - address); | 1988 reinterpret_cast<Address>(current) - address); |
1962 } | 1989 } |
1963 *current++ = GetAddress(backref_space); | 1990 *current++ = GetAddressFromEnd(backref_space); |
1964 break; | 1991 break; |
1965 } | 1992 } |
1966 case CODE_BACKREF_SERIALIZATION: { | 1993 ONE_CASE_PER_SPACE(REFERENCE_SERIALIZATION) { |
1967 int backref_space = source_->Get(); | 1994 // Write a reference to an object we unpacked earlier. |
| 1995 int reference_space = (data & 15); |
| 1996 if (reference_space == NEW_SPACE && space != NEW_SPACE) { |
| 1997 Heap::RecordWrite(address, |
| 1998 reinterpret_cast<Address>(current) - address); |
| 1999 } |
| 2000 *current++ = GetAddressFromStart(reference_space); |
| 2001 break; |
| 2002 } |
| 2003 #define COMMON_REFS_CASE(index, reference_space, address) \ |
| 2004 case REFERENCE_SERIALIZATION + index: { \ |
| 2005 ASSERT(SpaceIsPaged(reference_space)); \ |
| 2006 Address object_address = \ |
| 2007 pages_[reference_space][0] + (address << kObjectAlignmentBits); \ |
| 2008 *current++ = HeapObject::FromAddress(object_address); \ |
| 2009 break; \ |
| 2010 } |
| 2011 COMMON_REFERENCE_PATTERNS(COMMON_REFS_CASE) |
| 2012 #undef COMMON_REFS_CASE |
| 2013 ONE_CASE_PER_SPACE(CODE_BACKREF_SERIALIZATION) { |
| 2014 int backref_space = (data & 15); |
1968 // Can't use Code::cast because heap is not set up yet and assertions | 2015 // Can't use Code::cast because heap is not set up yet and assertions |
1969 // will fail. | 2016 // will fail. |
1970 Code* code_object = reinterpret_cast<Code*>(GetAddress(backref_space)); | 2017 Code* code_object = |
| 2018 reinterpret_cast<Code*>(GetAddressFromEnd(backref_space)); |
1971 // Setting a branch/call to previously decoded code object from code. | 2019 // Setting a branch/call to previously decoded code object from code. |
1972 Address location_of_branch_data = reinterpret_cast<Address>(current); | 2020 Address location_of_branch_data = reinterpret_cast<Address>(current); |
1973 Assembler::set_target_at(location_of_branch_data, | 2021 Assembler::set_target_at(location_of_branch_data, |
1974 code_object->instruction_start()); | 2022 code_object->instruction_start()); |
1975 location_of_branch_data += Assembler::kCallTargetSize; | 2023 location_of_branch_data += Assembler::kCallTargetSize; |
1976 current = reinterpret_cast<Object**>(location_of_branch_data); | 2024 current = reinterpret_cast<Object**>(location_of_branch_data); |
1977 break; | 2025 break; |
1978 } | 2026 } |
| 2027 ONE_CASE_PER_SPACE(CODE_REFERENCE_SERIALIZATION) { |
| 2028 int backref_space = (data & 15); |
| 2029 // Can't use Code::cast because heap is not set up yet and assertions |
| 2030 // will fail. |
| 2031 Code* code_object = |
| 2032 reinterpret_cast<Code*>(GetAddressFromStart(backref_space)); |
| 2033 // Setting a branch/call to previously decoded code object from code. |
| 2034 Address location_of_branch_data = reinterpret_cast<Address>(current); |
| 2035 Assembler::set_target_at(location_of_branch_data, |
| 2036 code_object->instruction_start()); |
| 2037 location_of_branch_data += Assembler::kCallTargetSize; |
| 2038 current = reinterpret_cast<Object**>(location_of_branch_data); |
| 2039 break; |
| 2040 } |
1979 case EXTERNAL_REFERENCE_SERIALIZATION: { | 2041 case EXTERNAL_REFERENCE_SERIALIZATION: { |
1980 int reference_id = source_->GetInt(); | 2042 int reference_id = source_->GetInt(); |
1981 Address address = external_reference_decoder_->Decode(reference_id); | 2043 Address address = external_reference_decoder_->Decode(reference_id); |
1982 *current++ = reinterpret_cast<Object*>(address); | 2044 *current++ = reinterpret_cast<Object*>(address); |
1983 break; | 2045 break; |
1984 } | 2046 } |
1985 case EXTERNAL_BRANCH_TARGET_SERIALIZATION: { | 2047 case EXTERNAL_BRANCH_TARGET_SERIALIZATION: { |
1986 int reference_id = source_->GetInt(); | 2048 int reference_id = source_->GetInt(); |
1987 Address address = external_reference_decoder_->Decode(reference_id); | 2049 Address address = external_reference_decoder_->Decode(reference_id); |
1988 Address location_of_branch_data = reinterpret_cast<Address>(current); | 2050 Address location_of_branch_data = reinterpret_cast<Address>(current); |
1989 Assembler::set_external_target_at(location_of_branch_data, address); | 2051 Assembler::set_external_target_at(location_of_branch_data, address); |
1990 location_of_branch_data += Assembler::kExternalTargetSize; | 2052 location_of_branch_data += Assembler::kExternalTargetSize; |
1991 current = reinterpret_cast<Object**>(location_of_branch_data); | 2053 current = reinterpret_cast<Object**>(location_of_branch_data); |
1992 break; | 2054 break; |
1993 } | 2055 } |
| 2056 case START_NEW_PAGE_SERIALIZATION: { |
| 2057 int space = source_->Get(); |
| 2058 pages_[space].Add(last_object_address_); |
| 2059 break; |
| 2060 } |
1994 default: | 2061 default: |
1995 UNREACHABLE(); | 2062 UNREACHABLE(); |
1996 } | 2063 } |
1997 } | 2064 } |
1998 ASSERT(current == limit); | 2065 ASSERT(current == limit); |
1999 return space == NEW_SPACE; | |
2000 } | 2066 } |
2001 | 2067 |
2002 | 2068 |
2003 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) { | 2069 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) { |
2004 const int max_shift = ((kPointerSize * kBitsPerByte) / 7) * 7; | 2070 const int max_shift = ((kPointerSize * kBitsPerByte) / 7) * 7; |
2005 for (int shift = max_shift; shift > 0; shift -= 7) { | 2071 for (int shift = max_shift; shift > 0; shift -= 7) { |
2006 if (integer >= 1u << shift) { | 2072 if (integer >= 1u << shift) { |
2007 Put(((integer >> shift) & 0x7f) | 0x80, "intpart"); | 2073 Put(((integer >> shift) & 0x7f) | 0x80, "intpart"); |
2008 } | 2074 } |
2009 } | 2075 } |
2010 Put(integer & 0x7f, "intlastpart"); | 2076 Put(integer & 0x7f, "intlastpart\n"); |
2011 } | 2077 } |
2012 | 2078 |
2013 #ifdef DEBUG | 2079 #ifdef DEBUG |
2014 | 2080 |
2015 void Deserializer2::Synchronize(const char* tag) { | 2081 void Deserializer2::Synchronize(const char* tag) { |
2016 int data = source_->Get(); | 2082 int data = source_->Get(); |
2017 // If this assert fails then that indicates that you have a mismatch between | 2083 // If this assert fails then that indicates that you have a mismatch between |
2018 // the number of GC roots when serializing and deserializing. | 2084 // the number of GC roots when serializing and deserializing. |
2019 ASSERT(data == SYNCHRONIZE); | 2085 ASSERT(data == SYNCHRONIZE); |
2020 do { | 2086 do { |
2021 int character = source_->Get(); | 2087 int character = source_->Get(); |
2022 if (character == 0) break; | 2088 if (character == 0) break; |
2023 if (FLAG_debug_serialization) { | 2089 if (FLAG_debug_serialization) { |
2024 PrintF("%c", character); | 2090 PrintF("%c", character); |
2025 } | 2091 } |
2026 } while (true); | 2092 } while (true); |
2027 if (FLAG_debug_serialization) { | 2093 if (FLAG_debug_serialization) { |
2028 PrintF("\n"); | 2094 PrintF("\n"); |
2029 } | 2095 } |
2030 } | 2096 } |
2031 | 2097 |
2032 | 2098 |
2033 void Serializer2::Synchronize(const char* tag) { | 2099 void Serializer2::Synchronize(const char* tag) { |
2034 sink_->Put(SYNCHRONIZE, tag); | 2100 sink_->Put(SYNCHRONIZE, tag); |
2035 int character; | 2101 int character; |
2036 do { | 2102 do { |
2037 character = *tag++; | 2103 character = *tag++; |
2038 sink_->Put(character, "tagcharacter"); | 2104 sink_->Put(character, "tagcharacter\n"); |
2039 } while (character != 0); | 2105 } while (character != 0); |
2040 } | 2106 } |
2041 | 2107 |
2042 #endif | 2108 #endif |
2043 | 2109 |
2044 Serializer2::Serializer2(SnapshotByteSink* sink) | 2110 Serializer2::Serializer2(SnapshotByteSink* sink) |
2045 : sink_(sink), | 2111 : sink_(sink), |
2046 current_root_index_(0), | 2112 current_root_index_(0), |
2047 external_reference_encoder_(NULL) { | 2113 external_reference_encoder_(NULL) { |
2048 for (int i = 0; i <= LAST_SPACE; i++) { | 2114 for (int i = 0; i <= LAST_SPACE; i++) { |
(...skipping 11 matching lines...) Expand all Loading... |
2060 ASSERT(external_reference_encoder_ == NULL); | 2126 ASSERT(external_reference_encoder_ == NULL); |
2061 external_reference_encoder_ = new ExternalReferenceEncoder(); | 2127 external_reference_encoder_ = new ExternalReferenceEncoder(); |
2062 Heap::IterateRoots(this); | 2128 Heap::IterateRoots(this); |
2063 delete external_reference_encoder_; | 2129 delete external_reference_encoder_; |
2064 external_reference_encoder_ = NULL; | 2130 external_reference_encoder_ = NULL; |
2065 } | 2131 } |
2066 | 2132 |
2067 | 2133 |
2068 void Serializer2::VisitPointers(Object** start, Object** end) { | 2134 void Serializer2::VisitPointers(Object** start, Object** end) { |
2069 for (Object** current = start; current < end; current++) { | 2135 for (Object** current = start; current < end; current++) { |
2070 SerializeObject(*current, TAGGED_REPRESENTATION); | 2136 if ((*current)->IsSmi()) { |
| 2137 sink_->Put(RAW_DATA_SERIALIZATION, "raw data"); |
| 2138 sink_->PutInt(kPointerSize, "length"); |
| 2139 for (int i = 0; i < kPointerSize; i++) { |
| 2140 sink_->Put(reinterpret_cast<byte*>(current)[i], "byte"); |
| 2141 } |
| 2142 } else { |
| 2143 SerializeObject(*current, TAGGED_REPRESENTATION); |
| 2144 } |
2071 } | 2145 } |
2072 } | 2146 } |
2073 | 2147 |
2074 | 2148 |
2075 void Serializer2::SerializeObject( | 2149 void Serializer2::SerializeObject( |
2076 Object* o, | 2150 Object* o, |
2077 ReferenceRepresentation reference_representation) { | 2151 ReferenceRepresentation reference_representation) { |
2078 if (o->IsHeapObject()) { | 2152 ASSERT(o->IsHeapObject()); |
2079 HeapObject* heap_object = HeapObject::cast(o); | 2153 HeapObject* heap_object = HeapObject::cast(o); |
2080 MapWord map_word = heap_object->map_word(); | 2154 MapWord map_word = heap_object->map_word(); |
2081 if (map_word.IsSerializationAddress()) { | 2155 if (map_word.IsSerializationAddress()) { |
2082 int space = SpaceOfAlreadySerializedObject(heap_object); | 2156 int space = SpaceOfAlreadySerializedObject(heap_object); |
2083 int offset = | 2157 int address = map_word.ToSerializationAddress(); |
2084 CurrentAllocationAddress(space) - map_word.ToSerializationAddress(); | 2158 int offset = CurrentAllocationAddress(space) - address; |
2085 // If we are actually dealing with real offsets (and not a numbering of | 2159 bool from_start = true; |
2086 // all objects) then we should shift out the bits that are always 0. | 2160 if (SpaceIsPaged(space)) { |
2087 if (!SpaceIsLarge(space)) offset >>= kObjectAlignmentBits; | 2161 if ((CurrentAllocationAddress(space) >> Page::kPageSizeBits) == |
2088 if (reference_representation == CODE_TARGET_REPRESENTATION) { | 2162 (address >> Page::kPageSizeBits)) { |
2089 sink_->Put(CODE_BACKREF_SERIALIZATION, "BackRefCodeSerialization"); | 2163 from_start = false; |
| 2164 address = offset; |
| 2165 } |
| 2166 } else if (space == NEW_SPACE) { |
| 2167 if (offset < address) { |
| 2168 from_start = false; |
| 2169 address = offset; |
| 2170 } |
| 2171 } |
| 2172 // If we are actually dealing with real offsets (and not a numbering of |
| 2173 // all objects) then we should shift out the bits that are always 0. |
| 2174 if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits; |
| 2175 if (reference_representation == CODE_TARGET_REPRESENTATION) { |
| 2176 if (from_start) { |
| 2177 sink_->Put(CODE_REFERENCE_SERIALIZATION + space, "RefCodeSer"); |
| 2178 sink_->PutInt(address, "address"); |
2090 } else { | 2179 } else { |
2091 ASSERT(reference_representation == TAGGED_REPRESENTATION); | 2180 sink_->Put(CODE_BACKREF_SERIALIZATION + space, "BackRefCodeSer"); |
2092 sink_->Put(BACKREF_SERIALIZATION, "BackRefSerialization"); | 2181 sink_->PutInt(address, "address"); |
2093 } | 2182 } |
2094 sink_->Put(space, "space"); | |
2095 sink_->PutInt(offset, "offset"); | |
2096 } else { | 2183 } else { |
2097 // Object has not yet been serialized. Serialize it here. | 2184 ASSERT(reference_representation == TAGGED_REPRESENTATION); |
2098 ObjectSerializer serializer(this, | 2185 if (from_start) { |
2099 heap_object, | 2186 #define COMMON_REFS_CASE(tag, common_space, common_offset) \ |
2100 sink_, | 2187 if (space == common_space && address == common_offset) { \ |
2101 reference_representation); | 2188 sink_->Put(tag + REFERENCE_SERIALIZATION, "RefSer\n"); \ |
2102 serializer.Serialize(); | 2189 } else /* NOLINT */ |
| 2190 COMMON_REFERENCE_PATTERNS(COMMON_REFS_CASE) |
| 2191 #undef COMMON_REFS_CASE |
| 2192 { /* NOLINT */ |
| 2193 sink_->Put(REFERENCE_SERIALIZATION + space, "RefSer"); |
| 2194 sink_->PutInt(address, "address"); |
| 2195 } |
| 2196 } else { |
| 2197 sink_->Put(BACKREF_SERIALIZATION + space, "BackRefSer"); |
| 2198 sink_->PutInt(address, "address"); |
| 2199 } |
2103 } | 2200 } |
2104 } else { | 2201 } else { |
2105 // Serialize a Smi. | 2202 // Object has not yet been serialized. Serialize it here. |
2106 unsigned int value = Smi::cast(o)->value() + kSmiBias; | 2203 ObjectSerializer serializer(this, |
2107 sink_->Put(SMI_SERIALIZATION, "SmiSerialization"); | 2204 heap_object, |
2108 sink_->PutInt(value, "smi"); | 2205 sink_, |
| 2206 reference_representation); |
| 2207 serializer.Serialize(); |
2109 } | 2208 } |
2110 } | 2209 } |
2111 | 2210 |
2112 | 2211 |
| 2212 |
2113 void Serializer2::ObjectSerializer::Serialize() { | 2213 void Serializer2::ObjectSerializer::Serialize() { |
2114 int space = Serializer2::SpaceOfObject(object_); | 2214 int space = Serializer2::SpaceOfObject(object_); |
2115 int size = object_->Size(); | 2215 int size = object_->Size(); |
2116 | 2216 |
2117 if (reference_representation_ == TAGGED_REPRESENTATION) { | 2217 if (reference_representation_ == TAGGED_REPRESENTATION) { |
2118 sink_->Put(OBJECT_SERIALIZATION, "ObjectSerialization"); | 2218 sink_->Put(OBJECT_SERIALIZATION + space, "ObjectSerialization"); |
2119 } else { | 2219 } else { |
2120 ASSERT(reference_representation_ == CODE_TARGET_REPRESENTATION); | 2220 ASSERT(reference_representation_ == CODE_TARGET_REPRESENTATION); |
2121 sink_->Put(CODE_OBJECT_SERIALIZATION, "ObjectSerialization"); | 2221 sink_->Put(CODE_OBJECT_SERIALIZATION + space, "ObjectSerialization"); |
2122 } | 2222 } |
2123 sink_->Put(space, "space"); | |
2124 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); | 2223 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words"); |
2125 | 2224 |
2126 // Get the map before overwriting it. | 2225 // Get the map before overwriting it. |
2127 Map* map = object_->map(); | 2226 Map* map = object_->map(); |
2128 // Mark this object as already serialized. | 2227 // Mark this object as already serialized. |
2129 object_->set_map_word( | 2228 bool start_new_page; |
2130 MapWord::FromSerializationAddress(serializer_->Allocate(space, size))); | 2229 object_->set_map_word(MapWord::FromSerializationAddress( |
| 2230 serializer_->Allocate(space, size, &start_new_page))); |
| 2231 if (start_new_page) { |
| 2232 sink_->Put(START_NEW_PAGE_SERIALIZATION, "NewPage"); |
| 2233 sink_->Put(space, "NewPage space\n"); |
| 2234 } |
2131 | 2235 |
2132 // Serialize the map (first word of the object). | 2236 // Serialize the map (first word of the object). |
2133 serializer_->SerializeObject(map, TAGGED_REPRESENTATION); | 2237 serializer_->SerializeObject(map, TAGGED_REPRESENTATION); |
2134 | 2238 |
2135 // Serialize the rest of the object. | 2239 // Serialize the rest of the object. |
2136 ASSERT(bytes_processed_so_far_ == 0); | 2240 ASSERT(bytes_processed_so_far_ == 0); |
2137 bytes_processed_so_far_ = kPointerSize; | 2241 bytes_processed_so_far_ = kPointerSize; |
2138 object_->IterateBody(map->instance_type(), size, this); | 2242 object_->IterateBody(map->instance_type(), size, this); |
2139 OutputRawData(object_->address() + size); | 2243 OutputRawData(object_->address() + size); |
2140 } | 2244 } |
2141 | 2245 |
2142 | 2246 |
2143 void Serializer2::ObjectSerializer::VisitPointers(Object** start, | 2247 void Serializer2::ObjectSerializer::VisitPointers(Object** start, |
2144 Object** end) { | 2248 Object** end) { |
2145 Address pointers_start = reinterpret_cast<Address>(start); | 2249 Object** current = start; |
2146 OutputRawData(pointers_start); | 2250 while (current < end) { |
| 2251 while (current < end && (*current)->IsSmi()) current++; |
| 2252 OutputRawData(reinterpret_cast<Address>(current)); |
2147 | 2253 |
2148 for (Object** current = start; current < end; current++) { | 2254 while (current < end && !(*current)->IsSmi()) { |
2149 serializer_->SerializeObject(*current, TAGGED_REPRESENTATION); | 2255 serializer_->SerializeObject(*current, TAGGED_REPRESENTATION); |
| 2256 bytes_processed_so_far_ += kPointerSize; |
| 2257 current++; |
| 2258 } |
2150 } | 2259 } |
2151 bytes_processed_so_far_ += (end - start) * kPointerSize; | |
2152 } | 2260 } |
2153 | 2261 |
2154 | 2262 |
2155 void Serializer2::ObjectSerializer::VisitExternalReferences(Address* start, | 2263 void Serializer2::ObjectSerializer::VisitExternalReferences(Address* start, |
2156 Address* end) { | 2264 Address* end) { |
2157 Address references_start = reinterpret_cast<Address>(start); | 2265 Address references_start = reinterpret_cast<Address>(start); |
2158 OutputRawData(references_start); | 2266 OutputRawData(references_start); |
2159 | 2267 |
2160 for (Address* current = start; current < end; current++) { | 2268 for (Address* current = start; current < end; current++) { |
2161 sink_->Put(EXTERNAL_REFERENCE_SERIALIZATION, "External reference"); | 2269 sink_->Put(EXTERNAL_REFERENCE_SERIALIZATION, "External reference"); |
(...skipping 27 matching lines...) Expand all Loading... |
2189 | 2297 |
2190 | 2298 |
2191 void Serializer2::ObjectSerializer::OutputRawData(Address up_to) { | 2299 void Serializer2::ObjectSerializer::OutputRawData(Address up_to) { |
2192 Address object_start = object_->address(); | 2300 Address object_start = object_->address(); |
2193 int up_to_offset = up_to - object_start; | 2301 int up_to_offset = up_to - object_start; |
2194 int skipped = up_to_offset - bytes_processed_so_far_; | 2302 int skipped = up_to_offset - bytes_processed_so_far_; |
2195 // This assert will fail if the reloc info gives us the target_address_address | 2303 // This assert will fail if the reloc info gives us the target_address_address |
2196 // locations in a non-ascending order. Luckily that doesn't happen. | 2304 // locations in a non-ascending order. Luckily that doesn't happen. |
2197 ASSERT(skipped >= 0); | 2305 ASSERT(skipped >= 0); |
2198 if (skipped != 0) { | 2306 if (skipped != 0) { |
2199 sink_->Put(RAW_DATA_SERIALIZATION, "raw data"); | 2307 Address base = object_start + bytes_processed_so_far_; |
2200 sink_->PutInt(skipped, "length"); | 2308 #define RAW_CASE(index, length) \ |
| 2309 if (skipped == length) { \ |
| 2310 sink_->Put(RAW_DATA_SERIALIZATION + index, "raw data fixed\n"); \ |
| 2311 } else /* NOLINT */ |
| 2312 COMMON_RAW_LENGTHS(RAW_CASE) |
| 2313 #undef RAW_CASE |
| 2314 { /* NOLINT */ |
| 2315 sink_->Put(RAW_DATA_SERIALIZATION, "raw data"); |
| 2316 sink_->PutInt(skipped, "length"); |
| 2317 } |
2201 for (int i = 0; i < skipped; i++) { | 2318 for (int i = 0; i < skipped; i++) { |
2202 unsigned int data = object_start[bytes_processed_so_far_ + i]; | 2319 unsigned int data = base[i]; |
2203 sink_->Put(data, "byte"); | 2320 sink_->Put(data, "byte\n"); |
2204 } | 2321 } |
2205 bytes_processed_so_far_ += skipped; | 2322 bytes_processed_so_far_ += skipped; |
2206 } | 2323 } |
2207 } | 2324 } |
2208 | 2325 |
2209 | 2326 |
2210 int Serializer2::SpaceOfObject(HeapObject* object) { | 2327 int Serializer2::SpaceOfObject(HeapObject* object) { |
2211 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { | 2328 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { |
2212 AllocationSpace s = static_cast<AllocationSpace>(i); | 2329 AllocationSpace s = static_cast<AllocationSpace>(i); |
2213 if (Heap::InSpace(object, s)) { | 2330 if (Heap::InSpace(object, s)) { |
(...skipping 19 matching lines...) Expand all Loading... |
2233 AllocationSpace s = static_cast<AllocationSpace>(i); | 2350 AllocationSpace s = static_cast<AllocationSpace>(i); |
2234 if (Heap::InSpace(object, s)) { | 2351 if (Heap::InSpace(object, s)) { |
2235 return i; | 2352 return i; |
2236 } | 2353 } |
2237 } | 2354 } |
2238 UNREACHABLE(); | 2355 UNREACHABLE(); |
2239 return 0; | 2356 return 0; |
2240 } | 2357 } |
2241 | 2358 |
2242 | 2359 |
2243 int Serializer2::Allocate(int space, int size) { | 2360 int Serializer2::Allocate(int space, int size, bool* new_page) { |
2244 ASSERT(space >= 0 && space < kNumberOfSpaces); | 2361 ASSERT(space >= 0 && space < kNumberOfSpaces); |
2245 if (SpaceIsLarge(space)) { | 2362 if (SpaceIsLarge(space)) { |
2246 // In large object space we merely number the objects instead of trying to | 2363 // In large object space we merely number the objects instead of trying to |
2247 // determine some sort of address. | 2364 // determine some sort of address. |
| 2365 *new_page = true; |
2248 return fullness_[LO_SPACE]++; | 2366 return fullness_[LO_SPACE]++; |
2249 } | 2367 } |
| 2368 *new_page = false; |
| 2369 if (fullness_[space] == 0) { |
| 2370 *new_page = true; |
| 2371 } |
2250 if (SpaceIsPaged(space)) { | 2372 if (SpaceIsPaged(space)) { |
2251 // Paged spaces are a little special. We encode their addresses as if the | 2373 // Paged spaces are a little special. We encode their addresses as if the |
2252 // pages were all contiguous and each page were filled up in the range | 2374 // pages were all contiguous and each page were filled up in the range |
2253 // 0 - Page::kObjectAreaSize. In practice the pages may not be contiguous | 2375 // 0 - Page::kObjectAreaSize. In practice the pages may not be contiguous |
2254 // and allocation does not start at offset 0 in the page, but this scheme | 2376 // and allocation does not start at offset 0 in the page, but this scheme |
2255 // means the deserializer can get the page number quickly by shifting the | 2377 // means the deserializer can get the page number quickly by shifting the |
2256 // serialized address. | 2378 // serialized address. |
2257 ASSERT(IsPowerOf2(Page::kPageSize)); | 2379 ASSERT(IsPowerOf2(Page::kPageSize)); |
2258 int used_in_this_page = (fullness_[space] & (Page::kPageSize - 1)); | 2380 int used_in_this_page = (fullness_[space] & (Page::kPageSize - 1)); |
2259 ASSERT(size <= Page::kObjectAreaSize); | 2381 ASSERT(size <= Page::kObjectAreaSize); |
2260 if (used_in_this_page + size > Page::kObjectAreaSize) { | 2382 if (used_in_this_page + size > Page::kObjectAreaSize) { |
| 2383 *new_page = true; |
2261 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); | 2384 fullness_[space] = RoundUp(fullness_[space], Page::kPageSize); |
2262 } | 2385 } |
2263 } | 2386 } |
2264 int allocation_address = fullness_[space]; | 2387 int allocation_address = fullness_[space]; |
2265 fullness_[space] = allocation_address + size; | 2388 fullness_[space] = allocation_address + size; |
2266 return allocation_address; | 2389 return allocation_address; |
2267 } | 2390 } |
2268 | 2391 |
2269 | 2392 |
2270 } } // namespace v8::internal | 2393 } } // namespace v8::internal |
OLD | NEW |