Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(314)

Side by Side Diff: src/heap.cc

Issue 15562008: Recording array buffer views. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: CR feedback Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
172 172
173 if (max_virtual > 0) { 173 if (max_virtual > 0) {
174 if (code_range_size_ > 0) { 174 if (code_range_size_ > 0) {
175 // Reserve no more than 1/8 of the memory for the code range. 175 // Reserve no more than 1/8 of the memory for the code range.
176 code_range_size_ = Min(code_range_size_, max_virtual >> 3); 176 code_range_size_ = Min(code_range_size_, max_virtual >> 3);
177 } 177 }
178 } 178 }
179 179
180 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 180 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
181 native_contexts_list_ = NULL; 181 native_contexts_list_ = NULL;
182 array_buffers_list_ = Smi::FromInt(0);
182 mark_compact_collector_.heap_ = this; 183 mark_compact_collector_.heap_ = this;
183 external_string_table_.heap_ = this; 184 external_string_table_.heap_ = this;
184 // Put a dummy entry in the remembered pages so we can find the list the 185 // Put a dummy entry in the remembered pages so we can find the list the
185 // minidump even if there are no real unmapped pages. 186 // minidump even if there are no real unmapped pages.
186 RememberUnmappedPage(NULL, false); 187 RememberUnmappedPage(NULL, false);
187 188
188 ClearObjectStats(true); 189 ClearObjectStats(true);
189 } 190 }
190 191
191 192
(...skipping 1339 matching lines...) Expand 10 before | Expand all | Expand 10 after
1531 // Terminate the list if there is one or more elements. 1532 // Terminate the list if there is one or more elements.
1532 if (tail != NULL) { 1533 if (tail != NULL) {
1533 tail->set_next_function_link(undefined); 1534 tail->set_next_function_link(undefined);
1534 } 1535 }
1535 1536
1536 return head; 1537 return head;
1537 } 1538 }
1538 1539
1539 1540
1540 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { 1541 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1541 Object* undefined = undefined_value();
1542 Object* head = undefined;
1543 Context* tail = NULL;
1544 Object* candidate = native_contexts_list_;
1545
1546 // We don't record weak slots during marking or scavenges. 1542 // We don't record weak slots during marking or scavenges.
1547 // Instead we do it once when we complete mark-compact cycle. 1543 // Instead we do it once when we complete mark-compact cycle.
1548 // Note that write barrier has no effect if we are already in the middle of 1544 // Note that write barrier has no effect if we are already in the middle of
1549 // compacting mark-sweep cycle and we have to record slots manually. 1545 // compacting mark-sweep cycle and we have to record slots manually.
1550 bool record_slots = 1546 bool record_slots =
1551 gc_state() == MARK_COMPACT && 1547 gc_state() == MARK_COMPACT &&
1552 mark_compact_collector()->is_compacting(); 1548 mark_compact_collector()->is_compacting();
1549 ProcessArrayBuffers(retainer, record_slots);
1550 ProcessNativeContexts(retainer, record_slots);
1551 }
1552
1553 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer,
1554 bool record_slots) {
1555 Object* undefined = undefined_value();
1556 Object* head = undefined;
1557 Context* tail = NULL;
1558 Object* candidate = native_contexts_list_;
1553 1559
1554 while (candidate != undefined) { 1560 while (candidate != undefined) {
1555 // Check whether to keep the candidate in the list. 1561 // Check whether to keep the candidate in the list.
1556 Context* candidate_context = reinterpret_cast<Context*>(candidate); 1562 Context* candidate_context = reinterpret_cast<Context*>(candidate);
1557 Object* retain = retainer->RetainAs(candidate); 1563 Object* retain = retainer->RetainAs(candidate);
1558 if (retain != NULL) { 1564 if (retain != NULL) {
1559 if (head == undefined) { 1565 if (head == undefined) {
1560 // First element in the list. 1566 // First element in the list.
1561 head = retain; 1567 head = retain;
1562 } else { 1568 } else {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1611 Context::NEXT_CONTEXT_LINK, 1617 Context::NEXT_CONTEXT_LINK,
1612 Heap::undefined_value(), 1618 Heap::undefined_value(),
1613 UPDATE_WRITE_BARRIER); 1619 UPDATE_WRITE_BARRIER);
1614 } 1620 }
1615 1621
1616 // Update the head of the list of contexts. 1622 // Update the head of the list of contexts.
1617 native_contexts_list_ = head; 1623 native_contexts_list_ = head;
1618 } 1624 }
1619 1625
1620 1626
1627 template <class T>
1628 struct WeakListVisitor;
1629
1630
1631 template <class T>
1632 static Object* VisitWeakList(Object* list,
1633 MarkCompactCollector* collector,
1634 WeakObjectRetainer* retainer, bool record_slots) {
1635 Object* head = Smi::FromInt(0);
1636 T* tail = NULL;
1637 while (list != Smi::FromInt(0)) {
1638 Object* retained = retainer->RetainAs(list);
1639 if (retained != NULL) {
1640 if (head == Smi::FromInt(0)) {
1641 head = retained;
1642 } else {
1643 ASSERT(tail != NULL);
1644 WeakListVisitor<T>::set_weak_next(tail, retained);
1645 if (record_slots) {
1646 Object** next_slot =
1647 HeapObject::RawField(tail, WeakListVisitor<T>::kWeakNextOffset);
1648 collector->RecordSlot(next_slot, next_slot, retained);
1649 }
1650 }
1651 tail = reinterpret_cast<T*>(retained);
1652 WeakListVisitor<T>::VisitLiveObject(
1653 tail, collector, retainer, record_slots);
1654 }
1655 list = WeakListVisitor<T>::get_weak_next(reinterpret_cast<T*>(list));
1656 }
1657 if (tail != NULL) {
1658 tail->set_weak_next(Smi::FromInt(0));
1659 }
1660 return head;
1661 }
1662
1663
1664 template<>
1665 struct WeakListVisitor<JSTypedArray> {
1666 static void set_weak_next(JSTypedArray* obj, Object* next) {
1667 obj->set_weak_next(next);
1668 }
1669
1670 static Object* get_weak_next(JSTypedArray* obj) {
1671 return obj->weak_next();
1672 }
1673
1674 static void VisitLiveObject(JSTypedArray* obj,
1675 MarkCompactCollector* collector,
1676 WeakObjectRetainer* retainer,
1677 bool record_slots) {}
1678
1679 static const int kWeakNextOffset = JSTypedArray::kWeakNextOffset;
1680 };
1681
1682
1683 template<>
1684 struct WeakListVisitor<JSArrayBuffer> {
1685 static void set_weak_next(JSArrayBuffer* obj, Object* next) {
1686 obj->set_weak_next(next);
1687 }
1688
1689 static Object* get_weak_next(JSArrayBuffer* obj) {
1690 return obj->weak_next();
1691 }
1692
1693 static void VisitLiveObject(JSArrayBuffer* array_buffer,
1694 MarkCompactCollector* collector,
1695 WeakObjectRetainer* retainer,
1696 bool record_slots) {
1697 Object* typed_array_obj =
1698 VisitWeakList<JSTypedArray>(array_buffer->weak_first_array(),
1699 collector, retainer, record_slots);
1700 array_buffer->set_weak_first_array(typed_array_obj);
1701 if (typed_array_obj != Smi::FromInt(0) && record_slots) {
1702 Object** slot = HeapObject::RawField(
1703 array_buffer, JSArrayBuffer::kWeakFirstArrayOffset);
1704 collector->RecordSlot(slot, slot, typed_array_obj);
1705 }
1706 }
1707
1708 static const int kWeakNextOffset = JSArrayBuffer::kWeakNextOffset;
1709 };
1710
1711
1712 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
1713 bool record_slots) {
1714 Object* array_buffer_obj =
1715 VisitWeakList<JSArrayBuffer>(array_buffers_list(),
1716 mark_compact_collector(),
1717 retainer, record_slots);
1718 set_array_buffers_list(array_buffer_obj);
1719 }
1720
1721
1621 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { 1722 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
1622 DisallowHeapAllocation no_allocation; 1723 DisallowHeapAllocation no_allocation;
1623 1724
1624 // Both the external string table and the string table may contain 1725 // Both the external string table and the string table may contain
1625 // external strings, but neither lists them exhaustively, nor is the 1726 // external strings, but neither lists them exhaustively, nor is the
1626 // intersection set empty. Therefore we iterate over the external string 1727 // intersection set empty. Therefore we iterate over the external string
1627 // table first, ignoring internalized strings, and then over the 1728 // table first, ignoring internalized strings, and then over the
1628 // internalized string table. 1729 // internalized string table.
1629 1730
1630 class ExternalStringTableVisitorAdapter : public ObjectVisitor { 1731 class ExternalStringTableVisitorAdapter : public ObjectVisitor {
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
1786 template VisitSpecialized<Symbol::kSize>); 1887 template VisitSpecialized<Symbol::kSize>);
1787 1888
1788 table_.Register(kVisitSharedFunctionInfo, 1889 table_.Register(kVisitSharedFunctionInfo,
1789 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1890 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1790 template VisitSpecialized<SharedFunctionInfo::kSize>); 1891 template VisitSpecialized<SharedFunctionInfo::kSize>);
1791 1892
1792 table_.Register(kVisitJSWeakMap, 1893 table_.Register(kVisitJSWeakMap,
1793 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1894 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1794 Visit); 1895 Visit);
1795 1896
1897 table_.Register(kVisitJSArrayBuffer,
1898 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1899 Visit);
1900
1901 table_.Register(kVisitJSTypedArray,
1902 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1903 Visit);
1904
1796 table_.Register(kVisitJSRegExp, 1905 table_.Register(kVisitJSRegExp,
1797 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1906 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1798 Visit); 1907 Visit);
1799 1908
1800 if (marks_handling == IGNORE_MARKS) { 1909 if (marks_handling == IGNORE_MARKS) {
1801 table_.Register(kVisitJSFunction, 1910 table_.Register(kVisitJSFunction,
1802 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1911 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1803 template VisitSpecialized<JSFunction::kSize>); 1912 template VisitSpecialized<JSFunction::kSize>);
1804 } else { 1913 } else {
1805 table_.Register(kVisitJSFunction, &EvacuateJSFunction); 1914 table_.Register(kVisitJSFunction, &EvacuateJSFunction);
(...skipping 6137 matching lines...) Expand 10 before | Expand all | Expand 10 after
7943 if (FLAG_parallel_recompilation) { 8052 if (FLAG_parallel_recompilation) {
7944 heap_->relocation_mutex_->Lock(); 8053 heap_->relocation_mutex_->Lock();
7945 #ifdef DEBUG 8054 #ifdef DEBUG
7946 heap_->relocation_mutex_locked_by_optimizer_thread_ = 8055 heap_->relocation_mutex_locked_by_optimizer_thread_ =
7947 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 8056 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
7948 #endif // DEBUG 8057 #endif // DEBUG
7949 } 8058 }
7950 } 8059 }
7951 8060
7952 } } // namespace v8::internal 8061 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698