| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1766 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1777 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { | 1777 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
| 1778 // We don't record weak slots during marking or scavenges. | 1778 // We don't record weak slots during marking or scavenges. |
| 1779 // Instead we do it once when we complete mark-compact cycle. | 1779 // Instead we do it once when we complete mark-compact cycle. |
| 1780 // Note that write barrier has no effect if we are already in the middle of | 1780 // Note that write barrier has no effect if we are already in the middle of |
| 1781 // compacting mark-sweep cycle and we have to record slots manually. | 1781 // compacting mark-sweep cycle and we have to record slots manually. |
| 1782 bool record_slots = | 1782 bool record_slots = |
| 1783 gc_state() == MARK_COMPACT && | 1783 gc_state() == MARK_COMPACT && |
| 1784 mark_compact_collector()->is_compacting(); | 1784 mark_compact_collector()->is_compacting(); |
| 1785 ProcessArrayBuffers(retainer, record_slots); | 1785 ProcessArrayBuffers(retainer, record_slots); |
| 1786 ProcessNativeContexts(retainer, record_slots); | 1786 ProcessNativeContexts(retainer, record_slots); |
| 1787 // TODO(mvstanton): AllocationSites only need to be processed during |
| 1788 // MARK_COMPACT, as they live in old space. Verify and address. |
| 1787 ProcessAllocationSites(retainer, record_slots); | 1789 ProcessAllocationSites(retainer, record_slots); |
| 1788 } | 1790 } |
| 1789 | 1791 |
| 1790 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, | 1792 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, |
| 1791 bool record_slots) { | 1793 bool record_slots) { |
| 1792 Object* head = | 1794 Object* head = |
| 1793 VisitWeakList<Context>( | 1795 VisitWeakList<Context>( |
| 1794 this, native_contexts_list(), retainer, record_slots); | 1796 this, native_contexts_list(), retainer, record_slots); |
| 1795 // Update the head of the list of contexts. | 1797 // Update the head of the list of contexts. |
| 1796 native_contexts_list_ = head; | 1798 native_contexts_list_ = head; |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1882 struct WeakListVisitor<AllocationSite> { | 1884 struct WeakListVisitor<AllocationSite> { |
| 1883 static void SetWeakNext(AllocationSite* obj, Object* next) { | 1885 static void SetWeakNext(AllocationSite* obj, Object* next) { |
| 1884 obj->set_weak_next(next); | 1886 obj->set_weak_next(next); |
| 1885 } | 1887 } |
| 1886 | 1888 |
| 1887 static Object* WeakNext(AllocationSite* obj) { | 1889 static Object* WeakNext(AllocationSite* obj) { |
| 1888 return obj->weak_next(); | 1890 return obj->weak_next(); |
| 1889 } | 1891 } |
| 1890 | 1892 |
| 1891 static void VisitLiveObject(Heap* heap, | 1893 static void VisitLiveObject(Heap* heap, |
| 1892 AllocationSite* array_buffer, | 1894 AllocationSite* site, |
| 1893 WeakObjectRetainer* retainer, | 1895 WeakObjectRetainer* retainer, |
| 1894 bool record_slots) {} | 1896 bool record_slots) {} |
| 1895 | 1897 |
| 1896 static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {} | 1898 static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {} |
| 1897 | 1899 |
| 1898 static int WeakNextOffset() { | 1900 static int WeakNextOffset() { |
| 1899 return AllocationSite::kWeakNextOffset; | 1901 return AllocationSite::kWeakNextOffset; |
| 1900 } | 1902 } |
| 1901 }; | 1903 }; |
| 1902 | 1904 |
| (...skipping 6055 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7958 if (FLAG_concurrent_recompilation) { | 7960 if (FLAG_concurrent_recompilation) { |
| 7959 heap_->relocation_mutex_->Lock(); | 7961 heap_->relocation_mutex_->Lock(); |
| 7960 #ifdef DEBUG | 7962 #ifdef DEBUG |
| 7961 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7963 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 7962 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7964 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 7963 #endif // DEBUG | 7965 #endif // DEBUG |
| 7964 } | 7966 } |
| 7965 } | 7967 } |
| 7966 | 7968 |
| 7967 } } // namespace v8::internal | 7969 } } // namespace v8::internal |
| OLD | NEW |