OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1681 matching lines...) Loading... |
1692 IncrementalMarking* incremental_marking() { | 1692 IncrementalMarking* incremental_marking() { |
1693 return &incremental_marking_; | 1693 return &incremental_marking_; |
1694 } | 1694 } |
1695 | 1695 |
1696 bool IsSweepingComplete() { | 1696 bool IsSweepingComplete() { |
1697 return !mark_compact_collector()->IsConcurrentSweepingInProgress() && | 1697 return !mark_compact_collector()->IsConcurrentSweepingInProgress() && |
1698 old_data_space()->IsLazySweepingComplete() && | 1698 old_data_space()->IsLazySweepingComplete() && |
1699 old_pointer_space()->IsLazySweepingComplete(); | 1699 old_pointer_space()->IsLazySweepingComplete(); |
1700 } | 1700 } |
1701 | 1701 |
1702 bool AdvanceSweepers(int step_size) { | 1702 bool AdvanceSweepers(int step_size); |
1703 ASSERT(!FLAG_parallel_sweeping && !FLAG_concurrent_sweeping); | |
1704 bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size); | |
1705 sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size); | |
1706 return sweeping_complete; | |
1707 } | |
1708 | 1703 |
1709 bool EnsureSweepersProgressed(int step_size) { | 1704 bool EnsureSweepersProgressed(int step_size) { |
1710 bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size); | 1705 bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size); |
1711 sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size); | 1706 sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size); |
1712 return sweeping_complete; | 1707 return sweeping_complete; |
1713 } | 1708 } |
1714 | 1709 |
1715 ExternalStringTable* external_string_table() { | 1710 ExternalStringTable* external_string_table() { |
1716 return &external_string_table_; | 1711 return &external_string_table_; |
1717 } | 1712 } |
(...skipping 108 matching lines...) Loading... |
1826 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++; | 1821 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++; |
1827 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size; | 1822 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size; |
1828 } | 1823 } |
1829 | 1824 |
1830 void CheckpointObjectStats(); | 1825 void CheckpointObjectStats(); |
1831 | 1826 |
1832 // We don't use a LockGuard here since we want to lock the heap | 1827 // We don't use a LockGuard here since we want to lock the heap |
1833 // only when FLAG_concurrent_recompilation is true. | 1828 // only when FLAG_concurrent_recompilation is true. |
1834 class RelocationLock { | 1829 class RelocationLock { |
1835 public: | 1830 public: |
1836 explicit RelocationLock(Heap* heap); | 1831 explicit RelocationLock(Heap* heap) : heap_(heap) { |
| 1832 if (FLAG_concurrent_recompilation) { |
| 1833 heap_->relocation_mutex_->Lock(); |
| 1834 } |
| 1835 } |
| 1836 |
1837 | 1837 |
1838 ~RelocationLock() { | 1838 ~RelocationLock() { |
1839 if (FLAG_concurrent_recompilation) { | 1839 if (FLAG_concurrent_recompilation) { |
1840 #ifdef DEBUG | |
1841 heap_->relocation_mutex_locked_by_optimizer_thread_ = false; | |
1842 #endif // DEBUG | |
1843 heap_->relocation_mutex_->Unlock(); | 1840 heap_->relocation_mutex_->Unlock(); |
1844 } | 1841 } |
1845 } | 1842 } |
1846 | 1843 |
1847 #ifdef DEBUG | |
1848 static bool IsLockedByOptimizerThread(Heap* heap) { | |
1849 return heap->relocation_mutex_locked_by_optimizer_thread_; | |
1850 } | |
1851 #endif // DEBUG | |
1852 | |
1853 private: | 1844 private: |
1854 Heap* heap_; | 1845 Heap* heap_; |
1855 }; | 1846 }; |
1856 | 1847 |
1857 MaybeObject* AddWeakObjectToCodeDependency(Object* obj, DependentCode* dep); | 1848 MaybeObject* AddWeakObjectToCodeDependency(Object* obj, DependentCode* dep); |
1858 | 1849 |
1859 DependentCode* LookupWeakObjectToCodeDependency(Object* obj); | 1850 DependentCode* LookupWeakObjectToCodeDependency(Object* obj); |
1860 | 1851 |
1861 void InitializeWeakObjectToCodeTable() { | 1852 void InitializeWeakObjectToCodeTable() { |
1862 set_weak_object_to_code_table(undefined_value()); | 1853 set_weak_object_to_code_table(undefined_value()); |
(...skipping 1227 matching lines...) Loading... |
3090 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 3081 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
3091 | 3082 |
3092 private: | 3083 private: |
3093 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 3084 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
3094 }; | 3085 }; |
3095 #endif // DEBUG | 3086 #endif // DEBUG |
3096 | 3087 |
3097 } } // namespace v8::internal | 3088 } } // namespace v8::internal |
3098 | 3089 |
3099 #endif // V8_HEAP_H_ | 3090 #endif // V8_HEAP_H_ |
OLD | NEW |