Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 | 9 |
| 10 #include "src/allocation.h" | 10 #include "src/allocation.h" |
| (...skipping 1434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1445 void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) { | 1445 void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) { |
| 1446 DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE); | 1446 DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE); |
| 1447 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++; | 1447 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++; |
| 1448 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size; | 1448 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size; |
| 1449 } | 1449 } |
| 1450 | 1450 |
| 1451 void TraceObjectStats(); | 1451 void TraceObjectStats(); |
| 1452 void TraceObjectStat(const char* name, int count, int size, double time); | 1452 void TraceObjectStat(const char* name, int count, int size, double time); |
| 1453 void CheckpointObjectStats(); | 1453 void CheckpointObjectStats(); |
| 1454 | 1454 |
| 1455 // We don't use a LockGuard here since we want to lock the heap | 1455 struct StrongRootsList { |
| 1456 // only when FLAG_concurrent_recompilation is true. | 1456 Object** start_; |
| 1457 Object** end_; | |
| 1458 StrongRootsList* next_; | |
| 1459 }; | |
| 1460 | |
| 1461 void RegisterStrongRoots(Object** start, Object** end) { | |
| 1462 StrongRootsList* list = new StrongRootsList(); | |
| 1463 list->next_ = strong_roots_list_; | |
| 1464 list->start_ = start; | |
| 1465 list->end_ = end; | |
| 1466 strong_roots_list_ = list; | |
| 1467 } | |
| 1468 | |
| 1469 void UnregisterStrongRoots(Object** start) { | |
| 1470 StrongRootsList* prev = NULL; | |
| 1471 for (StrongRootsList* list = strong_roots_list_; list; list = list->next_) { | |
| 1472 if (list->start_ == start) { | |
| 1473 if (prev) { | |
| 1474 prev->next_ = list->next_; | |
| 1475 } else { | |
| 1476 strong_roots_list_ = list->next_; | |
| 1477 } | |
| 1478 delete list; | |
|
Erik Corry
2015/04/27 15:39:13
Might we not just as well return here?
titzer
2015/04/27 16:36:48
Doesn't really matter. As written, the code will b
| |
| 1479 } | |
| 1480 prev = list; | |
| 1481 } | |
|
Erik Corry
2015/04/27 15:39:13
unreachable?
titzer
2015/04/27 16:36:48
Don't need it if we just allow the above.
| |
| 1482 } | |
| 1483 | |
| 1484 class OptionalRelocationLock { | |
|
Erik Corry
2015/04/27 15:39:13
Perhaps you could spend a couple of lines of comme
titzer
2015/04/27 16:36:48
Done.
| |
| 1485 public: | |
| 1486 explicit OptionalRelocationLock(Heap* heap, bool concurrent) | |
| 1487 : heap_(heap), concurrent_(concurrent) { | |
| 1488 if (concurrent_) heap_->relocation_mutex_.Lock(); | |
| 1489 } | |
| 1490 | |
| 1491 ~OptionalRelocationLock() { | |
| 1492 if (concurrent_) heap_->relocation_mutex_.Unlock(); | |
| 1493 } | |
| 1494 | |
| 1495 private: | |
| 1496 Heap* heap_; | |
| 1497 bool concurrent_; | |
| 1498 }; | |
| 1499 | |
| 1457 class RelocationLock { | 1500 class RelocationLock { |
| 1458 public: | 1501 public: |
| 1459 explicit RelocationLock(Heap* heap) : heap_(heap) { | 1502 explicit RelocationLock(Heap* heap) : heap_(heap) { |
| 1460 heap_->relocation_mutex_.Lock(); | 1503 heap_->relocation_mutex_.Lock(); |
| 1461 } | 1504 } |
| 1462 | 1505 |
| 1463 | |
| 1464 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); } | 1506 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); } |
| 1465 | 1507 |
| 1466 private: | 1508 private: |
| 1467 Heap* heap_; | 1509 Heap* heap_; |
| 1468 }; | 1510 }; |
| 1469 | 1511 |
| 1470 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj, | 1512 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj, |
| 1471 Handle<DependentCode> dep); | 1513 Handle<DependentCode> dep); |
| 1472 | 1514 |
| 1473 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj); | 1515 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj); |
| (...skipping 671 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2145 MemoryChunk* chunks_queued_for_free_; | 2187 MemoryChunk* chunks_queued_for_free_; |
| 2146 | 2188 |
| 2147 base::Mutex relocation_mutex_; | 2189 base::Mutex relocation_mutex_; |
| 2148 | 2190 |
| 2149 int gc_callbacks_depth_; | 2191 int gc_callbacks_depth_; |
| 2150 | 2192 |
| 2151 bool deserialization_complete_; | 2193 bool deserialization_complete_; |
| 2152 | 2194 |
| 2153 bool concurrent_sweeping_enabled_; | 2195 bool concurrent_sweeping_enabled_; |
| 2154 | 2196 |
| 2197 StrongRootsList* strong_roots_list_; | |
| 2198 | |
| 2155 friend class AlwaysAllocateScope; | 2199 friend class AlwaysAllocateScope; |
| 2156 friend class Deserializer; | 2200 friend class Deserializer; |
| 2157 friend class Factory; | 2201 friend class Factory; |
| 2158 friend class GCCallbacksScope; | 2202 friend class GCCallbacksScope; |
| 2159 friend class GCTracer; | 2203 friend class GCTracer; |
| 2160 friend class HeapIterator; | 2204 friend class HeapIterator; |
| 2161 friend class Isolate; | 2205 friend class Isolate; |
| 2162 friend class MarkCompactCollector; | 2206 friend class MarkCompactCollector; |
| 2163 friend class MarkCompactMarkingVisitor; | 2207 friend class MarkCompactMarkingVisitor; |
| 2164 friend class MapCompact; | 2208 friend class MapCompact; |
| (...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2606 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2650 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2607 | 2651 |
| 2608 private: | 2652 private: |
| 2609 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2653 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2610 }; | 2654 }; |
| 2611 #endif // DEBUG | 2655 #endif // DEBUG |
| 2612 } | 2656 } |
| 2613 } // namespace v8::internal | 2657 } // namespace v8::internal |
| 2614 | 2658 |
| 2615 #endif // V8_HEAP_HEAP_H_ | 2659 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |