Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1844)

Side by Side Diff: src/heap/heap.h

Issue 1324023007: [heap] introduce ArrayBufferTracker (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: destructor Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/array-buffer-tracker.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 #include <map> 9 #include <map>
10 10
(...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after
413 V(BooleanMap) \ 413 V(BooleanMap) \
414 V(UninitializedMap) \ 414 V(UninitializedMap) \
415 V(ArgumentsMarkerMap) \ 415 V(ArgumentsMarkerMap) \
416 V(JSMessageObjectMap) \ 416 V(JSMessageObjectMap) \
417 V(ForeignMap) \ 417 V(ForeignMap) \
418 V(NeanderMap) \ 418 V(NeanderMap) \
419 V(empty_string) \ 419 V(empty_string) \
420 PRIVATE_SYMBOL_LIST(V) 420 PRIVATE_SYMBOL_LIST(V)
421 421
422 // Forward declarations. 422 // Forward declarations.
423 class ArrayBufferTracker;
423 class HeapObjectsFilter; 424 class HeapObjectsFilter;
424 class HeapStats; 425 class HeapStats;
425 class Isolate; 426 class Isolate;
426 class MemoryReducer; 427 class MemoryReducer;
427 class ObjectStats; 428 class ObjectStats;
428 class WeakObjectRetainer; 429 class WeakObjectRetainer;
429 430
430 431
431 // A queue of objects promoted during scavenge. Each object is accompanied 432 // A queue of objects promoted during scavenge. Each object is accompanied
432 // by it's size to avoid dereferencing a map pointer for scanning. 433 // by it's size to avoid dereferencing a map pointer for scanning.
(...skipping 544 matching lines...) Expand 10 before | Expand all | Expand 10 after
977 int global_ic_age() { return global_ic_age_; } 978 int global_ic_age() { return global_ic_age_; }
978 979
979 void AgeInlineCaches() { 980 void AgeInlineCaches() {
980 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax; 981 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
981 } 982 }
982 983
983 int64_t amount_of_external_allocated_memory() { 984 int64_t amount_of_external_allocated_memory() {
984 return amount_of_external_allocated_memory_; 985 return amount_of_external_allocated_memory_;
985 } 986 }
986 987
988 void update_amount_of_external_allocated_memory(int64_t delta) {
989 amount_of_external_allocated_memory_ += delta;
990 }
991
987 void DeoptMarkedAllocationSites(); 992 void DeoptMarkedAllocationSites();
988 993
989 bool DeoptMaybeTenuredAllocationSites() { 994 bool DeoptMaybeTenuredAllocationSites() {
990 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; 995 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
991 } 996 }
992 997
993 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj, 998 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
994 Handle<DependentCode> dep); 999 Handle<DependentCode> dep);
995 1000
996 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj); 1001 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
997 1002
998 void AddRetainedMap(Handle<Map> map); 1003 void AddRetainedMap(Handle<Map> map);
999 1004
1000 // This event is triggered after successful allocation of a new object made 1005 // This event is triggered after successful allocation of a new object made
1001 // by runtime. Allocations of target space for object evacuation do not 1006 // by runtime. Allocations of target space for object evacuation do not
1002 // trigger the event. In order to track ALL allocations one must turn off 1007 // trigger the event. In order to track ALL allocations one must turn off
1003 // FLAG_inline_new and FLAG_use_allocation_folding. 1008 // FLAG_inline_new and FLAG_use_allocation_folding.
1004 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); 1009 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1005 1010
1006 // This event is triggered after object is moved to a new place. 1011 // This event is triggered after object is moved to a new place.
1007 inline void OnMoveEvent(HeapObject* target, HeapObject* source, 1012 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1008 int size_in_bytes); 1013 int size_in_bytes);
1009 1014
1010 bool deserialization_complete() const { return deserialization_complete_; } 1015 bool deserialization_complete() const { return deserialization_complete_; }
1011 1016
1012 // The following methods are used to track raw C++ pointers to externally
1013 // allocated memory used as backing store in live array buffers.
1014
1015 // A new ArrayBuffer was created with |data| as backing store.
1016 void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length);
1017
1018 // The backing store |data| is no longer owned by V8.
1019 void UnregisterArrayBuffer(bool in_new_space, void* data);
1020
1021 // A live ArrayBuffer was discovered during marking/scavenge.
1022 void RegisterLiveArrayBuffer(bool in_new_space, void* data);
1023
1024 // Frees all backing store pointers that weren't discovered in the previous
1025 // marking or scavenge phase.
1026 void FreeDeadArrayBuffers(bool from_scavenge);
1027
1028 // Prepare for a new scavenge phase. A new marking phase is implicitly
1029 // prepared by finishing the previous one.
1030 void PrepareArrayBufferDiscoveryInNewSpace();
1031
1032 // An ArrayBuffer moved from new space to old space.
1033 void PromoteArrayBuffer(Object* buffer);
1034
1035 bool HasLowAllocationRate(); 1017 bool HasLowAllocationRate();
1036 bool HasHighFragmentation(); 1018 bool HasHighFragmentation();
1037 bool HasHighFragmentation(intptr_t used, intptr_t committed); 1019 bool HasHighFragmentation(intptr_t used, intptr_t committed);
1038 1020
1039 // =========================================================================== 1021 // ===========================================================================
1040 // Initialization. =========================================================== 1022 // Initialization. ===========================================================
1041 // =========================================================================== 1023 // ===========================================================================
1042 1024
1043 // Configure heap size in MB before setup. Return false if the heap has been 1025 // Configure heap size in MB before setup. Return false if the heap has been
1044 // set up already. 1026 // set up already.
(...skipping 470 matching lines...) Expand 10 before | Expand all | Expand 10 after
1515 int filler_size); 1497 int filler_size);
1516 1498
1517 // Creates a filler object if needed for alignment and returns a heap object 1499 // Creates a filler object if needed for alignment and returns a heap object
1518 // immediately after it. If any space is left after the returned object, 1500 // immediately after it. If any space is left after the returned object,
1519 // another filler object is created so the over allocated memory is iterable. 1501 // another filler object is created so the over allocated memory is iterable.
1520 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object, 1502 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
1521 int object_size, 1503 int object_size,
1522 int allocation_size, 1504 int allocation_size,
1523 AllocationAlignment alignment); 1505 AllocationAlignment alignment);
1524 1506
1507 // ===========================================================================
1508 // ArrayBufferTracker. =======================================================
1509 // ===========================================================================
1510 void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
1511 void UnregisterArrayBuffer(JSArrayBuffer* buffer);
1512
1513 inline ArrayBufferTracker* array_buffer_tracker() {
1514 return array_buffer_tracker_;
1515 }
1516
1525 // ============================================================================= 1517 // =============================================================================
1526 1518
1527 #ifdef VERIFY_HEAP 1519 #ifdef VERIFY_HEAP
1528 // Verify the heap is in its normal state before or after a GC. 1520 // Verify the heap is in its normal state before or after a GC.
1529 void Verify(); 1521 void Verify();
1530 #endif 1522 #endif
1531 1523
1532 #ifdef DEBUG 1524 #ifdef DEBUG
1533 void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; } 1525 void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
1534 1526
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
1748 1740
1749 // Deopts all code that contains allocation instruction which are tenured or 1741 // Deopts all code that contains allocation instruction which are tenured or
1750 // not tenured. Moreover it clears the pretenuring allocation site statistics. 1742 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1751 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); 1743 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1752 1744
1753 // Evaluates local pretenuring for the old space and calls 1745 // Evaluates local pretenuring for the old space and calls
1754 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in 1746 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1755 // the old space. 1747 // the old space.
1756 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); 1748 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1757 1749
1758 // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
1759 void TearDownArrayBuffers();
1760
1761 // Record statistics before and after garbage collection. 1750 // Record statistics before and after garbage collection.
1762 void ReportStatisticsBeforeGC(); 1751 void ReportStatisticsBeforeGC();
1763 void ReportStatisticsAfterGC(); 1752 void ReportStatisticsAfterGC();
1764 1753
1765 // Creates and installs the full-sized number string cache. 1754 // Creates and installs the full-sized number string cache.
1766 int FullSizeNumberStringCacheLength(); 1755 int FullSizeNumberStringCacheLength();
1767 // Flush the number to string cache. 1756 // Flush the number to string cache.
1768 void FlushNumberStringCache(); 1757 void FlushNumberStringCache();
1769 1758
1770 // Sets used allocation sites entries to undefined. 1759 // Sets used allocation sites entries to undefined.
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after
2332 base::Semaphore pending_unmapping_tasks_semaphore_; 2321 base::Semaphore pending_unmapping_tasks_semaphore_;
2333 2322
2334 base::Mutex relocation_mutex_; 2323 base::Mutex relocation_mutex_;
2335 2324
2336 int gc_callbacks_depth_; 2325 int gc_callbacks_depth_;
2337 2326
2338 bool deserialization_complete_; 2327 bool deserialization_complete_;
2339 2328
2340 bool concurrent_sweeping_enabled_; 2329 bool concurrent_sweeping_enabled_;
2341 2330
2342 // |live_array_buffers_| maps externally allocated memory used as backing 2331 StrongRootsList* strong_roots_list_;
2343 // store for ArrayBuffers to the length of the respective memory blocks.
2344 //
2345 // At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is
2346 // a copy of |live_array_buffers_| and we remove pointers as we discover live
2347 // ArrayBuffer objects during marking. At the end of mark/compact, the
2348 // remaining memory blocks can be freed.
2349 std::map<void*, size_t> live_array_buffers_;
2350 std::map<void*, size_t> not_yet_discovered_array_buffers_;
2351 2332
2352 // To be able to free memory held by ArrayBuffers during scavenge as well, we 2333 ArrayBufferTracker* array_buffer_tracker_;
2353 // have a separate list of allocated memory held by ArrayBuffers in new space.
2354 //
2355 // Since mark/compact also evacuates the new space, all pointers in the
2356 // |live_array_buffers_for_scavenge_| list are also in the
2357 // |live_array_buffers_| list.
2358 std::map<void*, size_t> live_array_buffers_for_scavenge_;
2359 std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
2360
2361 StrongRootsList* strong_roots_list_;
2362 2334
2363 // Classes in "heap" can be friends. 2335 // Classes in "heap" can be friends.
2364 friend class AlwaysAllocateScope; 2336 friend class AlwaysAllocateScope;
2365 friend class GCCallbacksScope; 2337 friend class GCCallbacksScope;
2366 friend class GCTracer; 2338 friend class GCTracer;
2367 friend class HeapIterator; 2339 friend class HeapIterator;
2368 friend class IncrementalMarking; 2340 friend class IncrementalMarking;
2369 friend class MarkCompactCollector; 2341 friend class MarkCompactCollector;
2370 friend class MarkCompactMarkingVisitor; 2342 friend class MarkCompactMarkingVisitor;
2371 friend class Page; 2343 friend class Page;
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after
2741 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. 2713 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2742 2714
2743 private: 2715 private:
2744 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2716 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2745 }; 2717 };
2746 #endif // DEBUG 2718 #endif // DEBUG
2747 } 2719 }
2748 } // namespace v8::internal 2720 } // namespace v8::internal
2749 2721
2750 #endif // V8_HEAP_HEAP_H_ 2722 #endif // V8_HEAP_HEAP_H_
OLDNEW
« no previous file with comments | « src/heap/array-buffer-tracker.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698