Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/heap/heap.cc

Issue 1844413002: Use EmbedderHeapTracer instead of object grouping when trace_embedder_heap flag is set (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Move logic from heap to mark compact Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/incremental-marking.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/ast/scopeinfo.h" 9 #include "src/ast/scopeinfo.h"
10 #include "src/base/bits.h" 10 #include "src/base/bits.h"
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
108 remembered_unmapped_pages_index_(0), 108 remembered_unmapped_pages_index_(0),
109 #ifdef DEBUG 109 #ifdef DEBUG
110 allocation_timeout_(0), 110 allocation_timeout_(0),
111 #endif // DEBUG 111 #endif // DEBUG
112 old_generation_allocation_limit_(initial_old_generation_size_), 112 old_generation_allocation_limit_(initial_old_generation_size_),
113 old_gen_exhausted_(false), 113 old_gen_exhausted_(false),
114 optimize_for_memory_usage_(false), 114 optimize_for_memory_usage_(false),
115 inline_allocation_disabled_(false), 115 inline_allocation_disabled_(false),
116 total_regexp_code_generated_(0), 116 total_regexp_code_generated_(0),
117 tracer_(nullptr), 117 tracer_(nullptr),
118 embedder_heap_tracer_(nullptr),
119 high_survival_rate_period_length_(0), 118 high_survival_rate_period_length_(0),
120 promoted_objects_size_(0), 119 promoted_objects_size_(0),
121 promotion_ratio_(0), 120 promotion_ratio_(0),
122 semi_space_copied_object_size_(0), 121 semi_space_copied_object_size_(0),
123 previous_semi_space_copied_object_size_(0), 122 previous_semi_space_copied_object_size_(0),
124 semi_space_copied_rate_(0), 123 semi_space_copied_rate_(0),
125 nodes_died_in_new_space_(0), 124 nodes_died_in_new_space_(0),
126 nodes_copied_in_new_space_(0), 125 nodes_copied_in_new_space_(0),
127 nodes_promoted_(0), 126 nodes_promoted_(0),
128 maximum_size_scavenges_(0), 127 maximum_size_scavenges_(0),
(...skipping 1412 matching lines...) Expand 10 before | Expand all | Expand 10 after
1541 return heap->InNewSpace(*p) && 1540 return heap->InNewSpace(*p) &&
1542 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); 1541 !HeapObject::cast(*p)->map_word().IsForwardingAddress();
1543 } 1542 }
1544 1543
1545 1544
1546 static bool IsUnmodifiedHeapObject(Object** p) { 1545 static bool IsUnmodifiedHeapObject(Object** p) {
1547 Object* object = *p; 1546 Object* object = *p;
1548 if (object->IsSmi()) return false; 1547 if (object->IsSmi()) return false;
1549 HeapObject* heap_object = HeapObject::cast(object); 1548 HeapObject* heap_object = HeapObject::cast(object);
1550 if (!object->IsJSObject()) return false; 1549 if (!object->IsJSObject()) return false;
1551 Object* obj_constructor = (JSObject::cast(object))->map()->GetConstructor(); 1550 JSObject* js_object = JSObject::cast(object);
1552 if (!obj_constructor->IsJSFunction()) return false; 1551 if (!js_object->WasConstructedFromApiFunction()) return false;
1553 JSFunction* constructor = JSFunction::cast(obj_constructor); 1552 JSFunction* constructor =
1554 if (!constructor->shared()->IsApiFunction()) return false; 1553 JSFunction::cast(js_object->map()->GetConstructor());
1555 if (constructor != nullptr && 1554
1556 constructor->initial_map() == heap_object->map()) { 1555 return constructor->initial_map() == heap_object->map();
1557 return true;
1558 }
1559 return false;
1560 } 1556 }
1561 1557
1562 1558
1563 void PromotionQueue::Initialize() { 1559 void PromotionQueue::Initialize() {
1564 // The last to-space page may be used for promotion queue. On promotion 1560 // The last to-space page may be used for promotion queue. On promotion
1565 // conflict, we use the emergency stack. 1561 // conflict, we use the emergency stack.
1566 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == 1562 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) ==
1567 0); 1563 0);
1568 front_ = rear_ = 1564 front_ = rear_ =
1569 reinterpret_cast<struct Entry*>(heap_->new_space()->ToSpaceEnd()); 1565 reinterpret_cast<struct Entry*>(heap_->new_space()->ToSpaceEnd());
(...skipping 2001 matching lines...) Expand 10 before | Expand all | Expand 10 after
3571 3567
3572 AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) { 3568 AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
3573 // Make the clone. 3569 // Make the clone.
3574 Map* map = source->map(); 3570 Map* map = source->map();
3575 3571
3576 // We can only clone regexps, normal objects, api objects or arrays. Copying 3572 // We can only clone regexps, normal objects, api objects or arrays. Copying
3577 // anything else will break invariants. 3573 // anything else will break invariants.
3578 CHECK(map->instance_type() == JS_REGEXP_TYPE || 3574 CHECK(map->instance_type() == JS_REGEXP_TYPE ||
3579 map->instance_type() == JS_OBJECT_TYPE || 3575 map->instance_type() == JS_OBJECT_TYPE ||
3580 map->instance_type() == JS_ARRAY_TYPE || 3576 map->instance_type() == JS_ARRAY_TYPE ||
3577 map->instance_type() == JS_API_OBJECT_TYPE ||
3581 map->instance_type() == JS_SPECIAL_API_OBJECT_TYPE); 3578 map->instance_type() == JS_SPECIAL_API_OBJECT_TYPE);
3582 3579
3583 int object_size = map->instance_size(); 3580 int object_size = map->instance_size();
3584 HeapObject* clone = nullptr; 3581 HeapObject* clone = nullptr;
3585 3582
3586 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type())); 3583 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type()));
3587 3584
3588 int adjusted_object_size = 3585 int adjusted_object_size =
3589 site != NULL ? object_size + AllocationMemento::kSize : object_size; 3586 site != NULL ? object_size + AllocationMemento::kSize : object_size;
3590 AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE); 3587 AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE);
(...skipping 1776 matching lines...) Expand 10 before | Expand all | Expand 10 after
5367 #ifdef DEBUG 5364 #ifdef DEBUG
5368 // All pages right after bootstrapping must be marked as never-evacuate. 5365 // All pages right after bootstrapping must be marked as never-evacuate.
5369 PagedSpaces spaces(this); 5366 PagedSpaces spaces(this);
5370 for (PagedSpace* s = spaces.next(); s != NULL; s = spaces.next()) { 5367 for (PagedSpace* s = spaces.next(); s != NULL; s = spaces.next()) {
5371 PageIterator it(s); 5368 PageIterator it(s);
5372 while (it.has_next()) CHECK(it.next()->NeverEvacuate()); 5369 while (it.has_next()) CHECK(it.next()->NeverEvacuate());
5373 } 5370 }
5374 #endif // DEBUG 5371 #endif // DEBUG
5375 } 5372 }
5376 5373
5374 void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
5375 mark_compact_collector()->SetEmbedderHeapTracer(tracer);
5376 }
5377
5378 bool Heap::UsingEmbedderHeapTracer() {
5379 return mark_compact_collector()->UsingEmbedderHeapTracer();
5380 }
5381
5382 void Heap::TracePossibleWrapper(JSObject* js_object) {
5383 mark_compact_collector()->TracePossibleWrapper(js_object);
5384 }
5385
5377 void Heap::RegisterExternallyReferencedObject(Object** object) { 5386 void Heap::RegisterExternallyReferencedObject(Object** object) {
5378 DCHECK(mark_compact_collector()->in_use()); 5387 mark_compact_collector()->RegisterExternallyReferencedObject(object);
5379 HeapObject* heap_object = HeapObject::cast(*object);
5380 DCHECK(Contains(heap_object));
5381 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
5382 mark_compact_collector()->MarkObject(heap_object, mark_bit);
5383 } 5388 }
5384 5389
5385 void Heap::TearDown() { 5390 void Heap::TearDown() {
5386 #ifdef VERIFY_HEAP 5391 #ifdef VERIFY_HEAP
5387 if (FLAG_verify_heap) { 5392 if (FLAG_verify_heap) {
5388 Verify(); 5393 Verify();
5389 } 5394 }
5390 #endif 5395 #endif
5391 5396
5392 UpdateMaximumCommitted(); 5397 UpdateMaximumCommitted();
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
5542 DCHECK(callback != NULL); 5547 DCHECK(callback != NULL);
5543 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { 5548 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
5544 if (gc_epilogue_callbacks_[i].callback == callback) { 5549 if (gc_epilogue_callbacks_[i].callback == callback) {
5545 gc_epilogue_callbacks_.Remove(i); 5550 gc_epilogue_callbacks_.Remove(i);
5546 return; 5551 return;
5547 } 5552 }
5548 } 5553 }
5549 UNREACHABLE(); 5554 UNREACHABLE();
5550 } 5555 }
5551 5556
5552 void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
5553 DCHECK_NOT_NULL(tracer);
5554 CHECK_NULL(embedder_heap_tracer_);
5555 embedder_heap_tracer_ = tracer;
5556 }
5557
5558 // TODO(ishell): Find a better place for this. 5557 // TODO(ishell): Find a better place for this.
5559 void Heap::AddWeakObjectToCodeDependency(Handle<HeapObject> obj, 5558 void Heap::AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
5560 Handle<DependentCode> dep) { 5559 Handle<DependentCode> dep) {
5561 DCHECK(!InNewSpace(*obj)); 5560 DCHECK(!InNewSpace(*obj));
5562 DCHECK(!InNewSpace(*dep)); 5561 DCHECK(!InNewSpace(*dep));
5563 Handle<WeakHashTable> table(weak_object_to_code_table(), isolate()); 5562 Handle<WeakHashTable> table(weak_object_to_code_table(), isolate());
5564 table = WeakHashTable::Put(table, obj, dep); 5563 table = WeakHashTable::Put(table, obj, dep);
5565 if (*table != weak_object_to_code_table()) 5564 if (*table != weak_object_to_code_table())
5566 set_weak_object_to_code_table(*table); 5565 set_weak_object_to_code_table(*table);
5567 DCHECK_EQ(*dep, LookupWeakObjectToCodeDependency(obj)); 5566 DCHECK_EQ(*dep, LookupWeakObjectToCodeDependency(obj));
(...skipping 874 matching lines...) Expand 10 before | Expand all | Expand 10 after
6442 } 6441 }
6443 6442
6444 6443
6445 // static 6444 // static
6446 int Heap::GetStaticVisitorIdForMap(Map* map) { 6445 int Heap::GetStaticVisitorIdForMap(Map* map) {
6447 return StaticVisitorBase::GetVisitorId(map); 6446 return StaticVisitorBase::GetVisitorId(map);
6448 } 6447 }
6449 6448
6450 } // namespace internal 6449 } // namespace internal
6451 } // namespace v8 6450 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/incremental-marking.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698