| Index: src/heap/heap.h
|
| diff --git a/src/heap/heap.h b/src/heap/heap.h
|
| index dcf7bdea0ac812972304bd382d52b0a1f4c5eff2..1f573f77c4e1ef313903a913d7452546033b6167 100644
|
| --- a/src/heap/heap.h
|
| +++ b/src/heap/heap.h
|
| @@ -1452,21 +1452,68 @@ class Heap {
|
| void TraceObjectStat(const char* name, int count, int size, double time);
|
| void CheckpointObjectStats();
|
|
|
| - // We don't use a LockGuard here since we want to lock the heap
|
| - // only when FLAG_concurrent_recompilation is true.
|
| + struct StrongRootsList {
|
| + Object** start_;
|
| + Object** end_;
|
| + StrongRootsList* next_;
|
| + };
|
| +
|
| + void RegisterStrongRoots(Object** start, Object** end) {
|
| + StrongRootsList* list = new StrongRootsList();
|
| + list->next_ = strong_roots_list_;
|
| + list->start_ = start;
|
| + list->end_ = end;
|
| + strong_roots_list_ = list;
|
| + }
|
| +
|
| + void UnregisterStrongRoots(Object** start) {
|
| + StrongRootsList* prev = NULL;
|
| + for (StrongRootsList* list = strong_roots_list_; list; list = list->next_) {
|
| + if (list->start_ == start) {
|
| + if (prev) {
|
| + prev->next_ = list->next_;
|
| + } else {
|
| + strong_roots_list_ = list->next_;
|
| + }
|
| + delete list;
|
| + }
|
| + prev = list;
|
| + }
|
| + }
|
| +
|
| + // Taking this lock prevents the GC from entering a phase that relocates
|
| + // object references.
|
| class RelocationLock {
|
| public:
|
| explicit RelocationLock(Heap* heap) : heap_(heap) {
|
| heap_->relocation_mutex_.Lock();
|
| }
|
|
|
| -
|
| ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
|
|
|
| private:
|
| Heap* heap_;
|
| };
|
|
|
| + // An optional version of the above lock that can be used for some critical
|
| + // sections on the mutator thread; only safe since the GC currently does not
|
| + // do concurrent compaction.
|
| + class OptionalRelocationLock {
|
| + public:
|
| + explicit OptionalRelocationLock(Heap* heap, bool concurrent)
|
| + : heap_(heap), concurrent_(concurrent) {
|
| + if (concurrent_) heap_->relocation_mutex_.Lock();
|
| + }
|
| +
|
| + ~OptionalRelocationLock() {
|
| + if (concurrent_) heap_->relocation_mutex_.Unlock();
|
| + }
|
| +
|
| + private:
|
| + Heap* heap_;
|
| + bool concurrent_;
|
| + };
|
| +
|
| void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
|
| Handle<DependentCode> dep);
|
|
|
| @@ -2152,6 +2199,8 @@ class Heap {
|
|
|
| bool concurrent_sweeping_enabled_;
|
|
|
| + StrongRootsList* strong_roots_list_;
|
| +
|
| friend class AlwaysAllocateScope;
|
| friend class Deserializer;
|
| friend class Factory;
|
|
|