| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #include "incremental-marking.h" | 7 #include "incremental-marking.h" |
| 8 | 8 |
| 9 #include "code-stubs.h" | 9 #include "code-stubs.h" |
| 10 #include "compilation-cache.h" | 10 #include "compilation-cache.h" |
| (...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 225 // We will mark cache black with a separate pass when we finish marking. | 225 // We will mark cache black with a separate pass when we finish marking. |
| 226 // Note that GC can happen when the context is not fully initialized, | 226 // Note that GC can happen when the context is not fully initialized, |
| 227 // so the cache can be undefined. | 227 // so the cache can be undefined. |
| 228 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 228 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 229 if (!cache->IsUndefined()) { | 229 if (!cache->IsUndefined()) { |
| 230 MarkObjectGreyDoNotEnqueue(cache); | 230 MarkObjectGreyDoNotEnqueue(cache); |
| 231 } | 231 } |
| 232 VisitNativeContext(map, context); | 232 VisitNativeContext(map, context); |
| 233 } | 233 } |
| 234 | 234 |
| 235 static void VisitWeakCollection(Map* map, HeapObject* object) { | |
| 236 Heap* heap = map->GetHeap(); | |
| 237 VisitPointers(heap, | |
| 238 HeapObject::RawField(object, | |
| 239 JSWeakCollection::kPropertiesOffset), | |
| 240 HeapObject::RawField(object, JSWeakCollection::kSize)); | |
| 241 } | |
| 242 | |
| 243 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | 235 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
| 244 Object* obj = *p; | 236 Object* obj = *p; |
| 245 if (obj->IsHeapObject()) { | 237 if (obj->IsHeapObject()) { |
| 246 heap->mark_compact_collector()->RecordSlot(p, p, obj); | 238 heap->mark_compact_collector()->RecordSlot(p, p, obj); |
| 247 MarkObject(heap, obj); | 239 MarkObject(heap, obj); |
| 248 } | 240 } |
| 249 } | 241 } |
| 250 | 242 |
| 251 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { | 243 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { |
| 252 for (Object** p = start; p < end; p++) { | 244 for (Object** p = start; p < end; p++) { |
| (...skipping 741 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 994 bytes_scanned_ = 0; | 986 bytes_scanned_ = 0; |
| 995 write_barriers_invoked_since_last_step_ = 0; | 987 write_barriers_invoked_since_last_step_ = 0; |
| 996 } | 988 } |
| 997 | 989 |
| 998 | 990 |
| 999 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 991 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
| 1000 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 992 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); |
| 1001 } | 993 } |
| 1002 | 994 |
| 1003 } } // namespace v8::internal | 995 } } // namespace v8::internal |
| OLD | NEW |