| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 266 | 266 |
| 267 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { | 267 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { |
| 268 Context* context = Context::cast(object); | 268 Context* context = Context::cast(object); |
| 269 | 269 |
| 270 // We will mark cache black with a separate pass | 270 // We will mark cache black with a separate pass |
| 271 // when we finish marking. | 271 // when we finish marking. |
| 272 MarkObjectGreyDoNotEnqueue(context->normalized_map_cache()); | 272 MarkObjectGreyDoNotEnqueue(context->normalized_map_cache()); |
| 273 VisitNativeContext(map, context); | 273 VisitNativeContext(map, context); |
| 274 } | 274 } |
| 275 | 275 |
| 276 static void VisitJSWeakMap(Map* map, HeapObject* object) { | 276 static void VisitWeakCollection(Map* map, HeapObject* object) { |
| 277 Heap* heap = map->GetHeap(); | 277 Heap* heap = map->GetHeap(); |
| 278 VisitPointers(heap, | 278 VisitPointers(heap, |
| 279 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset), | 279 HeapObject::RawField(object, |
| 280 HeapObject::RawField(object, JSWeakMap::kSize)); | 280 JSWeakCollection::kPropertiesOffset), |
| 281 HeapObject::RawField(object, JSWeakCollection::kSize)); |
| 281 } | 282 } |
| 282 | 283 |
| 283 static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {} | 284 static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {} |
| 284 | 285 |
| 285 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | 286 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
| 286 Object* obj = *p; | 287 Object* obj = *p; |
| 287 if (obj->NonFailureIsHeapObject()) { | 288 if (obj->NonFailureIsHeapObject()) { |
| 288 heap->mark_compact_collector()->RecordSlot(p, p, obj); | 289 heap->mark_compact_collector()->RecordSlot(p, p, obj); |
| 289 MarkObject(heap, obj); | 290 MarkObject(heap, obj); |
| 290 } | 291 } |
| (...skipping 741 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1032 bytes_scanned_ = 0; | 1033 bytes_scanned_ = 0; |
| 1033 write_barriers_invoked_since_last_step_ = 0; | 1034 write_barriers_invoked_since_last_step_ = 0; |
| 1034 } | 1035 } |
| 1035 | 1036 |
| 1036 | 1037 |
| 1037 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 1038 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
| 1038 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 1039 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); |
| 1039 } | 1040 } |
| 1040 | 1041 |
| 1041 } } // namespace v8::internal | 1042 } } // namespace v8::internal |
| OLD | NEW |