Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: src/mark-compact.cc

Issue 157503002: A64: Synchronize with r18444. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/log.cc ('k') | src/messages.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 968 matching lines...) Expand 10 before | Expand all | Expand 10 after
979 // reachable from the objects on the marking stack, but no longer push them on 979 // reachable from the objects on the marking stack, but no longer push them on
980 // the marking stack. Instead, we mark them as both marked and overflowed. 980 // the marking stack. Instead, we mark them as both marked and overflowed.
981 // When the stack is in the overflowed state, objects marked as overflowed 981 // When the stack is in the overflowed state, objects marked as overflowed
982 // have been reached and marked but their children have not been visited yet. 982 // have been reached and marked but their children have not been visited yet.
983 // After emptying the marking stack, we clear the overflow flag and traverse 983 // After emptying the marking stack, we clear the overflow flag and traverse
984 // the heap looking for objects marked as overflowed, push them on the stack, 984 // the heap looking for objects marked as overflowed, push them on the stack,
985 // and continue with marking. This process repeats until all reachable 985 // and continue with marking. This process repeats until all reachable
986 // objects have been marked. 986 // objects have been marked.
987 987
988 void CodeFlusher::ProcessJSFunctionCandidates() { 988 void CodeFlusher::ProcessJSFunctionCandidates() {
989 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); 989 Code* lazy_compile =
990 isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
990 Object* undefined = isolate_->heap()->undefined_value(); 991 Object* undefined = isolate_->heap()->undefined_value();
991 992
992 JSFunction* candidate = jsfunction_candidates_head_; 993 JSFunction* candidate = jsfunction_candidates_head_;
993 JSFunction* next_candidate; 994 JSFunction* next_candidate;
994 while (candidate != NULL) { 995 while (candidate != NULL) {
995 next_candidate = GetNextCandidate(candidate); 996 next_candidate = GetNextCandidate(candidate);
996 ClearNextCandidate(candidate, undefined); 997 ClearNextCandidate(candidate, undefined);
997 998
998 SharedFunctionInfo* shared = candidate->shared(); 999 SharedFunctionInfo* shared = candidate->shared();
999 1000
(...skipping 24 matching lines...) Expand all
1024 RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot); 1025 RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
1025 1026
1026 candidate = next_candidate; 1027 candidate = next_candidate;
1027 } 1028 }
1028 1029
1029 jsfunction_candidates_head_ = NULL; 1030 jsfunction_candidates_head_ = NULL;
1030 } 1031 }
1031 1032
1032 1033
1033 void CodeFlusher::ProcessSharedFunctionInfoCandidates() { 1034 void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
1034 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); 1035 Code* lazy_compile =
1036 isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
1035 1037
1036 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; 1038 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
1037 SharedFunctionInfo* next_candidate; 1039 SharedFunctionInfo* next_candidate;
1038 while (candidate != NULL) { 1040 while (candidate != NULL) {
1039 next_candidate = GetNextCandidate(candidate); 1041 next_candidate = GetNextCandidate(candidate);
1040 ClearNextCandidate(candidate); 1042 ClearNextCandidate(candidate);
1041 1043
1042 Code* code = candidate->code(); 1044 Code* code = candidate->code();
1043 MarkBit code_mark = Marking::MarkBitFrom(code); 1045 MarkBit code_mark = Marking::MarkBitFrom(code);
1044 if (!code_mark.Get()) { 1046 if (!code_mark.Get()) {
(...skipping 11 matching lines...) Expand all
1056 RecordSlot(code_slot, code_slot, *code_slot); 1058 RecordSlot(code_slot, code_slot, *code_slot);
1057 1059
1058 candidate = next_candidate; 1060 candidate = next_candidate;
1059 } 1061 }
1060 1062
1061 shared_function_info_candidates_head_ = NULL; 1063 shared_function_info_candidates_head_ = NULL;
1062 } 1064 }
1063 1065
1064 1066
1065 void CodeFlusher::ProcessOptimizedCodeMaps() { 1067 void CodeFlusher::ProcessOptimizedCodeMaps() {
1066 static const int kEntriesStart = SharedFunctionInfo::kEntriesStart; 1068 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
1067 static const int kEntryLength = SharedFunctionInfo::kEntryLength;
1068 static const int kContextOffset = 0;
1069 static const int kCodeOffset = 1;
1070 static const int kLiteralsOffset = 2;
1071 STATIC_ASSERT(kEntryLength == 3);
1072 1069
1073 SharedFunctionInfo* holder = optimized_code_map_holder_head_; 1070 SharedFunctionInfo* holder = optimized_code_map_holder_head_;
1074 SharedFunctionInfo* next_holder; 1071 SharedFunctionInfo* next_holder;
1072
1075 while (holder != NULL) { 1073 while (holder != NULL) {
1076 next_holder = GetNextCodeMap(holder); 1074 next_holder = GetNextCodeMap(holder);
1077 ClearNextCodeMap(holder); 1075 ClearNextCodeMap(holder);
1078 1076
1079 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map()); 1077 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
1080 int new_length = kEntriesStart; 1078 int new_length = SharedFunctionInfo::kEntriesStart;
1081 int old_length = code_map->length(); 1079 int old_length = code_map->length();
1082 for (int i = kEntriesStart; i < old_length; i += kEntryLength) { 1080 for (int i = SharedFunctionInfo::kEntriesStart;
1083 Code* code = Code::cast(code_map->get(i + kCodeOffset)); 1081 i < old_length;
1084 MarkBit code_mark = Marking::MarkBitFrom(code); 1082 i += SharedFunctionInfo::kEntryLength) {
1085 if (!code_mark.Get()) { 1083 Code* code =
1086 continue; 1084 Code::cast(code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
1085 if (!Marking::MarkBitFrom(code).Get()) continue;
1086
1087 // Move every slot in the entry.
1088 for (int j = 0; j < SharedFunctionInfo::kEntryLength; j++) {
1089 int dst_index = new_length++;
1090 Object** slot = code_map->RawFieldOfElementAt(dst_index);
1091 Object* object = code_map->get(i + j);
1092 code_map->set(dst_index, object);
1093 if (j == SharedFunctionInfo::kOsrAstIdOffset) {
1094 ASSERT(object->IsSmi());
1095 } else {
1096 ASSERT(Marking::IsBlack(
1097 Marking::MarkBitFrom(HeapObject::cast(*slot))));
1098 isolate_->heap()->mark_compact_collector()->
1099 RecordSlot(slot, slot, *slot);
1100 }
1087 } 1101 }
1088
1089 // Update and record the context slot in the optimized code map.
1090 Object** context_slot = HeapObject::RawField(code_map,
1091 FixedArray::OffsetOfElementAt(new_length));
1092 code_map->set(new_length++, code_map->get(i + kContextOffset));
1093 ASSERT(Marking::IsBlack(
1094 Marking::MarkBitFrom(HeapObject::cast(*context_slot))));
1095 isolate_->heap()->mark_compact_collector()->
1096 RecordSlot(context_slot, context_slot, *context_slot);
1097
1098 // Update and record the code slot in the optimized code map.
1099 Object** code_slot = HeapObject::RawField(code_map,
1100 FixedArray::OffsetOfElementAt(new_length));
1101 code_map->set(new_length++, code_map->get(i + kCodeOffset));
1102 ASSERT(Marking::IsBlack(
1103 Marking::MarkBitFrom(HeapObject::cast(*code_slot))));
1104 isolate_->heap()->mark_compact_collector()->
1105 RecordSlot(code_slot, code_slot, *code_slot);
1106
1107 // Update and record the literals slot in the optimized code map.
1108 Object** literals_slot = HeapObject::RawField(code_map,
1109 FixedArray::OffsetOfElementAt(new_length));
1110 code_map->set(new_length++, code_map->get(i + kLiteralsOffset));
1111 ASSERT(Marking::IsBlack(
1112 Marking::MarkBitFrom(HeapObject::cast(*literals_slot))));
1113 isolate_->heap()->mark_compact_collector()->
1114 RecordSlot(literals_slot, literals_slot, *literals_slot);
1115 } 1102 }
1116 1103
1117 // Trim the optimized code map if entries have been removed. 1104 // Trim the optimized code map if entries have been removed.
1118 if (new_length < old_length) { 1105 if (new_length < old_length) {
1119 holder->TrimOptimizedCodeMap(old_length - new_length); 1106 holder->TrimOptimizedCodeMap(old_length - new_length);
1120 } 1107 }
1121 1108
1122 holder = next_holder; 1109 holder = next_holder;
1123 } 1110 }
1124 1111
(...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after
1832 // Mark all the objects reachable from the map and body. May leave 1819 // Mark all the objects reachable from the map and body. May leave
1833 // overflowed objects in the heap. 1820 // overflowed objects in the heap.
1834 collector_->EmptyMarkingDeque(); 1821 collector_->EmptyMarkingDeque();
1835 } 1822 }
1836 1823
1837 MarkCompactCollector* collector_; 1824 MarkCompactCollector* collector_;
1838 }; 1825 };
1839 1826
1840 1827
1841 // Helper class for pruning the string table. 1828 // Helper class for pruning the string table.
1829 template<bool finalize_external_strings>
1842 class StringTableCleaner : public ObjectVisitor { 1830 class StringTableCleaner : public ObjectVisitor {
1843 public: 1831 public:
1844 explicit StringTableCleaner(Heap* heap) 1832 explicit StringTableCleaner(Heap* heap)
1845 : heap_(heap), pointers_removed_(0) { } 1833 : heap_(heap), pointers_removed_(0) { }
1846 1834
1847 virtual void VisitPointers(Object** start, Object** end) { 1835 virtual void VisitPointers(Object** start, Object** end) {
1848 // Visit all HeapObject pointers in [start, end). 1836 // Visit all HeapObject pointers in [start, end).
1849 for (Object** p = start; p < end; p++) { 1837 for (Object** p = start; p < end; p++) {
1850 Object* o = *p; 1838 Object* o = *p;
1851 if (o->IsHeapObject() && 1839 if (o->IsHeapObject() &&
1852 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { 1840 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) {
1853 // Check if the internalized string being pruned is external. We need to 1841 if (finalize_external_strings) {
1854 // delete the associated external data as this string is going away. 1842 ASSERT(o->IsExternalString());
1855
1856 // Since no objects have yet been moved we can safely access the map of
1857 // the object.
1858 if (o->IsExternalString()) {
1859 heap_->FinalizeExternalString(String::cast(*p)); 1843 heap_->FinalizeExternalString(String::cast(*p));
1844 } else {
1845 pointers_removed_++;
1860 } 1846 }
1861 // Set the entry to the_hole_value (as deleted). 1847 // Set the entry to the_hole_value (as deleted).
1862 *p = heap_->the_hole_value(); 1848 *p = heap_->the_hole_value();
1863 pointers_removed_++;
1864 } 1849 }
1865 } 1850 }
1866 } 1851 }
1867 1852
1868 int PointersRemoved() { 1853 int PointersRemoved() {
1854 ASSERT(!finalize_external_strings);
1869 return pointers_removed_; 1855 return pointers_removed_;
1870 } 1856 }
1871 1857
1872 private: 1858 private:
1873 Heap* heap_; 1859 Heap* heap_;
1874 int pointers_removed_; 1860 int pointers_removed_;
1875 }; 1861 };
1876 1862
1877 1863
1864 typedef StringTableCleaner<false> InternalizedStringTableCleaner;
1865 typedef StringTableCleaner<true> ExternalStringTableCleaner;
1866
1867
1878 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects 1868 // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
1879 // are retained. 1869 // are retained.
1880 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { 1870 class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
1881 public: 1871 public:
1882 virtual Object* RetainAs(Object* object) { 1872 virtual Object* RetainAs(Object* object) {
1883 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { 1873 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) {
1884 return object; 1874 return object;
1885 } else if (object->IsAllocationSite() && 1875 } else if (object->IsAllocationSite() &&
1886 !(AllocationSite::cast(object)->IsZombie())) { 1876 !(AllocationSite::cast(object)->IsZombie())) {
1887 // "dead" AllocationSites need to live long enough for a traversal of new 1877 // "dead" AllocationSites need to live long enough for a traversal of new
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after
2391 // Object literal map caches reference strings (cache keys) and maps 2381 // Object literal map caches reference strings (cache keys) and maps
2392 // (cache values). At this point still useful maps have already been 2382 // (cache values). At this point still useful maps have already been
2393 // marked. Mark the keys for the alive values before we process the 2383 // marked. Mark the keys for the alive values before we process the
2394 // string table. 2384 // string table.
2395 ProcessMapCaches(); 2385 ProcessMapCaches();
2396 2386
2397 // Prune the string table removing all strings only pointed to by the 2387 // Prune the string table removing all strings only pointed to by the
2398 // string table. Cannot use string_table() here because the string 2388 // string table. Cannot use string_table() here because the string
2399 // table is marked. 2389 // table is marked.
2400 StringTable* string_table = heap()->string_table(); 2390 StringTable* string_table = heap()->string_table();
2401 StringTableCleaner v(heap()); 2391 InternalizedStringTableCleaner internalized_visitor(heap());
2402 string_table->IterateElements(&v); 2392 string_table->IterateElements(&internalized_visitor);
2403 string_table->ElementsRemoved(v.PointersRemoved()); 2393 string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
2404 heap()->external_string_table_.Iterate(&v); 2394
2395 ExternalStringTableCleaner external_visitor(heap());
2396 heap()->external_string_table_.Iterate(&external_visitor);
2405 heap()->external_string_table_.CleanUp(); 2397 heap()->external_string_table_.CleanUp();
2406 2398
2407 // Process the weak references. 2399 // Process the weak references.
2408 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 2400 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
2409 heap()->ProcessWeakReferences(&mark_compact_object_retainer); 2401 heap()->ProcessWeakReferences(&mark_compact_object_retainer);
2410 2402
2411 // Remove object groups after marking phase. 2403 // Remove object groups after marking phase.
2412 heap()->isolate()->global_handles()->RemoveObjectGroups(); 2404 heap()->isolate()->global_handles()->RemoveObjectGroups();
2413 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); 2405 heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
2414 2406
2415 // Flush code from collected candidates. 2407 // Flush code from collected candidates.
2416 if (is_code_flushing_enabled()) { 2408 if (is_code_flushing_enabled()) {
2417 code_flusher_->ProcessCandidates(); 2409 code_flusher_->ProcessCandidates();
2418 // If incremental marker does not support code flushing, we need to 2410 // If incremental marker does not support code flushing, we need to
2419 // disable it before incremental marking steps for next cycle. 2411 // disable it before incremental marking steps for next cycle.
2420 if (FLAG_flush_code && !FLAG_flush_code_incrementally) { 2412 if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
2421 EnableCodeFlushing(false); 2413 EnableCodeFlushing(false);
2422 } 2414 }
2423 } 2415 }
2424 2416
2425 if (!FLAG_watch_ic_patching) {
2426 // Clean up dead objects from the runtime profiler.
2427 heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
2428 }
2429
2430 if (FLAG_track_gc_object_stats) { 2417 if (FLAG_track_gc_object_stats) {
2431 heap()->CheckpointObjectStats(); 2418 heap()->CheckpointObjectStats();
2432 } 2419 }
2433 } 2420 }
2434 2421
2435 2422
2436 void MarkCompactCollector::ProcessMapCaches() { 2423 void MarkCompactCollector::ProcessMapCaches() {
2437 Object* raw_context = heap()->native_contexts_list_; 2424 Object* raw_context = heap()->native_contexts_list_;
2438 while (raw_context != heap()->undefined_value()) { 2425 while (raw_context != heap()->undefined_value()) {
2439 Context* context = reinterpret_cast<Context*>(raw_context); 2426 Context* context = reinterpret_cast<Context*>(raw_context);
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
2599 if (new_number_of_transitions != i) { 2586 if (new_number_of_transitions != i) {
2600 prototype_transitions->set( 2587 prototype_transitions->set(
2601 proto_index, 2588 proto_index,
2602 prototype, 2589 prototype,
2603 UPDATE_WRITE_BARRIER); 2590 UPDATE_WRITE_BARRIER);
2604 prototype_transitions->set( 2591 prototype_transitions->set(
2605 map_index, 2592 map_index,
2606 cached_map, 2593 cached_map,
2607 SKIP_WRITE_BARRIER); 2594 SKIP_WRITE_BARRIER);
2608 } 2595 }
2609 Object** slot = 2596 Object** slot = prototype_transitions->RawFieldOfElementAt(proto_index);
2610 HeapObject::RawField(prototype_transitions,
2611 FixedArray::OffsetOfElementAt(proto_index));
2612 RecordSlot(slot, slot, prototype); 2597 RecordSlot(slot, slot, prototype);
2613 new_number_of_transitions++; 2598 new_number_of_transitions++;
2614 } 2599 }
2615 } 2600 }
2616 2601
2617 if (new_number_of_transitions != number_of_transitions) { 2602 if (new_number_of_transitions != number_of_transitions) {
2618 map->SetNumberOfProtoTransitions(new_number_of_transitions); 2603 map->SetNumberOfProtoTransitions(new_number_of_transitions);
2619 } 2604 }
2620 2605
2621 // Fill slots that became free with undefined value. 2606 // Fill slots that became free with undefined value.
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
2706 while (weak_collection_obj != Smi::FromInt(0)) { 2691 while (weak_collection_obj != Smi::FromInt(0)) {
2707 ASSERT(MarkCompactCollector::IsMarked( 2692 ASSERT(MarkCompactCollector::IsMarked(
2708 HeapObject::cast(weak_collection_obj))); 2693 HeapObject::cast(weak_collection_obj)));
2709 JSWeakCollection* weak_collection = 2694 JSWeakCollection* weak_collection =
2710 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2695 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2711 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2696 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2712 Object** anchor = reinterpret_cast<Object**>(table->address()); 2697 Object** anchor = reinterpret_cast<Object**>(table->address());
2713 for (int i = 0; i < table->Capacity(); i++) { 2698 for (int i = 0; i < table->Capacity(); i++) {
2714 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2699 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2715 Object** key_slot = 2700 Object** key_slot =
2716 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( 2701 table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2717 ObjectHashTable::EntryToIndex(i)));
2718 RecordSlot(anchor, key_slot, *key_slot); 2702 RecordSlot(anchor, key_slot, *key_slot);
2719 Object** value_slot = 2703 Object** value_slot =
2720 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( 2704 table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2721 ObjectHashTable::EntryToValueIndex(i)));
2722 MarkCompactMarkingVisitor::MarkObjectByPointer( 2705 MarkCompactMarkingVisitor::MarkObjectByPointer(
2723 this, anchor, value_slot); 2706 this, anchor, value_slot);
2724 } 2707 }
2725 } 2708 }
2726 weak_collection_obj = weak_collection->next(); 2709 weak_collection_obj = weak_collection->next();
2727 } 2710 }
2728 } 2711 }
2729 2712
2730 2713
2731 void MarkCompactCollector::ClearWeakCollections() { 2714 void MarkCompactCollector::ClearWeakCollections() {
(...skipping 622 matching lines...) Expand 10 before | Expand all | Expand 10 after
3354 3337
3355 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3338 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3356 Heap::RelocationLock relocation_lock(heap()); 3339 Heap::RelocationLock relocation_lock(heap());
3357 3340
3358 bool code_slots_filtering_required; 3341 bool code_slots_filtering_required;
3359 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); 3342 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
3360 code_slots_filtering_required = MarkInvalidatedCode(); 3343 code_slots_filtering_required = MarkInvalidatedCode();
3361 EvacuateNewSpace(); 3344 EvacuateNewSpace();
3362 } 3345 }
3363 3346
3347 // We have to travers our allocation sites scratchpad which contains raw
3348 // pointers before we move objects. During new space evacauation we
3349 // gathered pretenuring statistics. The found allocation sites may not be
3350 // valid after compacting old space.
3351 heap()->ProcessPretenuringFeedback();
3352
3353
3364 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES); 3354 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES);
3365 EvacuatePages(); 3355 EvacuatePages();
3366 } 3356 }
3367 3357
3368 // Second pass: find pointers to new space and update them. 3358 // Second pass: find pointers to new space and update them.
3369 PointersUpdatingVisitor updating_visitor(heap()); 3359 PointersUpdatingVisitor updating_visitor(heap());
3370 3360
3371 { GCTracer::Scope gc_scope(tracer_, 3361 { GCTracer::Scope gc_scope(tracer_,
3372 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS); 3362 GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS);
3373 // Update pointers in to space. 3363 // Update pointers in to space.
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
3507 WeakHashTable* table = 3497 WeakHashTable* table =
3508 WeakHashTable::cast(heap_->weak_object_to_code_table()); 3498 WeakHashTable::cast(heap_->weak_object_to_code_table());
3509 table->Iterate(&updating_visitor); 3499 table->Iterate(&updating_visitor);
3510 table->Rehash(heap_->undefined_value()); 3500 table->Rehash(heap_->undefined_value());
3511 } 3501 }
3512 3502
3513 // Update pointers from external string table. 3503 // Update pointers from external string table.
3514 heap_->UpdateReferencesInExternalStringTable( 3504 heap_->UpdateReferencesInExternalStringTable(
3515 &UpdateReferenceInExternalStringTableEntry); 3505 &UpdateReferenceInExternalStringTableEntry);
3516 3506
3517 if (!FLAG_watch_ic_patching) {
3518 // Update JSFunction pointers from the runtime profiler.
3519 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
3520 &updating_visitor);
3521 }
3522
3523 EvacuationWeakObjectRetainer evacuation_object_retainer; 3507 EvacuationWeakObjectRetainer evacuation_object_retainer;
3524 heap()->ProcessWeakReferences(&evacuation_object_retainer); 3508 heap()->ProcessWeakReferences(&evacuation_object_retainer);
3525 3509
3526 // Visit invalidated code (we ignored all slots on it) and clear mark-bits 3510 // Visit invalidated code (we ignored all slots on it) and clear mark-bits
3527 // under it. 3511 // under it.
3528 ProcessInvalidatedCode(&updating_visitor); 3512 ProcessInvalidatedCode(&updating_visitor);
3529 3513
3530 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); 3514 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3531 3515
3532 #ifdef VERIFY_HEAP 3516 #ifdef VERIFY_HEAP
(...skipping 834 matching lines...) Expand 10 before | Expand all | Expand 10 after
4367 while (buffer != NULL) { 4351 while (buffer != NULL) {
4368 SlotsBuffer* next_buffer = buffer->next(); 4352 SlotsBuffer* next_buffer = buffer->next();
4369 DeallocateBuffer(buffer); 4353 DeallocateBuffer(buffer);
4370 buffer = next_buffer; 4354 buffer = next_buffer;
4371 } 4355 }
4372 *buffer_address = NULL; 4356 *buffer_address = NULL;
4373 } 4357 }
4374 4358
4375 4359
4376 } } // namespace v8::internal 4360 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/log.cc ('k') | src/messages.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698