Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 61 #endif | 61 #endif |
| 62 sweep_precisely_(false), | 62 sweep_precisely_(false), |
| 63 reduce_memory_footprint_(false), | 63 reduce_memory_footprint_(false), |
| 64 abort_incremental_marking_(false), | 64 abort_incremental_marking_(false), |
| 65 compacting_(false), | 65 compacting_(false), |
| 66 was_marked_incrementally_(false), | 66 was_marked_incrementally_(false), |
| 67 tracer_(NULL), | 67 tracer_(NULL), |
| 68 migration_slots_buffer_(NULL), | 68 migration_slots_buffer_(NULL), |
| 69 heap_(NULL), | 69 heap_(NULL), |
| 70 code_flusher_(NULL), | 70 code_flusher_(NULL), |
| 71 encountered_weak_maps_(NULL), | 71 encountered_weak_maps_(NULL) { } |
| 72 marker_(this, this) { } | |
| 73 | 72 |
| 74 | 73 |
| 75 #ifdef DEBUG | 74 #ifdef DEBUG |
| 76 class VerifyMarkingVisitor: public ObjectVisitor { | 75 class VerifyMarkingVisitor: public ObjectVisitor { |
| 77 public: | 76 public: |
| 78 void VisitPointers(Object** start, Object** end) { | 77 void VisitPointers(Object** start, Object** end) { |
| 79 for (Object** current = start; current < end; current++) { | 78 for (Object** current = start; current < end; current++) { |
| 80 if ((*current)->IsHeapObject()) { | 79 if ((*current)->IsHeapObject()) { |
| 81 HeapObject* object = HeapObject::cast(*current); | 80 HeapObject* object = HeapObject::cast(*current); |
| 82 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); | 81 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); |
| (...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1060 if (end - start >= kMinRangeForMarkingRecursion) { | 1059 if (end - start >= kMinRangeForMarkingRecursion) { |
| 1061 if (VisitUnmarkedObjects(heap, start, end)) return; | 1060 if (VisitUnmarkedObjects(heap, start, end)) return; |
| 1062 // We are close to a stack overflow, so just mark the objects. | 1061 // We are close to a stack overflow, so just mark the objects. |
| 1063 } | 1062 } |
| 1064 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1063 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1065 for (Object** p = start; p < end; p++) { | 1064 for (Object** p = start; p < end; p++) { |
| 1066 MarkObjectByPointer(collector, start, p); | 1065 MarkObjectByPointer(collector, start, p); |
| 1067 } | 1066 } |
| 1068 } | 1067 } |
| 1069 | 1068 |
| 1069 // Marks the object black and pushes it on the marking stack. | |
| 1070 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { | 1070 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
| 1071 MarkBit mark = Marking::MarkBitFrom(object); | 1071 MarkBit mark = Marking::MarkBitFrom(object); |
| 1072 heap->mark_compact_collector()->MarkObject(object, mark); | 1072 heap->mark_compact_collector()->MarkObject(object, mark); |
| 1073 } | 1073 } |
| 1074 | 1074 |
| 1075 // Marks the object black without pushing it on the marking stack. | |
| 1076 // Returns true if object needed marking and false otherwise. | |
| 1077 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { | |
| 1078 MarkBit mark_bit = Marking::MarkBitFrom(object); | |
| 1079 if (!mark_bit.Get()) { | |
| 1080 heap->mark_compact_collector()->SetMark(object, mark_bit); | |
| 1081 return true; | |
| 1082 } | |
| 1083 return false; | |
| 1084 } | |
|
Toon Verwaest
2012/09/17 09:06:36
This method seems exactly(?) the same as the metho
Michael Starzinger
2012/09/17 09:59:36
You are right, it does exactly the same. And I agr
| |
| 1085 | |
| 1075 // Mark object pointed to by p. | 1086 // Mark object pointed to by p. |
| 1076 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 1087 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
| 1077 Object** anchor_slot, | 1088 Object** anchor_slot, |
| 1078 Object** p)) { | 1089 Object** p)) { |
| 1079 if (!(*p)->IsHeapObject()) return; | 1090 if (!(*p)->IsHeapObject()) return; |
| 1080 HeapObject* object = ShortCircuitConsString(p); | 1091 HeapObject* object = ShortCircuitConsString(p); |
| 1081 collector->RecordSlot(anchor_slot, p, object); | 1092 collector->RecordSlot(anchor_slot, p, object); |
| 1082 MarkBit mark = Marking::MarkBitFrom(object); | 1093 MarkBit mark = Marking::MarkBitFrom(object); |
| 1083 collector->MarkObject(object, mark); | 1094 collector->MarkObject(object, mark); |
| 1084 } | 1095 } |
| (...skipping 786 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1871 virtual Object* RetainAs(Object* object) { | 1882 virtual Object* RetainAs(Object* object) { |
| 1872 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { | 1883 if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) { |
| 1873 return object; | 1884 return object; |
| 1874 } else { | 1885 } else { |
| 1875 return NULL; | 1886 return NULL; |
| 1876 } | 1887 } |
| 1877 } | 1888 } |
| 1878 }; | 1889 }; |
| 1879 | 1890 |
| 1880 | 1891 |
| 1881 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { | |
| 1882 ASSERT(IsMarked(object)); | |
| 1883 ASSERT(HEAP->Contains(object)); | |
| 1884 if (object->IsMap()) { | |
| 1885 Map* map = Map::cast(object); | |
| 1886 heap_->ClearCacheOnMap(map); | |
| 1887 | |
| 1888 // When map collection is enabled we have to mark through map's transitions | |
| 1889 // in a special way to make transition links weak. Only maps for subclasses | |
| 1890 // of JSReceiver can have transitions. | |
| 1891 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
| 1892 if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | |
| 1893 marker_.MarkMapContents(map); | |
| 1894 } else { | |
| 1895 marking_deque_.PushBlack(map); | |
| 1896 } | |
| 1897 } else { | |
| 1898 marking_deque_.PushBlack(object); | |
| 1899 } | |
| 1900 } | |
| 1901 | |
| 1902 | |
| 1903 // Force instantiation of template instances. | |
| 1904 template void Marker<IncrementalMarking>::MarkMapContents(Map* map); | |
| 1905 template void Marker<MarkCompactCollector>::MarkMapContents(Map* map); | |
| 1906 | |
| 1907 | |
| 1908 template <class T> | |
| 1909 void Marker<T>::MarkMapContents(Map* map) { | |
| 1910 // Make sure that the back pointer stored either in the map itself or inside | |
| 1911 // its transitions array is marked. Treat pointers in the transitions array as | |
| 1912 // weak and also mark that array to prevent visiting it later. | |
| 1913 base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer())); | |
| 1914 | |
| 1915 Object** transitions_slot = | |
| 1916 HeapObject::RawField(map, Map::kTransitionsOrBackPointerOffset); | |
| 1917 Object* transitions = *transitions_slot; | |
| 1918 if (transitions->IsTransitionArray()) { | |
| 1919 MarkTransitionArray(reinterpret_cast<TransitionArray*>(transitions)); | |
| 1920 } else { | |
| 1921 // Already marked by marking map->GetBackPointer(). | |
| 1922 ASSERT(transitions->IsMap() || transitions->IsUndefined()); | |
| 1923 } | |
| 1924 | |
| 1925 // Mark the Object* fields of the Map. Since the transitions array has been | |
| 1926 // marked already, it is fine that one of these fields contains a pointer to | |
| 1927 // it. | |
| 1928 Object** start_slot = | |
| 1929 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset); | |
| 1930 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); | |
| 1931 for (Object** slot = start_slot; slot < end_slot; slot++) { | |
| 1932 Object* obj = *slot; | |
| 1933 if (!obj->NonFailureIsHeapObject()) continue; | |
| 1934 mark_compact_collector()->RecordSlot(start_slot, slot, obj); | |
| 1935 base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj)); | |
| 1936 } | |
| 1937 } | |
| 1938 | |
| 1939 | |
| 1940 template <class T> | |
| 1941 void Marker<T>::MarkTransitionArray(TransitionArray* transitions) { | |
| 1942 if (!base_marker()->MarkObjectWithoutPush(transitions)) return; | |
| 1943 Object** transitions_start = transitions->data_start(); | |
| 1944 | |
| 1945 // We don't have to record the descriptors_pointer slot since the cell space | |
| 1946 // is not compacted. | |
| 1947 JSGlobalPropertyCell* descriptors_cell = transitions->descriptors_pointer(); | |
| 1948 base_marker()->MarkObjectAndPush(descriptors_cell); | |
| 1949 | |
| 1950 if (transitions->HasPrototypeTransitions()) { | |
| 1951 // Mark prototype transitions array but don't push it into marking stack. | |
| 1952 // This will make references from it weak. We will clean dead prototype | |
| 1953 // transitions in ClearNonLiveTransitions. | |
| 1954 Object** proto_trans_slot = transitions->GetPrototypeTransitionsSlot(); | |
| 1955 HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot); | |
| 1956 base_marker()->MarkObjectWithoutPush(prototype_transitions); | |
| 1957 mark_compact_collector()->RecordSlot( | |
| 1958 transitions_start, proto_trans_slot, prototype_transitions); | |
| 1959 } | |
| 1960 | |
| 1961 for (int i = 0; i < transitions->number_of_transitions(); ++i) { | |
| 1962 Object** key_slot = transitions->GetKeySlot(i); | |
| 1963 Object* key = *key_slot; | |
| 1964 if (key->IsHeapObject()) { | |
| 1965 base_marker()->MarkObjectAndPush(HeapObject::cast(key)); | |
| 1966 mark_compact_collector()->RecordSlot(transitions_start, key_slot, key); | |
| 1967 } | |
| 1968 } | |
| 1969 } | |
| 1970 | |
| 1971 | |
| 1972 // Fill the marking stack with overflowed objects returned by the given | 1892 // Fill the marking stack with overflowed objects returned by the given |
| 1973 // iterator. Stop when the marking stack is filled or the end of the space | 1893 // iterator. Stop when the marking stack is filled or the end of the space |
| 1974 // is reached, whichever comes first. | 1894 // is reached, whichever comes first. |
| 1975 template<class T> | 1895 template<class T> |
| 1976 static void DiscoverGreyObjectsWithIterator(Heap* heap, | 1896 static void DiscoverGreyObjectsWithIterator(Heap* heap, |
| 1977 MarkingDeque* marking_deque, | 1897 MarkingDeque* marking_deque, |
| 1978 T* it) { | 1898 T* it) { |
| 1979 // The caller should ensure that the marking stack is initially not full, | 1899 // The caller should ensure that the marking stack is initially not full, |
| 1980 // so that we don't waste effort pointlessly scanning for objects. | 1900 // so that we don't waste effort pointlessly scanning for objects. |
| 1981 ASSERT(!marking_deque->IsFull()); | 1901 ASSERT(!marking_deque->IsFull()); |
| (...skipping 2162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4144 while (buffer != NULL) { | 4064 while (buffer != NULL) { |
| 4145 SlotsBuffer* next_buffer = buffer->next(); | 4065 SlotsBuffer* next_buffer = buffer->next(); |
| 4146 DeallocateBuffer(buffer); | 4066 DeallocateBuffer(buffer); |
| 4147 buffer = next_buffer; | 4067 buffer = next_buffer; |
| 4148 } | 4068 } |
| 4149 *buffer_address = NULL; | 4069 *buffer_address = NULL; |
| 4150 } | 4070 } |
| 4151 | 4071 |
| 4152 | 4072 |
| 4153 } } // namespace v8::internal | 4073 } } // namespace v8::internal |
| OLD | NEW |