| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 355 | 355 |
| 356 void MarkCompactCollector::CollectGarbage() { | 356 void MarkCompactCollector::CollectGarbage() { |
| 357 // Make sure that Prepare() has been called. The individual steps below will | 357 // Make sure that Prepare() has been called. The individual steps below will |
| 358 // update the state as they proceed. | 358 // update the state as they proceed. |
| 359 DCHECK(state_ == PREPARE_GC); | 359 DCHECK(state_ == PREPARE_GC); |
| 360 | 360 |
| 361 MarkLiveObjects(); | 361 MarkLiveObjects(); |
| 362 | 362 |
| 363 DCHECK(heap_->incremental_marking()->IsStopped()); | 363 DCHECK(heap_->incremental_marking()->IsStopped()); |
| 364 | 364 |
| 365 ProcessWeakReferences(); | 365 ClearNonLiveReferences(); |
| 366 |
| 367 ClearWeakCollections(); |
| 366 | 368 |
| 367 #ifdef VERIFY_HEAP | 369 #ifdef VERIFY_HEAP |
| 368 if (FLAG_verify_heap) { | 370 if (FLAG_verify_heap) { |
| 369 VerifyMarking(heap_); | 371 VerifyMarking(heap_); |
| 370 } | 372 } |
| 371 #endif | 373 #endif |
| 372 | 374 |
| 373 ClearInvalidStoreAndSlotsBufferEntries(); | 375 ClearInvalidStoreAndSlotsBufferEntries(); |
| 374 | 376 |
| 375 #ifdef VERIFY_HEAP | 377 #ifdef VERIFY_HEAP |
| (...skipping 1554 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1930 heap()->number_of_disposed_maps_ = new_number_of_disposed_maps; | 1932 heap()->number_of_disposed_maps_ = new_number_of_disposed_maps; |
| 1931 Object* undefined = heap()->undefined_value(); | 1933 Object* undefined = heap()->undefined_value(); |
| 1932 for (int i = new_length; i < length; i++) { | 1934 for (int i = new_length; i < length; i++) { |
| 1933 retained_maps->Clear(i, undefined); | 1935 retained_maps->Clear(i, undefined); |
| 1934 } | 1936 } |
| 1935 if (new_length != length) retained_maps->SetLength(new_length); | 1937 if (new_length != length) retained_maps->SetLength(new_length); |
| 1936 ProcessMarkingDeque(); | 1938 ProcessMarkingDeque(); |
| 1937 } | 1939 } |
| 1938 | 1940 |
| 1939 | 1941 |
| 1940 DependentCode* MarkCompactCollector::DependentCodeListFromNonLiveMaps() { | |
| 1941 GCTracer::Scope gc_scope(heap()->tracer(), | |
| 1942 GCTracer::Scope::MC_EXTRACT_DEPENDENT_CODE); | |
| 1943 ArrayList* retained_maps = heap()->retained_maps(); | |
| 1944 int length = retained_maps->Length(); | |
| 1945 DependentCode* head = DependentCode::cast(heap()->empty_fixed_array()); | |
| 1946 for (int i = 0; i < length; i += 2) { | |
| 1947 DCHECK(retained_maps->Get(i)->IsWeakCell()); | |
| 1948 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | |
| 1949 DCHECK(!cell->cleared()); | |
| 1950 Map* map = Map::cast(cell->value()); | |
| 1951 MarkBit map_mark = Marking::MarkBitFrom(map); | |
| 1952 if (Marking::IsWhite(map_mark)) { | |
| 1953 DependentCode* candidate = map->dependent_code(); | |
| 1954 // We rely on the fact that the weak code group comes first. | |
| 1955 STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0); | |
| 1956 if (candidate->length() > 0 && | |
| 1957 candidate->group() == DependentCode::kWeakCodeGroup) { | |
| 1958 candidate->set_next_link(head); | |
| 1959 head = candidate; | |
| 1960 } | |
| 1961 } | |
| 1962 } | |
| 1963 return head; | |
| 1964 } | |
| 1965 | |
| 1966 | |
| 1967 void MarkCompactCollector::EnsureMarkingDequeIsReserved() { | 1942 void MarkCompactCollector::EnsureMarkingDequeIsReserved() { |
| 1968 DCHECK(!marking_deque_.in_use()); | 1943 DCHECK(!marking_deque_.in_use()); |
| 1969 if (marking_deque_memory_ == NULL) { | 1944 if (marking_deque_memory_ == NULL) { |
| 1970 marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize); | 1945 marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize); |
| 1971 marking_deque_memory_committed_ = 0; | 1946 marking_deque_memory_committed_ = 0; |
| 1972 } | 1947 } |
| 1973 if (marking_deque_memory_ == NULL) { | 1948 if (marking_deque_memory_ == NULL) { |
| 1974 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved"); | 1949 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved"); |
| 1975 } | 1950 } |
| 1976 } | 1951 } |
| (...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2263 } | 2238 } |
| 2264 | 2239 |
| 2265 // Trim the optimized code map if entries have been removed. | 2240 // Trim the optimized code map if entries have been removed. |
| 2266 if (new_length < old_length) { | 2241 if (new_length < old_length) { |
| 2267 shared->TrimOptimizedCodeMap(old_length - new_length); | 2242 shared->TrimOptimizedCodeMap(old_length - new_length); |
| 2268 } | 2243 } |
| 2269 } | 2244 } |
| 2270 } | 2245 } |
| 2271 | 2246 |
| 2272 | 2247 |
| 2273 void MarkCompactCollector::ProcessWeakReferences() { | |
| 2274 // This should be done before processing weak cells because it checks | |
| 2275 // mark bits of maps in weak cells. | |
| 2276 DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps(); | |
| 2277 | |
| 2278 // Process weak cells before MarkCodeForDeoptimization and | |
| 2279 // ClearNonLiveReferences so that weak cells in dependent code arrays are | |
| 2280 // cleared or contain only live code objects. | |
| 2281 ProcessAndClearWeakCells(); | |
| 2282 | |
| 2283 MarkDependentCodeListForDeoptimization(dependent_code_list); | |
| 2284 | |
| 2285 ClearNonLiveReferences(); | |
| 2286 | |
| 2287 ClearWeakCollections(); | |
| 2288 } | |
| 2289 | |
| 2290 | |
| 2291 void MarkCompactCollector::ClearNonLiveReferences() { | 2248 void MarkCompactCollector::ClearNonLiveReferences() { |
| 2249 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR); |
| 2250 |
| 2251 DependentCode* dependent_code_list; |
| 2252 Object* non_live_map_list; |
| 2253 ClearWeakCells(&non_live_map_list, &dependent_code_list); |
| 2254 |
| 2255 { |
| 2256 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP); |
| 2257 ClearSimpleMapTransitions(non_live_map_list); |
| 2258 ClearFullMapTransitions(); |
| 2259 } |
| 2260 |
| 2261 MarkDependentCodeForDeoptimization(dependent_code_list); |
| 2262 } |
| 2263 |
| 2264 |
| 2265 void MarkCompactCollector::MarkDependentCodeForDeoptimization( |
| 2266 DependentCode* list_head) { |
| 2292 GCTracer::Scope gc_scope(heap()->tracer(), | 2267 GCTracer::Scope gc_scope(heap()->tracer(), |
| 2293 GCTracer::Scope::MC_NONLIVEREFERENCES); | 2268 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); |
| 2294 | 2269 |
| 2295 ProcessAndClearTransitionArrays(); | 2270 Isolate* isolate = this->isolate(); |
| 2296 | 2271 DependentCode* current = list_head; |
| 2297 // Iterate over the map space, setting map transitions that go from | 2272 while (current->length() > 0) { |
| 2298 // a marked map to an unmarked map to null transitions. This action | 2273 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( |
| 2299 // is carried out only on maps of JSObjects and related subtypes. | 2274 isolate, DependentCode::kWeakCodeGroup); |
| 2300 HeapObjectIterator map_iterator(heap()->map_space()); | 2275 current = current->next_link(); |
| 2301 for (HeapObject* obj = map_iterator.Next(); obj != NULL; | |
| 2302 obj = map_iterator.Next()) { | |
| 2303 Map* map = Map::cast(obj); | |
| 2304 if (!map->CanTransition()) continue; | |
| 2305 MarkBit map_mark = Marking::MarkBitFrom(map); | |
| 2306 if (Marking::IsWhite(map_mark)) { | |
| 2307 ClearNonLiveMapTransitions(map); | |
| 2308 } | |
| 2309 } | 2276 } |
| 2310 | 2277 |
| 2311 WeakHashTable* table = heap_->weak_object_to_code_table(); | 2278 WeakHashTable* table = heap_->weak_object_to_code_table(); |
| 2312 uint32_t capacity = table->Capacity(); | 2279 uint32_t capacity = table->Capacity(); |
| 2313 for (uint32_t i = 0; i < capacity; i++) { | 2280 for (uint32_t i = 0; i < capacity; i++) { |
| 2314 uint32_t key_index = table->EntryToIndex(i); | 2281 uint32_t key_index = table->EntryToIndex(i); |
| 2315 Object* key = table->get(key_index); | 2282 Object* key = table->get(key_index); |
| 2316 if (!table->IsKey(key)) continue; | 2283 if (!table->IsKey(key)) continue; |
| 2317 uint32_t value_index = table->EntryToValueIndex(i); | 2284 uint32_t value_index = table->EntryToValueIndex(i); |
| 2318 Object* value = table->get(value_index); | 2285 Object* value = table->get(value_index); |
| 2319 DCHECK(key->IsWeakCell()); | 2286 DCHECK(key->IsWeakCell()); |
| 2320 if (WeakCell::cast(key)->cleared()) { | 2287 if (WeakCell::cast(key)->cleared()) { |
| 2321 have_code_to_deoptimize_ |= | 2288 have_code_to_deoptimize_ |= |
| 2322 DependentCode::cast(value)->MarkCodeForDeoptimization( | 2289 DependentCode::cast(value)->MarkCodeForDeoptimization( |
| 2323 isolate(), DependentCode::kWeakCodeGroup); | 2290 isolate, DependentCode::kWeakCodeGroup); |
| 2324 table->set(key_index, heap_->the_hole_value()); | 2291 table->set(key_index, heap_->the_hole_value()); |
| 2325 table->set(value_index, heap_->the_hole_value()); | 2292 table->set(value_index, heap_->the_hole_value()); |
| 2326 table->ElementRemoved(); | 2293 table->ElementRemoved(); |
| 2327 } | 2294 } |
| 2328 } | 2295 } |
| 2329 } | 2296 } |
| 2330 | 2297 |
| 2331 | 2298 |
| 2332 void MarkCompactCollector::MarkDependentCodeListForDeoptimization( | 2299 void MarkCompactCollector::ClearSimpleMapTransitions( |
| 2333 DependentCode* list_head) { | 2300 Object* non_live_map_list) { |
| 2334 GCTracer::Scope gc_scope(heap()->tracer(), | 2301 Object* the_hole_value = heap()->the_hole_value(); |
| 2335 GCTracer::Scope::MC_DEOPT_DEPENDENT_CODE); | 2302 Object* weak_cell_obj = non_live_map_list; |
| 2336 Isolate* isolate = this->isolate(); | 2303 while (weak_cell_obj != Smi::FromInt(0)) { |
| 2337 DependentCode* current = list_head; | 2304 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); |
| 2338 while (current->length() > 0) { | 2305 Map* map = Map::cast(weak_cell->value()); |
| 2339 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( | 2306 DCHECK(Marking::IsWhite(Marking::MarkBitFrom(map))); |
| 2340 isolate, DependentCode::kWeakCodeGroup); | 2307 Object* potential_parent = map->constructor_or_backpointer(); |
| 2341 current = current->next_link(); | 2308 if (potential_parent->IsMap()) { |
| 2309 Map* parent = Map::cast(potential_parent); |
| 2310 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)) && |
| 2311 parent->raw_transitions() == weak_cell) { |
| 2312 ClearSimpleMapTransition(parent, map); |
| 2313 } |
| 2314 } |
| 2315 weak_cell->clear(); |
| 2316 weak_cell_obj = weak_cell->next(); |
| 2317 weak_cell->clear_next(the_hole_value); |
| 2342 } | 2318 } |
| 2343 } | 2319 } |
| 2344 | 2320 |
| 2345 | 2321 |
| 2346 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map) { | 2322 void MarkCompactCollector::ClearSimpleMapTransition(Map* map, |
| 2347 Object* potential_parent = map->GetBackPointer(); | 2323 Map* dead_transition) { |
| 2348 if (!potential_parent->IsMap()) return; | 2324 // A previously existing simple transition (stored in a WeakCell) is going |
| 2349 Map* parent = Map::cast(potential_parent); | 2325 // to be cleared. Clear the useless cell pointer, and take ownership |
| 2350 | 2326 // of the descriptor array. |
| 2351 // Follow back pointer, check whether we are dealing with a map transition | 2327 map->set_raw_transitions(Smi::FromInt(0)); |
| 2352 // from a live map to a dead path and in case clear transitions of parent. | 2328 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); |
| 2353 DCHECK(!Marking::IsGrey(Marking::MarkBitFrom(map))); | 2329 DescriptorArray* descriptors = map->instance_descriptors(); |
| 2354 bool parent_is_alive = Marking::IsBlack(Marking::MarkBitFrom(parent)); | 2330 if (descriptors == dead_transition->instance_descriptors() && |
| 2355 if (parent_is_alive) { | 2331 number_of_own_descriptors > 0) { |
| 2356 ClearMapTransitions(parent, map); | 2332 TrimDescriptorArray(map, descriptors); |
| 2333 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); |
| 2334 map->set_owns_descriptors(true); |
| 2357 } | 2335 } |
| 2358 } | 2336 } |
| 2359 | 2337 |
| 2360 | 2338 |
| 2361 // Clear a possible back pointer in case the transition leads to a dead map. | 2339 void MarkCompactCollector::ClearFullMapTransitions() { |
| 2362 // Return true in case a back pointer has been cleared and false otherwise. | 2340 HeapObject* undefined = heap()->undefined_value(); |
| 2363 bool MarkCompactCollector::ClearMapBackPointer(Map* target) { | 2341 Object* obj = heap()->encountered_transition_arrays(); |
| 2364 DCHECK(!Marking::IsGrey(Marking::MarkBitFrom(target))); | 2342 while (obj != Smi::FromInt(0)) { |
| 2365 if (Marking::IsBlack(Marking::MarkBitFrom(target))) return false; | 2343 TransitionArray* array = TransitionArray::cast(obj); |
| 2366 target->SetBackPointer(heap_->undefined_value(), SKIP_WRITE_BARRIER); | 2344 int num_transitions = array->number_of_entries(); |
| 2367 return true; | 2345 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions); |
| 2346 if (num_transitions > 0) { |
| 2347 Map* map = array->GetTarget(0); |
| 2348 Map* parent = Map::cast(map->constructor_or_backpointer()); |
| 2349 bool parent_is_alive = |
| 2350 Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)); |
| 2351 DescriptorArray* descriptors = |
| 2352 parent_is_alive ? parent->instance_descriptors() : nullptr; |
| 2353 bool descriptors_owner_died = |
| 2354 CompactTransitionArray(parent, array, descriptors); |
| 2355 if (descriptors_owner_died) { |
| 2356 TrimDescriptorArray(parent, descriptors); |
| 2357 } |
| 2358 } |
| 2359 obj = array->next_link(); |
| 2360 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
| 2361 } |
| 2362 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); |
| 2368 } | 2363 } |
| 2369 | 2364 |
| 2370 | 2365 |
| 2371 void MarkCompactCollector::ClearMapTransitions(Map* map, Map* dead_transition) { | 2366 bool MarkCompactCollector::CompactTransitionArray( |
| 2372 Object* transitions = map->raw_transitions(); | 2367 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) { |
| 2373 int num_transitions = TransitionArray::NumberOfTransitions(transitions); | 2368 int num_transitions = transitions->number_of_entries(); |
| 2374 | 2369 bool descriptors_owner_died = false; |
| 2375 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); | |
| 2376 DescriptorArray* descriptors = map->instance_descriptors(); | |
| 2377 | |
| 2378 // A previously existing simple transition (stored in a WeakCell) may have | |
| 2379 // been cleared. Clear the useless cell pointer, and take ownership | |
| 2380 // of the descriptor array. | |
| 2381 if (transitions->IsWeakCell() && WeakCell::cast(transitions)->cleared()) { | |
| 2382 map->set_raw_transitions(Smi::FromInt(0)); | |
| 2383 } | |
| 2384 if (num_transitions == 0 && | |
| 2385 descriptors == dead_transition->instance_descriptors() && | |
| 2386 number_of_own_descriptors > 0) { | |
| 2387 TrimDescriptorArray(map, descriptors, number_of_own_descriptors); | |
| 2388 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); | |
| 2389 map->set_owns_descriptors(true); | |
| 2390 return; | |
| 2391 } | |
| 2392 | |
| 2393 int transition_index = 0; | 2370 int transition_index = 0; |
| 2394 | 2371 // Compact all live transitions to the left. |
| 2395 bool descriptors_owner_died = false; | |
| 2396 | |
| 2397 // Compact all live descriptors to the left. | |
| 2398 for (int i = 0; i < num_transitions; ++i) { | 2372 for (int i = 0; i < num_transitions; ++i) { |
| 2399 Map* target = TransitionArray::GetTarget(transitions, i); | 2373 Map* target = transitions->GetTarget(i); |
| 2400 if (ClearMapBackPointer(target)) { | 2374 DCHECK_EQ(target->constructor_or_backpointer(), map); |
| 2401 if (target->instance_descriptors() == descriptors) { | 2375 if (Marking::IsWhite(Marking::MarkBitFrom(target))) { |
| 2376 if (descriptors != nullptr && |
| 2377 target->instance_descriptors() == descriptors) { |
| 2402 descriptors_owner_died = true; | 2378 descriptors_owner_died = true; |
| 2403 } | 2379 } |
| 2404 } else { | 2380 } else { |
| 2405 if (i != transition_index) { | 2381 if (i != transition_index) { |
| 2406 DCHECK(TransitionArray::IsFullTransitionArray(transitions)); | 2382 Name* key = transitions->GetKey(i); |
| 2407 TransitionArray* t = TransitionArray::cast(transitions); | 2383 transitions->SetKey(transition_index, key); |
| 2408 Name* key = t->GetKey(i); | 2384 Object** key_slot = transitions->GetKeySlot(transition_index); |
| 2409 t->SetKey(transition_index, key); | 2385 RecordSlot(transitions, key_slot, key); |
| 2410 Object** key_slot = t->GetKeySlot(transition_index); | |
| 2411 RecordSlot(t, key_slot, key); | |
| 2412 // Target slots do not need to be recorded since maps are not compacted. | 2386 // Target slots do not need to be recorded since maps are not compacted. |
| 2413 t->SetTarget(transition_index, t->GetTarget(i)); | 2387 transitions->SetTarget(transition_index, transitions->GetTarget(i)); |
| 2414 } | 2388 } |
| 2415 transition_index++; | 2389 transition_index++; |
| 2416 } | 2390 } |
| 2417 } | 2391 } |
| 2418 | |
| 2419 // If there are no transitions to be cleared, return. | 2392 // If there are no transitions to be cleared, return. |
| 2420 // TODO(verwaest) Should be an assert, otherwise back pointers are not | 2393 if (transition_index == num_transitions) { |
| 2421 // properly cleared. | 2394 DCHECK(!descriptors_owner_died); |
| 2422 if (transition_index == num_transitions) return; | 2395 return false; |
| 2423 | |
| 2424 if (descriptors_owner_died) { | |
| 2425 if (number_of_own_descriptors > 0) { | |
| 2426 TrimDescriptorArray(map, descriptors, number_of_own_descriptors); | |
| 2427 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); | |
| 2428 map->set_owns_descriptors(true); | |
| 2429 } else { | |
| 2430 DCHECK(descriptors == heap_->empty_descriptor_array()); | |
| 2431 } | |
| 2432 } | 2396 } |
| 2433 | |
| 2434 // Note that we never eliminate a transition array, though we might right-trim | 2397 // Note that we never eliminate a transition array, though we might right-trim |
| 2435 // such that number_of_transitions() == 0. If this assumption changes, | 2398 // such that number_of_transitions() == 0. If this assumption changes, |
| 2436 // TransitionArray::Insert() will need to deal with the case that a transition | 2399 // TransitionArray::Insert() will need to deal with the case that a transition |
| 2437 // array disappeared during GC. | 2400 // array disappeared during GC. |
| 2438 int trim = TransitionArray::Capacity(transitions) - transition_index; | 2401 int trim = TransitionArray::Capacity(transitions) - transition_index; |
| 2439 if (trim > 0) { | 2402 if (trim > 0) { |
| 2440 // Non-full-TransitionArray cases can never reach this point. | |
| 2441 DCHECK(TransitionArray::IsFullTransitionArray(transitions)); | |
| 2442 TransitionArray* t = TransitionArray::cast(transitions); | |
| 2443 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( | 2403 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( |
| 2444 t, trim * TransitionArray::kTransitionSize); | 2404 transitions, trim * TransitionArray::kTransitionSize); |
| 2445 t->SetNumberOfTransitions(transition_index); | 2405 transitions->SetNumberOfTransitions(transition_index); |
| 2446 // The map still has a full transition array. | |
| 2447 DCHECK(TransitionArray::IsFullTransitionArray(map->raw_transitions())); | |
| 2448 } | 2406 } |
| 2407 return descriptors_owner_died; |
| 2449 } | 2408 } |
| 2450 | 2409 |
| 2451 | 2410 |
| 2452 void MarkCompactCollector::TrimDescriptorArray(Map* map, | 2411 void MarkCompactCollector::TrimDescriptorArray(Map* map, |
| 2453 DescriptorArray* descriptors, | 2412 DescriptorArray* descriptors) { |
| 2454 int number_of_own_descriptors) { | 2413 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); |
| 2414 if (number_of_own_descriptors == 0) { |
| 2415 DCHECK(descriptors == heap_->empty_descriptor_array()); |
| 2416 return; |
| 2417 } |
| 2418 |
| 2455 int number_of_descriptors = descriptors->number_of_descriptors_storage(); | 2419 int number_of_descriptors = descriptors->number_of_descriptors_storage(); |
| 2456 int to_trim = number_of_descriptors - number_of_own_descriptors; | 2420 int to_trim = number_of_descriptors - number_of_own_descriptors; |
| 2457 if (to_trim == 0) return; | 2421 if (to_trim > 0) { |
| 2422 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( |
| 2423 descriptors, to_trim * DescriptorArray::kDescriptorSize); |
| 2424 descriptors->SetNumberOfDescriptors(number_of_own_descriptors); |
| 2458 | 2425 |
| 2459 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( | 2426 if (descriptors->HasEnumCache()) TrimEnumCache(map, descriptors); |
| 2460 descriptors, to_trim * DescriptorArray::kDescriptorSize); | 2427 descriptors->Sort(); |
| 2461 descriptors->SetNumberOfDescriptors(number_of_own_descriptors); | |
| 2462 | 2428 |
| 2463 if (descriptors->HasEnumCache()) TrimEnumCache(map, descriptors); | 2429 if (FLAG_unbox_double_fields) { |
| 2464 descriptors->Sort(); | 2430 LayoutDescriptor* layout_descriptor = map->layout_descriptor(); |
| 2465 | 2431 layout_descriptor = layout_descriptor->Trim(heap_, map, descriptors, |
| 2466 if (FLAG_unbox_double_fields) { | 2432 number_of_own_descriptors); |
| 2467 LayoutDescriptor* layout_descriptor = map->layout_descriptor(); | 2433 SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true)); |
| 2468 layout_descriptor = layout_descriptor->Trim(heap_, map, descriptors, | 2434 } |
| 2469 number_of_own_descriptors); | |
| 2470 SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true)); | |
| 2471 } | 2435 } |
| 2436 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); |
| 2437 map->set_owns_descriptors(true); |
| 2472 } | 2438 } |
| 2473 | 2439 |
| 2474 | 2440 |
| 2475 void MarkCompactCollector::TrimEnumCache(Map* map, | 2441 void MarkCompactCollector::TrimEnumCache(Map* map, |
| 2476 DescriptorArray* descriptors) { | 2442 DescriptorArray* descriptors) { |
| 2477 int live_enum = map->EnumLength(); | 2443 int live_enum = map->EnumLength(); |
| 2478 if (live_enum == kInvalidEnumCacheSentinel) { | 2444 if (live_enum == kInvalidEnumCacheSentinel) { |
| 2479 live_enum = | 2445 live_enum = |
| 2480 map->NumberOfDescribedProperties(OWN_DESCRIPTORS, ENUMERABLE_STRINGS); | 2446 map->NumberOfDescribedProperties(OWN_DESCRIPTORS, ENUMERABLE_STRINGS); |
| 2481 } | 2447 } |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2553 while (weak_collection_obj != Smi::FromInt(0)) { | 2519 while (weak_collection_obj != Smi::FromInt(0)) { |
| 2554 JSWeakCollection* weak_collection = | 2520 JSWeakCollection* weak_collection = |
| 2555 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); | 2521 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); |
| 2556 weak_collection_obj = weak_collection->next(); | 2522 weak_collection_obj = weak_collection->next(); |
| 2557 weak_collection->set_next(heap()->undefined_value()); | 2523 weak_collection->set_next(heap()->undefined_value()); |
| 2558 } | 2524 } |
| 2559 heap()->set_encountered_weak_collections(Smi::FromInt(0)); | 2525 heap()->set_encountered_weak_collections(Smi::FromInt(0)); |
| 2560 } | 2526 } |
| 2561 | 2527 |
| 2562 | 2528 |
| 2563 void MarkCompactCollector::ProcessAndClearWeakCells() { | 2529 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, |
| 2564 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_WEAKCELL); | 2530 DependentCode** dependent_code_list) { |
| 2565 Object* weak_cell_obj = heap()->encountered_weak_cells(); | 2531 Heap* heap = this->heap(); |
| 2532 GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL); |
| 2533 Object* weak_cell_obj = heap->encountered_weak_cells(); |
| 2534 Object* the_hole_value = heap->the_hole_value(); |
| 2535 DependentCode* dependent_code_head = |
| 2536 DependentCode::cast(heap->empty_fixed_array()); |
| 2537 Object* non_live_map_head = Smi::FromInt(0); |
| 2566 while (weak_cell_obj != Smi::FromInt(0)) { | 2538 while (weak_cell_obj != Smi::FromInt(0)) { |
| 2567 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 2539 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
| 2540 Object* next_weak_cell = weak_cell->next(); |
| 2541 bool clear_value = true; |
| 2542 bool clear_next = true; |
| 2568 // We do not insert cleared weak cells into the list, so the value | 2543 // We do not insert cleared weak cells into the list, so the value |
| 2569 // cannot be a Smi here. | 2544 // cannot be a Smi here. |
| 2570 HeapObject* value = HeapObject::cast(weak_cell->value()); | 2545 HeapObject* value = HeapObject::cast(weak_cell->value()); |
| 2571 if (!MarkCompactCollector::IsMarked(value)) { | 2546 if (!MarkCompactCollector::IsMarked(value)) { |
| 2572 // Cells for new-space objects embedded in optimized code are wrapped in | 2547 // Cells for new-space objects embedded in optimized code are wrapped in |
| 2573 // WeakCell and put into Heap::weak_object_to_code_table. | 2548 // WeakCell and put into Heap::weak_object_to_code_table. |
| 2574 // Such cells do not have any strong references but we want to keep them | 2549 // Such cells do not have any strong references but we want to keep them |
| 2575 // alive as long as the cell value is alive. | 2550 // alive as long as the cell value is alive. |
| 2576 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. | 2551 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. |
| 2577 if (value->IsCell()) { | 2552 if (value->IsCell()) { |
| 2578 Object* cell_value = Cell::cast(value)->value(); | 2553 Object* cell_value = Cell::cast(value)->value(); |
| 2579 if (cell_value->IsHeapObject() && | 2554 if (cell_value->IsHeapObject() && |
| 2580 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { | 2555 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { |
| 2581 // Resurrect the cell. | 2556 // Resurrect the cell. |
| 2582 MarkBit mark = Marking::MarkBitFrom(value); | 2557 MarkBit mark = Marking::MarkBitFrom(value); |
| 2583 SetMark(value, mark); | 2558 SetMark(value, mark); |
| 2584 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); | 2559 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); |
| 2585 RecordSlot(value, slot, *slot); | 2560 RecordSlot(value, slot, *slot); |
| 2586 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 2561 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
| 2587 RecordSlot(weak_cell, slot, *slot); | 2562 RecordSlot(weak_cell, slot, *slot); |
| 2588 } else { | 2563 clear_value = false; |
| 2589 weak_cell->clear(); | |
| 2590 } | 2564 } |
| 2591 } else { | 2565 } |
| 2592 weak_cell->clear(); | 2566 if (value->IsMap()) { |
| 2567 // The map is non-live. |
| 2568 Map* map = Map::cast(value); |
| 2569 // Add dependent code to the dependent_code_list. |
| 2570 DependentCode* candidate = map->dependent_code(); |
| 2571 // We rely on the fact that the weak code group comes first. |
| 2572 STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0); |
| 2573 if (candidate->length() > 0 && |
| 2574 candidate->group() == DependentCode::kWeakCodeGroup) { |
| 2575 candidate->set_next_link(dependent_code_head); |
| 2576 dependent_code_head = candidate; |
| 2577 } |
| 2578 // Add the weak cell to the non_live_map list. |
| 2579 weak_cell->set_next(non_live_map_head); |
| 2580 non_live_map_head = weak_cell; |
| 2581 clear_value = false; |
| 2582 clear_next = false; |
| 2593 } | 2583 } |
| 2594 } else { | 2584 } else { |
| 2585 // The value of the weak cell is alive. |
| 2595 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 2586 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
| 2596 RecordSlot(weak_cell, slot, *slot); | 2587 RecordSlot(weak_cell, slot, *slot); |
| 2588 clear_value = false; |
| 2597 } | 2589 } |
| 2598 weak_cell_obj = weak_cell->next(); | 2590 if (clear_value) { |
| 2599 weak_cell->clear_next(heap()); | 2591 weak_cell->clear(); |
| 2592 } |
| 2593 if (clear_next) { |
| 2594 weak_cell->clear_next(the_hole_value); |
| 2595 } |
| 2596 weak_cell_obj = next_weak_cell; |
| 2600 } | 2597 } |
| 2601 heap()->set_encountered_weak_cells(Smi::FromInt(0)); | 2598 heap->set_encountered_weak_cells(Smi::FromInt(0)); |
| 2599 *non_live_map_list = non_live_map_head; |
| 2600 *dependent_code_list = dependent_code_head; |
| 2602 } | 2601 } |
| 2603 | 2602 |
| 2604 | 2603 |
| 2605 void MarkCompactCollector::AbortWeakCells() { | 2604 void MarkCompactCollector::AbortWeakCells() { |
| 2605 Object* the_hole_value = heap()->the_hole_value(); |
| 2606 Object* weak_cell_obj = heap()->encountered_weak_cells(); | 2606 Object* weak_cell_obj = heap()->encountered_weak_cells(); |
| 2607 while (weak_cell_obj != Smi::FromInt(0)) { | 2607 while (weak_cell_obj != Smi::FromInt(0)) { |
| 2608 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 2608 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
| 2609 weak_cell_obj = weak_cell->next(); | 2609 weak_cell_obj = weak_cell->next(); |
| 2610 weak_cell->clear_next(heap()); | 2610 weak_cell->clear_next(the_hole_value); |
| 2611 } | 2611 } |
| 2612 heap()->set_encountered_weak_cells(Smi::FromInt(0)); | 2612 heap()->set_encountered_weak_cells(Smi::FromInt(0)); |
| 2613 } | 2613 } |
| 2614 | 2614 |
| 2615 | 2615 |
| 2616 void MarkCompactCollector::ProcessAndClearTransitionArrays() { | |
| 2617 HeapObject* undefined = heap()->undefined_value(); | |
| 2618 Object* obj = heap()->encountered_transition_arrays(); | |
| 2619 while (obj != Smi::FromInt(0)) { | |
| 2620 TransitionArray* array = TransitionArray::cast(obj); | |
| 2621 // TODO(ulan): move logic from ClearMapTransitions here. | |
| 2622 obj = array->next_link(); | |
| 2623 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | |
| 2624 } | |
| 2625 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); | |
| 2626 } | |
| 2627 | |
| 2628 | |
| 2629 void MarkCompactCollector::AbortTransitionArrays() { | 2616 void MarkCompactCollector::AbortTransitionArrays() { |
| 2630 HeapObject* undefined = heap()->undefined_value(); | 2617 HeapObject* undefined = heap()->undefined_value(); |
| 2631 Object* obj = heap()->encountered_transition_arrays(); | 2618 Object* obj = heap()->encountered_transition_arrays(); |
| 2632 while (obj != Smi::FromInt(0)) { | 2619 while (obj != Smi::FromInt(0)) { |
| 2633 TransitionArray* array = TransitionArray::cast(obj); | 2620 TransitionArray* array = TransitionArray::cast(obj); |
| 2634 obj = array->next_link(); | 2621 obj = array->next_link(); |
| 2635 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2622 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
| 2636 } | 2623 } |
| 2637 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); | 2624 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); |
| 2638 } | 2625 } |
| (...skipping 1518 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4157 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4144 MarkBit mark_bit = Marking::MarkBitFrom(host); |
| 4158 if (Marking::IsBlack(mark_bit)) { | 4145 if (Marking::IsBlack(mark_bit)) { |
| 4159 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4146 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
| 4160 RecordRelocSlot(&rinfo, target); | 4147 RecordRelocSlot(&rinfo, target); |
| 4161 } | 4148 } |
| 4162 } | 4149 } |
| 4163 } | 4150 } |
| 4164 | 4151 |
| 4165 } // namespace internal | 4152 } // namespace internal |
| 4166 } // namespace v8 | 4153 } // namespace v8 |
| OLD | NEW |