Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1488593003: Optimize clearing of map transitions. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: base Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after
347 347
348 void MarkCompactCollector::CollectGarbage() { 348 void MarkCompactCollector::CollectGarbage() {
349 // Make sure that Prepare() has been called. The individual steps below will 349 // Make sure that Prepare() has been called. The individual steps below will
350 // update the state as they proceed. 350 // update the state as they proceed.
351 DCHECK(state_ == PREPARE_GC); 351 DCHECK(state_ == PREPARE_GC);
352 352
353 MarkLiveObjects(); 353 MarkLiveObjects();
354 354
355 DCHECK(heap_->incremental_marking()->IsStopped()); 355 DCHECK(heap_->incremental_marking()->IsStopped());
356 356
357 ProcessWeakReferences(); 357 ClearNonLiveReferences();
358
359 ClearWeakCollections();
358 360
359 #ifdef VERIFY_HEAP 361 #ifdef VERIFY_HEAP
360 if (FLAG_verify_heap) { 362 if (FLAG_verify_heap) {
361 VerifyMarking(heap_); 363 VerifyMarking(heap_);
362 } 364 }
363 #endif 365 #endif
364 366
365 ClearInvalidStoreAndSlotsBufferEntries(); 367 ClearInvalidStoreAndSlotsBufferEntries();
366 368
367 #ifdef VERIFY_HEAP 369 #ifdef VERIFY_HEAP
(...skipping 1607 matching lines...) Expand 10 before | Expand all | Expand 10 after
1975 heap()->number_of_disposed_maps_ = new_number_of_disposed_maps; 1977 heap()->number_of_disposed_maps_ = new_number_of_disposed_maps;
1976 Object* undefined = heap()->undefined_value(); 1978 Object* undefined = heap()->undefined_value();
1977 for (int i = new_length; i < length; i++) { 1979 for (int i = new_length; i < length; i++) {
1978 retained_maps->Clear(i, undefined); 1980 retained_maps->Clear(i, undefined);
1979 } 1981 }
1980 if (new_length != length) retained_maps->SetLength(new_length); 1982 if (new_length != length) retained_maps->SetLength(new_length);
1981 ProcessMarkingDeque(); 1983 ProcessMarkingDeque();
1982 } 1984 }
1983 1985
1984 1986
1985 DependentCode* MarkCompactCollector::DependentCodeListFromNonLiveMaps() {
1986 GCTracer::Scope gc_scope(heap()->tracer(),
1987 GCTracer::Scope::MC_EXTRACT_DEPENDENT_CODE);
1988 ArrayList* retained_maps = heap()->retained_maps();
1989 int length = retained_maps->Length();
1990 DependentCode* head = DependentCode::cast(heap()->empty_fixed_array());
1991 for (int i = 0; i < length; i += 2) {
1992 DCHECK(retained_maps->Get(i)->IsWeakCell());
1993 WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
1994 DCHECK(!cell->cleared());
1995 Map* map = Map::cast(cell->value());
1996 MarkBit map_mark = Marking::MarkBitFrom(map);
1997 if (Marking::IsWhite(map_mark)) {
ulan 2015/12/01 10:13:15 This code moved to ClearWeakCells
1998 DependentCode* candidate = map->dependent_code();
1999 // We rely on the fact that the weak code group comes first.
2000 STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0);
2001 if (candidate->length() > 0 &&
2002 candidate->group() == DependentCode::kWeakCodeGroup) {
2003 candidate->set_next_link(head);
2004 head = candidate;
2005 }
2006 }
2007 }
2008 return head;
2009 }
2010
2011
2012 void MarkCompactCollector::EnsureMarkingDequeIsReserved() { 1987 void MarkCompactCollector::EnsureMarkingDequeIsReserved() {
2013 DCHECK(!marking_deque_.in_use()); 1988 DCHECK(!marking_deque_.in_use());
2014 if (marking_deque_memory_ == NULL) { 1989 if (marking_deque_memory_ == NULL) {
2015 marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize); 1990 marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize);
2016 marking_deque_memory_committed_ = 0; 1991 marking_deque_memory_committed_ = 0;
2017 } 1992 }
2018 if (marking_deque_memory_ == NULL) { 1993 if (marking_deque_memory_ == NULL) {
2019 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved"); 1994 V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved");
2020 } 1995 }
2021 } 1996 }
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
2308 } 2283 }
2309 2284
2310 // Trim the optimized code map if entries have been removed. 2285 // Trim the optimized code map if entries have been removed.
2311 if (new_length < old_length) { 2286 if (new_length < old_length) {
2312 shared->TrimOptimizedCodeMap(old_length - new_length); 2287 shared->TrimOptimizedCodeMap(old_length - new_length);
2313 } 2288 }
2314 } 2289 }
2315 } 2290 }
2316 2291
2317 2292
2318 void MarkCompactCollector::ProcessWeakReferences() {
2319 // This should be done before processing weak cells because it checks
2320 // mark bits of maps in weak cells.
2321 DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps();
ulan 2015/12/01 10:13:15 ProcessAndClearWeakCells() and MarkDependentCodeL
2322
2323 // Process weak cells before MarkCodeForDeoptimization and
2324 // ClearNonLiveReferences so that weak cells in dependent code arrays are
2325 // cleared or contain only live code objects.
2326 ProcessAndClearWeakCells();
2327
2328 MarkDependentCodeListForDeoptimization(dependent_code_list);
2329
2330 ClearNonLiveReferences();
2331
2332 ClearWeakCollections();
2333 }
2334
2335
2336 void MarkCompactCollector::ClearNonLiveReferences() { 2293 void MarkCompactCollector::ClearNonLiveReferences() {
2294 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
2295
2296 DependentCode* dependent_code_list;
2297 Object* non_live_map_list;
2298 ClearWeakCells(&non_live_map_list, &dependent_code_list);
2299
2300 {
2301 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP);
2302 ClearSimpleMapTransitions(non_live_map_list);
2303 ClearFullMapTransitions();
2304 }
2305
2306 MarkDependentCodeForDeoptimization(dependent_code_list);
2307 }
2308
2309
2310 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2311 DependentCode* list_head) {
2337 GCTracer::Scope gc_scope(heap()->tracer(), 2312 GCTracer::Scope gc_scope(heap()->tracer(),
2338 GCTracer::Scope::MC_NONLIVEREFERENCES); 2313 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2339 2314
2340 ProcessAndClearTransitionArrays(); 2315 Isolate* isolate = this->isolate();
2341 2316 DependentCode* current = list_head;
ulan 2015/12/01 10:13:15 This code is from MarkDependentCodeListForDeoptimi
2342 // Iterate over the map space, setting map transitions that go from 2317 while (current->length() > 0) {
2343 // a marked map to an unmarked map to null transitions. This action 2318 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
2344 // is carried out only on maps of JSObjects and related subtypes. 2319 isolate, DependentCode::kWeakCodeGroup);
2345 HeapObjectIterator map_iterator(heap()->map_space()); 2320 current = current->next_link();
2346 for (HeapObject* obj = map_iterator.Next(); obj != NULL;
ulan 2015/12/01 10:13:15 This is the main change: this loop is replaced by
2347 obj = map_iterator.Next()) {
2348 Map* map = Map::cast(obj);
2349 if (!map->CanTransition()) continue;
2350 MarkBit map_mark = Marking::MarkBitFrom(map);
2351 if (Marking::IsWhite(map_mark)) {
2352 ClearNonLiveMapTransitions(map);
2353 }
2354 } 2321 }
2355 2322
2356 WeakHashTable* table = heap_->weak_object_to_code_table(); 2323 WeakHashTable* table = heap_->weak_object_to_code_table();
2357 uint32_t capacity = table->Capacity(); 2324 uint32_t capacity = table->Capacity();
2358 for (uint32_t i = 0; i < capacity; i++) { 2325 for (uint32_t i = 0; i < capacity; i++) {
2359 uint32_t key_index = table->EntryToIndex(i); 2326 uint32_t key_index = table->EntryToIndex(i);
2360 Object* key = table->get(key_index); 2327 Object* key = table->get(key_index);
2361 if (!table->IsKey(key)) continue; 2328 if (!table->IsKey(key)) continue;
2362 uint32_t value_index = table->EntryToValueIndex(i); 2329 uint32_t value_index = table->EntryToValueIndex(i);
2363 Object* value = table->get(value_index); 2330 Object* value = table->get(value_index);
2364 DCHECK(key->IsWeakCell()); 2331 DCHECK(key->IsWeakCell());
2365 if (WeakCell::cast(key)->cleared()) { 2332 if (WeakCell::cast(key)->cleared()) {
2366 have_code_to_deoptimize_ |= 2333 have_code_to_deoptimize_ |=
2367 DependentCode::cast(value)->MarkCodeForDeoptimization( 2334 DependentCode::cast(value)->MarkCodeForDeoptimization(
2368 isolate(), DependentCode::kWeakCodeGroup); 2335 isolate, DependentCode::kWeakCodeGroup);
2369 table->set(key_index, heap_->the_hole_value()); 2336 table->set(key_index, heap_->the_hole_value());
2370 table->set(value_index, heap_->the_hole_value()); 2337 table->set(value_index, heap_->the_hole_value());
2371 table->ElementRemoved(); 2338 table->ElementRemoved();
2372 } 2339 }
2373 } 2340 }
2374 } 2341 }
2375 2342
2376 2343
2377 void MarkCompactCollector::MarkDependentCodeListForDeoptimization( 2344 void MarkCompactCollector::ClearSimpleMapTransitions(
2378 DependentCode* list_head) { 2345 Object* non_live_map_list) {
2379 GCTracer::Scope gc_scope(heap()->tracer(), 2346 Object* the_hole_value = heap()->the_hole_value();
2380 GCTracer::Scope::MC_DEOPT_DEPENDENT_CODE); 2347 Object* weak_cell_obj = non_live_map_list;
2381 Isolate* isolate = this->isolate(); 2348 while (weak_cell_obj != Smi::FromInt(0)) {
2382 DependentCode* current = list_head; 2349 WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
2383 while (current->length() > 0) { 2350 Map* map = Map::cast(weak_cell->value());
2384 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( 2351 DCHECK(Marking::IsWhite(Marking::MarkBitFrom(map)));
2385 isolate, DependentCode::kWeakCodeGroup); 2352 Object* potential_parent = map->constructor_or_backpointer();
2386 current = current->next_link(); 2353 if (potential_parent->IsMap()) {
2354 Map* parent = Map::cast(potential_parent);
2355 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)) &&
2356 parent->raw_transitions() == weak_cell) {
2357 ClearSimpleMapTransition(parent, map);
2358 }
2359 }
2360 weak_cell->clear();
2361 weak_cell_obj = weak_cell->next();
2362 weak_cell->clear_next(the_hole_value);
2387 } 2363 }
2388 } 2364 }
2389 2365
2390 2366
2391 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map) { 2367 void MarkCompactCollector::ClearSimpleMapTransition(Map* map,
2392 Object* potential_parent = map->GetBackPointer(); 2368 Map* dead_transition) {
2393 if (!potential_parent->IsMap()) return; 2369 // A previously existing simple transition (stored in a WeakCell) is going
ulan 2015/12/01 10:13:15 This code is from lines 2426 - 2436
2394 Map* parent = Map::cast(potential_parent); 2370 // to be cleared. Clear the useless cell pointer, and take ownership
2395 2371 // of the descriptor array.
2396 // Follow back pointer, check whether we are dealing with a map transition 2372 map->set_raw_transitions(Smi::FromInt(0));
2397 // from a live map to a dead path and in case clear transitions of parent. 2373 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2398 DCHECK(!Marking::IsGrey(Marking::MarkBitFrom(map))); 2374 DescriptorArray* descriptors = map->instance_descriptors();
2399 bool parent_is_alive = Marking::IsBlack(Marking::MarkBitFrom(parent)); 2375 if (descriptors == dead_transition->instance_descriptors() &&
2400 if (parent_is_alive) { 2376 number_of_own_descriptors > 0) {
2401 ClearMapTransitions(parent, map); 2377 TrimDescriptorArray(map, descriptors);
2378 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
2379 map->set_owns_descriptors(true);
2402 } 2380 }
2403 } 2381 }
2404 2382
2405 2383
2406 // Clear a possible back pointer in case the transition leads to a dead map. 2384 void MarkCompactCollector::ClearFullMapTransitions() {
2407 // Return true in case a back pointer has been cleared and false otherwise. 2385 HeapObject* undefined = heap()->undefined_value();
2408 bool MarkCompactCollector::ClearMapBackPointer(Map* target) { 2386 Object* obj = heap()->encountered_transition_arrays();
2409 DCHECK(!Marking::IsGrey(Marking::MarkBitFrom(target))); 2387 while (obj != Smi::FromInt(0)) {
2410 if (Marking::IsBlack(Marking::MarkBitFrom(target))) return false; 2388 TransitionArray* array = TransitionArray::cast(obj);
2411 target->SetBackPointer(heap_->undefined_value(), SKIP_WRITE_BARRIER); 2389 int num_transitions = array->number_of_entries();
2412 return true; 2390 DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions);
2391 if (num_transitions > 0) {
2392 Map* map = array->GetTarget(0);
ulan 2015/12/01 10:13:15 This code does the same as lines 2440 - 2493.
2393 Map* parent = Map::cast(map->constructor_or_backpointer());
2394 bool parent_is_alive =
2395 Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent));
2396 DescriptorArray* descriptors =
2397 parent_is_alive ? parent->instance_descriptors() : nullptr;
2398 bool descriptors_owner_died =
2399 CompactTransitionArray(parent, array, descriptors);
2400 if (descriptors_owner_died) {
2401 TrimDescriptorArray(parent, descriptors);
2402 }
2403 }
2404 obj = array->next_link();
2405 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2406 }
2407 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2413 } 2408 }
2414 2409
2415 2410
2416 void MarkCompactCollector::ClearMapTransitions(Map* map, Map* dead_transition) { 2411 bool MarkCompactCollector::CompactTransitionArray(
2417 Object* transitions = map->raw_transitions(); 2412 Map* map, TransitionArray* transitions, DescriptorArray* descriptors) {
2418 int num_transitions = TransitionArray::NumberOfTransitions(transitions); 2413 int num_transitions = transitions->number_of_entries();
2419 2414 bool descriptors_owner_died = false;
2420 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2421 DescriptorArray* descriptors = map->instance_descriptors();
2422
2423 // A previously existing simple transition (stored in a WeakCell) may have
2424 // been cleared. Clear the useless cell pointer, and take ownership
2425 // of the descriptor array.
2426 if (transitions->IsWeakCell() && WeakCell::cast(transitions)->cleared()) {
2427 map->set_raw_transitions(Smi::FromInt(0));
2428 }
2429 if (num_transitions == 0 &&
2430 descriptors == dead_transition->instance_descriptors() &&
2431 number_of_own_descriptors > 0) {
2432 TrimDescriptorArray(map, descriptors, number_of_own_descriptors);
2433 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
2434 map->set_owns_descriptors(true);
2435 return;
2436 }
2437
2438 int transition_index = 0; 2415 int transition_index = 0;
2439 2416 // Compact all live transitions to the left.
2440 bool descriptors_owner_died = false;
2441
2442 // Compact all live descriptors to the left.
2443 for (int i = 0; i < num_transitions; ++i) { 2417 for (int i = 0; i < num_transitions; ++i) {
2444 Map* target = TransitionArray::GetTarget(transitions, i); 2418 Map* target = transitions->GetTarget(i);
2445 if (ClearMapBackPointer(target)) { 2419 DCHECK_EQ(target->constructor_or_backpointer(), map);
2446 if (target->instance_descriptors() == descriptors) { 2420 if (Marking::IsWhite(Marking::MarkBitFrom(target))) {
2421 if (descriptors != nullptr &&
2422 target->instance_descriptors() == descriptors) {
2447 descriptors_owner_died = true; 2423 descriptors_owner_died = true;
2448 } 2424 }
2449 } else { 2425 } else {
2450 if (i != transition_index) { 2426 if (i != transition_index) {
2451 DCHECK(TransitionArray::IsFullTransitionArray(transitions)); 2427 Name* key = transitions->GetKey(i);
2452 TransitionArray* t = TransitionArray::cast(transitions); 2428 transitions->SetKey(transition_index, key);
2453 Name* key = t->GetKey(i); 2429 Object** key_slot = transitions->GetKeySlot(transition_index);
2454 t->SetKey(transition_index, key); 2430 RecordSlot(transitions, key_slot, key);
2455 Object** key_slot = t->GetKeySlot(transition_index);
2456 RecordSlot(t, key_slot, key);
2457 // Target slots do not need to be recorded since maps are not compacted. 2431 // Target slots do not need to be recorded since maps are not compacted.
2458 t->SetTarget(transition_index, t->GetTarget(i)); 2432 transitions->SetTarget(transition_index, transitions->GetTarget(i));
2459 } 2433 }
2460 transition_index++; 2434 transition_index++;
2461 } 2435 }
2462 } 2436 }
2463
2464 // If there are no transitions to be cleared, return. 2437 // If there are no transitions to be cleared, return.
2465 // TODO(verwaest) Should be an assert, otherwise back pointers are not 2438 if (transition_index == num_transitions) {
2466 // properly cleared. 2439 DCHECK(!descriptors_owner_died);
2467 if (transition_index == num_transitions) return; 2440 return false;
2468
2469 if (descriptors_owner_died) {
2470 if (number_of_own_descriptors > 0) {
2471 TrimDescriptorArray(map, descriptors, number_of_own_descriptors);
2472 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
2473 map->set_owns_descriptors(true);
2474 } else {
2475 DCHECK(descriptors == heap_->empty_descriptor_array());
2476 }
2477 } 2441 }
2478
2479 // Note that we never eliminate a transition array, though we might right-trim 2442 // Note that we never eliminate a transition array, though we might right-trim
2480 // such that number_of_transitions() == 0. If this assumption changes, 2443 // such that number_of_transitions() == 0. If this assumption changes,
2481 // TransitionArray::Insert() will need to deal with the case that a transition 2444 // TransitionArray::Insert() will need to deal with the case that a transition
2482 // array disappeared during GC. 2445 // array disappeared during GC.
2483 int trim = TransitionArray::Capacity(transitions) - transition_index; 2446 int trim = TransitionArray::Capacity(transitions) - transition_index;
2484 if (trim > 0) { 2447 if (trim > 0) {
2485 // Non-full-TransitionArray cases can never reach this point.
2486 DCHECK(TransitionArray::IsFullTransitionArray(transitions));
2487 TransitionArray* t = TransitionArray::cast(transitions);
2488 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( 2448 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
2489 t, trim * TransitionArray::kTransitionSize); 2449 transitions, trim * TransitionArray::kTransitionSize);
2490 t->SetNumberOfTransitions(transition_index); 2450 transitions->SetNumberOfTransitions(transition_index);
2491 // The map still has a full transition array.
2492 DCHECK(TransitionArray::IsFullTransitionArray(map->raw_transitions()));
2493 } 2451 }
2452 return descriptors_owner_died;
2494 } 2453 }
2495 2454
2496 2455
2497 void MarkCompactCollector::TrimDescriptorArray(Map* map, 2456 void MarkCompactCollector::TrimDescriptorArray(Map* map,
2498 DescriptorArray* descriptors, 2457 DescriptorArray* descriptors) {
2499 int number_of_own_descriptors) { 2458 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2459 if (number_of_own_descriptors == 0) {
2460 DCHECK(descriptors == heap_->empty_descriptor_array());
2461 return;
2462 }
2463
2500 int number_of_descriptors = descriptors->number_of_descriptors_storage(); 2464 int number_of_descriptors = descriptors->number_of_descriptors_storage();
2501 int to_trim = number_of_descriptors - number_of_own_descriptors; 2465 int to_trim = number_of_descriptors - number_of_own_descriptors;
2502 if (to_trim == 0) return; 2466 if (to_trim > 0) {
2467 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
2468 descriptors, to_trim * DescriptorArray::kDescriptorSize);
2469 descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
2503 2470
2504 heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( 2471 if (descriptors->HasEnumCache()) TrimEnumCache(map, descriptors);
2505 descriptors, to_trim * DescriptorArray::kDescriptorSize); 2472 descriptors->Sort();
2506 descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
2507 2473
2508 if (descriptors->HasEnumCache()) TrimEnumCache(map, descriptors); 2474 if (FLAG_unbox_double_fields) {
2509 descriptors->Sort(); 2475 LayoutDescriptor* layout_descriptor = map->layout_descriptor();
2510 2476 layout_descriptor = layout_descriptor->Trim(heap_, map, descriptors,
2511 if (FLAG_unbox_double_fields) { 2477 number_of_own_descriptors);
2512 LayoutDescriptor* layout_descriptor = map->layout_descriptor(); 2478 SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true));
2513 layout_descriptor = layout_descriptor->Trim(heap_, map, descriptors, 2479 }
2514 number_of_own_descriptors);
2515 SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true));
2516 } 2480 }
2481 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
2482 map->set_owns_descriptors(true);
2517 } 2483 }
2518 2484
2519 2485
2520 void MarkCompactCollector::TrimEnumCache(Map* map, 2486 void MarkCompactCollector::TrimEnumCache(Map* map,
2521 DescriptorArray* descriptors) { 2487 DescriptorArray* descriptors) {
2522 int live_enum = map->EnumLength(); 2488 int live_enum = map->EnumLength();
2523 if (live_enum == kInvalidEnumCacheSentinel) { 2489 if (live_enum == kInvalidEnumCacheSentinel) {
2524 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM); 2490 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
2525 } 2491 }
2526 if (live_enum == 0) return descriptors->ClearEnumCache(); 2492 if (live_enum == 0) return descriptors->ClearEnumCache();
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
2597 while (weak_collection_obj != Smi::FromInt(0)) { 2563 while (weak_collection_obj != Smi::FromInt(0)) {
2598 JSWeakCollection* weak_collection = 2564 JSWeakCollection* weak_collection =
2599 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2565 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2600 weak_collection_obj = weak_collection->next(); 2566 weak_collection_obj = weak_collection->next();
2601 weak_collection->set_next(heap()->undefined_value()); 2567 weak_collection->set_next(heap()->undefined_value());
2602 } 2568 }
2603 heap()->set_encountered_weak_collections(Smi::FromInt(0)); 2569 heap()->set_encountered_weak_collections(Smi::FromInt(0));
2604 } 2570 }
2605 2571
2606 2572
2607 void MarkCompactCollector::ProcessAndClearWeakCells() { 2573 void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
2608 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_WEAKCELL); 2574 DependentCode** dependent_code_list) {
2609 Object* weak_cell_obj = heap()->encountered_weak_cells(); 2575 Heap* heap = this->heap();
2576 GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL);
2577 Object* weak_cell_obj = heap->encountered_weak_cells();
2578 Object* the_hole_value = heap->the_hole_value();
2579 DependentCode* dependent_code_head =
2580 DependentCode::cast(heap->empty_fixed_array());
2581 Object* non_live_map_head = Smi::FromInt(0);
2610 while (weak_cell_obj != Smi::FromInt(0)) { 2582 while (weak_cell_obj != Smi::FromInt(0)) {
2611 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 2583 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2584 Object* next_weak_cell = weak_cell->next();
2585 bool clear_value = true;
2586 bool clear_next = true;
2612 // We do not insert cleared weak cells into the list, so the value 2587 // We do not insert cleared weak cells into the list, so the value
2613 // cannot be a Smi here. 2588 // cannot be a Smi here.
2614 HeapObject* value = HeapObject::cast(weak_cell->value()); 2589 HeapObject* value = HeapObject::cast(weak_cell->value());
2615 if (!MarkCompactCollector::IsMarked(value)) { 2590 if (!MarkCompactCollector::IsMarked(value)) {
2616 // Cells for new-space objects embedded in optimized code are wrapped in 2591 // Cells for new-space objects embedded in optimized code are wrapped in
2617 // WeakCell and put into Heap::weak_object_to_code_table. 2592 // WeakCell and put into Heap::weak_object_to_code_table.
2618 // Such cells do not have any strong references but we want to keep them 2593 // Such cells do not have any strong references but we want to keep them
2619 // alive as long as the cell value is alive. 2594 // alive as long as the cell value is alive.
2620 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table. 2595 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table.
2621 if (value->IsCell()) { 2596 if (value->IsCell()) {
2622 Object* cell_value = Cell::cast(value)->value(); 2597 Object* cell_value = Cell::cast(value)->value();
2623 if (cell_value->IsHeapObject() && 2598 if (cell_value->IsHeapObject() &&
2624 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) { 2599 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) {
2625 // Resurrect the cell. 2600 // Resurrect the cell.
2626 MarkBit mark = Marking::MarkBitFrom(value); 2601 MarkBit mark = Marking::MarkBitFrom(value);
2627 SetMark(value, mark); 2602 SetMark(value, mark);
2628 Object** slot = HeapObject::RawField(value, Cell::kValueOffset); 2603 Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
2629 RecordSlot(value, slot, *slot); 2604 RecordSlot(value, slot, *slot);
2630 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 2605 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2631 RecordSlot(weak_cell, slot, *slot); 2606 RecordSlot(weak_cell, slot, *slot);
2632 } else { 2607 clear_value = false;
2633 weak_cell->clear();
2634 } 2608 }
2635 } else { 2609 }
2636 weak_cell->clear(); 2610 if (value->IsMap()) {
2611 // The map is non-live.
2612 Map* map = Map::cast(value);
2613 // Add dependent code to the dependent_code_list.
2614 DependentCode* candidate = map->dependent_code();
2615 // We rely on the fact that the weak code group comes first.
2616 STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0);
2617 if (candidate->length() > 0 &&
2618 candidate->group() == DependentCode::kWeakCodeGroup) {
2619 candidate->set_next_link(dependent_code_head);
2620 dependent_code_head = candidate;
2621 }
2622 // Add the weak cell to the non_live_map list.
2623 weak_cell->set_next(non_live_map_head);
2624 non_live_map_head = weak_cell;
2625 clear_value = false;
2626 clear_next = false;
2637 } 2627 }
2638 } else { 2628 } else {
2629 // The value of the weak cell is alive.
2639 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 2630 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2640 RecordSlot(weak_cell, slot, *slot); 2631 RecordSlot(weak_cell, slot, *slot);
2632 clear_value = false;
2641 } 2633 }
2642 weak_cell_obj = weak_cell->next(); 2634 if (clear_value) {
2643 weak_cell->clear_next(heap()); 2635 weak_cell->clear();
2636 }
2637 if (clear_next) {
2638 weak_cell->clear_next(the_hole_value);
2639 }
2640 weak_cell_obj = next_weak_cell;
2644 } 2641 }
2645 heap()->set_encountered_weak_cells(Smi::FromInt(0)); 2642 heap->set_encountered_weak_cells(Smi::FromInt(0));
2643 *non_live_map_list = non_live_map_head;
2644 *dependent_code_list = dependent_code_head;
2646 } 2645 }
2647 2646
2648 2647
2649 void MarkCompactCollector::AbortWeakCells() { 2648 void MarkCompactCollector::AbortWeakCells() {
2649 Object* the_hole_value = heap()->the_hole_value();
2650 Object* weak_cell_obj = heap()->encountered_weak_cells(); 2650 Object* weak_cell_obj = heap()->encountered_weak_cells();
2651 while (weak_cell_obj != Smi::FromInt(0)) { 2651 while (weak_cell_obj != Smi::FromInt(0)) {
2652 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 2652 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2653 weak_cell_obj = weak_cell->next(); 2653 weak_cell_obj = weak_cell->next();
2654 weak_cell->clear_next(heap()); 2654 weak_cell->clear_next(the_hole_value);
2655 } 2655 }
2656 heap()->set_encountered_weak_cells(Smi::FromInt(0)); 2656 heap()->set_encountered_weak_cells(Smi::FromInt(0));
2657 } 2657 }
2658 2658
2659 2659
2660 void MarkCompactCollector::ProcessAndClearTransitionArrays() {
2661 HeapObject* undefined = heap()->undefined_value();
2662 Object* obj = heap()->encountered_transition_arrays();
2663 while (obj != Smi::FromInt(0)) {
2664 TransitionArray* array = TransitionArray::cast(obj);
2665 // TODO(ulan): move logic from ClearMapTransitions here.
2666 obj = array->next_link();
2667 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2668 }
2669 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2670 }
2671
2672
2673 void MarkCompactCollector::AbortTransitionArrays() { 2660 void MarkCompactCollector::AbortTransitionArrays() {
2674 HeapObject* undefined = heap()->undefined_value(); 2661 HeapObject* undefined = heap()->undefined_value();
2675 Object* obj = heap()->encountered_transition_arrays(); 2662 Object* obj = heap()->encountered_transition_arrays();
2676 while (obj != Smi::FromInt(0)) { 2663 while (obj != Smi::FromInt(0)) {
2677 TransitionArray* array = TransitionArray::cast(obj); 2664 TransitionArray* array = TransitionArray::cast(obj);
2678 obj = array->next_link(); 2665 obj = array->next_link();
2679 array->set_next_link(undefined, SKIP_WRITE_BARRIER); 2666 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2680 } 2667 }
2681 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); 2668 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2682 } 2669 }
(...skipping 1512 matching lines...) Expand 10 before | Expand all | Expand 10 after
4195 MarkBit mark_bit = Marking::MarkBitFrom(host); 4182 MarkBit mark_bit = Marking::MarkBitFrom(host);
4196 if (Marking::IsBlack(mark_bit)) { 4183 if (Marking::IsBlack(mark_bit)) {
4197 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 4184 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4198 RecordRelocSlot(&rinfo, target); 4185 RecordRelocSlot(&rinfo, target);
4199 } 4186 }
4200 } 4187 }
4201 } 4188 }
4202 4189
4203 } // namespace internal 4190 } // namespace internal
4204 } // namespace v8 4191 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698