Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/mark-compact.cc

Issue 23477061: Make objects embedded in optimized code weak. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebase and fix weak object verification. Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
84 for (Object** current = start; current < end; current++) { 84 for (Object** current = start; current < end; current++) {
85 if ((*current)->IsHeapObject()) { 85 if ((*current)->IsHeapObject()) {
86 HeapObject* object = HeapObject::cast(*current); 86 HeapObject* object = HeapObject::cast(*current);
87 CHECK(heap_->mark_compact_collector()->IsMarked(object)); 87 CHECK(heap_->mark_compact_collector()->IsMarked(object));
88 } 88 }
89 } 89 }
90 } 90 }
91 91
92 void VisitEmbeddedPointer(RelocInfo* rinfo) { 92 void VisitEmbeddedPointer(RelocInfo* rinfo) {
93 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 93 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
94 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || 94 if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(),
95 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || 95 rinfo->target_object())) {
96 !rinfo->target_object()->IsMap() ||
97 !Map::cast(rinfo->target_object())->CanTransition()) {
98 VisitPointer(rinfo->target_object_address()); 96 VisitPointer(rinfo->target_object_address());
99 } 97 }
100 } 98 }
101 99
102 private: 100 private:
103 Heap* heap_; 101 Heap* heap_;
104 }; 102 };
105 103
106 104
107 static void VerifyMarking(Heap* heap, Address bottom, Address top) { 105 static void VerifyMarking(Heap* heap, Address bottom, Address top) {
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
424 422
425 if (!FLAG_collect_maps) ReattachInitialMaps(); 423 if (!FLAG_collect_maps) ReattachInitialMaps();
426 424
427 #ifdef DEBUG 425 #ifdef DEBUG
428 if (FLAG_verify_native_context_separation) { 426 if (FLAG_verify_native_context_separation) {
429 VerifyNativeContextSeparation(heap_); 427 VerifyNativeContextSeparation(heap_);
430 } 428 }
431 #endif 429 #endif
432 430
433 #ifdef VERIFY_HEAP 431 #ifdef VERIFY_HEAP
434 if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code && 432 if (heap()->weak_embedded_maps_verification_enabled()) {
435 heap()->weak_embedded_maps_verification_enabled()) { 433 VerifyWeakEmbeddedMapsAndObjectsInOptimizedCode();
436 VerifyWeakEmbeddedMapsInOptimizedCode();
437 } 434 }
438 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { 435 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
439 VerifyOmittedMapChecks(); 436 VerifyOmittedMapChecks();
440 } 437 }
441 #endif 438 #endif
442 439
443 Finish(); 440 Finish();
444 441
445 if (marking_parity_ == EVEN_MARKING_PARITY) { 442 if (marking_parity_ == EVEN_MARKING_PARITY) {
446 marking_parity_ = ODD_MARKING_PARITY; 443 marking_parity_ = ODD_MARKING_PARITY;
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
487 484
488 LargeObjectIterator it(heap_->lo_space()); 485 LargeObjectIterator it(heap_->lo_space());
489 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 486 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
490 MarkBit mark_bit = Marking::MarkBitFrom(obj); 487 MarkBit mark_bit = Marking::MarkBitFrom(obj);
491 CHECK(Marking::IsWhite(mark_bit)); 488 CHECK(Marking::IsWhite(mark_bit));
492 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); 489 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
493 } 490 }
494 } 491 }
495 492
496 493
497 void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() { 494 void MarkCompactCollector::VerifyWeakEmbeddedMapsAndObjectsInOptimizedCode() {
498 HeapObjectIterator code_iterator(heap()->code_space()); 495 HeapObjectIterator code_iterator(heap()->code_space());
499 for (HeapObject* obj = code_iterator.Next(); 496 for (HeapObject* obj = code_iterator.Next();
500 obj != NULL; 497 obj != NULL;
501 obj = code_iterator.Next()) { 498 obj = code_iterator.Next()) {
502 Code* code = Code::cast(obj); 499 Code* code = Code::cast(obj);
503 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; 500 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
504 if (WillBeDeoptimized(code)) continue; 501 if (WillBeDeoptimized(code)) continue;
505 code->VerifyEmbeddedMapsDependency(); 502 code->VerifyEmbeddedObjectsDependency();
506 } 503 }
507 } 504 }
508 505
509 506
510 void MarkCompactCollector::VerifyOmittedMapChecks() { 507 void MarkCompactCollector::VerifyOmittedMapChecks() {
511 HeapObjectIterator iterator(heap()->map_space()); 508 HeapObjectIterator iterator(heap()->map_space());
512 for (HeapObject* obj = iterator.Next(); 509 for (HeapObject* obj = iterator.Next();
513 obj != NULL; 510 obj != NULL;
514 obj = iterator.Next()) { 511 obj = iterator.Next()) {
515 Map* map = Map::cast(obj); 512 Map* map = Map::cast(obj);
(...skipping 943 matching lines...) Expand 10 before | Expand all | Expand 10 after
1459 JSWeakCollection::kTableOffset); 1456 JSWeakCollection::kTableOffset);
1460 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( 1457 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
1461 map->GetHeap(), 1458 map->GetHeap(),
1462 object, 1459 object,
1463 JSWeakCollection::kTableOffset + kPointerSize, 1460 JSWeakCollection::kTableOffset + kPointerSize,
1464 object_size); 1461 object_size);
1465 1462
1466 // Mark the backing hash table without pushing it on the marking stack. 1463 // Mark the backing hash table without pushing it on the marking stack.
1467 Object* table_object = weak_collection->table(); 1464 Object* table_object = weak_collection->table();
1468 if (!table_object->IsHashTable()) return; 1465 if (!table_object->IsHashTable()) return;
1469 ObjectHashTable* table = ObjectHashTable::cast(table_object); 1466 WeakHashTable* table = WeakHashTable::cast(table_object);
1470 Object** table_slot = 1467 Object** table_slot =
1471 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset); 1468 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
1472 MarkBit table_mark = Marking::MarkBitFrom(table); 1469 MarkBit table_mark = Marking::MarkBitFrom(table);
1473 collector->RecordSlot(table_slot, table_slot, table); 1470 collector->RecordSlot(table_slot, table_slot, table);
1474 if (!table_mark.Get()) collector->SetMark(table, table_mark); 1471 if (!table_mark.Get()) collector->SetMark(table, table_mark);
1475 // Recording the map slot can be skipped, because maps are not compacted. 1472 // Recording the map slot can be skipped, because maps are not compacted.
1476 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); 1473 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map()));
1477 ASSERT(MarkCompactCollector::IsMarked(table->map())); 1474 ASSERT(MarkCompactCollector::IsMarked(table->map()));
1478 } 1475 }
1479 1476
(...skipping 614 matching lines...) Expand 10 before | Expand all | Expand 10 after
2094 2091
2095 2092
2096 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { 2093 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
2097 // Mark the heap roots including global variables, stack variables, 2094 // Mark the heap roots including global variables, stack variables,
2098 // etc., and all objects reachable from them. 2095 // etc., and all objects reachable from them.
2099 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); 2096 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
2100 2097
2101 // Handle the string table specially. 2098 // Handle the string table specially.
2102 MarkStringTable(visitor); 2099 MarkStringTable(visitor);
2103 2100
2101 MarkWeakObjectToCodeTable();
2102
2104 // There may be overflowed objects in the heap. Visit them now. 2103 // There may be overflowed objects in the heap. Visit them now.
2105 while (marking_deque_.overflowed()) { 2104 while (marking_deque_.overflowed()) {
2106 RefillMarkingDeque(); 2105 RefillMarkingDeque();
2107 EmptyMarkingDeque(); 2106 EmptyMarkingDeque();
2108 } 2107 }
2109 } 2108 }
2110 2109
2111 2110
2112 void MarkCompactCollector::MarkImplicitRefGroups() { 2111 void MarkCompactCollector::MarkImplicitRefGroups() {
2113 List<ImplicitRefGroup*>* ref_groups = 2112 List<ImplicitRefGroup*>* ref_groups =
(...skipping 20 matching lines...) Expand all
2134 } 2133 }
2135 2134
2136 // Once the entire group has been marked, dispose it because it's 2135 // Once the entire group has been marked, dispose it because it's
2137 // not needed anymore. 2136 // not needed anymore.
2138 delete entry; 2137 delete entry;
2139 } 2138 }
2140 ref_groups->Rewind(last); 2139 ref_groups->Rewind(last);
2141 } 2140 }
2142 2141
2143 2142
2143 void MarkCompactCollector::MarkWeakObjectToCodeTable() {
2144 HeapObject* weak_object_to_code_table =
2145 HeapObject::cast(heap()->weak_object_to_code_table());
2146 if (!IsMarked(weak_object_to_code_table)) {
2147 MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
2148 SetMark(weak_object_to_code_table, mark);
2149 }
2150 }
2151
2152
2144 // Mark all objects reachable from the objects on the marking stack. 2153 // Mark all objects reachable from the objects on the marking stack.
2145 // Before: the marking stack contains zero or more heap object pointers. 2154 // Before: the marking stack contains zero or more heap object pointers.
2146 // After: the marking stack is empty, and all objects reachable from the 2155 // After: the marking stack is empty, and all objects reachable from the
2147 // marking stack have been marked, or are overflowed in the heap. 2156 // marking stack have been marked, or are overflowed in the heap.
2148 void MarkCompactCollector::EmptyMarkingDeque() { 2157 void MarkCompactCollector::EmptyMarkingDeque() {
2149 while (!marking_deque_.IsEmpty()) { 2158 while (!marking_deque_.IsEmpty()) {
2150 HeapObject* object = marking_deque_.Pop(); 2159 HeapObject* object = marking_deque_.Pop();
2151 ASSERT(object->IsHeapObject()); 2160 ASSERT(object->IsHeapObject());
2152 ASSERT(heap()->Contains(object)); 2161 ASSERT(heap()->Contains(object));
2153 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); 2162 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object)));
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after
2501 // Since it survived the GC, reattach it now. 2510 // Since it survived the GC, reattach it now.
2502 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); 2511 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map);
2503 } 2512 }
2504 2513
2505 ClearNonLivePrototypeTransitions(map); 2514 ClearNonLivePrototypeTransitions(map);
2506 ClearNonLiveMapTransitions(map, map_mark); 2515 ClearNonLiveMapTransitions(map, map_mark);
2507 2516
2508 if (map_mark.Get()) { 2517 if (map_mark.Get()) {
2509 ClearNonLiveDependentCode(map->dependent_code()); 2518 ClearNonLiveDependentCode(map->dependent_code());
2510 } else { 2519 } else {
2511 ClearAndDeoptimizeDependentCode(map); 2520 ClearAndDeoptimizeDependentCode(map->dependent_code());
2521 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
2512 } 2522 }
2513 } 2523 }
2514 2524
2515 // Iterate over property cell space, removing dependent code that is not 2525 // Iterate over property cell space, removing dependent code that is not
2516 // otherwise kept alive by strong references. 2526 // otherwise kept alive by strong references.
2517 HeapObjectIterator cell_iterator(heap_->property_cell_space()); 2527 HeapObjectIterator cell_iterator(heap_->property_cell_space());
2518 for (HeapObject* cell = cell_iterator.Next(); 2528 for (HeapObject* cell = cell_iterator.Next();
2519 cell != NULL; 2529 cell != NULL;
2520 cell = cell_iterator.Next()) { 2530 cell = cell_iterator.Next()) {
2521 if (IsMarked(cell)) { 2531 if (IsMarked(cell)) {
2522 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); 2532 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code());
2523 } 2533 }
2524 } 2534 }
2535
2536 if (heap_->weak_object_to_code_table()->IsHashTable()) {
2537 WeakHashTable* table =
2538 WeakHashTable::cast(heap_->weak_object_to_code_table());
2539 uint32_t capacity = table->Capacity();
2540 for (uint32_t i = 0; i < capacity; i++) {
2541 uint32_t key_index = table->EntryToIndex(i);
2542 Object* key = table->get(key_index);
2543 if (!table->IsKey(key)) continue;
2544 uint32_t value_index = table->EntryToValueIndex(i);
2545 Object* value = table->get(value_index);
2546 if (IsMarked(key)) {
2547 if (!IsMarked(value)) {
2548 HeapObject* obj = HeapObject::cast(value);
2549 MarkBit mark = Marking::MarkBitFrom(obj);
2550 SetMark(obj, mark);
2551 }
2552 ClearNonLiveDependentCode(DependentCode::cast(value));
2553 } else {
2554 ClearAndDeoptimizeDependentCode(DependentCode::cast(value));
2555 table->set(key_index, heap_->the_hole_value());
2556 table->set(value_index, heap_->the_hole_value());
2557 }
2558 }
2559 }
2525 } 2560 }
2526 2561
2527 2562
2528 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { 2563 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
2529 int number_of_transitions = map->NumberOfProtoTransitions(); 2564 int number_of_transitions = map->NumberOfProtoTransitions();
2530 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); 2565 FixedArray* prototype_transitions = map->GetPrototypeTransitions();
2531 2566
2532 int new_number_of_transitions = 0; 2567 int new_number_of_transitions = 0;
2533 const int header = Map::kProtoTransitionHeaderSize; 2568 const int header = Map::kProtoTransitionHeaderSize;
2534 const int proto_offset = header + Map::kProtoTransitionPrototypeOffset; 2569 const int proto_offset = header + Map::kProtoTransitionPrototypeOffset;
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
2580 // Follow back pointer, check whether we are dealing with a map transition 2615 // Follow back pointer, check whether we are dealing with a map transition
2581 // from a live map to a dead path and in case clear transitions of parent. 2616 // from a live map to a dead path and in case clear transitions of parent.
2582 bool current_is_alive = map_mark.Get(); 2617 bool current_is_alive = map_mark.Get();
2583 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); 2618 bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
2584 if (!current_is_alive && parent_is_alive) { 2619 if (!current_is_alive && parent_is_alive) {
2585 parent->ClearNonLiveTransitions(heap()); 2620 parent->ClearNonLiveTransitions(heap());
2586 } 2621 }
2587 } 2622 }
2588 2623
2589 2624
2590 void MarkCompactCollector::ClearAndDeoptimizeDependentCode(Map* map) { 2625 void MarkCompactCollector::ClearAndDeoptimizeDependentCode(
2626 DependentCode* entries) {
2591 DisallowHeapAllocation no_allocation; 2627 DisallowHeapAllocation no_allocation;
2592 DependentCode* entries = map->dependent_code();
2593 DependentCode::GroupStartIndexes starts(entries); 2628 DependentCode::GroupStartIndexes starts(entries);
2594 int number_of_entries = starts.number_of_entries(); 2629 int number_of_entries = starts.number_of_entries();
2595 if (number_of_entries == 0) return; 2630 if (number_of_entries == 0) return;
2596 for (int i = 0; i < number_of_entries; i++) { 2631 for (int i = 0; i < number_of_entries; i++) {
2597 // If the entry is compilation info then the map must be alive, 2632 // If the entry is compilation info then the map must be alive,
2598 // and ClearAndDeoptimizeDependentCode shouldn't be called. 2633 // and ClearAndDeoptimizeDependentCode shouldn't be called.
2599 ASSERT(entries->is_code_at(i)); 2634 ASSERT(entries->is_code_at(i));
2600 Code* code = entries->code_at(i); 2635 Code* code = entries->code_at(i);
2601 2636
2602 if (IsMarked(code) && !code->marked_for_deoptimization()) { 2637 if (IsMarked(code) && !code->marked_for_deoptimization()) {
2603 code->set_marked_for_deoptimization(true); 2638 code->set_marked_for_deoptimization(true);
2604 have_code_to_deoptimize_ = true; 2639 have_code_to_deoptimize_ = true;
2605 } 2640 }
2606 entries->clear_at(i); 2641 entries->clear_at(i);
2607 } 2642 }
2608 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
2609 } 2643 }
2610 2644
2611 2645
2612 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { 2646 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
2613 DisallowHeapAllocation no_allocation; 2647 DisallowHeapAllocation no_allocation;
2614 DependentCode::GroupStartIndexes starts(entries); 2648 DependentCode::GroupStartIndexes starts(entries);
2615 int number_of_entries = starts.number_of_entries(); 2649 int number_of_entries = starts.number_of_entries();
2616 if (number_of_entries == 0) return; 2650 if (number_of_entries == 0) return;
2617 int new_number_of_entries = 0; 2651 int new_number_of_entries = 0;
2618 // Go through all groups, remove dead codes and compact. 2652 // Go through all groups, remove dead codes and compact.
(...skipping 817 matching lines...) Expand 10 before | Expand all | Expand 10 after
3436 cell = js_global_property_cell_iterator.Next()) { 3470 cell = js_global_property_cell_iterator.Next()) {
3437 if (cell->IsPropertyCell()) { 3471 if (cell->IsPropertyCell()) {
3438 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); 3472 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor);
3439 } 3473 }
3440 } 3474 }
3441 3475
3442 // Update the head of the native contexts list in the heap. 3476 // Update the head of the native contexts list in the heap.
3443 updating_visitor.VisitPointer(heap_->native_contexts_list_address()); 3477 updating_visitor.VisitPointer(heap_->native_contexts_list_address());
3444 3478
3445 heap_->string_table()->Iterate(&updating_visitor); 3479 heap_->string_table()->Iterate(&updating_visitor);
3480 updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address());
3481 if (heap_->weak_object_to_code_table()->IsHashTable()) {
3482 WeakHashTable* table =
3483 WeakHashTable::cast(heap_->weak_object_to_code_table());
3484 table->Iterate(&updating_visitor);
3485 table->Rehash(heap_->undefined_value());
3486 }
3446 3487
3447 // Update pointers from external string table. 3488 // Update pointers from external string table.
3448 heap_->UpdateReferencesInExternalStringTable( 3489 heap_->UpdateReferencesInExternalStringTable(
3449 &UpdateReferenceInExternalStringTableEntry); 3490 &UpdateReferenceInExternalStringTableEntry);
3450 3491
3451 if (!FLAG_watch_ic_patching) { 3492 if (!FLAG_watch_ic_patching) {
3452 // Update JSFunction pointers from the runtime profiler. 3493 // Update JSFunction pointers from the runtime profiler.
3453 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( 3494 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
3454 &updating_visitor); 3495 &updating_visitor);
3455 } 3496 }
(...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after
4299 while (buffer != NULL) { 4340 while (buffer != NULL) {
4300 SlotsBuffer* next_buffer = buffer->next(); 4341 SlotsBuffer* next_buffer = buffer->next();
4301 DeallocateBuffer(buffer); 4342 DeallocateBuffer(buffer);
4302 buffer = next_buffer; 4343 buffer = next_buffer;
4303 } 4344 }
4304 *buffer_address = NULL; 4345 *buffer_address = NULL;
4305 } 4346 }
4306 4347
4307 4348
4308 } } // namespace v8::internal 4349 } } // namespace v8::internal
OLDNEW
« src/heap.h ('K') | « src/mark-compact.h ('k') | src/mips/lithium-codegen-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698