Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(749)

Side by Side Diff: src/mark-compact.cc

Issue 19638014: Factor out common code from platform-specific deoptimization. Fix Deoptimizer not to need to partit… (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Cleanups post-review. Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/mips/deoptimizer-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
66 abort_incremental_marking_(false), 66 abort_incremental_marking_(false),
67 marking_parity_(ODD_MARKING_PARITY), 67 marking_parity_(ODD_MARKING_PARITY),
68 compacting_(false), 68 compacting_(false),
69 was_marked_incrementally_(false), 69 was_marked_incrementally_(false),
70 sweeping_pending_(false), 70 sweeping_pending_(false),
71 sequential_sweeping_(false), 71 sequential_sweeping_(false),
72 tracer_(NULL), 72 tracer_(NULL),
73 migration_slots_buffer_(NULL), 73 migration_slots_buffer_(NULL),
74 heap_(NULL), 74 heap_(NULL),
75 code_flusher_(NULL), 75 code_flusher_(NULL),
76 encountered_weak_collections_(NULL) { } 76 encountered_weak_collections_(NULL),
77 77 code_to_deoptimize_(NULL) { }
78 78
79 #ifdef VERIFY_HEAP 79 #ifdef VERIFY_HEAP
80 class VerifyMarkingVisitor: public ObjectVisitor { 80 class VerifyMarkingVisitor: public ObjectVisitor {
81 public: 81 public:
82 void VisitPointers(Object** start, Object** end) { 82 void VisitPointers(Object** start, Object** end) {
83 for (Object** current = start; current < end; current++) { 83 for (Object** current = start; current < end; current++) {
84 if ((*current)->IsHeapObject()) { 84 if ((*current)->IsHeapObject()) {
85 HeapObject* object = HeapObject::cast(*current); 85 HeapObject* object = HeapObject::cast(*current);
86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); 86 CHECK(HEAP->mark_compact_collector()->IsMarked(object));
87 } 87 }
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after
485 } 485 }
486 486
487 487
488 void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() { 488 void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
489 HeapObjectIterator code_iterator(heap()->code_space()); 489 HeapObjectIterator code_iterator(heap()->code_space());
490 for (HeapObject* obj = code_iterator.Next(); 490 for (HeapObject* obj = code_iterator.Next();
491 obj != NULL; 491 obj != NULL;
492 obj = code_iterator.Next()) { 492 obj = code_iterator.Next()) {
493 Code* code = Code::cast(obj); 493 Code* code = Code::cast(obj);
494 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; 494 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
495 if (code->marked_for_deoptimization()) continue; 495 if (WillBeDeoptimized(code)) continue;
496 code->VerifyEmbeddedMapsDependency(); 496 code->VerifyEmbeddedMapsDependency();
497 } 497 }
498 } 498 }
499 499
500 500
501 void MarkCompactCollector::VerifyOmittedPrototypeChecks() { 501 void MarkCompactCollector::VerifyOmittedPrototypeChecks() {
502 HeapObjectIterator iterator(heap()->map_space()); 502 HeapObjectIterator iterator(heap()->map_space());
503 for (HeapObject* obj = iterator.Next(); 503 for (HeapObject* obj = iterator.Next();
504 obj != NULL; 504 obj != NULL;
505 obj = iterator.Next()) { 505 obj = iterator.Next()) {
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after
938 } 938 }
939 939
940 #ifdef VERIFY_HEAP 940 #ifdef VERIFY_HEAP
941 if (!was_marked_incrementally_ && FLAG_verify_heap) { 941 if (!was_marked_incrementally_ && FLAG_verify_heap) {
942 VerifyMarkbitsAreClean(); 942 VerifyMarkbitsAreClean();
943 } 943 }
944 #endif 944 #endif
945 } 945 }
946 946
947 947
948 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
949 public:
950 virtual bool TakeFunction(JSFunction* function) {
951 return function->code()->marked_for_deoptimization();
952 }
953 };
954
955
956 void MarkCompactCollector::Finish() { 948 void MarkCompactCollector::Finish() {
957 #ifdef DEBUG 949 #ifdef DEBUG
958 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); 950 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
959 state_ = IDLE; 951 state_ = IDLE;
960 #endif 952 #endif
961 // The stub cache is not traversed during GC; clear the cache to 953 // The stub cache is not traversed during GC; clear the cache to
962 // force lazy re-initialization of it. This must be done after the 954 // force lazy re-initialization of it. This must be done after the
963 // GC, because it relies on the new address of certain old space 955 // GC, because it relies on the new address of certain old space
964 // objects (empty string, illegal builtin). 956 // objects (empty string, illegal builtin).
965 isolate()->stub_cache()->Clear(); 957 isolate()->stub_cache()->Clear();
966 958
967 DeoptimizeMarkedCodeFilter filter; 959 if (code_to_deoptimize_ != Smi::FromInt(0)) {
968 Deoptimizer::DeoptimizeAllFunctionsWith(isolate(), &filter); 960 // Convert the linked list of Code objects into a ZoneList.
961 Zone zone(isolate());
962 ZoneList<Code*> codes(4, &zone);
963
964 Object *list = code_to_deoptimize_;
965 while (list->IsCode()) {
966 Code *code = Code::cast(list);
967 list = code->code_to_deoptimize_link();
968 codes.Add(code, &zone);
969 // Destroy the link and don't ever try to deoptimize this code again.
970 code->set_code_to_deoptimize_link(Smi::FromInt(0));
971 }
972 code_to_deoptimize_ = Smi::FromInt(0);
973
974 Deoptimizer::DeoptimizeCodeList(isolate(), &codes);
975 }
969 } 976 }
970 977
971 978
972 // ------------------------------------------------------------------------- 979 // -------------------------------------------------------------------------
973 // Phase 1: tracing and marking live objects. 980 // Phase 1: tracing and marking live objects.
974 // before: all objects are in normal state. 981 // before: all objects are in normal state.
975 // after: a live object's map pointer is marked as '00'. 982 // after: a live object's map pointer is marked as '00'.
976 983
977 // Marking all live objects in the heap as part of mark-sweep or mark-compact 984 // Marking all live objects in the heap as part of mark-sweep or mark-compact
978 // collection. Before marking, all objects are in their normal state. After 985 // collection. Before marking, all objects are in their normal state. After
(...skipping 1624 matching lines...) Expand 10 before | Expand all | Expand 10 after
2603 DisallowHeapAllocation no_allocation; 2610 DisallowHeapAllocation no_allocation;
2604 DependentCode* entries = map->dependent_code(); 2611 DependentCode* entries = map->dependent_code();
2605 DependentCode::GroupStartIndexes starts(entries); 2612 DependentCode::GroupStartIndexes starts(entries);
2606 int number_of_entries = starts.number_of_entries(); 2613 int number_of_entries = starts.number_of_entries();
2607 if (number_of_entries == 0) return; 2614 if (number_of_entries == 0) return;
2608 for (int i = 0; i < number_of_entries; i++) { 2615 for (int i = 0; i < number_of_entries; i++) {
2609 // If the entry is compilation info then the map must be alive, 2616 // If the entry is compilation info then the map must be alive,
2610 // and ClearAndDeoptimizeDependentCode shouldn't be called. 2617 // and ClearAndDeoptimizeDependentCode shouldn't be called.
2611 ASSERT(entries->is_code_at(i)); 2618 ASSERT(entries->is_code_at(i));
2612 Code* code = entries->code_at(i); 2619 Code* code = entries->code_at(i);
2613 if (IsMarked(code) && !code->marked_for_deoptimization()) { 2620
2614 code->set_marked_for_deoptimization(true); 2621 if (IsMarked(code) && !WillBeDeoptimized(code)) {
2622 // Insert the code into the code_to_deoptimize linked list.
2623 Object* next = code_to_deoptimize_;
2624 if (next != Smi::FromInt(0)) {
2625 // Record the slot so that it is updated.
2626 Object** slot = code->code_to_deoptimize_link_slot();
2627 RecordSlot(slot, slot, next);
2628 }
2629 code->set_code_to_deoptimize_link(next);
2630 code_to_deoptimize_ = code;
2615 } 2631 }
2616 entries->clear_at(i); 2632 entries->clear_at(i);
2617 } 2633 }
2618 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); 2634 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
2619 } 2635 }
2620 2636
2621 2637
2622 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { 2638 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
2623 DisallowHeapAllocation no_allocation; 2639 DisallowHeapAllocation no_allocation;
2624 DependentCode::GroupStartIndexes starts(entries); 2640 DependentCode::GroupStartIndexes starts(entries);
2625 int number_of_entries = starts.number_of_entries(); 2641 int number_of_entries = starts.number_of_entries();
2626 if (number_of_entries == 0) return; 2642 if (number_of_entries == 0) return;
2627 int new_number_of_entries = 0; 2643 int new_number_of_entries = 0;
2628 // Go through all groups, remove dead codes and compact. 2644 // Go through all groups, remove dead codes and compact.
2629 for (int g = 0; g < DependentCode::kGroupCount; g++) { 2645 for (int g = 0; g < DependentCode::kGroupCount; g++) {
2630 int group_number_of_entries = 0; 2646 int group_number_of_entries = 0;
2631 for (int i = starts.at(g); i < starts.at(g + 1); i++) { 2647 for (int i = starts.at(g); i < starts.at(g + 1); i++) {
2632 Object* obj = entries->object_at(i); 2648 Object* obj = entries->object_at(i);
2633 ASSERT(obj->IsCode() || IsMarked(obj)); 2649 ASSERT(obj->IsCode() || IsMarked(obj));
2634 if (IsMarked(obj) && 2650 if (IsMarked(obj) &&
2635 (!obj->IsCode() || !Code::cast(obj)->marked_for_deoptimization())) { 2651 (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
2636 if (new_number_of_entries + group_number_of_entries != i) { 2652 if (new_number_of_entries + group_number_of_entries != i) {
2637 entries->set_object_at( 2653 entries->set_object_at(
2638 new_number_of_entries + group_number_of_entries, obj); 2654 new_number_of_entries + group_number_of_entries, obj);
2639 } 2655 }
2640 Object** slot = entries->slot_at(new_number_of_entries + 2656 Object** slot = entries->slot_at(new_number_of_entries +
2641 group_number_of_entries); 2657 group_number_of_entries);
2642 RecordSlot(slot, slot, obj); 2658 RecordSlot(slot, slot, obj);
2643 group_number_of_entries++; 2659 group_number_of_entries++;
2644 } 2660 }
2645 } 2661 }
(...skipping 617 matching lines...) Expand 10 before | Expand all | Expand 10 after
3263 3279
3264 // If the object is white than no slots were recorded on it yet. 3280 // If the object is white than no slots were recorded on it yet.
3265 MarkBit mark_bit = Marking::MarkBitFrom(code); 3281 MarkBit mark_bit = Marking::MarkBitFrom(code);
3266 if (Marking::IsWhite(mark_bit)) return; 3282 if (Marking::IsWhite(mark_bit)) return;
3267 3283
3268 invalidated_code_.Add(code); 3284 invalidated_code_.Add(code);
3269 } 3285 }
3270 } 3286 }
3271 3287
3272 3288
3289 // Return true if the given code is deoptimized or will be deoptimized.
3290 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3291 // We assume the code_to_deoptimize_link is initialized to undefined.
3292 // If it is 0, or refers to another Code object, then this code
3293 // is already linked, or was already linked into the list.
3294 return code->code_to_deoptimize_link() != heap()->undefined_value()
3295 || code->marked_for_deoptimization();
3296 }
3297
3298
3273 bool MarkCompactCollector::MarkInvalidatedCode() { 3299 bool MarkCompactCollector::MarkInvalidatedCode() {
3274 bool code_marked = false; 3300 bool code_marked = false;
3275 3301
3276 int length = invalidated_code_.length(); 3302 int length = invalidated_code_.length();
3277 for (int i = 0; i < length; i++) { 3303 for (int i = 0; i < length; i++) {
3278 Code* code = invalidated_code_[i]; 3304 Code* code = invalidated_code_[i];
3279 3305
3280 if (SetMarkBitsUnderInvalidatedCode(code, true)) { 3306 if (SetMarkBitsUnderInvalidatedCode(code, true)) {
3281 code_marked = true; 3307 code_marked = true;
3282 } 3308 }
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
3446 HeapObjectIterator js_global_property_cell_iterator( 3472 HeapObjectIterator js_global_property_cell_iterator(
3447 heap_->property_cell_space()); 3473 heap_->property_cell_space());
3448 for (HeapObject* cell = js_global_property_cell_iterator.Next(); 3474 for (HeapObject* cell = js_global_property_cell_iterator.Next();
3449 cell != NULL; 3475 cell != NULL;
3450 cell = js_global_property_cell_iterator.Next()) { 3476 cell = js_global_property_cell_iterator.Next()) {
3451 if (cell->IsPropertyCell()) { 3477 if (cell->IsPropertyCell()) {
3452 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); 3478 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor);
3453 } 3479 }
3454 } 3480 }
3455 3481
3456 // Update pointer from the native contexts list. 3482 // Update the heads of the native contexts list the code to deoptimize list.
3457 updating_visitor.VisitPointer(heap_->native_contexts_list_address()); 3483 updating_visitor.VisitPointer(heap_->native_contexts_list_address());
3484 updating_visitor.VisitPointer(&code_to_deoptimize_);
3458 3485
3459 heap_->string_table()->Iterate(&updating_visitor); 3486 heap_->string_table()->Iterate(&updating_visitor);
3460 3487
3461 // Update pointers from external string table. 3488 // Update pointers from external string table.
3462 heap_->UpdateReferencesInExternalStringTable( 3489 heap_->UpdateReferencesInExternalStringTable(
3463 &UpdateReferenceInExternalStringTableEntry); 3490 &UpdateReferenceInExternalStringTableEntry);
3464 3491
3465 if (!FLAG_watch_ic_patching) { 3492 if (!FLAG_watch_ic_patching) {
3466 // Update JSFunction pointers from the runtime profiler. 3493 // Update JSFunction pointers from the runtime profiler.
3467 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( 3494 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
(...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after
4311 while (buffer != NULL) { 4338 while (buffer != NULL) {
4312 SlotsBuffer* next_buffer = buffer->next(); 4339 SlotsBuffer* next_buffer = buffer->next();
4313 DeallocateBuffer(buffer); 4340 DeallocateBuffer(buffer);
4314 buffer = next_buffer; 4341 buffer = next_buffer;
4315 } 4342 }
4316 *buffer_address = NULL; 4343 *buffer_address = NULL;
4317 } 4344 }
4318 4345
4319 4346
4320 } } // namespace v8::internal 4347 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/mips/deoptimizer-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698