OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
442 if (!FLAG_collect_maps) ReattachInitialMaps(); | 442 if (!FLAG_collect_maps) ReattachInitialMaps(); |
443 | 443 |
444 #ifdef DEBUG | 444 #ifdef DEBUG |
445 if (FLAG_verify_native_context_separation) { | 445 if (FLAG_verify_native_context_separation) { |
446 VerifyNativeContextSeparation(heap_); | 446 VerifyNativeContextSeparation(heap_); |
447 } | 447 } |
448 #endif | 448 #endif |
449 | 449 |
450 #ifdef VERIFY_HEAP | 450 #ifdef VERIFY_HEAP |
451 if (heap()->weak_embedded_objects_verification_enabled()) { | 451 if (heap()->weak_embedded_objects_verification_enabled()) { |
452 VerifyWeakEmbeddedObjectsInOptimizedCode(); | 452 VerifyWeakEmbeddedObjectsInCode(); |
453 } | 453 } |
454 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { | 454 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { |
455 VerifyOmittedMapChecks(); | 455 VerifyOmittedMapChecks(); |
456 } | 456 } |
457 #endif | 457 #endif |
458 | 458 |
459 Finish(); | 459 Finish(); |
460 | 460 |
461 if (marking_parity_ == EVEN_MARKING_PARITY) { | 461 if (marking_parity_ == EVEN_MARKING_PARITY) { |
462 marking_parity_ = ODD_MARKING_PARITY; | 462 marking_parity_ = ODD_MARKING_PARITY; |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
503 | 503 |
504 LargeObjectIterator it(heap_->lo_space()); | 504 LargeObjectIterator it(heap_->lo_space()); |
505 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 505 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
506 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 506 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
507 CHECK(Marking::IsWhite(mark_bit)); | 507 CHECK(Marking::IsWhite(mark_bit)); |
508 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 508 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
509 } | 509 } |
510 } | 510 } |
511 | 511 |
512 | 512 |
513 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() { | 513 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
514 HeapObjectIterator code_iterator(heap()->code_space()); | 514 HeapObjectIterator code_iterator(heap()->code_space()); |
515 for (HeapObject* obj = code_iterator.Next(); | 515 for (HeapObject* obj = code_iterator.Next(); |
516 obj != NULL; | 516 obj != NULL; |
517 obj = code_iterator.Next()) { | 517 obj = code_iterator.Next()) { |
518 Code* code = Code::cast(obj); | 518 Code* code = Code::cast(obj); |
519 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; | 519 if (!code->is_optimized_code() && !code->is_weak_stub()) continue; |
520 if (WillBeDeoptimized(code)) continue; | 520 if (WillBeDeoptimized(code)) continue; |
521 code->VerifyEmbeddedObjectsDependency(); | 521 code->VerifyEmbeddedObjectsDependency(); |
522 } | 522 } |
523 } | 523 } |
524 | 524 |
525 | 525 |
526 void MarkCompactCollector::VerifyOmittedMapChecks() { | 526 void MarkCompactCollector::VerifyOmittedMapChecks() { |
527 HeapObjectIterator iterator(heap()->map_space()); | 527 HeapObjectIterator iterator(heap()->map_space()); |
528 for (HeapObject* obj = iterator.Next(); | 528 for (HeapObject* obj = iterator.Next(); |
529 obj != NULL; | 529 obj != NULL; |
(...skipping 2183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2713 DisallowHeapAllocation no_allocation; | 2713 DisallowHeapAllocation no_allocation; |
2714 DependentCode::GroupStartIndexes starts(entries); | 2714 DependentCode::GroupStartIndexes starts(entries); |
2715 int number_of_entries = starts.number_of_entries(); | 2715 int number_of_entries = starts.number_of_entries(); |
2716 if (number_of_entries == 0) return; | 2716 if (number_of_entries == 0) return; |
2717 for (int i = 0; i < number_of_entries; i++) { | 2717 for (int i = 0; i < number_of_entries; i++) { |
2718 // If the entry is compilation info then the map must be alive, | 2718 // If the entry is compilation info then the map must be alive, |
2719 // and ClearAndDeoptimizeDependentCode shouldn't be called. | 2719 // and ClearAndDeoptimizeDependentCode shouldn't be called. |
2720 ASSERT(entries->is_code_at(i)); | 2720 ASSERT(entries->is_code_at(i)); |
2721 Code* code = entries->code_at(i); | 2721 Code* code = entries->code_at(i); |
2722 | 2722 |
2723 if (IsMarked(code) && !code->marked_for_deoptimization()) { | 2723 if (code->is_weak_stub()) { |
Toon Verwaest
2014/04/03 15:18:36
Also IsMarked(code)?
ulan
2014/04/03 15:40:26
Good catch, done.
| |
2724 IC::InvalidateMapsAndHandlers(code); | |
2725 } else if (IsMarked(code) && !code->marked_for_deoptimization()) { | |
2724 code->set_marked_for_deoptimization(true); | 2726 code->set_marked_for_deoptimization(true); |
2725 code->InvalidateEmbeddedObjects(); | 2727 code->InvalidateEmbeddedObjects(); |
2726 have_code_to_deoptimize_ = true; | 2728 have_code_to_deoptimize_ = true; |
2727 } | 2729 } |
2728 entries->clear_at(i); | 2730 entries->clear_at(i); |
2729 } | 2731 } |
2730 } | 2732 } |
2731 | 2733 |
2732 | 2734 |
2733 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { | 2735 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { |
(...skipping 668 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3402 MarkBit mark_bit = Marking::MarkBitFrom(code); | 3404 MarkBit mark_bit = Marking::MarkBitFrom(code); |
3403 if (Marking::IsWhite(mark_bit)) return; | 3405 if (Marking::IsWhite(mark_bit)) return; |
3404 | 3406 |
3405 invalidated_code_.Add(code); | 3407 invalidated_code_.Add(code); |
3406 } | 3408 } |
3407 } | 3409 } |
3408 | 3410 |
3409 | 3411 |
3410 // Return true if the given code is deoptimized or will be deoptimized. | 3412 // Return true if the given code is deoptimized or will be deoptimized. |
3411 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3413 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3412 return code->marked_for_deoptimization(); | 3414 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3413 } | 3415 } |
3414 | 3416 |
3415 | 3417 |
3416 bool MarkCompactCollector::MarkInvalidatedCode() { | 3418 bool MarkCompactCollector::MarkInvalidatedCode() { |
3417 bool code_marked = false; | 3419 bool code_marked = false; |
3418 | 3420 |
3419 int length = invalidated_code_.length(); | 3421 int length = invalidated_code_.length(); |
3420 for (int i = 0; i < length; i++) { | 3422 for (int i = 0; i < length; i++) { |
3421 Code* code = invalidated_code_[i]; | 3423 Code* code = invalidated_code_[i]; |
3422 | 3424 |
(...skipping 1101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4524 while (buffer != NULL) { | 4526 while (buffer != NULL) { |
4525 SlotsBuffer* next_buffer = buffer->next(); | 4527 SlotsBuffer* next_buffer = buffer->next(); |
4526 DeallocateBuffer(buffer); | 4528 DeallocateBuffer(buffer); |
4527 buffer = next_buffer; | 4529 buffer = next_buffer; |
4528 } | 4530 } |
4529 *buffer_address = NULL; | 4531 *buffer_address = NULL; |
4530 } | 4532 } |
4531 | 4533 |
4532 | 4534 |
4533 } } // namespace v8::internal | 4535 } } // namespace v8::internal |
OLD | NEW |