OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 624 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
635 // Memory is exhausted and we will die. | 635 // Memory is exhausted and we will die. |
636 V8::FatalProcessOutOfMemory("Committing semi space failed."); | 636 V8::FatalProcessOutOfMemory("Committing semi space failed."); |
637 } | 637 } |
638 | 638 |
639 | 639 |
640 void Heap::ClearJSFunctionResultCaches() { | 640 void Heap::ClearJSFunctionResultCaches() { |
641 if (isolate_->bootstrapper()->IsActive()) return; | 641 if (isolate_->bootstrapper()->IsActive()) return; |
642 | 642 |
643 Object* context = global_contexts_list_; | 643 Object* context = global_contexts_list_; |
644 while (!context->IsUndefined()) { | 644 while (!context->IsUndefined()) { |
645 // Get the caches for this context: | 645 // Get the caches for this context. GC can happen when the context |
646 FixedArray* caches = | 646 // is not fully initialized, so the caches can be undefined. |
647 Context::cast(context)->jsfunction_result_caches(); | 647 Object* caches_or_undefined = |
648 // Clear the caches: | 648 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); |
649 int length = caches->length(); | 649 if (!caches_or_undefined->IsUndefined()) { |
650 for (int i = 0; i < length; i++) { | 650 FixedArray* caches = FixedArray::cast(caches_or_undefined); |
651 JSFunctionResultCache::cast(caches->get(i))->Clear(); | 651 // Clear the caches: |
| 652 int length = caches->length(); |
| 653 for (int i = 0; i < length; i++) { |
| 654 JSFunctionResultCache::cast(caches->get(i))->Clear(); |
| 655 } |
652 } | 656 } |
653 // Get the next context: | 657 // Get the next context: |
654 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 658 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
655 } | 659 } |
656 } | 660 } |
657 | 661 |
658 | 662 |
659 | 663 |
660 void Heap::ClearNormalizedMapCaches() { | 664 void Heap::ClearNormalizedMapCaches() { |
661 if (isolate_->bootstrapper()->IsActive() && | 665 if (isolate_->bootstrapper()->IsActive() && |
662 !incremental_marking()->IsMarking()) { | 666 !incremental_marking()->IsMarking()) { |
663 return; | 667 return; |
664 } | 668 } |
665 | 669 |
666 Object* context = global_contexts_list_; | 670 Object* context = global_contexts_list_; |
667 while (!context->IsUndefined()) { | 671 while (!context->IsUndefined()) { |
668 Context::cast(context)->normalized_map_cache()->Clear(); | 672 // GC can happen when the context is not fully initialized, |
| 673 // so the cache can be undefined. |
| 674 Object* cache = |
| 675 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 676 if (!cache->IsUndefined()) { |
| 677 NormalizedMapCache::cast(cache)->Clear(); |
| 678 } |
669 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 679 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
670 } | 680 } |
671 } | 681 } |
672 | 682 |
673 | 683 |
674 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { | 684 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { |
675 double survival_rate = | 685 double survival_rate = |
676 (static_cast<double>(young_survivors_after_last_gc_) * 100) / | 686 (static_cast<double>(young_survivors_after_last_gc_) * 100) / |
677 start_new_space_size; | 687 start_new_space_size; |
678 | 688 |
(...skipping 5912 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6591 isolate_->heap()->store_buffer()->Compact(); | 6601 isolate_->heap()->store_buffer()->Compact(); |
6592 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6602 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6593 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6603 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6594 next = chunk->next_chunk(); | 6604 next = chunk->next_chunk(); |
6595 isolate_->memory_allocator()->Free(chunk); | 6605 isolate_->memory_allocator()->Free(chunk); |
6596 } | 6606 } |
6597 chunks_queued_for_free_ = NULL; | 6607 chunks_queued_for_free_ = NULL; |
6598 } | 6608 } |
6599 | 6609 |
6600 } } // namespace v8::internal | 6610 } } // namespace v8::internal |
OLD | NEW |