Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 590 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 601 // not matter, so long as we do not specify NEW_SPACE, which would not | 601 // not matter, so long as we do not specify NEW_SPACE, which would not |
| 602 // cause a full GC. | 602 // cause a full GC. |
| 603 // Major GC would invoke weak handle callbacks on weakly reachable | 603 // Major GC would invoke weak handle callbacks on weakly reachable |
| 604 // handles, but won't collect weakly reachable objects until next | 604 // handles, but won't collect weakly reachable objects until next |
| 605 // major GC. Therefore if we collect aggressively and weak handle callback | 605 // major GC. Therefore if we collect aggressively and weak handle callback |
| 606 // has been invoked, we rerun major GC to release objects which become | 606 // has been invoked, we rerun major GC to release objects which become |
| 607 // garbage. | 607 // garbage. |
| 608 // Note: as weak callbacks can execute arbitrary code, we cannot | 608 // Note: as weak callbacks can execute arbitrary code, we cannot |
| 609 // hope that eventually there will be no weak callbacks invocations. | 609 // hope that eventually there will be no weak callbacks invocations. |
| 610 // Therefore stop recollecting after several attempts. | 610 // Therefore stop recollecting after several attempts. |
| 611 if (FLAG_concurrent_recompilation) { | |
| 612 // The optimizing compiler may be unnecessarily holding onto memory. | |
|
Hannes Payer (out of office)
2013/09/24 13:28:07
on to
| |
| 613 DisallowHeapAllocation no_recursive_gc; | |
| 614 isolate()->optimizing_compiler_thread()->Flush(); | |
| 615 } | |
| 611 mark_compact_collector()->SetFlags(kMakeHeapIterableMask | | 616 mark_compact_collector()->SetFlags(kMakeHeapIterableMask | |
| 612 kReduceMemoryFootprintMask); | 617 kReduceMemoryFootprintMask); |
| 613 isolate_->compilation_cache()->Clear(); | 618 isolate_->compilation_cache()->Clear(); |
| 614 const int kMaxNumberOfAttempts = 7; | 619 const int kMaxNumberOfAttempts = 7; |
| 615 const int kMinNumberOfAttempts = 2; | 620 const int kMinNumberOfAttempts = 2; |
| 616 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 621 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
| 617 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL) && | 622 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL) && |
| 618 attempt + 1 >= kMinNumberOfAttempts) { | 623 attempt + 1 >= kMinNumberOfAttempts) { |
| 619 break; | 624 break; |
| 620 } | 625 } |
| (...skipping 7278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7899 if (FLAG_concurrent_recompilation) { | 7904 if (FLAG_concurrent_recompilation) { |
| 7900 heap_->relocation_mutex_->Lock(); | 7905 heap_->relocation_mutex_->Lock(); |
| 7901 #ifdef DEBUG | 7906 #ifdef DEBUG |
| 7902 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7907 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 7903 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7908 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 7904 #endif // DEBUG | 7909 #endif // DEBUG |
| 7905 } | 7910 } |
| 7906 } | 7911 } |
| 7907 | 7912 |
| 7908 } } // namespace v8::internal | 7913 } } // namespace v8::internal |
| OLD | NEW |