Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 660 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 671 } | 671 } |
| 672 | 672 |
| 673 void ThreadState::scheduleV8FollowupGCIfNeeded(V8GCType gcType) | 673 void ThreadState::scheduleV8FollowupGCIfNeeded(V8GCType gcType) |
| 674 { | 674 { |
| 675 ASSERT(checkThread()); | 675 ASSERT(checkThread()); |
| 676 Heap::reportMemoryUsageForTracing(); | 676 Heap::reportMemoryUsageForTracing(); |
| 677 | 677 |
| 678 if (isGCForbidden()) | 678 if (isGCForbidden()) |
| 679 return; | 679 return; |
| 680 | 680 |
| 681 // If V8 has acted on a memory pressure signal and performed a major GC, | |
| 682 // follow up, if needed. | |
| 683 if (gcType == V8MajorGC && shouldForceMemoryPressureGC()) { | |
|
haraken
2015/09/26 02:04:37
A better form would be:
if (shouldForceMemoryPres
haraken
2015/09/26 02:04:37
Is there any reason we want to limit this to V8Maj
sof
2015/09/26 06:17:22
v8 will force major GCs on detecting/being signall
sof
2015/09/26 06:17:22
We've been over that two-step formulation before i
sof
2015/09/26 06:17:22
Done.
| |
| 684 Heap::collectGarbage(HeapPointersOnStack, GCWithoutSweep, Heap::Conserva tiveGC); | |
| 685 return; | |
| 686 } | |
| 687 | |
| 681 if (isSweepingInProgress()) | 688 if (isSweepingInProgress()) |
| 682 return; | 689 return; |
| 683 ASSERT(!sweepForbidden()); | 690 ASSERT(!sweepForbidden()); |
| 684 | 691 |
| 685 if (shouldScheduleV8FollowupGC()) | 692 if (shouldScheduleV8FollowupGC()) |
| 686 schedulePreciseGC(); | 693 schedulePreciseGC(); |
| 687 else if (gcType == V8MinorGC) | 694 else if (gcType == V8MinorGC) |
| 688 scheduleIdleGC(); | 695 scheduleIdleGC(); |
| 689 } | 696 } |
| 690 | 697 |
| 691 void ThreadState::schedulePageNavigationGCIfNeeded(float estimatedRemovalRatio) | 698 void ThreadState::schedulePageNavigationGCIfNeeded(float estimatedRemovalRatio) |
| 692 { | 699 { |
| 693 ASSERT(checkThread()); | 700 ASSERT(checkThread()); |
| 694 Heap::reportMemoryUsageForTracing(); | 701 Heap::reportMemoryUsageForTracing(); |
| 695 | 702 |
| 696 if (isGCForbidden()) | 703 if (isGCForbidden()) |
| 697 return; | 704 return; |
| 698 | 705 |
| 699 // Finish on-going lazy sweeping. | 706 // Finish on-going lazy sweeping. |
| 700 // TODO(haraken): It might not make sense to force completeSweep() for all | 707 // TODO(haraken): It might not make sense to force completeSweep() for all |
| 701 // page navigations. | 708 // page navigations. |
| 702 completeSweep(); | 709 completeSweep(); |
| 703 ASSERT(!isSweepingInProgress()); | 710 ASSERT(!isSweepingInProgress()); |
| 704 ASSERT(!sweepForbidden()); | 711 ASSERT(!sweepForbidden()); |
| 705 | 712 |
|
haraken
2015/09/26 02:04:37
It would be better to call shouldForceMemoryPressu
sof
2015/09/26 06:17:22
Could you explain your underlying reasoning?
(We
haraken
2015/09/26 12:38:19
My reasoning is just that it would be better to al
| |
| 706 if (shouldSchedulePageNavigationGC(estimatedRemovalRatio)) | 713 if (shouldSchedulePageNavigationGC(estimatedRemovalRatio)) |
| 707 schedulePageNavigationGC(); | 714 schedulePageNavigationGC(); |
| 708 } | 715 } |
| 709 | 716 |
| 710 void ThreadState::schedulePageNavigationGC() | 717 void ThreadState::schedulePageNavigationGC() |
| 711 { | 718 { |
| 712 ASSERT(checkThread()); | 719 ASSERT(checkThread()); |
| 713 ASSERT(!isSweepingInProgress()); | 720 ASSERT(!isSweepingInProgress()); |
| 714 setGCState(PageNavigationGCScheduled); | 721 setGCState(PageNavigationGCScheduled); |
| 715 } | 722 } |
| 716 | 723 |
| 717 void ThreadState::scheduleGCIfNeeded() | 724 void ThreadState::scheduleGCIfNeeded() |
| 718 { | 725 { |
| 719 ASSERT(checkThread()); | 726 ASSERT(checkThread()); |
| 720 Heap::reportMemoryUsageForTracing(); | 727 Heap::reportMemoryUsageForTracing(); |
| 721 | 728 |
| 722 if (isGCForbidden()) | 729 if (isGCForbidden()) |
| 723 return; | 730 return; |
| 724 | 731 |
| 725 // Allocation is allowed during sweeping, but those allocations should not | 732 // Allocation is allowed during sweeping, but those allocations should not |
| 726 // trigger nested GCs. | 733 // trigger nested GCs. |
| 727 if (isSweepingInProgress()) | 734 if (isSweepingInProgress()) |
| 728 return; | 735 return; |
| 729 ASSERT(!sweepForbidden()); | 736 ASSERT(!sweepForbidden()); |
| 730 | 737 |
| 731 if (shouldForceMemoryPressureGC()) { | 738 if (shouldForceMemoryPressureGC()) { |
|
haraken
2015/09/26 02:04:37
I think we should move this up to before the isSwe
sof
2015/09/26 06:17:22
(ftr, such a move on its own is not sufficient to
haraken
2015/09/26 12:38:19
Yes, my point is just that it will be better in th
| |
| 732 Heap::collectGarbage(HeapPointersOnStack, GCWithoutSweep, Heap::Conserva tiveGC); | 739 Heap::collectGarbage(HeapPointersOnStack, GCWithoutSweep, Heap::Conserva tiveGC); |
| 733 return; | 740 return; |
| 734 } | 741 } |
| 735 if (shouldForceConservativeGC()) { | 742 if (shouldForceConservativeGC()) { |
| 736 Heap::collectGarbage(HeapPointersOnStack, GCWithoutSweep, Heap::Conserva tiveGC); | 743 Heap::collectGarbage(HeapPointersOnStack, GCWithoutSweep, Heap::Conserva tiveGC); |
| 737 return; | 744 return; |
| 738 } | 745 } |
| 739 if (shouldSchedulePreciseGC()) | 746 if (shouldSchedulePreciseGC()) |
| 740 schedulePreciseGC(); | 747 schedulePreciseGC(); |
| 741 else if (shouldScheduleIdleGC()) | 748 else if (shouldScheduleIdleGC()) |
| (...skipping 863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1605 json->beginArray(it->key.ascii().data()); | 1612 json->beginArray(it->key.ascii().data()); |
| 1606 for (size_t age = 0; age <= maxHeapObjectAge; ++age) | 1613 for (size_t age = 0; age <= maxHeapObjectAge; ++age) |
| 1607 json->pushInteger(it->value.ages[age]); | 1614 json->pushInteger(it->value.ages[age]); |
| 1608 json->endArray(); | 1615 json->endArray(); |
| 1609 } | 1616 } |
| 1610 TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID(TRACE_DISABLED_BY_DEFAULT("blink_gc"), s tatsName, this, json.release()); | 1617 TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID(TRACE_DISABLED_BY_DEFAULT("blink_gc"), s tatsName, this, json.release()); |
| 1611 } | 1618 } |
| 1612 #endif | 1619 #endif |
| 1613 | 1620 |
| 1614 } // namespace blink | 1621 } // namespace blink |
| OLD | NEW |