Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(374)

Side by Side Diff: src/heap.cc

Issue 9178014: Revert 10413-10416 initial memory use reduction due to (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/deoptimizer.cc ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after
575 int map_space_size, 575 int map_space_size,
576 int cell_space_size, 576 int cell_space_size,
577 int large_object_size) { 577 int large_object_size) {
578 NewSpace* new_space = Heap::new_space(); 578 NewSpace* new_space = Heap::new_space();
579 PagedSpace* old_pointer_space = Heap::old_pointer_space(); 579 PagedSpace* old_pointer_space = Heap::old_pointer_space();
580 PagedSpace* old_data_space = Heap::old_data_space(); 580 PagedSpace* old_data_space = Heap::old_data_space();
581 PagedSpace* code_space = Heap::code_space(); 581 PagedSpace* code_space = Heap::code_space();
582 PagedSpace* map_space = Heap::map_space(); 582 PagedSpace* map_space = Heap::map_space();
583 PagedSpace* cell_space = Heap::cell_space(); 583 PagedSpace* cell_space = Heap::cell_space();
584 LargeObjectSpace* lo_space = Heap::lo_space(); 584 LargeObjectSpace* lo_space = Heap::lo_space();
585 bool one_old_space_gc_has_been_performed = false;
586 bool gc_performed = true; 585 bool gc_performed = true;
587 bool old_space_gc_performed;
588 while (gc_performed) { 586 while (gc_performed) {
589 old_space_gc_performed = false;
590 gc_performed = false; 587 gc_performed = false;
591 if (!new_space->ReserveSpace(new_space_size)) { 588 if (!new_space->ReserveSpace(new_space_size)) {
592 Heap::CollectGarbage(NEW_SPACE); 589 Heap::CollectGarbage(NEW_SPACE);
593 gc_performed = true; 590 gc_performed = true;
594 } 591 }
595 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { 592 if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
596 Heap::CollectGarbage(OLD_POINTER_SPACE); 593 Heap::CollectGarbage(OLD_POINTER_SPACE);
597 gc_performed = true; 594 gc_performed = true;
598 old_space_gc_performed = true;
599 } 595 }
600 if (!(old_data_space->ReserveSpace(data_space_size))) { 596 if (!(old_data_space->ReserveSpace(data_space_size))) {
601 Heap::CollectGarbage(OLD_DATA_SPACE); 597 Heap::CollectGarbage(OLD_DATA_SPACE);
602 gc_performed = true; 598 gc_performed = true;
603 old_space_gc_performed = true;
604 } 599 }
605 if (!(code_space->ReserveSpace(code_space_size))) { 600 if (!(code_space->ReserveSpace(code_space_size))) {
606 Heap::CollectGarbage(CODE_SPACE); 601 Heap::CollectGarbage(CODE_SPACE);
607 gc_performed = true; 602 gc_performed = true;
608 old_space_gc_performed = true;
609 } 603 }
610 if (!(map_space->ReserveSpace(map_space_size))) { 604 if (!(map_space->ReserveSpace(map_space_size))) {
611 Heap::CollectGarbage(MAP_SPACE); 605 Heap::CollectGarbage(MAP_SPACE);
612 gc_performed = true; 606 gc_performed = true;
613 old_space_gc_performed = true;
614 } 607 }
615 if (!(cell_space->ReserveSpace(cell_space_size))) { 608 if (!(cell_space->ReserveSpace(cell_space_size))) {
616 Heap::CollectGarbage(CELL_SPACE); 609 Heap::CollectGarbage(CELL_SPACE);
617 gc_performed = true; 610 gc_performed = true;
618 old_space_gc_performed = true;
619 } 611 }
620 // We add a slack-factor of 2 in order to have space for a series of 612 // We add a slack-factor of 2 in order to have space for a series of
621 // large-object allocations that are only just larger than the page size. 613 // large-object allocations that are only just larger than the page size.
622 large_object_size *= 2; 614 large_object_size *= 2;
623 // The ReserveSpace method on the large object space checks how much 615 // The ReserveSpace method on the large object space checks how much
624 // we can expand the old generation. This includes expansion caused by 616 // we can expand the old generation. This includes expansion caused by
625 // allocation in the other spaces. 617 // allocation in the other spaces.
626 large_object_size += cell_space_size + map_space_size + code_space_size + 618 large_object_size += cell_space_size + map_space_size + code_space_size +
627 data_space_size + pointer_space_size; 619 data_space_size + pointer_space_size;
628 620 if (!(lo_space->ReserveSpace(large_object_size))) {
629 // If we already did one GC in order to make space in old space, there is
630 // no sense in doing another one. We will attempt to force through the
631 // large object space allocation, which comes directly from the OS,
632 // regardless of any soft limit.
633 if (!one_old_space_gc_has_been_performed &&
634 !(lo_space->ReserveSpace(large_object_size))) {
635 Heap::CollectGarbage(LO_SPACE); 621 Heap::CollectGarbage(LO_SPACE);
636 gc_performed = true; 622 gc_performed = true;
637 } 623 }
638 if (old_space_gc_performed) one_old_space_gc_has_been_performed = true;
639 } 624 }
640 } 625 }
641 626
642 627
643 void Heap::EnsureFromSpaceIsCommitted() { 628 void Heap::EnsureFromSpaceIsCommitted() {
644 if (new_space_.CommitFromSpaceIfNeeded()) return; 629 if (new_space_.CommitFromSpaceIfNeeded()) return;
645 630
646 // Committing memory to from space failed. 631 // Committing memory to from space failed.
647 // Try shrinking and try again. 632 // Try shrinking and try again.
648 Shrink(); 633 Shrink();
(...skipping 6020 matching lines...) Expand 10 before | Expand all | Expand 10 after
6669 isolate_->heap()->store_buffer()->Compact(); 6654 isolate_->heap()->store_buffer()->Compact();
6670 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); 6655 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED);
6671 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { 6656 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6672 next = chunk->next_chunk(); 6657 next = chunk->next_chunk();
6673 isolate_->memory_allocator()->Free(chunk); 6658 isolate_->memory_allocator()->Free(chunk);
6674 } 6659 }
6675 chunks_queued_for_free_ = NULL; 6660 chunks_queued_for_free_ = NULL;
6676 } 6661 }
6677 6662
6678 } } // namespace v8::internal 6663 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/deoptimizer.cc ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698