| Index: src/heap.cc
|
| ===================================================================
|
| --- src/heap.cc (revision 5846)
|
| +++ src/heap.cc (working copy)
|
| @@ -38,7 +38,7 @@
|
| #include "mark-compact.h"
|
| #include "natives.h"
|
| #include "objects-visiting.h"
|
| -#include "scanner.h"
|
| +#include "scanner-base.h"
|
| #include "scopeinfo.h"
|
| #include "snapshot.h"
|
| #include "v8threads.h"
|
| @@ -79,25 +79,34 @@
|
| // semispace_size_ should be a power of 2 and old_generation_size_ should be
|
| // a multiple of Page::kPageSize.
|
| #if defined(ANDROID)
|
| -int Heap::max_semispace_size_ = 2*MB;
|
| +static const int default_max_semispace_size_ = 2*MB;
|
| intptr_t Heap::max_old_generation_size_ = 192*MB;
|
| int Heap::initial_semispace_size_ = 128*KB;
|
| intptr_t Heap::code_range_size_ = 0;
|
| intptr_t Heap::max_executable_size_ = max_old_generation_size_;
|
| #elif defined(V8_TARGET_ARCH_X64)
|
| -int Heap::max_semispace_size_ = 16*MB;
|
| +static const int default_max_semispace_size_ = 16*MB;
|
| intptr_t Heap::max_old_generation_size_ = 1*GB;
|
| int Heap::initial_semispace_size_ = 1*MB;
|
| intptr_t Heap::code_range_size_ = 512*MB;
|
| intptr_t Heap::max_executable_size_ = 256*MB;
|
| #else
|
| -int Heap::max_semispace_size_ = 8*MB;
|
| +static const int default_max_semispace_size_ = 8*MB;
|
| intptr_t Heap::max_old_generation_size_ = 512*MB;
|
| int Heap::initial_semispace_size_ = 512*KB;
|
| intptr_t Heap::code_range_size_ = 0;
|
| intptr_t Heap::max_executable_size_ = 128*MB;
|
| #endif
|
|
|
| +// Allow build-time customization of the max semispace size. Building
|
| +// V8 with snapshots and a non-default max semispace size is much
|
| +// easier if you can define it as part of the build environment.
|
| +#if defined(V8_MAX_SEMISPACE_SIZE)
|
| +int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
|
| +#else
|
| +int Heap::max_semispace_size_ = default_max_semispace_size_;
|
| +#endif
|
| +
|
| // The snapshot semispace size will be the default semispace size if
|
| // snapshotting is used and will be the requested semispace size as
|
| // set up by ConfigureHeap otherwise.
|
| @@ -395,7 +404,7 @@
|
| intptr_t total = 0;
|
| AllSpaces spaces;
|
| for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
|
| - total += space->Size();
|
| + total += space->SizeOfObjects();
|
| }
|
| return total;
|
| }
|
| @@ -3240,7 +3249,8 @@
|
| const uc32 kMaxSupportedChar = 0xFFFF;
|
| // Count the number of characters in the UTF-8 string and check if
|
| // it is an ASCII string.
|
| - Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
|
| + Access<ScannerConstants::Utf8Decoder>
|
| + decoder(ScannerConstants::utf8_decoder());
|
| decoder->Reset(string.start(), string.length());
|
| int chars = 0;
|
| bool is_ascii = true;
|
| @@ -4399,13 +4409,10 @@
|
| MemoryAllocator::Size() + MemoryAllocator::Available();
|
| *stats->os_error = OS::GetLastError();
|
| if (take_snapshot) {
|
| - HeapIterator iterator;
|
| + HeapIterator iterator(HeapIterator::kPreciseFiltering);
|
| for (HeapObject* obj = iterator.next();
|
| obj != NULL;
|
| obj = iterator.next()) {
|
| - // Note: snapshot won't be precise because IsFreeListNode returns true
|
| - // for any bytearray.
|
| - if (FreeListNode::IsFreeListNode(obj)) continue;
|
| InstanceType type = obj->map()->instance_type();
|
| ASSERT(0 <= type && type <= LAST_TYPE);
|
| stats->objects_per_type[type]++;
|
| @@ -4760,10 +4767,20 @@
|
| }
|
|
|
|
|
| -SpaceIterator::SpaceIterator() : current_space_(FIRST_SPACE), iterator_(NULL) {
|
| +SpaceIterator::SpaceIterator()
|
| + : current_space_(FIRST_SPACE),
|
| + iterator_(NULL),
|
| + size_func_(NULL) {
|
| }
|
|
|
|
|
| +SpaceIterator::SpaceIterator(HeapObjectCallback size_func)
|
| + : current_space_(FIRST_SPACE),
|
| + iterator_(NULL),
|
| + size_func_(size_func) {
|
| +}
|
| +
|
| +
|
| SpaceIterator::~SpaceIterator() {
|
| // Delete active iterator if any.
|
| delete iterator_;
|
| @@ -4798,25 +4815,25 @@
|
|
|
| switch (current_space_) {
|
| case NEW_SPACE:
|
| - iterator_ = new SemiSpaceIterator(Heap::new_space());
|
| + iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_);
|
| break;
|
| case OLD_POINTER_SPACE:
|
| - iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
|
| + iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_);
|
| break;
|
| case OLD_DATA_SPACE:
|
| - iterator_ = new HeapObjectIterator(Heap::old_data_space());
|
| + iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_);
|
| break;
|
| case CODE_SPACE:
|
| - iterator_ = new HeapObjectIterator(Heap::code_space());
|
| + iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_);
|
| break;
|
| case MAP_SPACE:
|
| - iterator_ = new HeapObjectIterator(Heap::map_space());
|
| + iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_);
|
| break;
|
| case CELL_SPACE:
|
| - iterator_ = new HeapObjectIterator(Heap::cell_space());
|
| + iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_);
|
| break;
|
| case LO_SPACE:
|
| - iterator_ = new LargeObjectIterator(Heap::lo_space());
|
| + iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_);
|
| break;
|
| }
|
|
|
| @@ -4826,11 +4843,58 @@
|
| }
|
|
|
|
|
| -HeapIterator::HeapIterator() {
|
| +class FreeListNodesFilter {
|
| + public:
|
| + FreeListNodesFilter() {
|
| + MarkFreeListNodes();
|
| + }
|
| +
|
| + inline bool IsFreeListNode(HeapObject* object) {
|
| + if (object->IsMarked()) {
|
| + object->ClearMark();
|
| + return true;
|
| + } else {
|
| + return false;
|
| + }
|
| + }
|
| +
|
| + private:
|
| + void MarkFreeListNodes() {
|
| + Heap::old_pointer_space()->MarkFreeListNodes();
|
| + Heap::old_data_space()->MarkFreeListNodes();
|
| + MarkCodeSpaceFreeListNodes();
|
| + Heap::map_space()->MarkFreeListNodes();
|
| + Heap::cell_space()->MarkFreeListNodes();
|
| + }
|
| +
|
| + void MarkCodeSpaceFreeListNodes() {
|
| + // For code space, using FreeListNode::IsFreeListNode is OK.
|
| + HeapObjectIterator iter(Heap::code_space());
|
| + for (HeapObject* obj = iter.next_object();
|
| + obj != NULL;
|
| + obj = iter.next_object()) {
|
| + if (FreeListNode::IsFreeListNode(obj)) obj->SetMark();
|
| + }
|
| + }
|
| +
|
| + AssertNoAllocation no_alloc;
|
| +};
|
| +
|
| +
|
| +HeapIterator::HeapIterator()
|
| + : filtering_(HeapIterator::kNoFiltering),
|
| + filter_(NULL) {
|
| Init();
|
| }
|
|
|
|
|
| +HeapIterator::HeapIterator(HeapIterator::FreeListNodesFiltering filtering)
|
| + : filtering_(filtering),
|
| + filter_(NULL) {
|
| + Init();
|
| +}
|
| +
|
| +
|
| HeapIterator::~HeapIterator() {
|
| Shutdown();
|
| }
|
| @@ -4838,20 +4902,44 @@
|
|
|
| void HeapIterator::Init() {
|
| // Start the iteration.
|
| - space_iterator_ = new SpaceIterator();
|
| + if (filtering_ == kPreciseFiltering) {
|
| + filter_ = new FreeListNodesFilter;
|
| + space_iterator_ =
|
| + new SpaceIterator(MarkCompactCollector::SizeOfMarkedObject);
|
| + } else {
|
| + space_iterator_ = new SpaceIterator;
|
| + }
|
| object_iterator_ = space_iterator_->next();
|
| }
|
|
|
|
|
| void HeapIterator::Shutdown() {
|
| +#ifdef DEBUG
|
| + // Assert that in precise mode we have iterated through all
|
| + // objects. Otherwise, heap will be left in an inconsistent state.
|
| + if (filtering_ == kPreciseFiltering) {
|
| + ASSERT(object_iterator_ == NULL);
|
| + }
|
| +#endif
|
| // Make sure the last iterator is deallocated.
|
| delete space_iterator_;
|
| space_iterator_ = NULL;
|
| object_iterator_ = NULL;
|
| + delete filter_;
|
| + filter_ = NULL;
|
| }
|
|
|
|
|
| HeapObject* HeapIterator::next() {
|
| + if (filter_ == NULL) return NextObject();
|
| +
|
| + HeapObject* obj = NextObject();
|
| + while (obj != NULL && filter_->IsFreeListNode(obj)) obj = NextObject();
|
| + return obj;
|
| +}
|
| +
|
| +
|
| +HeapObject* HeapIterator::NextObject() {
|
| // No iterator means we are done.
|
| if (object_iterator_ == NULL) return NULL;
|
|
|
|
|