| Index: src/heap.cc
|
| diff --git a/src/heap.cc b/src/heap.cc
|
| index 1bcd5d09f628a84be179b1dddc8a62be00c53e4a..0e3750664eaa7941824c14b645a7f5a001bc4d71 100644
|
| --- a/src/heap.cc
|
| +++ b/src/heap.cc
|
| @@ -150,7 +150,7 @@ Heap::Heap()
|
| #ifdef VERIFY_HEAP
|
| no_weak_object_verification_scope_depth_(0),
|
| #endif
|
| - allocation_sites_scratchpad_length(0),
|
| + allocation_sites_scratchpad_length_(0),
|
| promotion_queue_(this),
|
| configured_(false),
|
| external_string_table_(this),
|
| @@ -516,16 +516,17 @@ void Heap::ProcessPretenuringFeedback() {
|
| // If the scratchpad overflowed, we have to iterate over the allocation
|
| // sites list.
|
| bool use_scratchpad =
|
| - allocation_sites_scratchpad_length < kAllocationSiteScratchpadSize;
|
| + allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize;
|
|
|
| int i = 0;
|
| Object* list_element = allocation_sites_list();
|
| bool trigger_deoptimization = false;
|
| while (use_scratchpad ?
|
| - i < allocation_sites_scratchpad_length :
|
| + i < allocation_sites_scratchpad_length_ :
|
| list_element->IsAllocationSite()) {
|
| AllocationSite* site = use_scratchpad ?
|
| - allocation_sites_scratchpad[i] : AllocationSite::cast(list_element);
|
| + AllocationSite::cast(allocation_sites_scratchpad()->get(i)) :
|
| + AllocationSite::cast(list_element);
|
| allocation_mementos_found += site->memento_found_count();
|
| if (site->memento_found_count() > 0) {
|
| active_allocation_sites++;
|
| @@ -546,12 +547,9 @@ void Heap::ProcessPretenuringFeedback() {
|
|
|
| if (trigger_deoptimization) isolate_->stack_guard()->DeoptMarkedCode();
|
|
|
| - allocation_sites_scratchpad_length = 0;
|
| + FlushAllocationSitesScratchpad();
|
|
|
| - // TODO(mvstanton): Pretenure decisions are only made once for an allocation
|
| - // site. Find a sane way to decide about revisiting the decision later.
|
| -
|
| - if (FLAG_trace_track_allocation_sites &&
|
| + if (FLAG_trace_pretenuring_statistics &&
|
| (allocation_mementos_found > 0 ||
|
| tenure_decisions > 0 ||
|
| dont_tenure_decisions > 0)) {
|
| @@ -739,7 +737,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
|
| const int kMaxNumberOfAttempts = 7;
|
| const int kMinNumberOfAttempts = 2;
|
| for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
|
| - if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL) &&
|
| + if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL) &&
|
| attempt + 1 >= kMinNumberOfAttempts) {
|
| break;
|
| }
|
| @@ -751,8 +749,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
|
| }
|
|
|
|
|
| -bool Heap::CollectGarbage(AllocationSpace space,
|
| - GarbageCollector collector,
|
| +bool Heap::CollectGarbage(GarbageCollector collector,
|
| const char* gc_reason,
|
| const char* collector_reason,
|
| const v8::GCCallbackFlags gc_callback_flags) {
|
| @@ -768,6 +765,18 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
| allocation_timeout_ = Max(6, FLAG_gc_interval);
|
| #endif
|
|
|
| + // There may be an allocation memento behind every object in new space.
|
| + // If we evacuate a not full new space or if we are on the last page of
|
| + // the new space, then there may be uninitialized memory behind the top
|
| + // pointer of the new space page. We store a filler object there to
|
| + // identify the unused space.
|
| + Address from_top = new_space_.top();
|
| + Address from_limit = new_space_.limit();
|
| + if (from_top < from_limit) {
|
| + int remaining_in_page = static_cast<int>(from_limit - from_top);
|
| + CreateFillerObjectAt(from_top, remaining_in_page);
|
| + }
|
| +
|
| if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
|
| if (FLAG_trace_incremental_marking) {
|
| PrintF("[IncrementalMarking] Scavenge during marking.\n");
|
| @@ -3297,6 +3306,12 @@ bool Heap::CreateInitialObjects() {
|
| // Handling of script id generation is in Factory::NewScript.
|
| set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId));
|
|
|
| + { MaybeObject* maybe_obj = AllocateAllocationSitesScratchpad();
|
| + if (!maybe_obj->ToObject(&obj)) return false;
|
| + }
|
| + set_allocation_sites_scratchpad(FixedArray::cast(obj));
|
| + InitializeAllocationSitesScratchpad();
|
| +
|
| // Initialize keyed lookup cache.
|
| isolate_->keyed_lookup_cache()->Clear();
|
|
|
| @@ -3586,6 +3601,39 @@ MaybeObject* Heap::Uint32ToString(uint32_t value,
|
| }
|
|
|
|
|
| +MaybeObject* Heap::AllocateAllocationSitesScratchpad() {
|
| + MaybeObject* maybe_obj =
|
| + AllocateFixedArray(kAllocationSiteScratchpadSize, TENURED);
|
| + return maybe_obj;
|
| +}
|
| +
|
| +
|
| +void Heap::FlushAllocationSitesScratchpad() {
|
| + for (int i = 0; i < allocation_sites_scratchpad_length_; i++) {
|
| + allocation_sites_scratchpad()->set_undefined(i);
|
| + }
|
| + allocation_sites_scratchpad_length_ = 0;
|
| +}
|
| +
|
| +
|
| +void Heap::InitializeAllocationSitesScratchpad() {
|
| + ASSERT(allocation_sites_scratchpad()->length() ==
|
| + kAllocationSiteScratchpadSize);
|
| + for (int i = 0; i < kAllocationSiteScratchpadSize; i++) {
|
| + allocation_sites_scratchpad()->set_undefined(i);
|
| + }
|
| +}
|
| +
|
| +
|
| +void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) {
|
| + if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) {
|
| + allocation_sites_scratchpad()->set(
|
| + allocation_sites_scratchpad_length_, site);
|
| + allocation_sites_scratchpad_length_++;
|
| + }
|
| +}
|
| +
|
| +
|
| Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
|
| return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
|
| }
|
|
|