| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 527 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 538 dont_tenure_decisions++; | 538 dont_tenure_decisions++; |
| 539 } | 539 } |
| 540 allocation_sites++; | 540 allocation_sites++; |
| 541 if (use_scratchpad) { | 541 if (use_scratchpad) { |
| 542 i++; | 542 i++; |
| 543 } else { | 543 } else { |
| 544 list_element = site->weak_next(); | 544 list_element = site->weak_next(); |
| 545 } | 545 } |
| 546 } | 546 } |
| 547 | 547 |
| 548 if (trigger_deoptimization) { | 548 if (trigger_deoptimization) isolate_->stack_guard()->DeoptMarkedCode(); |
| 549 isolate_->stack_guard()->DeoptMarkedAllocationSites(); | |
| 550 } | |
| 551 | 549 |
| 552 FlushAllocationSitesScratchpad(); | 550 FlushAllocationSitesScratchpad(); |
| 553 | 551 |
| 554 if (FLAG_trace_pretenuring_statistics && | 552 if (FLAG_trace_pretenuring_statistics && |
| 555 (allocation_mementos_found > 0 || | 553 (allocation_mementos_found > 0 || |
| 556 tenure_decisions > 0 || | 554 tenure_decisions > 0 || |
| 557 dont_tenure_decisions > 0)) { | 555 dont_tenure_decisions > 0)) { |
| 558 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " | 556 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " |
| 559 "#mementos, #tenure decisions, #donttenure decisions) " | 557 "#mementos, #tenure decisions, #donttenure decisions) " |
| 560 "(%s, %d, %d, %d, %d, %d)\n", | 558 "(%s, %d, %d, %d, %d, %d)\n", |
| 561 use_scratchpad ? "use scratchpad" : "use list", | 559 use_scratchpad ? "use scratchpad" : "use list", |
| 562 allocation_sites, | 560 allocation_sites, |
| 563 active_allocation_sites, | 561 active_allocation_sites, |
| 564 allocation_mementos_found, | 562 allocation_mementos_found, |
| 565 tenure_decisions, | 563 tenure_decisions, |
| 566 dont_tenure_decisions); | 564 dont_tenure_decisions); |
| 567 } | 565 } |
| 568 } | 566 } |
| 569 } | 567 } |
| 570 | 568 |
| 571 | 569 |
| 572 void Heap::DeoptMarkedAllocationSites() { | |
| 573 // TODO(hpayer): If iterating over the allocation sites list becomes a | |
| 574 // performance issue, use a cache heap data structure instead (similar to the | |
| 575 // allocation sites scratchpad). | |
| 576 Object* list_element = allocation_sites_list(); | |
| 577 while (list_element->IsAllocationSite()) { | |
| 578 AllocationSite* site = AllocationSite::cast(list_element); | |
| 579 if (site->deopt_dependent_code()) { | |
| 580 site->dependent_code()->MarkCodeForDeoptimization( | |
| 581 isolate_, | |
| 582 DependentCode::kAllocationSiteTenuringChangedGroup); | |
| 583 site->set_deopt_dependent_code(false); | |
| 584 } | |
| 585 list_element = site->weak_next(); | |
| 586 } | |
| 587 Deoptimizer::DeoptimizeMarkedCode(isolate_); | |
| 588 } | |
| 589 | |
| 590 | |
| 591 void Heap::GarbageCollectionEpilogue() { | 570 void Heap::GarbageCollectionEpilogue() { |
| 592 store_buffer()->GCEpilogue(); | 571 store_buffer()->GCEpilogue(); |
| 593 | 572 |
| 594 // In release mode, we only zap the from space under heap verification. | 573 // In release mode, we only zap the from space under heap verification. |
| 595 if (Heap::ShouldZapGarbage()) { | 574 if (Heap::ShouldZapGarbage()) { |
| 596 ZapFromSpace(); | 575 ZapFromSpace(); |
| 597 } | 576 } |
| 598 | 577 |
| 599 // Process pretenuring feedback and update allocation sites. | |
| 600 ProcessPretenuringFeedback(); | |
| 601 | |
| 602 #ifdef VERIFY_HEAP | 578 #ifdef VERIFY_HEAP |
| 603 if (FLAG_verify_heap) { | 579 if (FLAG_verify_heap) { |
| 604 Verify(); | 580 Verify(); |
| 605 } | 581 } |
| 606 #endif | 582 #endif |
| 607 | 583 |
| 608 AllowHeapAllocation for_the_rest_of_the_epilogue; | 584 AllowHeapAllocation for_the_rest_of_the_epilogue; |
| 609 | 585 |
| 610 #ifdef DEBUG | 586 #ifdef DEBUG |
| 611 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); | 587 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 766 break; | 742 break; |
| 767 } | 743 } |
| 768 } | 744 } |
| 769 mark_compact_collector()->SetFlags(kNoGCFlags); | 745 mark_compact_collector()->SetFlags(kNoGCFlags); |
| 770 new_space_.Shrink(); | 746 new_space_.Shrink(); |
| 771 UncommitFromSpace(); | 747 UncommitFromSpace(); |
| 772 incremental_marking()->UncommitMarkingDeque(); | 748 incremental_marking()->UncommitMarkingDeque(); |
| 773 } | 749 } |
| 774 | 750 |
| 775 | 751 |
| 776 void Heap::EnsureFillerObjectAtTop() { | |
| 777 // There may be an allocation memento behind every object in new space. | |
| 778 // If we evacuate a not full new space or if we are on the last page of | |
| 779 // the new space, then there may be uninitialized memory behind the top | |
| 780 // pointer of the new space page. We store a filler object there to | |
| 781 // identify the unused space. | |
| 782 Address from_top = new_space_.top(); | |
| 783 Address from_limit = new_space_.limit(); | |
| 784 if (from_top < from_limit) { | |
| 785 int remaining_in_page = static_cast<int>(from_limit - from_top); | |
| 786 CreateFillerObjectAt(from_top, remaining_in_page); | |
| 787 } | |
| 788 } | |
| 789 | |
| 790 | |
| 791 bool Heap::CollectGarbage(GarbageCollector collector, | 752 bool Heap::CollectGarbage(GarbageCollector collector, |
| 792 const char* gc_reason, | 753 const char* gc_reason, |
| 793 const char* collector_reason, | 754 const char* collector_reason, |
| 794 const v8::GCCallbackFlags gc_callback_flags) { | 755 const v8::GCCallbackFlags gc_callback_flags) { |
| 795 // The VM is in the GC state until exiting this function. | 756 // The VM is in the GC state until exiting this function. |
| 796 VMState<GC> state(isolate_); | 757 VMState<GC> state(isolate_); |
| 797 | 758 |
| 798 #ifdef DEBUG | 759 #ifdef DEBUG |
| 799 // Reset the allocation timeout to the GC interval, but make sure to | 760 // Reset the allocation timeout to the GC interval, but make sure to |
| 800 // allow at least a few allocations after a collection. The reason | 761 // allow at least a few allocations after a collection. The reason |
| 801 // for this is that we have a lot of allocation sequences and we | 762 // for this is that we have a lot of allocation sequences and we |
| 802 // assume that a garbage collection will allow the subsequent | 763 // assume that a garbage collection will allow the subsequent |
| 803 // allocation attempts to go through. | 764 // allocation attempts to go through. |
| 804 allocation_timeout_ = Max(6, FLAG_gc_interval); | 765 allocation_timeout_ = Max(6, FLAG_gc_interval); |
| 805 #endif | 766 #endif |
| 806 | 767 |
| 807 EnsureFillerObjectAtTop(); | 768 // There may be an allocation memento behind every object in new space. |
| 769 // If we evacuate a not full new space or if we are on the last page of |
| 770 // the new space, then there may be uninitialized memory behind the top |
| 771 // pointer of the new space page. We store a filler object there to |
| 772 // identify the unused space. |
| 773 Address from_top = new_space_.top(); |
| 774 Address from_limit = new_space_.limit(); |
| 775 if (from_top < from_limit) { |
| 776 int remaining_in_page = static_cast<int>(from_limit - from_top); |
| 777 CreateFillerObjectAt(from_top, remaining_in_page); |
| 778 } |
| 808 | 779 |
| 809 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) { | 780 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) { |
| 810 if (FLAG_trace_incremental_marking) { | 781 if (FLAG_trace_incremental_marking) { |
| 811 PrintF("[IncrementalMarking] Scavenge during marking.\n"); | 782 PrintF("[IncrementalMarking] Scavenge during marking.\n"); |
| 812 } | 783 } |
| 813 } | 784 } |
| 814 | 785 |
| 815 if (collector == MARK_COMPACTOR && | 786 if (collector == MARK_COMPACTOR && |
| 816 !mark_compact_collector()->abort_incremental_marking() && | 787 !mark_compact_collector()->abort_incremental_marking() && |
| 817 !incremental_marking()->IsStopped() && | 788 !incremental_marking()->IsStopped() && |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 871 if (isolate()->concurrent_recompilation_enabled()) { | 842 if (isolate()->concurrent_recompilation_enabled()) { |
| 872 // Flush the queued recompilation tasks. | 843 // Flush the queued recompilation tasks. |
| 873 isolate()->optimizing_compiler_thread()->Flush(); | 844 isolate()->optimizing_compiler_thread()->Flush(); |
| 874 } | 845 } |
| 875 flush_monomorphic_ics_ = true; | 846 flush_monomorphic_ics_ = true; |
| 876 AgeInlineCaches(); | 847 AgeInlineCaches(); |
| 877 return ++contexts_disposed_; | 848 return ++contexts_disposed_; |
| 878 } | 849 } |
| 879 | 850 |
| 880 | 851 |
| 852 void Heap::PerformScavenge() { |
| 853 GCTracer tracer(this, NULL, NULL); |
| 854 if (incremental_marking()->IsStopped()) { |
| 855 PerformGarbageCollection(SCAVENGER, &tracer); |
| 856 } else { |
| 857 PerformGarbageCollection(MARK_COMPACTOR, &tracer); |
| 858 } |
| 859 } |
| 860 |
| 861 |
| 881 void Heap::MoveElements(FixedArray* array, | 862 void Heap::MoveElements(FixedArray* array, |
| 882 int dst_index, | 863 int dst_index, |
| 883 int src_index, | 864 int src_index, |
| 884 int len) { | 865 int len) { |
| 885 if (len == 0) return; | 866 if (len == 0) return; |
| 886 | 867 |
| 887 ASSERT(array->map() != fixed_cow_array_map()); | 868 ASSERT(array->map() != fixed_cow_array_map()); |
| 888 Object** dst_objects = array->data_start() + dst_index; | 869 Object** dst_objects = array->data_start() + dst_index; |
| 889 OS::MemMove(dst_objects, | 870 OS::MemMove(dst_objects, |
| 890 array->data_start() + src_index, | 871 array->data_start() + src_index, |
| (...skipping 739 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1630 // Set age mark. | 1611 // Set age mark. |
| 1631 new_space_.set_age_mark(new_space_.top()); | 1612 new_space_.set_age_mark(new_space_.top()); |
| 1632 | 1613 |
| 1633 new_space_.LowerInlineAllocationLimit( | 1614 new_space_.LowerInlineAllocationLimit( |
| 1634 new_space_.inline_allocation_limit_step()); | 1615 new_space_.inline_allocation_limit_step()); |
| 1635 | 1616 |
| 1636 // Update how much has survived scavenge. | 1617 // Update how much has survived scavenge. |
| 1637 IncrementYoungSurvivorsCounter(static_cast<int>( | 1618 IncrementYoungSurvivorsCounter(static_cast<int>( |
| 1638 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); | 1619 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); |
| 1639 | 1620 |
| 1621 ProcessPretenuringFeedback(); |
| 1622 |
| 1640 LOG(isolate_, ResourceEvent("scavenge", "end")); | 1623 LOG(isolate_, ResourceEvent("scavenge", "end")); |
| 1641 | 1624 |
| 1642 gc_state_ = NOT_IN_GC; | 1625 gc_state_ = NOT_IN_GC; |
| 1643 | 1626 |
| 1644 scavenges_since_last_idle_round_++; | 1627 scavenges_since_last_idle_round_++; |
| 1645 } | 1628 } |
| 1646 | 1629 |
| 1647 | 1630 |
| 1648 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, | 1631 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, |
| 1649 Object** p) { | 1632 Object** p) { |
| (...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2009 | 1992 |
| 2010 | 1993 |
| 2011 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { | 1994 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { |
| 2012 DisallowHeapAllocation no_allocation_scope; | 1995 DisallowHeapAllocation no_allocation_scope; |
| 2013 Object* cur = allocation_sites_list(); | 1996 Object* cur = allocation_sites_list(); |
| 2014 bool marked = false; | 1997 bool marked = false; |
| 2015 while (cur->IsAllocationSite()) { | 1998 while (cur->IsAllocationSite()) { |
| 2016 AllocationSite* casted = AllocationSite::cast(cur); | 1999 AllocationSite* casted = AllocationSite::cast(cur); |
| 2017 if (casted->GetPretenureMode() == flag) { | 2000 if (casted->GetPretenureMode() == flag) { |
| 2018 casted->ResetPretenureDecision(); | 2001 casted->ResetPretenureDecision(); |
| 2019 casted->set_deopt_dependent_code(true); | 2002 bool got_marked = casted->dependent_code()->MarkCodeForDeoptimization( |
| 2020 marked = true; | 2003 isolate_, |
| 2004 DependentCode::kAllocationSiteTenuringChangedGroup); |
| 2005 if (got_marked) marked = true; |
| 2021 } | 2006 } |
| 2022 cur = casted->weak_next(); | 2007 cur = casted->weak_next(); |
| 2023 } | 2008 } |
| 2024 if (marked) isolate_->stack_guard()->DeoptMarkedAllocationSites(); | 2009 if (marked) isolate_->stack_guard()->DeoptMarkedCode(); |
| 2025 } | 2010 } |
| 2026 | 2011 |
| 2027 | 2012 |
| 2028 void Heap::EvaluateOldSpaceLocalPretenuring( | 2013 void Heap::EvaluateOldSpaceLocalPretenuring( |
| 2029 uint64_t size_of_objects_before_gc) { | 2014 uint64_t size_of_objects_before_gc) { |
| 2030 uint64_t size_of_objects_after_gc = SizeOfObjects(); | 2015 uint64_t size_of_objects_after_gc = SizeOfObjects(); |
| 2031 double old_generation_survival_rate = | 2016 double old_generation_survival_rate = |
| 2032 (static_cast<double>(size_of_objects_after_gc) * 100) / | 2017 (static_cast<double>(size_of_objects_after_gc) * 100) / |
| 2033 static_cast<double>(size_of_objects_before_gc); | 2018 static_cast<double>(size_of_objects_before_gc); |
| 2034 | 2019 |
| (...skipping 642 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2677 return accessors; | 2662 return accessors; |
| 2678 } | 2663 } |
| 2679 | 2664 |
| 2680 | 2665 |
| 2681 MaybeObject* Heap::AllocateTypeFeedbackInfo() { | 2666 MaybeObject* Heap::AllocateTypeFeedbackInfo() { |
| 2682 TypeFeedbackInfo* info; | 2667 TypeFeedbackInfo* info; |
| 2683 { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE); | 2668 { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE); |
| 2684 if (!maybe_info->To(&info)) return maybe_info; | 2669 if (!maybe_info->To(&info)) return maybe_info; |
| 2685 } | 2670 } |
| 2686 info->initialize_storage(); | 2671 info->initialize_storage(); |
| 2687 info->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER); | 2672 info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()), |
| 2673 SKIP_WRITE_BARRIER); |
| 2688 return info; | 2674 return info; |
| 2689 } | 2675 } |
| 2690 | 2676 |
| 2691 | 2677 |
| 2692 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) { | 2678 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) { |
| 2693 AliasedArgumentsEntry* entry; | 2679 AliasedArgumentsEntry* entry; |
| 2694 { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE); | 2680 { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE); |
| 2695 if (!maybe_entry->To(&entry)) return maybe_entry; | 2681 if (!maybe_entry->To(&entry)) return maybe_entry; |
| 2696 } | 2682 } |
| 2697 entry->set_aliased_context_slot(aliased_context_slot); | 2683 entry->set_aliased_context_slot(aliased_context_slot); |
| (...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3059 set_js_construct_entry_code(*stub.GetCode(isolate())); | 3045 set_js_construct_entry_code(*stub.GetCode(isolate())); |
| 3060 } | 3046 } |
| 3061 | 3047 |
| 3062 | 3048 |
| 3063 void Heap::CreateFixedStubs() { | 3049 void Heap::CreateFixedStubs() { |
| 3064 // Here we create roots for fixed stubs. They are needed at GC | 3050 // Here we create roots for fixed stubs. They are needed at GC |
| 3065 // for cooking and uncooking (check out frames.cc). | 3051 // for cooking and uncooking (check out frames.cc). |
| 3066 // The eliminates the need for doing dictionary lookup in the | 3052 // The eliminates the need for doing dictionary lookup in the |
| 3067 // stub cache for these stubs. | 3053 // stub cache for these stubs. |
| 3068 HandleScope scope(isolate()); | 3054 HandleScope scope(isolate()); |
| 3069 | |
| 3070 // Create stubs that should be there, so we don't unexpectedly have to | |
| 3071 // create them if we need them during the creation of another stub. | |
| 3072 // Stub creation mixes raw pointers and handles in an unsafe manner so | |
| 3073 // we cannot create stubs while we are creating stubs. | |
| 3074 CodeStub::GenerateStubsAheadOfTime(isolate()); | |
| 3075 | |
| 3076 // MacroAssembler::Abort calls (usually enabled with --debug-code) depend on | |
| 3077 // CEntryStub, so we need to call GenerateStubsAheadOfTime before JSEntryStub | |
| 3078 // is created. | |
| 3079 | |
| 3080 // gcc-4.4 has problem generating correct code of following snippet: | 3055 // gcc-4.4 has problem generating correct code of following snippet: |
| 3081 // { JSEntryStub stub; | 3056 // { JSEntryStub stub; |
| 3082 // js_entry_code_ = *stub.GetCode(); | 3057 // js_entry_code_ = *stub.GetCode(); |
| 3083 // } | 3058 // } |
| 3084 // { JSConstructEntryStub stub; | 3059 // { JSConstructEntryStub stub; |
| 3085 // js_construct_entry_code_ = *stub.GetCode(); | 3060 // js_construct_entry_code_ = *stub.GetCode(); |
| 3086 // } | 3061 // } |
| 3087 // To workaround the problem, make separate functions without inlining. | 3062 // To workaround the problem, make separate functions without inlining. |
| 3088 Heap::CreateJSEntryStub(); | 3063 Heap::CreateJSEntryStub(); |
| 3089 Heap::CreateJSConstructEntryStub(); | 3064 Heap::CreateJSConstructEntryStub(); |
| 3065 |
| 3066 // Create stubs that should be there, so we don't unexpectedly have to |
| 3067 // create them if we need them during the creation of another stub. |
| 3068 // Stub creation mixes raw pointers and handles in an unsafe manner so |
| 3069 // we cannot create stubs while we are creating stubs. |
| 3070 CodeStub::GenerateStubsAheadOfTime(isolate()); |
| 3090 } | 3071 } |
| 3091 | 3072 |
| 3092 | 3073 |
| 3093 bool Heap::CreateInitialObjects() { | 3074 bool Heap::CreateInitialObjects() { |
| 3094 Object* obj; | 3075 Object* obj; |
| 3095 | 3076 |
| 3096 // The -0 value must be set before NumberFromDouble works. | 3077 // The -0 value must be set before NumberFromDouble works. |
| 3097 { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED); | 3078 { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED); |
| 3098 if (!maybe_obj->ToObject(&obj)) return false; | 3079 if (!maybe_obj->ToObject(&obj)) return false; |
| 3099 } | 3080 } |
| (...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3281 | 3262 |
| 3282 // Allocate object to hold object observation state. | 3263 // Allocate object to hold object observation state. |
| 3283 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); | 3264 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); |
| 3284 if (!maybe_obj->ToObject(&obj)) return false; | 3265 if (!maybe_obj->ToObject(&obj)) return false; |
| 3285 } | 3266 } |
| 3286 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); | 3267 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); |
| 3287 if (!maybe_obj->ToObject(&obj)) return false; | 3268 if (!maybe_obj->ToObject(&obj)) return false; |
| 3288 } | 3269 } |
| 3289 set_observation_state(JSObject::cast(obj)); | 3270 set_observation_state(JSObject::cast(obj)); |
| 3290 | 3271 |
| 3291 // Allocate object to hold object microtask state. | |
| 3292 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); | |
| 3293 if (!maybe_obj->ToObject(&obj)) return false; | |
| 3294 } | |
| 3295 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); | |
| 3296 if (!maybe_obj->ToObject(&obj)) return false; | |
| 3297 } | |
| 3298 set_microtask_state(JSObject::cast(obj)); | |
| 3299 | |
| 3300 { MaybeObject* maybe_obj = AllocateSymbol(); | 3272 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3301 if (!maybe_obj->ToObject(&obj)) return false; | 3273 if (!maybe_obj->ToObject(&obj)) return false; |
| 3302 } | 3274 } |
| 3303 Symbol::cast(obj)->set_is_private(true); | 3275 Symbol::cast(obj)->set_is_private(true); |
| 3304 set_frozen_symbol(Symbol::cast(obj)); | 3276 set_frozen_symbol(Symbol::cast(obj)); |
| 3305 | 3277 |
| 3306 { MaybeObject* maybe_obj = AllocateSymbol(); | 3278 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3307 if (!maybe_obj->ToObject(&obj)) return false; | 3279 if (!maybe_obj->ToObject(&obj)) return false; |
| 3308 } | 3280 } |
| 3309 Symbol::cast(obj)->set_is_private(true); | 3281 Symbol::cast(obj)->set_is_private(true); |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3643 ASSERT(allocation_sites_scratchpad()->length() == | 3615 ASSERT(allocation_sites_scratchpad()->length() == |
| 3644 kAllocationSiteScratchpadSize); | 3616 kAllocationSiteScratchpadSize); |
| 3645 for (int i = 0; i < kAllocationSiteScratchpadSize; i++) { | 3617 for (int i = 0; i < kAllocationSiteScratchpadSize; i++) { |
| 3646 allocation_sites_scratchpad()->set_undefined(i); | 3618 allocation_sites_scratchpad()->set_undefined(i); |
| 3647 } | 3619 } |
| 3648 } | 3620 } |
| 3649 | 3621 |
| 3650 | 3622 |
| 3651 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) { | 3623 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) { |
| 3652 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) { | 3624 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) { |
| 3653 // We cannot use the normal write-barrier because slots need to be | |
| 3654 // recorded with non-incremental marking as well. We have to explicitly | |
| 3655 // record the slot to take evacuation candidates into account. | |
| 3656 allocation_sites_scratchpad()->set( | 3625 allocation_sites_scratchpad()->set( |
| 3657 allocation_sites_scratchpad_length_, site, SKIP_WRITE_BARRIER); | 3626 allocation_sites_scratchpad_length_, site); |
| 3658 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt( | |
| 3659 allocation_sites_scratchpad_length_); | |
| 3660 mark_compact_collector()->RecordSlot(slot, slot, *slot); | |
| 3661 allocation_sites_scratchpad_length_++; | 3627 allocation_sites_scratchpad_length_++; |
| 3662 } | 3628 } |
| 3663 } | 3629 } |
| 3664 | 3630 |
| 3665 | 3631 |
| 3666 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { | 3632 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { |
| 3667 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); | 3633 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); |
| 3668 } | 3634 } |
| 3669 | 3635 |
| 3670 | 3636 |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3797 | 3763 |
| 3798 return share; | 3764 return share; |
| 3799 } | 3765 } |
| 3800 | 3766 |
| 3801 | 3767 |
| 3802 MaybeObject* Heap::AllocateJSMessageObject(String* type, | 3768 MaybeObject* Heap::AllocateJSMessageObject(String* type, |
| 3803 JSArray* arguments, | 3769 JSArray* arguments, |
| 3804 int start_position, | 3770 int start_position, |
| 3805 int end_position, | 3771 int end_position, |
| 3806 Object* script, | 3772 Object* script, |
| 3773 Object* stack_trace, |
| 3807 Object* stack_frames) { | 3774 Object* stack_frames) { |
| 3808 Object* result; | 3775 Object* result; |
| 3809 { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE); | 3776 { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE); |
| 3810 if (!maybe_result->ToObject(&result)) return maybe_result; | 3777 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3811 } | 3778 } |
| 3812 JSMessageObject* message = JSMessageObject::cast(result); | 3779 JSMessageObject* message = JSMessageObject::cast(result); |
| 3813 message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); | 3780 message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 3814 message->initialize_elements(); | 3781 message->initialize_elements(); |
| 3815 message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); | 3782 message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 3816 message->set_type(type); | 3783 message->set_type(type); |
| 3817 message->set_arguments(arguments); | 3784 message->set_arguments(arguments); |
| 3818 message->set_start_position(start_position); | 3785 message->set_start_position(start_position); |
| 3819 message->set_end_position(end_position); | 3786 message->set_end_position(end_position); |
| 3820 message->set_script(script); | 3787 message->set_script(script); |
| 3788 message->set_stack_trace(stack_trace); |
| 3821 message->set_stack_frames(stack_frames); | 3789 message->set_stack_frames(stack_frames); |
| 3822 return result; | 3790 return result; |
| 3823 } | 3791 } |
| 3824 | 3792 |
| 3825 | 3793 |
| 3826 MaybeObject* Heap::AllocateExternalStringFromAscii( | 3794 MaybeObject* Heap::AllocateExternalStringFromAscii( |
| 3827 const ExternalAsciiString::Resource* resource) { | 3795 const ExternalAsciiString::Resource* resource) { |
| 3828 size_t length = resource->length(); | 3796 size_t length = resource->length(); |
| 3829 if (length > static_cast<size_t>(String::kMaxLength)) { | 3797 if (length > static_cast<size_t>(String::kMaxLength)) { |
| 3830 isolate()->context()->mark_out_of_memory(); | 3798 isolate()->context()->mark_out_of_memory(); |
| (...skipping 2017 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5848 | 5816 |
| 5849 #ifdef VERIFY_HEAP | 5817 #ifdef VERIFY_HEAP |
| 5850 void Heap::Verify() { | 5818 void Heap::Verify() { |
| 5851 CHECK(HasBeenSetUp()); | 5819 CHECK(HasBeenSetUp()); |
| 5852 | 5820 |
| 5853 store_buffer()->Verify(); | 5821 store_buffer()->Verify(); |
| 5854 | 5822 |
| 5855 VerifyPointersVisitor visitor; | 5823 VerifyPointersVisitor visitor; |
| 5856 IterateRoots(&visitor, VISIT_ONLY_STRONG); | 5824 IterateRoots(&visitor, VISIT_ONLY_STRONG); |
| 5857 | 5825 |
| 5858 VerifySmisVisitor smis_visitor; | |
| 5859 IterateSmiRoots(&smis_visitor); | |
| 5860 | |
| 5861 new_space_.Verify(); | 5826 new_space_.Verify(); |
| 5862 | 5827 |
| 5863 old_pointer_space_->Verify(&visitor); | 5828 old_pointer_space_->Verify(&visitor); |
| 5864 map_space_->Verify(&visitor); | 5829 map_space_->Verify(&visitor); |
| 5865 | 5830 |
| 5866 VerifyPointersVisitor no_dirty_regions_visitor; | 5831 VerifyPointersVisitor no_dirty_regions_visitor; |
| 5867 old_data_space_->Verify(&no_dirty_regions_visitor); | 5832 old_data_space_->Verify(&no_dirty_regions_visitor); |
| 5868 code_space_->Verify(&no_dirty_regions_visitor); | 5833 code_space_->Verify(&no_dirty_regions_visitor); |
| 5869 cell_space_->Verify(&no_dirty_regions_visitor); | 5834 cell_space_->Verify(&no_dirty_regions_visitor); |
| 5870 property_cell_space_->Verify(&no_dirty_regions_visitor); | 5835 property_cell_space_->Verify(&no_dirty_regions_visitor); |
| (...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6148 v->Synchronize(VisitorSynchronization::kStringTable); | 6113 v->Synchronize(VisitorSynchronization::kStringTable); |
| 6149 if (mode != VISIT_ALL_IN_SCAVENGE && | 6114 if (mode != VISIT_ALL_IN_SCAVENGE && |
| 6150 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { | 6115 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { |
| 6151 // Scavenge collections have special processing for this. | 6116 // Scavenge collections have special processing for this. |
| 6152 external_string_table_.Iterate(v); | 6117 external_string_table_.Iterate(v); |
| 6153 } | 6118 } |
| 6154 v->Synchronize(VisitorSynchronization::kExternalStringsTable); | 6119 v->Synchronize(VisitorSynchronization::kExternalStringsTable); |
| 6155 } | 6120 } |
| 6156 | 6121 |
| 6157 | 6122 |
| 6158 void Heap::IterateSmiRoots(ObjectVisitor* v) { | |
| 6159 v->VisitPointers(&roots_[kSmiRootsStart], &roots_[kRootListLength]); | |
| 6160 v->Synchronize(VisitorSynchronization::kSmiRootList); | |
| 6161 } | |
| 6162 | |
| 6163 | |
| 6164 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { | 6123 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { |
| 6165 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); | 6124 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); |
| 6166 v->Synchronize(VisitorSynchronization::kStrongRootList); | 6125 v->Synchronize(VisitorSynchronization::kStrongRootList); |
| 6167 | 6126 |
| 6168 v->VisitPointer(BitCast<Object**>(&hidden_string_)); | 6127 v->VisitPointer(BitCast<Object**>(&hidden_string_)); |
| 6169 v->Synchronize(VisitorSynchronization::kInternalizedString); | 6128 v->Synchronize(VisitorSynchronization::kInternalizedString); |
| 6170 | 6129 |
| 6171 isolate_->bootstrapper()->Iterate(v); | 6130 isolate_->bootstrapper()->Iterate(v); |
| 6172 v->Synchronize(VisitorSynchronization::kBootstrapper); | 6131 v->Synchronize(VisitorSynchronization::kBootstrapper); |
| 6173 isolate_->Iterate(v); | 6132 isolate_->Iterate(v); |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6376 + old_data_space_->SizeOfObjects() | 6335 + old_data_space_->SizeOfObjects() |
| 6377 + code_space_->SizeOfObjects() | 6336 + code_space_->SizeOfObjects() |
| 6378 + map_space_->SizeOfObjects() | 6337 + map_space_->SizeOfObjects() |
| 6379 + cell_space_->SizeOfObjects() | 6338 + cell_space_->SizeOfObjects() |
| 6380 + property_cell_space_->SizeOfObjects() | 6339 + property_cell_space_->SizeOfObjects() |
| 6381 + lo_space_->SizeOfObjects(); | 6340 + lo_space_->SizeOfObjects(); |
| 6382 } | 6341 } |
| 6383 | 6342 |
| 6384 | 6343 |
| 6385 bool Heap::AdvanceSweepers(int step_size) { | 6344 bool Heap::AdvanceSweepers(int step_size) { |
| 6386 ASSERT(!mark_compact_collector()->AreSweeperThreadsActivated()); | 6345 ASSERT(isolate()->num_sweeper_threads() == 0); |
| 6387 bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size); | 6346 bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size); |
| 6388 sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size); | 6347 sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size); |
| 6389 return sweeping_complete; | 6348 return sweeping_complete; |
| 6390 } | 6349 } |
| 6391 | 6350 |
| 6392 | 6351 |
| 6393 int64_t Heap::PromotedExternalMemorySize() { | 6352 int64_t Heap::PromotedExternalMemorySize() { |
| 6394 if (amount_of_external_allocated_memory_ | 6353 if (amount_of_external_allocated_memory_ |
| 6395 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; | 6354 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; |
| 6396 return amount_of_external_allocated_memory_ | 6355 return amount_of_external_allocated_memory_ |
| (...skipping 1346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7743 static_cast<int>(object_sizes_last_time_[index])); | 7702 static_cast<int>(object_sizes_last_time_[index])); |
| 7744 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7703 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7745 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7704 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7746 | 7705 |
| 7747 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7706 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7748 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7707 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7749 ClearObjectStats(); | 7708 ClearObjectStats(); |
| 7750 } | 7709 } |
| 7751 | 7710 |
| 7752 } } // namespace v8::internal | 7711 } } // namespace v8::internal |
| OLD | NEW |