Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(32)

Side by Side Diff: src/heap.cc

Issue 185653004: Experimental parser: merge to r19637 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
127 previous_survival_rate_trend_(Heap::STABLE), 127 previous_survival_rate_trend_(Heap::STABLE),
128 survival_rate_trend_(Heap::STABLE), 128 survival_rate_trend_(Heap::STABLE),
129 max_gc_pause_(0.0), 129 max_gc_pause_(0.0),
130 total_gc_time_ms_(0.0), 130 total_gc_time_ms_(0.0),
131 max_alive_after_gc_(0), 131 max_alive_after_gc_(0),
132 min_in_mutator_(kMaxInt), 132 min_in_mutator_(kMaxInt),
133 alive_after_last_gc_(0), 133 alive_after_last_gc_(0),
134 last_gc_end_timestamp_(0.0), 134 last_gc_end_timestamp_(0.0),
135 marking_time_(0.0), 135 marking_time_(0.0),
136 sweeping_time_(0.0), 136 sweeping_time_(0.0),
137 mark_compact_collector_(this),
137 store_buffer_(this), 138 store_buffer_(this),
138 marking_(this), 139 marking_(this),
139 incremental_marking_(this), 140 incremental_marking_(this),
140 number_idle_notifications_(0), 141 number_idle_notifications_(0),
141 last_idle_notification_gc_count_(0), 142 last_idle_notification_gc_count_(0),
142 last_idle_notification_gc_count_init_(false), 143 last_idle_notification_gc_count_init_(false),
143 mark_sweeps_since_idle_round_started_(0), 144 mark_sweeps_since_idle_round_started_(0),
144 gc_count_at_last_idle_gc_(0), 145 gc_count_at_last_idle_gc_(0),
145 scavenges_since_last_idle_round_(kIdleScavengeThreshold), 146 scavenges_since_last_idle_round_(kIdleScavengeThreshold),
146 full_codegen_bytes_generated_(0), 147 full_codegen_bytes_generated_(0),
147 crankshaft_codegen_bytes_generated_(0), 148 crankshaft_codegen_bytes_generated_(0),
148 gcs_since_last_deopt_(0), 149 gcs_since_last_deopt_(0),
149 #ifdef VERIFY_HEAP 150 #ifdef VERIFY_HEAP
150 no_weak_object_verification_scope_depth_(0), 151 no_weak_object_verification_scope_depth_(0),
151 #endif 152 #endif
152 allocation_sites_scratchpad_length(0), 153 allocation_sites_scratchpad_length_(0),
153 promotion_queue_(this), 154 promotion_queue_(this),
154 configured_(false), 155 configured_(false),
156 external_string_table_(this),
155 chunks_queued_for_free_(NULL), 157 chunks_queued_for_free_(NULL),
156 relocation_mutex_(NULL) { 158 relocation_mutex_(NULL) {
157 // Allow build-time customization of the max semispace size. Building 159 // Allow build-time customization of the max semispace size. Building
158 // V8 with snapshots and a non-default max semispace size is much 160 // V8 with snapshots and a non-default max semispace size is much
159 // easier if you can define it as part of the build environment. 161 // easier if you can define it as part of the build environment.
160 #if defined(V8_MAX_SEMISPACE_SIZE) 162 #if defined(V8_MAX_SEMISPACE_SIZE)
161 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 163 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
162 #endif 164 #endif
163 165
164 // Ensure old_generation_size_ is a multiple of kPageSize. 166 // Ensure old_generation_size_ is a multiple of kPageSize.
165 ASSERT(MB >= Page::kPageSize); 167 ASSERT(MB >= Page::kPageSize);
166 168
167 intptr_t max_virtual = OS::MaxVirtualMemory(); 169 intptr_t max_virtual = OS::MaxVirtualMemory();
168 170
169 if (max_virtual > 0) { 171 if (max_virtual > 0) {
170 if (code_range_size_ > 0) { 172 if (code_range_size_ > 0) {
171 // Reserve no more than 1/8 of the memory for the code range. 173 // Reserve no more than 1/8 of the memory for the code range.
172 code_range_size_ = Min(code_range_size_, max_virtual >> 3); 174 code_range_size_ = Min(code_range_size_, max_virtual >> 3);
173 } 175 }
174 } 176 }
175 177
176 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 178 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
177 native_contexts_list_ = NULL; 179 native_contexts_list_ = NULL;
178 array_buffers_list_ = Smi::FromInt(0); 180 array_buffers_list_ = Smi::FromInt(0);
179 allocation_sites_list_ = Smi::FromInt(0); 181 allocation_sites_list_ = Smi::FromInt(0);
180 mark_compact_collector_.heap_ = this;
181 external_string_table_.heap_ = this;
182 // Put a dummy entry in the remembered pages so we can find the list the 182 // Put a dummy entry in the remembered pages so we can find the list the
183 // minidump even if there are no real unmapped pages. 183 // minidump even if there are no real unmapped pages.
184 RememberUnmappedPage(NULL, false); 184 RememberUnmappedPage(NULL, false);
185 185
186 ClearObjectStats(true); 186 ClearObjectStats(true);
187 } 187 }
188 188
189 189
190 intptr_t Heap::Capacity() { 190 intptr_t Heap::Capacity() {
191 if (!HasBeenSetUp()) return 0; 191 if (!HasBeenSetUp()) return 0;
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after
499 PagedSpaces spaces(this); 499 PagedSpaces spaces(this);
500 for (PagedSpace* space = spaces.next(); 500 for (PagedSpace* space = spaces.next();
501 space != NULL; 501 space != NULL;
502 space = spaces.next()) { 502 space = spaces.next()) {
503 space->RepairFreeListsAfterBoot(); 503 space->RepairFreeListsAfterBoot();
504 } 504 }
505 } 505 }
506 506
507 507
508 void Heap::ProcessPretenuringFeedback() { 508 void Heap::ProcessPretenuringFeedback() {
509 if (FLAG_allocation_site_pretenuring && 509 if (FLAG_allocation_site_pretenuring) {
510 new_space_high_promotion_mode_active_) {
511 int tenure_decisions = 0; 510 int tenure_decisions = 0;
512 int dont_tenure_decisions = 0; 511 int dont_tenure_decisions = 0;
513 int allocation_mementos_found = 0; 512 int allocation_mementos_found = 0;
514 int allocation_sites = 0; 513 int allocation_sites = 0;
515 int active_allocation_sites = 0; 514 int active_allocation_sites = 0;
516 515
517 // If the scratchpad overflowed, we have to iterate over the allocation 516 // If the scratchpad overflowed, we have to iterate over the allocation
518 // sites list. 517 // sites list.
519 bool use_scratchpad = 518 bool use_scratchpad =
520 allocation_sites_scratchpad_length < kAllocationSiteScratchpadSize; 519 allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize;
521 520
522 int i = 0; 521 int i = 0;
523 Object* list_element = allocation_sites_list(); 522 Object* list_element = allocation_sites_list();
524 bool trigger_deoptimization = false; 523 bool trigger_deoptimization = false;
525 while (use_scratchpad ? 524 while (use_scratchpad ?
526 i < allocation_sites_scratchpad_length : 525 i < allocation_sites_scratchpad_length_ :
527 list_element->IsAllocationSite()) { 526 list_element->IsAllocationSite()) {
528 AllocationSite* site = use_scratchpad ? 527 AllocationSite* site = use_scratchpad ?
529 allocation_sites_scratchpad[i] : AllocationSite::cast(list_element); 528 AllocationSite::cast(allocation_sites_scratchpad()->get(i)) :
529 AllocationSite::cast(list_element);
530 allocation_mementos_found += site->memento_found_count(); 530 allocation_mementos_found += site->memento_found_count();
531 if (site->memento_found_count() > 0) { 531 if (site->memento_found_count() > 0) {
532 active_allocation_sites++; 532 active_allocation_sites++;
533 } 533 }
534 if (site->DigestPretenuringFeedback()) trigger_deoptimization = true; 534 if (site->DigestPretenuringFeedback()) trigger_deoptimization = true;
535 if (site->GetPretenureMode() == TENURED) { 535 if (site->GetPretenureMode() == TENURED) {
536 tenure_decisions++; 536 tenure_decisions++;
537 } else { 537 } else {
538 dont_tenure_decisions++; 538 dont_tenure_decisions++;
539 } 539 }
540 allocation_sites++; 540 allocation_sites++;
541 if (use_scratchpad) { 541 if (use_scratchpad) {
542 i++; 542 i++;
543 } else { 543 } else {
544 list_element = site->weak_next(); 544 list_element = site->weak_next();
545 } 545 }
546 } 546 }
547 547
548 if (trigger_deoptimization) isolate_->stack_guard()->DeoptMarkedCode(); 548 if (trigger_deoptimization) {
549 isolate_->stack_guard()->DeoptMarkedAllocationSites();
550 }
549 551
550 allocation_sites_scratchpad_length = 0; 552 FlushAllocationSitesScratchpad();
551 553
552 // TODO(mvstanton): Pretenure decisions are only made once for an allocation 554 if (FLAG_trace_pretenuring_statistics &&
553 // site. Find a sane way to decide about revisiting the decision later.
554
555 if (FLAG_trace_track_allocation_sites &&
556 (allocation_mementos_found > 0 || 555 (allocation_mementos_found > 0 ||
557 tenure_decisions > 0 || 556 tenure_decisions > 0 ||
558 dont_tenure_decisions > 0)) { 557 dont_tenure_decisions > 0)) {
559 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " 558 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, "
560 "#mementos, #tenure decisions, #donttenure decisions) " 559 "#mementos, #tenure decisions, #donttenure decisions) "
561 "(%s, %d, %d, %d, %d, %d)\n", 560 "(%s, %d, %d, %d, %d, %d)\n",
562 use_scratchpad ? "use scratchpad" : "use list", 561 use_scratchpad ? "use scratchpad" : "use list",
563 allocation_sites, 562 allocation_sites,
564 active_allocation_sites, 563 active_allocation_sites,
565 allocation_mementos_found, 564 allocation_mementos_found,
566 tenure_decisions, 565 tenure_decisions,
567 dont_tenure_decisions); 566 dont_tenure_decisions);
568 } 567 }
569 } 568 }
570 } 569 }
571 570
572 571
572 void Heap::DeoptMarkedAllocationSites() {
573 // TODO(hpayer): If iterating over the allocation sites list becomes a
574 // performance issue, use a cache heap data structure instead (similar to the
575 // allocation sites scratchpad).
576 Object* list_element = allocation_sites_list();
577 while (list_element->IsAllocationSite()) {
578 AllocationSite* site = AllocationSite::cast(list_element);
579 if (site->deopt_dependent_code()) {
580 site->dependent_code()->MarkCodeForDeoptimization(
581 isolate_,
582 DependentCode::kAllocationSiteTenuringChangedGroup);
583 site->set_deopt_dependent_code(false);
584 }
585 list_element = site->weak_next();
586 }
587 Deoptimizer::DeoptimizeMarkedCode(isolate_);
588 }
589
590
573 void Heap::GarbageCollectionEpilogue() { 591 void Heap::GarbageCollectionEpilogue() {
574 store_buffer()->GCEpilogue(); 592 store_buffer()->GCEpilogue();
575 593
576 // In release mode, we only zap the from space under heap verification. 594 // In release mode, we only zap the from space under heap verification.
577 if (Heap::ShouldZapGarbage()) { 595 if (Heap::ShouldZapGarbage()) {
578 ZapFromSpace(); 596 ZapFromSpace();
579 } 597 }
580 598
599 // Process pretenuring feedback and update allocation sites.
600 ProcessPretenuringFeedback();
601
581 #ifdef VERIFY_HEAP 602 #ifdef VERIFY_HEAP
582 if (FLAG_verify_heap) { 603 if (FLAG_verify_heap) {
583 Verify(); 604 Verify();
584 } 605 }
585 #endif 606 #endif
586 607
587 AllowHeapAllocation for_the_rest_of_the_epilogue; 608 AllowHeapAllocation for_the_rest_of_the_epilogue;
588 609
589 #ifdef DEBUG 610 #ifdef DEBUG
590 if (FLAG_print_global_handles) isolate_->global_handles()->Print(); 611 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
733 // The optimizing compiler may be unnecessarily holding on to memory. 754 // The optimizing compiler may be unnecessarily holding on to memory.
734 DisallowHeapAllocation no_recursive_gc; 755 DisallowHeapAllocation no_recursive_gc;
735 isolate()->optimizing_compiler_thread()->Flush(); 756 isolate()->optimizing_compiler_thread()->Flush();
736 } 757 }
737 mark_compact_collector()->SetFlags(kMakeHeapIterableMask | 758 mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
738 kReduceMemoryFootprintMask); 759 kReduceMemoryFootprintMask);
739 isolate_->compilation_cache()->Clear(); 760 isolate_->compilation_cache()->Clear();
740 const int kMaxNumberOfAttempts = 7; 761 const int kMaxNumberOfAttempts = 7;
741 const int kMinNumberOfAttempts = 2; 762 const int kMinNumberOfAttempts = 2;
742 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { 763 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
743 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL) && 764 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL) &&
744 attempt + 1 >= kMinNumberOfAttempts) { 765 attempt + 1 >= kMinNumberOfAttempts) {
745 break; 766 break;
746 } 767 }
747 } 768 }
748 mark_compact_collector()->SetFlags(kNoGCFlags); 769 mark_compact_collector()->SetFlags(kNoGCFlags);
749 new_space_.Shrink(); 770 new_space_.Shrink();
750 UncommitFromSpace(); 771 UncommitFromSpace();
751 incremental_marking()->UncommitMarkingDeque(); 772 incremental_marking()->UncommitMarkingDeque();
752 } 773 }
753 774
754 775
755 bool Heap::CollectGarbage(AllocationSpace space, 776 void Heap::EnsureFillerObjectAtTop() {
756 GarbageCollector collector, 777 // There may be an allocation memento behind every object in new space.
778 // If we evacuate a not full new space or if we are on the last page of
779 // the new space, then there may be uninitialized memory behind the top
780 // pointer of the new space page. We store a filler object there to
781 // identify the unused space.
782 Address from_top = new_space_.top();
783 Address from_limit = new_space_.limit();
784 if (from_top < from_limit) {
785 int remaining_in_page = static_cast<int>(from_limit - from_top);
786 CreateFillerObjectAt(from_top, remaining_in_page);
787 }
788 }
789
790
791 bool Heap::CollectGarbage(GarbageCollector collector,
757 const char* gc_reason, 792 const char* gc_reason,
758 const char* collector_reason, 793 const char* collector_reason,
759 const v8::GCCallbackFlags gc_callback_flags) { 794 const v8::GCCallbackFlags gc_callback_flags) {
760 // The VM is in the GC state until exiting this function. 795 // The VM is in the GC state until exiting this function.
761 VMState<GC> state(isolate_); 796 VMState<GC> state(isolate_);
762 797
763 #ifdef DEBUG 798 #ifdef DEBUG
764 // Reset the allocation timeout to the GC interval, but make sure to 799 // Reset the allocation timeout to the GC interval, but make sure to
765 // allow at least a few allocations after a collection. The reason 800 // allow at least a few allocations after a collection. The reason
766 // for this is that we have a lot of allocation sequences and we 801 // for this is that we have a lot of allocation sequences and we
767 // assume that a garbage collection will allow the subsequent 802 // assume that a garbage collection will allow the subsequent
768 // allocation attempts to go through. 803 // allocation attempts to go through.
769 allocation_timeout_ = Max(6, FLAG_gc_interval); 804 allocation_timeout_ = Max(6, FLAG_gc_interval);
770 #endif 805 #endif
771 806
807 EnsureFillerObjectAtTop();
808
772 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) { 809 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
773 if (FLAG_trace_incremental_marking) { 810 if (FLAG_trace_incremental_marking) {
774 PrintF("[IncrementalMarking] Scavenge during marking.\n"); 811 PrintF("[IncrementalMarking] Scavenge during marking.\n");
775 } 812 }
776 } 813 }
777 814
778 if (collector == MARK_COMPACTOR && 815 if (collector == MARK_COMPACTOR &&
779 !mark_compact_collector()->abort_incremental_marking() && 816 !mark_compact_collector()->abort_incremental_marking() &&
780 !incremental_marking()->IsStopped() && 817 !incremental_marking()->IsStopped() &&
781 !incremental_marking()->should_hurry() && 818 !incremental_marking()->should_hurry() &&
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
834 if (isolate()->concurrent_recompilation_enabled()) { 871 if (isolate()->concurrent_recompilation_enabled()) {
835 // Flush the queued recompilation tasks. 872 // Flush the queued recompilation tasks.
836 isolate()->optimizing_compiler_thread()->Flush(); 873 isolate()->optimizing_compiler_thread()->Flush();
837 } 874 }
838 flush_monomorphic_ics_ = true; 875 flush_monomorphic_ics_ = true;
839 AgeInlineCaches(); 876 AgeInlineCaches();
840 return ++contexts_disposed_; 877 return ++contexts_disposed_;
841 } 878 }
842 879
843 880
844 void Heap::PerformScavenge() {
845 GCTracer tracer(this, NULL, NULL);
846 if (incremental_marking()->IsStopped()) {
847 PerformGarbageCollection(SCAVENGER, &tracer);
848 } else {
849 PerformGarbageCollection(MARK_COMPACTOR, &tracer);
850 }
851 }
852
853
854 void Heap::MoveElements(FixedArray* array, 881 void Heap::MoveElements(FixedArray* array,
855 int dst_index, 882 int dst_index,
856 int src_index, 883 int src_index,
857 int len) { 884 int len) {
858 if (len == 0) return; 885 if (len == 0) return;
859 886
860 ASSERT(array->map() != fixed_cow_array_map()); 887 ASSERT(array->map() != fixed_cow_array_map());
861 Object** dst_objects = array->data_start() + dst_index; 888 Object** dst_objects = array->data_start() + dst_index;
862 OS::MemMove(dst_objects, 889 OS::MemMove(dst_objects,
863 array->data_start() + src_index, 890 array->data_start() + src_index,
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after
1106 // have to limit maximal capacity of the young generation. 1133 // have to limit maximal capacity of the young generation.
1107 SetNewSpaceHighPromotionModeActive(true); 1134 SetNewSpaceHighPromotionModeActive(true);
1108 if (FLAG_trace_gc) { 1135 if (FLAG_trace_gc) {
1109 PrintPID("Limited new space size due to high promotion rate: %d MB\n", 1136 PrintPID("Limited new space size due to high promotion rate: %d MB\n",
1110 new_space_.InitialCapacity() / MB); 1137 new_space_.InitialCapacity() / MB);
1111 } 1138 }
1112 // The high promotion mode is our indicator to turn on pretenuring. We have 1139 // The high promotion mode is our indicator to turn on pretenuring. We have
1113 // to deoptimize all optimized code in global pretenuring mode and all 1140 // to deoptimize all optimized code in global pretenuring mode and all
1114 // code which should be tenured in local pretenuring mode. 1141 // code which should be tenured in local pretenuring mode.
1115 if (FLAG_pretenuring) { 1142 if (FLAG_pretenuring) {
1116 if (FLAG_allocation_site_pretenuring) { 1143 if (!FLAG_allocation_site_pretenuring) {
1117 ResetAllAllocationSitesDependentCode(NOT_TENURED);
1118 } else {
1119 isolate_->stack_guard()->FullDeopt(); 1144 isolate_->stack_guard()->FullDeopt();
1120 } 1145 }
1121 } 1146 }
1122 } else if (new_space_high_promotion_mode_active_ && 1147 } else if (new_space_high_promotion_mode_active_ &&
1123 IsStableOrDecreasingSurvivalTrend() && 1148 IsStableOrDecreasingSurvivalTrend() &&
1124 IsLowSurvivalRate()) { 1149 IsLowSurvivalRate()) {
1125 // Decreasing low survival rates might indicate that the above high 1150 // Decreasing low survival rates might indicate that the above high
1126 // promotion mode is over and we should allow the young generation 1151 // promotion mode is over and we should allow the young generation
1127 // to grow again. 1152 // to grow again.
1128 SetNewSpaceHighPromotionModeActive(false); 1153 SetNewSpaceHighPromotionModeActive(false);
(...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after
1605 // Set age mark. 1630 // Set age mark.
1606 new_space_.set_age_mark(new_space_.top()); 1631 new_space_.set_age_mark(new_space_.top());
1607 1632
1608 new_space_.LowerInlineAllocationLimit( 1633 new_space_.LowerInlineAllocationLimit(
1609 new_space_.inline_allocation_limit_step()); 1634 new_space_.inline_allocation_limit_step());
1610 1635
1611 // Update how much has survived scavenge. 1636 // Update how much has survived scavenge.
1612 IncrementYoungSurvivorsCounter(static_cast<int>( 1637 IncrementYoungSurvivorsCounter(static_cast<int>(
1613 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1638 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1614 1639
1615 ProcessPretenuringFeedback();
1616
1617 LOG(isolate_, ResourceEvent("scavenge", "end")); 1640 LOG(isolate_, ResourceEvent("scavenge", "end"));
1618 1641
1619 gc_state_ = NOT_IN_GC; 1642 gc_state_ = NOT_IN_GC;
1620 1643
1621 scavenges_since_last_idle_round_++; 1644 scavenges_since_last_idle_round_++;
1622 } 1645 }
1623 1646
1624 1647
1625 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, 1648 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1626 Object** p) { 1649 Object** p) {
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
1979 bool record_slots) { 2002 bool record_slots) {
1980 Object* allocation_site_obj = 2003 Object* allocation_site_obj =
1981 VisitWeakList<AllocationSite>(this, 2004 VisitWeakList<AllocationSite>(this,
1982 allocation_sites_list(), 2005 allocation_sites_list(),
1983 retainer, record_slots); 2006 retainer, record_slots);
1984 set_allocation_sites_list(allocation_site_obj); 2007 set_allocation_sites_list(allocation_site_obj);
1985 } 2008 }
1986 2009
1987 2010
1988 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 2011 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1989 ASSERT(AllowCodeDependencyChange::IsAllowed());
1990 DisallowHeapAllocation no_allocation_scope; 2012 DisallowHeapAllocation no_allocation_scope;
1991 Object* cur = allocation_sites_list(); 2013 Object* cur = allocation_sites_list();
1992 bool marked = false; 2014 bool marked = false;
1993 while (cur->IsAllocationSite()) { 2015 while (cur->IsAllocationSite()) {
1994 AllocationSite* casted = AllocationSite::cast(cur); 2016 AllocationSite* casted = AllocationSite::cast(cur);
1995 if (casted->GetPretenureMode() == flag) { 2017 if (casted->GetPretenureMode() == flag) {
1996 casted->ResetPretenureDecision(); 2018 casted->ResetPretenureDecision();
1997 bool got_marked = casted->dependent_code()->MarkCodeForDeoptimization( 2019 casted->set_deopt_dependent_code(true);
1998 isolate_, 2020 marked = true;
1999 DependentCode::kAllocationSiteTenuringChangedGroup);
2000 if (got_marked) marked = true;
2001 } 2021 }
2002 cur = casted->weak_next(); 2022 cur = casted->weak_next();
2003 } 2023 }
2004 if (marked) isolate_->stack_guard()->DeoptMarkedCode(); 2024 if (marked) isolate_->stack_guard()->DeoptMarkedAllocationSites();
2005 } 2025 }
2006 2026
2007 2027
2008 void Heap::EvaluateOldSpaceLocalPretenuring( 2028 void Heap::EvaluateOldSpaceLocalPretenuring(
2009 uint64_t size_of_objects_before_gc) { 2029 uint64_t size_of_objects_before_gc) {
2010 uint64_t size_of_objects_after_gc = SizeOfObjects(); 2030 uint64_t size_of_objects_after_gc = SizeOfObjects();
2011 double old_generation_survival_rate = 2031 double old_generation_survival_rate =
2012 (static_cast<double>(size_of_objects_after_gc) * 100) / 2032 (static_cast<double>(size_of_objects_after_gc) * 100) /
2013 static_cast<double>(size_of_objects_before_gc); 2033 static_cast<double>(size_of_objects_before_gc);
2014 2034
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after
2276 } 2296 }
2277 } 2297 }
2278 } 2298 }
2279 2299
2280 2300
2281 template<ObjectContents object_contents, int alignment> 2301 template<ObjectContents object_contents, int alignment>
2282 static inline void EvacuateObject(Map* map, 2302 static inline void EvacuateObject(Map* map,
2283 HeapObject** slot, 2303 HeapObject** slot,
2284 HeapObject* object, 2304 HeapObject* object,
2285 int object_size) { 2305 int object_size) {
2286 SLOW_ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize); 2306 SLOW_ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
2287 SLOW_ASSERT(object->Size() == object_size); 2307 SLOW_ASSERT(object->Size() == object_size);
2288 2308
2289 int allocation_size = object_size; 2309 int allocation_size = object_size;
2290 if (alignment != kObjectAlignment) { 2310 if (alignment != kObjectAlignment) {
2291 ASSERT(alignment == kDoubleAlignment); 2311 ASSERT(alignment == kDoubleAlignment);
2292 allocation_size += kPointerSize; 2312 allocation_size += kPointerSize;
2293 } 2313 }
2294 2314
2295 Heap* heap = map->GetHeap(); 2315 Heap* heap = map->GetHeap();
2296 if (heap->ShouldBePromoted(object->address(), object_size)) { 2316 if (heap->ShouldBePromoted(object->address(), object_size)) {
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
2657 return accessors; 2677 return accessors;
2658 } 2678 }
2659 2679
2660 2680
2661 MaybeObject* Heap::AllocateTypeFeedbackInfo() { 2681 MaybeObject* Heap::AllocateTypeFeedbackInfo() {
2662 TypeFeedbackInfo* info; 2682 TypeFeedbackInfo* info;
2663 { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE); 2683 { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
2664 if (!maybe_info->To(&info)) return maybe_info; 2684 if (!maybe_info->To(&info)) return maybe_info;
2665 } 2685 }
2666 info->initialize_storage(); 2686 info->initialize_storage();
2667 info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()), 2687 info->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER);
2668 SKIP_WRITE_BARRIER);
2669 return info; 2688 return info;
2670 } 2689 }
2671 2690
2672 2691
2673 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) { 2692 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) {
2674 AliasedArgumentsEntry* entry; 2693 AliasedArgumentsEntry* entry;
2675 { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE); 2694 { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE);
2676 if (!maybe_entry->To(&entry)) return maybe_entry; 2695 if (!maybe_entry->To(&entry)) return maybe_entry;
2677 } 2696 }
2678 entry->set_aliased_context_slot(aliased_context_slot); 2697 entry->set_aliased_context_slot(aliased_context_slot);
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
2827 ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string) 2846 ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string)
2828 undetectable_string_map()->set_is_undetectable(); 2847 undetectable_string_map()->set_is_undetectable();
2829 2848
2830 ALLOCATE_VARSIZE_MAP(ASCII_STRING_TYPE, undetectable_ascii_string); 2849 ALLOCATE_VARSIZE_MAP(ASCII_STRING_TYPE, undetectable_ascii_string);
2831 undetectable_ascii_string_map()->set_is_undetectable(); 2850 undetectable_ascii_string_map()->set_is_undetectable();
2832 2851
2833 ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array) 2852 ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
2834 ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array) 2853 ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array)
2835 ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space) 2854 ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space)
2836 2855
2837 #define ALLOCATE_EXTERNAL_ARRAY_MAP(TYPE, type) \ 2856 #define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2838 ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \ 2857 ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \
2839 external_##type##_array) 2858 external_##type##_array)
2840 2859
2841 ALLOCATE_EXTERNAL_ARRAY_MAP(PIXEL, pixel) 2860 TYPED_ARRAYS(ALLOCATE_EXTERNAL_ARRAY_MAP)
2842 ALLOCATE_EXTERNAL_ARRAY_MAP(BYTE, byte)
2843 ALLOCATE_EXTERNAL_ARRAY_MAP(UNSIGNED_BYTE, unsigned_byte)
2844 ALLOCATE_EXTERNAL_ARRAY_MAP(SHORT, short) // NOLINT
2845 ALLOCATE_EXTERNAL_ARRAY_MAP(UNSIGNED_SHORT, unsigned_short)
2846 ALLOCATE_EXTERNAL_ARRAY_MAP(INT, int)
2847 ALLOCATE_EXTERNAL_ARRAY_MAP(UNSIGNED_INT, unsigned_int)
2848 ALLOCATE_EXTERNAL_ARRAY_MAP(FLOAT, float)
2849 ALLOCATE_EXTERNAL_ARRAY_MAP(DOUBLE, double)
2850 #undef ALLOCATE_EXTERNAL_ARRAY_MAP 2861 #undef ALLOCATE_EXTERNAL_ARRAY_MAP
2851 2862
2852 ALLOCATE_VARSIZE_MAP(FIXED_UINT8_ARRAY_TYPE, fixed_uint8_array) 2863 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2853 ALLOCATE_VARSIZE_MAP(FIXED_UINT8_CLAMPED_ARRAY_TYPE, 2864 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, \
2854 fixed_uint8_clamped_array) 2865 fixed_##type##_array)
2855 ALLOCATE_VARSIZE_MAP(FIXED_INT8_ARRAY_TYPE, fixed_int8_array) 2866
2856 ALLOCATE_VARSIZE_MAP(FIXED_UINT16_ARRAY_TYPE, fixed_uint16_array) 2867 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP)
2857 ALLOCATE_VARSIZE_MAP(FIXED_INT16_ARRAY_TYPE, fixed_int16_array) 2868 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP
2858 ALLOCATE_VARSIZE_MAP(FIXED_UINT32_ARRAY_TYPE, fixed_uint32_array)
2859 ALLOCATE_VARSIZE_MAP(FIXED_INT32_ARRAY_TYPE, fixed_int32_array)
2860 ALLOCATE_VARSIZE_MAP(FIXED_FLOAT32_ARRAY_TYPE, fixed_float32_array)
2861 ALLOCATE_VARSIZE_MAP(FIXED_FLOAT64_ARRAY_TYPE, fixed_float64_array)
2862 2869
2863 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, non_strict_arguments_elements) 2870 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, non_strict_arguments_elements)
2864 2871
2865 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) 2872 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code)
2866 2873
2867 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) 2874 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell)
2868 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) 2875 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
2869 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) 2876 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
2870 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) 2877 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
2871 2878
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2903 #undef ALLOCATE_VARSIZE_MAP 2910 #undef ALLOCATE_VARSIZE_MAP
2904 #undef ALLOCATE_MAP 2911 #undef ALLOCATE_MAP
2905 } 2912 }
2906 2913
2907 { // Empty arrays 2914 { // Empty arrays
2908 { ByteArray* byte_array; 2915 { ByteArray* byte_array;
2909 if (!AllocateByteArray(0, TENURED)->To(&byte_array)) return false; 2916 if (!AllocateByteArray(0, TENURED)->To(&byte_array)) return false;
2910 set_empty_byte_array(byte_array); 2917 set_empty_byte_array(byte_array);
2911 } 2918 }
2912 2919
2913 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type) \ 2920 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \
2914 { ExternalArray* obj; \ 2921 { ExternalArray* obj; \
2915 if (!AllocateEmptyExternalArray(kExternal##Type##Array)->To(&obj)) \ 2922 if (!AllocateEmptyExternalArray(kExternal##Type##Array)->To(&obj)) \
2916 return false; \ 2923 return false; \
2917 set_empty_external_##type##_array(obj); \ 2924 set_empty_external_##type##_array(obj); \
2918 } 2925 }
2919 2926
2920 ALLOCATE_EMPTY_EXTERNAL_ARRAY(Byte, byte) 2927 TYPED_ARRAYS(ALLOCATE_EMPTY_EXTERNAL_ARRAY)
2921 ALLOCATE_EMPTY_EXTERNAL_ARRAY(UnsignedByte, unsigned_byte)
2922 ALLOCATE_EMPTY_EXTERNAL_ARRAY(Short, short) // NOLINT
2923 ALLOCATE_EMPTY_EXTERNAL_ARRAY(UnsignedShort, unsigned_short)
2924 ALLOCATE_EMPTY_EXTERNAL_ARRAY(Int, int)
2925 ALLOCATE_EMPTY_EXTERNAL_ARRAY(UnsignedInt, unsigned_int)
2926 ALLOCATE_EMPTY_EXTERNAL_ARRAY(Float, float)
2927 ALLOCATE_EMPTY_EXTERNAL_ARRAY(Double, double)
2928 ALLOCATE_EMPTY_EXTERNAL_ARRAY(Pixel, pixel)
2929 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY 2928 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY
2930 } 2929 }
2931 ASSERT(!InNewSpace(empty_fixed_array())); 2930 ASSERT(!InNewSpace(empty_fixed_array()));
2932 return true; 2931 return true;
2933 } 2932 }
2934 2933
2935 2934
2936 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 2935 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2937 // Statically ensure that it is safe to allocate heap numbers in paged 2936 // Statically ensure that it is safe to allocate heap numbers in paged
2938 // spaces. 2937 // spaces.
2939 int size = HeapNumber::kSize; 2938 int size = HeapNumber::kSize;
2940 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize); 2939 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
2940
2941 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 2941 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
2942 2942
2943 Object* result; 2943 Object* result;
2944 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 2944 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
2945 if (!maybe_result->ToObject(&result)) return maybe_result; 2945 if (!maybe_result->ToObject(&result)) return maybe_result;
2946 } 2946 }
2947 2947
2948 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); 2948 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2949 HeapNumber::cast(result)->set_value(value); 2949 HeapNumber::cast(result)->set_value(value);
2950 return result; 2950 return result;
2951 } 2951 }
2952 2952
2953 2953
2954 MaybeObject* Heap::AllocateCell(Object* value) { 2954 MaybeObject* Heap::AllocateCell(Object* value) {
2955 int size = Cell::kSize; 2955 int size = Cell::kSize;
2956 STATIC_ASSERT(Cell::kSize <= Page::kNonCodeObjectAreaSize); 2956 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize);
2957 2957
2958 Object* result; 2958 Object* result;
2959 { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); 2959 { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE);
2960 if (!maybe_result->ToObject(&result)) return maybe_result; 2960 if (!maybe_result->ToObject(&result)) return maybe_result;
2961 } 2961 }
2962 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); 2962 HeapObject::cast(result)->set_map_no_write_barrier(cell_map());
2963 Cell::cast(result)->set_value(value); 2963 Cell::cast(result)->set_value(value);
2964 return result; 2964 return result;
2965 } 2965 }
2966 2966
2967 2967
2968 MaybeObject* Heap::AllocatePropertyCell() { 2968 MaybeObject* Heap::AllocatePropertyCell() {
2969 int size = PropertyCell::kSize; 2969 int size = PropertyCell::kSize;
2970 STATIC_ASSERT(PropertyCell::kSize <= Page::kNonCodeObjectAreaSize); 2970 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
2971 2971
2972 Object* result; 2972 Object* result;
2973 MaybeObject* maybe_result = 2973 MaybeObject* maybe_result =
2974 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); 2974 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE);
2975 if (!maybe_result->ToObject(&result)) return maybe_result; 2975 if (!maybe_result->ToObject(&result)) return maybe_result;
2976 2976
2977 HeapObject::cast(result)->set_map_no_write_barrier( 2977 HeapObject::cast(result)->set_map_no_write_barrier(
2978 global_property_cell_map()); 2978 global_property_cell_map());
2979 PropertyCell* cell = PropertyCell::cast(result); 2979 PropertyCell* cell = PropertyCell::cast(result);
2980 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2980 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2981 SKIP_WRITE_BARRIER); 2981 SKIP_WRITE_BARRIER);
2982 cell->set_value(the_hole_value()); 2982 cell->set_value(the_hole_value());
2983 cell->set_type(Type::None()); 2983 cell->set_type(HeapType::None());
2984 return result; 2984 return result;
2985 } 2985 }
2986 2986
2987 2987
2988 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { 2988 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) {
2989 Box* result; 2989 Box* result;
2990 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); 2990 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE);
2991 if (!maybe_result->To(&result)) return maybe_result; 2991 if (!maybe_result->To(&result)) return maybe_result;
2992 result->set_value(value); 2992 result->set_value(value);
2993 return result; 2993 return result;
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
3059 set_js_construct_entry_code(*stub.GetCode(isolate())); 3059 set_js_construct_entry_code(*stub.GetCode(isolate()));
3060 } 3060 }
3061 3061
3062 3062
3063 void Heap::CreateFixedStubs() { 3063 void Heap::CreateFixedStubs() {
3064 // Here we create roots for fixed stubs. They are needed at GC 3064 // Here we create roots for fixed stubs. They are needed at GC
3065 // for cooking and uncooking (check out frames.cc). 3065 // for cooking and uncooking (check out frames.cc).
3066 // The eliminates the need for doing dictionary lookup in the 3066 // The eliminates the need for doing dictionary lookup in the
3067 // stub cache for these stubs. 3067 // stub cache for these stubs.
3068 HandleScope scope(isolate()); 3068 HandleScope scope(isolate());
3069
3070 // Create stubs that should be there, so we don't unexpectedly have to
3071 // create them if we need them during the creation of another stub.
3072 // Stub creation mixes raw pointers and handles in an unsafe manner so
3073 // we cannot create stubs while we are creating stubs.
3074 CodeStub::GenerateStubsAheadOfTime(isolate());
3075
3076 // MacroAssembler::Abort calls (usually enabled with --debug-code) depend on
3077 // CEntryStub, so we need to call GenerateStubsAheadOfTime before JSEntryStub
3078 // is created.
3079
3069 // gcc-4.4 has problem generating correct code of following snippet: 3080 // gcc-4.4 has problem generating correct code of following snippet:
3070 // { JSEntryStub stub; 3081 // { JSEntryStub stub;
3071 // js_entry_code_ = *stub.GetCode(); 3082 // js_entry_code_ = *stub.GetCode();
3072 // } 3083 // }
3073 // { JSConstructEntryStub stub; 3084 // { JSConstructEntryStub stub;
3074 // js_construct_entry_code_ = *stub.GetCode(); 3085 // js_construct_entry_code_ = *stub.GetCode();
3075 // } 3086 // }
3076 // To workaround the problem, make separate functions without inlining. 3087 // To workaround the problem, make separate functions without inlining.
3077 Heap::CreateJSEntryStub(); 3088 Heap::CreateJSEntryStub();
3078 Heap::CreateJSConstructEntryStub(); 3089 Heap::CreateJSConstructEntryStub();
3079
3080 // Create stubs that should be there, so we don't unexpectedly have to
3081 // create them if we need them during the creation of another stub.
3082 // Stub creation mixes raw pointers and handles in an unsafe manner so
3083 // we cannot create stubs while we are creating stubs.
3084 CodeStub::GenerateStubsAheadOfTime(isolate());
3085 } 3090 }
3086 3091
3087 3092
3088 void Heap::CreateStubsRequiringBuiltins() {
3089 HandleScope scope(isolate());
3090 CodeStub::GenerateStubsRequiringBuiltinsAheadOfTime(isolate());
3091 }
3092
3093
3094 bool Heap::CreateInitialObjects() { 3093 bool Heap::CreateInitialObjects() {
3095 Object* obj; 3094 Object* obj;
3096 3095
3097 // The -0 value must be set before NumberFromDouble works. 3096 // The -0 value must be set before NumberFromDouble works.
3098 { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED); 3097 { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED);
3099 if (!maybe_obj->ToObject(&obj)) return false; 3098 if (!maybe_obj->ToObject(&obj)) return false;
3100 } 3099 }
3101 set_minus_zero_value(HeapNumber::cast(obj)); 3100 set_minus_zero_value(HeapNumber::cast(obj));
3102 ASSERT(std::signbit(minus_zero_value()->Number()) != 0); 3101 ASSERT(std::signbit(minus_zero_value()->Number()) != 0);
3103 3102
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
3282 3281
3283 // Allocate object to hold object observation state. 3282 // Allocate object to hold object observation state.
3284 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); 3283 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
3285 if (!maybe_obj->ToObject(&obj)) return false; 3284 if (!maybe_obj->ToObject(&obj)) return false;
3286 } 3285 }
3287 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj)); 3286 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj));
3288 if (!maybe_obj->ToObject(&obj)) return false; 3287 if (!maybe_obj->ToObject(&obj)) return false;
3289 } 3288 }
3290 set_observation_state(JSObject::cast(obj)); 3289 set_observation_state(JSObject::cast(obj));
3291 3290
3291 // Allocate object to hold object microtask state.
3292 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
3293 if (!maybe_obj->ToObject(&obj)) return false;
3294 }
3295 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj));
3296 if (!maybe_obj->ToObject(&obj)) return false;
3297 }
3298 set_microtask_state(JSObject::cast(obj));
3299
3292 { MaybeObject* maybe_obj = AllocateSymbol(); 3300 { MaybeObject* maybe_obj = AllocateSymbol();
3293 if (!maybe_obj->ToObject(&obj)) return false; 3301 if (!maybe_obj->ToObject(&obj)) return false;
3294 } 3302 }
3295 Symbol::cast(obj)->set_is_private(true); 3303 Symbol::cast(obj)->set_is_private(true);
3296 set_frozen_symbol(Symbol::cast(obj)); 3304 set_frozen_symbol(Symbol::cast(obj));
3297 3305
3298 { MaybeObject* maybe_obj = AllocateSymbol(); 3306 { MaybeObject* maybe_obj = AllocateSymbol();
3299 if (!maybe_obj->ToObject(&obj)) return false; 3307 if (!maybe_obj->ToObject(&obj)) return false;
3300 } 3308 }
3301 Symbol::cast(obj)->set_is_private(true); 3309 Symbol::cast(obj)->set_is_private(true);
3302 set_elements_transition_symbol(Symbol::cast(obj)); 3310 set_elements_transition_symbol(Symbol::cast(obj));
3303 3311
3304 { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED); 3312 { MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED);
3305 if (!maybe_obj->ToObject(&obj)) return false; 3313 if (!maybe_obj->ToObject(&obj)) return false;
3306 } 3314 }
3307 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); 3315 SeededNumberDictionary::cast(obj)->set_requires_slow_elements();
3308 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); 3316 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj));
3309 3317
3310 { MaybeObject* maybe_obj = AllocateSymbol(); 3318 { MaybeObject* maybe_obj = AllocateSymbol();
3311 if (!maybe_obj->ToObject(&obj)) return false; 3319 if (!maybe_obj->ToObject(&obj)) return false;
3312 } 3320 }
3313 Symbol::cast(obj)->set_is_private(true); 3321 Symbol::cast(obj)->set_is_private(true);
3314 set_observed_symbol(Symbol::cast(obj)); 3322 set_observed_symbol(Symbol::cast(obj));
3315 3323
3324 { MaybeObject* maybe_obj = AllocateFixedArray(0, TENURED);
3325 if (!maybe_obj->ToObject(&obj)) return false;
3326 }
3327 set_materialized_objects(FixedArray::cast(obj));
3328
3316 // Handling of script id generation is in Factory::NewScript. 3329 // Handling of script id generation is in Factory::NewScript.
3317 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); 3330 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId));
3318 3331
3332 { MaybeObject* maybe_obj = AllocateAllocationSitesScratchpad();
3333 if (!maybe_obj->ToObject(&obj)) return false;
3334 }
3335 set_allocation_sites_scratchpad(FixedArray::cast(obj));
3336 InitializeAllocationSitesScratchpad();
3337
3319 // Initialize keyed lookup cache. 3338 // Initialize keyed lookup cache.
3320 isolate_->keyed_lookup_cache()->Clear(); 3339 isolate_->keyed_lookup_cache()->Clear();
3321 3340
3322 // Initialize context slot cache. 3341 // Initialize context slot cache.
3323 isolate_->context_slot_cache()->Clear(); 3342 isolate_->context_slot_cache()->Clear();
3324 3343
3325 // Initialize descriptor cache. 3344 // Initialize descriptor cache.
3326 isolate_->descriptor_lookup_cache()->Clear(); 3345 isolate_->descriptor_lookup_cache()->Clear();
3327 3346
3328 // Initialize compilation cache. 3347 // Initialize compilation cache.
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after
3556 // number string cache. 3575 // number string cache.
3557 AllocateFullSizeNumberStringCache(); 3576 AllocateFullSizeNumberStringCache();
3558 return; 3577 return;
3559 } 3578 }
3560 number_string_cache()->set(hash * 2, number); 3579 number_string_cache()->set(hash * 2, number);
3561 number_string_cache()->set(hash * 2 + 1, string); 3580 number_string_cache()->set(hash * 2 + 1, string);
3562 } 3581 }
3563 3582
3564 3583
3565 MaybeObject* Heap::NumberToString(Object* number, 3584 MaybeObject* Heap::NumberToString(Object* number,
3566 bool check_number_string_cache, 3585 bool check_number_string_cache) {
3567 PretenureFlag pretenure) {
3568 isolate_->counters()->number_to_string_runtime()->Increment(); 3586 isolate_->counters()->number_to_string_runtime()->Increment();
3569 if (check_number_string_cache) { 3587 if (check_number_string_cache) {
3570 Object* cached = GetNumberStringCache(number); 3588 Object* cached = GetNumberStringCache(number);
3571 if (cached != undefined_value()) { 3589 if (cached != undefined_value()) {
3572 return cached; 3590 return cached;
3573 } 3591 }
3574 } 3592 }
3575 3593
3576 char arr[100]; 3594 char arr[100];
3577 Vector<char> buffer(arr, ARRAY_SIZE(arr)); 3595 Vector<char> buffer(arr, ARRAY_SIZE(arr));
3578 const char* str; 3596 const char* str;
3579 if (number->IsSmi()) { 3597 if (number->IsSmi()) {
3580 int num = Smi::cast(number)->value(); 3598 int num = Smi::cast(number)->value();
3581 str = IntToCString(num, buffer); 3599 str = IntToCString(num, buffer);
3582 } else { 3600 } else {
3583 double num = HeapNumber::cast(number)->value(); 3601 double num = HeapNumber::cast(number)->value();
3584 str = DoubleToCString(num, buffer); 3602 str = DoubleToCString(num, buffer);
3585 } 3603 }
3586 3604
3587 Object* js_string; 3605 Object* js_string;
3606
3607 // We tenure the allocated string since it is referenced from the
3608 // number-string cache which lives in the old space.
3588 MaybeObject* maybe_js_string = 3609 MaybeObject* maybe_js_string =
3589 AllocateStringFromOneByte(CStrVector(str), pretenure); 3610 AllocateStringFromOneByte(CStrVector(str), TENURED);
3590 if (maybe_js_string->ToObject(&js_string)) { 3611 if (maybe_js_string->ToObject(&js_string)) {
3591 SetNumberStringCache(number, String::cast(js_string)); 3612 SetNumberStringCache(number, String::cast(js_string));
3592 } 3613 }
3593 return maybe_js_string; 3614 return maybe_js_string;
3594 } 3615 }
3595 3616
3596 3617
3597 MaybeObject* Heap::Uint32ToString(uint32_t value, 3618 MaybeObject* Heap::Uint32ToString(uint32_t value,
3598 bool check_number_string_cache) { 3619 bool check_number_string_cache) {
3599 Object* number; 3620 Object* number;
3600 MaybeObject* maybe = NumberFromUint32(value); 3621 MaybeObject* maybe = NumberFromUint32(value);
3601 if (!maybe->To<Object>(&number)) return maybe; 3622 if (!maybe->To<Object>(&number)) return maybe;
3602 return NumberToString(number, check_number_string_cache); 3623 return NumberToString(number, check_number_string_cache);
3603 } 3624 }
3604 3625
3605 3626
3627 MaybeObject* Heap::AllocateAllocationSitesScratchpad() {
3628 MaybeObject* maybe_obj =
3629 AllocateFixedArray(kAllocationSiteScratchpadSize, TENURED);
3630 return maybe_obj;
3631 }
3632
3633
3634 void Heap::FlushAllocationSitesScratchpad() {
3635 for (int i = 0; i < allocation_sites_scratchpad_length_; i++) {
3636 allocation_sites_scratchpad()->set_undefined(i);
3637 }
3638 allocation_sites_scratchpad_length_ = 0;
3639 }
3640
3641
3642 void Heap::InitializeAllocationSitesScratchpad() {
3643 ASSERT(allocation_sites_scratchpad()->length() ==
3644 kAllocationSiteScratchpadSize);
3645 for (int i = 0; i < kAllocationSiteScratchpadSize; i++) {
3646 allocation_sites_scratchpad()->set_undefined(i);
3647 }
3648 }
3649
3650
3651 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site) {
3652 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) {
3653 // We cannot use the normal write-barrier because slots need to be
3654 // recorded with non-incremental marking as well. We have to explicitly
3655 // record the slot to take evacuation candidates into account.
3656 allocation_sites_scratchpad()->set(
3657 allocation_sites_scratchpad_length_, site, SKIP_WRITE_BARRIER);
3658 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt(
3659 allocation_sites_scratchpad_length_);
3660 mark_compact_collector()->RecordSlot(slot, slot, *slot);
3661 allocation_sites_scratchpad_length_++;
3662 }
3663 }
3664
3665
3606 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { 3666 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
3607 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); 3667 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
3608 } 3668 }
3609 3669
3610 3670
3611 Heap::RootListIndex Heap::RootIndexForExternalArrayType( 3671 Heap::RootListIndex Heap::RootIndexForExternalArrayType(
3612 ExternalArrayType array_type) { 3672 ExternalArrayType array_type) {
3613 switch (array_type) { 3673 switch (array_type) {
3614 case kExternalByteArray: 3674 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3615 return kExternalByteArrayMapRootIndex; 3675 case kExternal##Type##Array: \
3616 case kExternalUnsignedByteArray: 3676 return kExternal##Type##ArrayMapRootIndex;
3617 return kExternalUnsignedByteArrayMapRootIndex; 3677
3618 case kExternalShortArray: 3678 TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX)
3619 return kExternalShortArrayMapRootIndex; 3679 #undef ARRAY_TYPE_TO_ROOT_INDEX
3620 case kExternalUnsignedShortArray: 3680
3621 return kExternalUnsignedShortArrayMapRootIndex;
3622 case kExternalIntArray:
3623 return kExternalIntArrayMapRootIndex;
3624 case kExternalUnsignedIntArray:
3625 return kExternalUnsignedIntArrayMapRootIndex;
3626 case kExternalFloatArray:
3627 return kExternalFloatArrayMapRootIndex;
3628 case kExternalDoubleArray:
3629 return kExternalDoubleArrayMapRootIndex;
3630 case kExternalPixelArray:
3631 return kExternalPixelArrayMapRootIndex;
3632 default: 3681 default:
3633 UNREACHABLE(); 3682 UNREACHABLE();
3634 return kUndefinedValueRootIndex; 3683 return kUndefinedValueRootIndex;
3635 } 3684 }
3636 } 3685 }
3637 3686
3638 3687
3639 Map* Heap::MapForFixedTypedArray(ExternalArrayType array_type) { 3688 Map* Heap::MapForFixedTypedArray(ExternalArrayType array_type) {
3640 return Map::cast(roots_[RootIndexForFixedTypedArray(array_type)]); 3689 return Map::cast(roots_[RootIndexForFixedTypedArray(array_type)]);
3641 } 3690 }
3642 3691
3643 3692
3644 Heap::RootListIndex Heap::RootIndexForFixedTypedArray( 3693 Heap::RootListIndex Heap::RootIndexForFixedTypedArray(
3645 ExternalArrayType array_type) { 3694 ExternalArrayType array_type) {
3646 switch (array_type) { 3695 switch (array_type) {
3647 case kExternalByteArray: 3696 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3648 return kFixedInt8ArrayMapRootIndex; 3697 case kExternal##Type##Array: \
3649 case kExternalUnsignedByteArray: 3698 return kFixed##Type##ArrayMapRootIndex;
3650 return kFixedUint8ArrayMapRootIndex; 3699
3651 case kExternalShortArray: 3700 TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX)
3652 return kFixedInt16ArrayMapRootIndex; 3701 #undef ARRAY_TYPE_TO_ROOT_INDEX
3653 case kExternalUnsignedShortArray: 3702
3654 return kFixedUint16ArrayMapRootIndex;
3655 case kExternalIntArray:
3656 return kFixedInt32ArrayMapRootIndex;
3657 case kExternalUnsignedIntArray:
3658 return kFixedUint32ArrayMapRootIndex;
3659 case kExternalFloatArray:
3660 return kFixedFloat32ArrayMapRootIndex;
3661 case kExternalDoubleArray:
3662 return kFixedFloat64ArrayMapRootIndex;
3663 case kExternalPixelArray:
3664 return kFixedUint8ClampedArrayMapRootIndex;
3665 default: 3703 default:
3666 UNREACHABLE(); 3704 UNREACHABLE();
3667 return kUndefinedValueRootIndex; 3705 return kUndefinedValueRootIndex;
3668 } 3706 }
3669 } 3707 }
3670 3708
3671 3709
3672 Heap::RootListIndex Heap::RootIndexForEmptyExternalArray( 3710 Heap::RootListIndex Heap::RootIndexForEmptyExternalArray(
3673 ElementsKind elementsKind) { 3711 ElementsKind elementsKind) {
3674 switch (elementsKind) { 3712 switch (elementsKind) {
3675 case EXTERNAL_BYTE_ELEMENTS: 3713 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3676 return kEmptyExternalByteArrayRootIndex; 3714 case EXTERNAL_##TYPE##_ELEMENTS: \
3677 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3715 return kEmptyExternal##Type##ArrayRootIndex;
3678 return kEmptyExternalUnsignedByteArrayRootIndex; 3716
3679 case EXTERNAL_SHORT_ELEMENTS: 3717 TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX)
3680 return kEmptyExternalShortArrayRootIndex; 3718 #undef ELEMENT_KIND_TO_ROOT_INDEX
3681 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3719
3682 return kEmptyExternalUnsignedShortArrayRootIndex;
3683 case EXTERNAL_INT_ELEMENTS:
3684 return kEmptyExternalIntArrayRootIndex;
3685 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3686 return kEmptyExternalUnsignedIntArrayRootIndex;
3687 case EXTERNAL_FLOAT_ELEMENTS:
3688 return kEmptyExternalFloatArrayRootIndex;
3689 case EXTERNAL_DOUBLE_ELEMENTS:
3690 return kEmptyExternalDoubleArrayRootIndex;
3691 case EXTERNAL_PIXEL_ELEMENTS:
3692 return kEmptyExternalPixelArrayRootIndex;
3693 default: 3720 default:
3694 UNREACHABLE(); 3721 UNREACHABLE();
3695 return kUndefinedValueRootIndex; 3722 return kUndefinedValueRootIndex;
3696 } 3723 }
3697 } 3724 }
3698 3725
3699 3726
3700 ExternalArray* Heap::EmptyExternalArrayForMap(Map* map) { 3727 ExternalArray* Heap::EmptyExternalArrayForMap(Map* map) {
3701 return ExternalArray::cast( 3728 return ExternalArray::cast(
3702 roots_[RootIndexForEmptyExternalArray(map->elements_kind())]); 3729 roots_[RootIndexForEmptyExternalArray(map->elements_kind())]);
(...skipping 13 matching lines...) Expand all
3716 return Smi::FromInt(int_value); 3743 return Smi::FromInt(int_value);
3717 } 3744 }
3718 3745
3719 // Materialize the value in the heap. 3746 // Materialize the value in the heap.
3720 return AllocateHeapNumber(value, pretenure); 3747 return AllocateHeapNumber(value, pretenure);
3721 } 3748 }
3722 3749
3723 3750
3724 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { 3751 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
3725 // Statically ensure that it is safe to allocate foreigns in paged spaces. 3752 // Statically ensure that it is safe to allocate foreigns in paged spaces.
3726 STATIC_ASSERT(Foreign::kSize <= Page::kMaxNonCodeHeapObjectSize); 3753 STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize);
3727 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 3754 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3728 Foreign* result; 3755 Foreign* result;
3729 MaybeObject* maybe_result = Allocate(foreign_map(), space); 3756 MaybeObject* maybe_result = Allocate(foreign_map(), space);
3730 if (!maybe_result->To(&result)) return maybe_result; 3757 if (!maybe_result->To(&result)) return maybe_result;
3731 result->set_foreign_address(address); 3758 result->set_foreign_address(address);
3732 return result; 3759 return result;
3733 } 3760 }
3734 3761
3735 3762
3736 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { 3763 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
3770 3797
3771 return share; 3798 return share;
3772 } 3799 }
3773 3800
3774 3801
3775 MaybeObject* Heap::AllocateJSMessageObject(String* type, 3802 MaybeObject* Heap::AllocateJSMessageObject(String* type,
3776 JSArray* arguments, 3803 JSArray* arguments,
3777 int start_position, 3804 int start_position,
3778 int end_position, 3805 int end_position,
3779 Object* script, 3806 Object* script,
3780 Object* stack_trace,
3781 Object* stack_frames) { 3807 Object* stack_frames) {
3782 Object* result; 3808 Object* result;
3783 { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE); 3809 { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE);
3784 if (!maybe_result->ToObject(&result)) return maybe_result; 3810 if (!maybe_result->ToObject(&result)) return maybe_result;
3785 } 3811 }
3786 JSMessageObject* message = JSMessageObject::cast(result); 3812 JSMessageObject* message = JSMessageObject::cast(result);
3787 message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); 3813 message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
3788 message->initialize_elements(); 3814 message->initialize_elements();
3789 message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER); 3815 message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
3790 message->set_type(type); 3816 message->set_type(type);
3791 message->set_arguments(arguments); 3817 message->set_arguments(arguments);
3792 message->set_start_position(start_position); 3818 message->set_start_position(start_position);
3793 message->set_end_position(end_position); 3819 message->set_end_position(end_position);
3794 message->set_script(script); 3820 message->set_script(script);
3795 message->set_stack_trace(stack_trace);
3796 message->set_stack_frames(stack_frames); 3821 message->set_stack_frames(stack_frames);
3797 return result; 3822 return result;
3798 } 3823 }
3799 3824
3800 3825
3801 MaybeObject* Heap::AllocateExternalStringFromAscii( 3826 MaybeObject* Heap::AllocateExternalStringFromAscii(
3802 const ExternalAsciiString::Resource* resource) { 3827 const ExternalAsciiString::Resource* resource) {
3803 size_t length = resource->length(); 3828 size_t length = resource->length();
3804 if (length > static_cast<size_t>(String::kMaxLength)) { 3829 if (length > static_cast<size_t>(String::kMaxLength)) {
3805 isolate()->context()->mark_out_of_memory(); 3830 isolate()->context()->mark_out_of_memory();
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
3924 reinterpret_cast<ExternalArray*>(result)->set_external_pointer( 3949 reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
3925 external_pointer); 3950 external_pointer);
3926 3951
3927 return result; 3952 return result;
3928 } 3953 }
3929 3954
3930 static void ForFixedTypedArray(ExternalArrayType array_type, 3955 static void ForFixedTypedArray(ExternalArrayType array_type,
3931 int* element_size, 3956 int* element_size,
3932 ElementsKind* element_kind) { 3957 ElementsKind* element_kind) {
3933 switch (array_type) { 3958 switch (array_type) {
3934 case kExternalUnsignedByteArray: 3959 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3935 *element_size = 1; 3960 case kExternal##Type##Array: \
3936 *element_kind = UINT8_ELEMENTS; 3961 *element_size = size; \
3962 *element_kind = TYPE##_ELEMENTS; \
3937 return; 3963 return;
3938 case kExternalByteArray: 3964
3939 *element_size = 1; 3965 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3940 *element_kind = INT8_ELEMENTS; 3966 #undef TYPED_ARRAY_CASE
3941 return; 3967
3942 case kExternalUnsignedShortArray:
3943 *element_size = 2;
3944 *element_kind = UINT16_ELEMENTS;
3945 return;
3946 case kExternalShortArray:
3947 *element_size = 2;
3948 *element_kind = INT16_ELEMENTS;
3949 return;
3950 case kExternalUnsignedIntArray:
3951 *element_size = 4;
3952 *element_kind = UINT32_ELEMENTS;
3953 return;
3954 case kExternalIntArray:
3955 *element_size = 4;
3956 *element_kind = INT32_ELEMENTS;
3957 return;
3958 case kExternalFloatArray:
3959 *element_size = 4;
3960 *element_kind = FLOAT32_ELEMENTS;
3961 return;
3962 case kExternalDoubleArray:
3963 *element_size = 8;
3964 *element_kind = FLOAT64_ELEMENTS;
3965 return;
3966 case kExternalPixelArray:
3967 *element_size = 1;
3968 *element_kind = UINT8_CLAMPED_ELEMENTS;
3969 return;
3970 default: 3968 default:
3971 *element_size = 0; // Bogus 3969 *element_size = 0; // Bogus
3972 *element_kind = UINT8_ELEMENTS; // Bogus 3970 *element_kind = UINT8_ELEMENTS; // Bogus
3973 UNREACHABLE(); 3971 UNREACHABLE();
3974 } 3972 }
3975 } 3973 }
3976 3974
3977 3975
3978 MaybeObject* Heap::AllocateFixedTypedArray(int length, 3976 MaybeObject* Heap::AllocateFixedTypedArray(int length,
3979 ExternalArrayType array_type, 3977 ExternalArrayType array_type,
3980 PretenureFlag pretenure) { 3978 PretenureFlag pretenure) {
3981 int element_size; 3979 int element_size;
3982 ElementsKind elements_kind; 3980 ElementsKind elements_kind;
3983 ForFixedTypedArray(array_type, &element_size, &elements_kind); 3981 ForFixedTypedArray(array_type, &element_size, &elements_kind);
3984 int size = OBJECT_POINTER_ALIGN( 3982 int size = OBJECT_POINTER_ALIGN(
3985 length * element_size + FixedTypedArrayBase::kDataOffset); 3983 length * element_size + FixedTypedArrayBase::kDataOffset);
3986 #ifndef V8_HOST_ARCH_64_BIT 3984 #ifndef V8_HOST_ARCH_64_BIT
3987 if (array_type == kExternalDoubleArray) { 3985 if (array_type == kExternalFloat64Array) {
3988 size += kPointerSize; 3986 size += kPointerSize;
3989 } 3987 }
3990 #endif 3988 #endif
3991 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 3989 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
3992 3990
3993 HeapObject* object; 3991 HeapObject* object;
3994 MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); 3992 MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
3995 if (!maybe_object->To(&object)) return maybe_object; 3993 if (!maybe_object->To(&object)) return maybe_object;
3996 3994
3997 if (array_type == kExternalDoubleArray) { 3995 if (array_type == kExternalFloat64Array) {
3998 object = EnsureDoubleAligned(this, object, size); 3996 object = EnsureDoubleAligned(this, object, size);
3999 } 3997 }
4000 3998
4001 FixedTypedArrayBase* elements = 3999 FixedTypedArrayBase* elements =
4002 reinterpret_cast<FixedTypedArrayBase*>(object); 4000 reinterpret_cast<FixedTypedArrayBase*>(object);
4003 elements->set_map(MapForFixedTypedArray(array_type)); 4001 elements->set_map(MapForFixedTypedArray(array_type));
4004 elements->set_length(length); 4002 elements->set_length(length);
4005 return elements; 4003 return elements;
4006 } 4004 }
4007 4005
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
4047 // Initialize the object 4045 // Initialize the object
4048 result->set_map_no_write_barrier(code_map()); 4046 result->set_map_no_write_barrier(code_map());
4049 Code* code = Code::cast(result); 4047 Code* code = Code::cast(result);
4050 ASSERT(!isolate_->code_range()->exists() || 4048 ASSERT(!isolate_->code_range()->exists() ||
4051 isolate_->code_range()->contains(code->address())); 4049 isolate_->code_range()->contains(code->address()));
4052 code->set_instruction_size(desc.instr_size); 4050 code->set_instruction_size(desc.instr_size);
4053 code->set_relocation_info(reloc_info); 4051 code->set_relocation_info(reloc_info);
4054 code->set_flags(flags); 4052 code->set_flags(flags);
4055 code->set_raw_kind_specific_flags1(0); 4053 code->set_raw_kind_specific_flags1(0);
4056 code->set_raw_kind_specific_flags2(0); 4054 code->set_raw_kind_specific_flags2(0);
4057 if (code->is_call_stub() || code->is_keyed_call_stub()) {
4058 code->set_check_type(RECEIVER_MAP_CHECK);
4059 }
4060 code->set_is_crankshafted(crankshafted); 4055 code->set_is_crankshafted(crankshafted);
4061 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); 4056 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
4062 code->set_raw_type_feedback_info(undefined_value()); 4057 code->set_raw_type_feedback_info(undefined_value());
4063 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); 4058 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
4064 code->set_gc_metadata(Smi::FromInt(0)); 4059 code->set_gc_metadata(Smi::FromInt(0));
4065 code->set_ic_age(global_ic_age_); 4060 code->set_ic_age(global_ic_age_);
4066 code->set_prologue_offset(prologue_offset); 4061 code->set_prologue_offset(prologue_offset);
4067 if (code->kind() == Code::OPTIMIZED_FUNCTION) { 4062 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
4068 code->set_marked_for_deoptimization(false); 4063 code->set_marked_for_deoptimization(false);
4069 } 4064 }
(...skipping 1270 matching lines...) Expand 10 before | Expand all | Expand 10 after
5340 } 5335 }
5341 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( 5336 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(
5342 hash_table_map()); 5337 hash_table_map());
5343 ASSERT(result->IsHashTable()); 5338 ASSERT(result->IsHashTable());
5344 return result; 5339 return result;
5345 } 5340 }
5346 5341
5347 5342
5348 MaybeObject* Heap::AllocateSymbol() { 5343 MaybeObject* Heap::AllocateSymbol() {
5349 // Statically ensure that it is safe to allocate symbols in paged spaces. 5344 // Statically ensure that it is safe to allocate symbols in paged spaces.
5350 STATIC_ASSERT(Symbol::kSize <= Page::kNonCodeObjectAreaSize); 5345 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
5351 5346
5352 Object* result; 5347 Object* result;
5353 MaybeObject* maybe = 5348 MaybeObject* maybe =
5354 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); 5349 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
5355 if (!maybe->ToObject(&result)) return maybe; 5350 if (!maybe->ToObject(&result)) return maybe;
5356 5351
5357 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); 5352 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map());
5358 5353
5359 // Generate a random hash value. 5354 // Generate a random hash value.
5360 int hash; 5355 int hash;
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after
5612 AdvanceIdleIncrementalMarking(step_size); 5607 AdvanceIdleIncrementalMarking(step_size);
5613 } 5608 }
5614 5609
5615 // After context disposal there is likely a lot of garbage remaining, reset 5610 // After context disposal there is likely a lot of garbage remaining, reset
5616 // the idle notification counters in order to trigger more incremental GCs 5611 // the idle notification counters in order to trigger more incremental GCs
5617 // on subsequent idle notifications. 5612 // on subsequent idle notifications.
5618 StartIdleRound(); 5613 StartIdleRound();
5619 return false; 5614 return false;
5620 } 5615 }
5621 5616
5622 if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) { 5617 if (!FLAG_incremental_marking || Serializer::enabled()) {
5623 return IdleGlobalGC(); 5618 return IdleGlobalGC();
5624 } 5619 }
5625 5620
5626 // By doing small chunks of GC work in each IdleNotification, 5621 // By doing small chunks of GC work in each IdleNotification,
5627 // perform a round of incremental GCs and after that wait until 5622 // perform a round of incremental GCs and after that wait until
5628 // the mutator creates enough garbage to justify a new round. 5623 // the mutator creates enough garbage to justify a new round.
5629 // An incremental GC progresses as follows: 5624 // An incremental GC progresses as follows:
5630 // 1. many incremental marking steps, 5625 // 1. many incremental marking steps,
5631 // 2. one old space mark-sweep-compact, 5626 // 2. one old space mark-sweep-compact,
5632 // 3. many lazy sweep steps. 5627 // 3. many lazy sweep steps.
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
5853 5848
5854 #ifdef VERIFY_HEAP 5849 #ifdef VERIFY_HEAP
5855 void Heap::Verify() { 5850 void Heap::Verify() {
5856 CHECK(HasBeenSetUp()); 5851 CHECK(HasBeenSetUp());
5857 5852
5858 store_buffer()->Verify(); 5853 store_buffer()->Verify();
5859 5854
5860 VerifyPointersVisitor visitor; 5855 VerifyPointersVisitor visitor;
5861 IterateRoots(&visitor, VISIT_ONLY_STRONG); 5856 IterateRoots(&visitor, VISIT_ONLY_STRONG);
5862 5857
5858 VerifySmisVisitor smis_visitor;
5859 IterateSmiRoots(&smis_visitor);
5860
5863 new_space_.Verify(); 5861 new_space_.Verify();
5864 5862
5865 old_pointer_space_->Verify(&visitor); 5863 old_pointer_space_->Verify(&visitor);
5866 map_space_->Verify(&visitor); 5864 map_space_->Verify(&visitor);
5867 5865
5868 VerifyPointersVisitor no_dirty_regions_visitor; 5866 VerifyPointersVisitor no_dirty_regions_visitor;
5869 old_data_space_->Verify(&no_dirty_regions_visitor); 5867 old_data_space_->Verify(&no_dirty_regions_visitor);
5870 code_space_->Verify(&no_dirty_regions_visitor); 5868 code_space_->Verify(&no_dirty_regions_visitor);
5871 cell_space_->Verify(&no_dirty_regions_visitor); 5869 cell_space_->Verify(&no_dirty_regions_visitor);
5872 property_cell_space_->Verify(&no_dirty_regions_visitor); 5870 property_cell_space_->Verify(&no_dirty_regions_visitor);
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
6150 v->Synchronize(VisitorSynchronization::kStringTable); 6148 v->Synchronize(VisitorSynchronization::kStringTable);
6151 if (mode != VISIT_ALL_IN_SCAVENGE && 6149 if (mode != VISIT_ALL_IN_SCAVENGE &&
6152 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { 6150 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
6153 // Scavenge collections have special processing for this. 6151 // Scavenge collections have special processing for this.
6154 external_string_table_.Iterate(v); 6152 external_string_table_.Iterate(v);
6155 } 6153 }
6156 v->Synchronize(VisitorSynchronization::kExternalStringsTable); 6154 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
6157 } 6155 }
6158 6156
6159 6157
6158 void Heap::IterateSmiRoots(ObjectVisitor* v) {
6159 v->VisitPointers(&roots_[kSmiRootsStart], &roots_[kRootListLength]);
6160 v->Synchronize(VisitorSynchronization::kSmiRootList);
6161 }
6162
6163
6160 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { 6164 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
6161 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); 6165 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
6162 v->Synchronize(VisitorSynchronization::kStrongRootList); 6166 v->Synchronize(VisitorSynchronization::kStrongRootList);
6163 6167
6164 v->VisitPointer(BitCast<Object**>(&hidden_string_)); 6168 v->VisitPointer(BitCast<Object**>(&hidden_string_));
6165 v->Synchronize(VisitorSynchronization::kInternalizedString); 6169 v->Synchronize(VisitorSynchronization::kInternalizedString);
6166 6170
6167 isolate_->bootstrapper()->Iterate(v); 6171 isolate_->bootstrapper()->Iterate(v);
6168 v->Synchronize(VisitorSynchronization::kBootstrapper); 6172 v->Synchronize(VisitorSynchronization::kBootstrapper);
6169 isolate_->Iterate(v); 6173 isolate_->Iterate(v);
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
6304 ASSERT(external_allocation_limit_ <= 256 * MB); 6308 ASSERT(external_allocation_limit_ <= 256 * MB);
6305 6309
6306 // The old generation is paged and needs at least one page for each space. 6310 // The old generation is paged and needs at least one page for each space.
6307 int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; 6311 int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
6308 max_old_generation_size_ = Max(static_cast<intptr_t>(paged_space_count * 6312 max_old_generation_size_ = Max(static_cast<intptr_t>(paged_space_count *
6309 Page::kPageSize), 6313 Page::kPageSize),
6310 RoundUp(max_old_generation_size_, 6314 RoundUp(max_old_generation_size_,
6311 Page::kPageSize)); 6315 Page::kPageSize));
6312 6316
6313 // We rely on being able to allocate new arrays in paged spaces. 6317 // We rely on being able to allocate new arrays in paged spaces.
6314 ASSERT(MaxRegularSpaceAllocationSize() >= 6318 ASSERT(Page::kMaxRegularHeapObjectSize >=
6315 (JSArray::kSize + 6319 (JSArray::kSize +
6316 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + 6320 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) +
6317 AllocationMemento::kSize)); 6321 AllocationMemento::kSize));
6318 6322
6319 configured_ = true; 6323 configured_ = true;
6320 return true; 6324 return true;
6321 } 6325 }
6322 6326
6323 6327
6324 bool Heap::ConfigureHeapDefault() { 6328 bool Heap::ConfigureHeapDefault() {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
6372 + old_data_space_->SizeOfObjects() 6376 + old_data_space_->SizeOfObjects()
6373 + code_space_->SizeOfObjects() 6377 + code_space_->SizeOfObjects()
6374 + map_space_->SizeOfObjects() 6378 + map_space_->SizeOfObjects()
6375 + cell_space_->SizeOfObjects() 6379 + cell_space_->SizeOfObjects()
6376 + property_cell_space_->SizeOfObjects() 6380 + property_cell_space_->SizeOfObjects()
6377 + lo_space_->SizeOfObjects(); 6381 + lo_space_->SizeOfObjects();
6378 } 6382 }
6379 6383
6380 6384
6381 bool Heap::AdvanceSweepers(int step_size) { 6385 bool Heap::AdvanceSweepers(int step_size) {
6382 ASSERT(isolate()->num_sweeper_threads() == 0); 6386 ASSERT(!mark_compact_collector()->AreSweeperThreadsActivated());
6383 bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size); 6387 bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
6384 sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size); 6388 sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
6385 return sweeping_complete; 6389 return sweeping_complete;
6386 } 6390 }
6387 6391
6388 6392
6389 int64_t Heap::PromotedExternalMemorySize() { 6393 int64_t Heap::PromotedExternalMemorySize() {
6390 if (amount_of_external_allocated_memory_ 6394 if (amount_of_external_allocated_memory_
6391 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; 6395 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
6392 return amount_of_external_allocated_memory_ 6396 return amount_of_external_allocated_memory_
6393 - amount_of_external_allocated_memory_at_last_global_gc_; 6397 - amount_of_external_allocated_memory_at_last_global_gc_;
6394 } 6398 }
6395 6399
6396 6400
6397 void Heap::EnableInlineAllocation() { 6401 void Heap::EnableInlineAllocation() {
6398 ASSERT(inline_allocation_disabled_); 6402 if (!inline_allocation_disabled_) return;
6399 inline_allocation_disabled_ = false; 6403 inline_allocation_disabled_ = false;
6400 6404
6401 // Update inline allocation limit for new space. 6405 // Update inline allocation limit for new space.
6402 new_space()->UpdateInlineAllocationLimit(0); 6406 new_space()->UpdateInlineAllocationLimit(0);
6403 } 6407 }
6404 6408
6405 6409
6406 void Heap::DisableInlineAllocation() { 6410 void Heap::DisableInlineAllocation() {
6407 ASSERT(!inline_allocation_disabled_); 6411 if (inline_allocation_disabled_) return;
6408 inline_allocation_disabled_ = true; 6412 inline_allocation_disabled_ = true;
6409 6413
6410 // Update inline allocation limit for new space. 6414 // Update inline allocation limit for new space.
6411 new_space()->UpdateInlineAllocationLimit(0); 6415 new_space()->UpdateInlineAllocationLimit(0);
6412 6416
6413 // Update inline allocation limit for old spaces. 6417 // Update inline allocation limit for old spaces.
6414 PagedSpaces spaces(this); 6418 PagedSpaces spaces(this);
6415 for (PagedSpace* space = spaces.next(); 6419 for (PagedSpace* space = spaces.next();
6416 space != NULL; 6420 space != NULL;
6417 space = spaces.next()) { 6421 space = spaces.next()) {
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
6524 } else { 6528 } else {
6525 set_hash_seed(Smi::FromInt(FLAG_hash_seed)); 6529 set_hash_seed(Smi::FromInt(FLAG_hash_seed));
6526 } 6530 }
6527 } 6531 }
6528 6532
6529 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); 6533 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
6530 LOG(isolate_, IntPtrTEvent("heap-available", Available())); 6534 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
6531 6535
6532 store_buffer()->SetUp(); 6536 store_buffer()->SetUp();
6533 6537
6538 mark_compact_collector()->SetUp();
6539
6534 if (FLAG_concurrent_recompilation) relocation_mutex_ = new Mutex; 6540 if (FLAG_concurrent_recompilation) relocation_mutex_ = new Mutex;
6535 6541
6536 return true; 6542 return true;
6537 } 6543 }
6538 6544
6539 6545
6540 bool Heap::CreateHeapObjects() { 6546 bool Heap::CreateHeapObjects() {
6541 // Create initial maps. 6547 // Create initial maps.
6542 if (!CreateInitialMaps()) return false; 6548 if (!CreateInitialMaps()) return false;
6543 if (!CreateApiObjects()) return false; 6549 if (!CreateApiObjects()) return false;
(...skipping 1193 matching lines...) Expand 10 before | Expand all | Expand 10 after
7737 static_cast<int>(object_sizes_last_time_[index])); 7743 static_cast<int>(object_sizes_last_time_[index]));
7738 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 7744 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
7739 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7745 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7740 7746
7741 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7747 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7742 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7748 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7743 ClearObjectStats(); 7749 ClearObjectStats();
7744 } 7750 }
7745 7751
7746 } } // namespace v8::internal 7752 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698