Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(337)

Side by Side Diff: src/heap.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
45 #include "scopeinfo.h" 45 #include "scopeinfo.h"
46 #include "snapshot.h" 46 #include "snapshot.h"
47 #include "store-buffer.h" 47 #include "store-buffer.h"
48 #include "v8threads.h" 48 #include "v8threads.h"
49 #include "vm-state-inl.h" 49 #include "vm-state-inl.h"
50 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 50 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
51 #include "regexp-macro-assembler.h" 51 #include "regexp-macro-assembler.h"
52 #include "arm/regexp-macro-assembler-arm.h" 52 #include "arm/regexp-macro-assembler-arm.h"
53 #endif 53 #endif
54 54
55
56 namespace v8 { 55 namespace v8 {
57 namespace internal { 56 namespace internal {
58 57
59 58
60 String* Heap::hidden_symbol_;
61 Object* Heap::roots_[Heap::kRootListLength];
62 Object* Heap::global_contexts_list_;
63 StoreBufferRebuilder Heap::store_buffer_rebuilder_;
64
65
66 NewSpace Heap::new_space_;
67 OldSpace* Heap::old_pointer_space_ = NULL;
68 OldSpace* Heap::old_data_space_ = NULL;
69 OldSpace* Heap::code_space_ = NULL;
70 MapSpace* Heap::map_space_ = NULL;
71 CellSpace* Heap::cell_space_ = NULL;
72 LargeObjectSpace* Heap::lo_space_ = NULL;
73
74 static const intptr_t kMinimumPromotionLimit = 2 * MB; 59 static const intptr_t kMinimumPromotionLimit = 2 * MB;
75 static const intptr_t kMinimumAllocationLimit = 8 * MB; 60 static const intptr_t kMinimumAllocationLimit = 8 * MB;
76 61
77 intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
78 intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
79 62
80 int Heap::old_gen_exhausted_ = false; 63 static Mutex* gc_initializer_mutex = OS::CreateMutex();
81 64
82 int Heap::amount_of_external_allocated_memory_ = 0;
83 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
84 65
66 Heap::Heap()
67 : isolate_(NULL),
85 // semispace_size_ should be a power of 2 and old_generation_size_ should be 68 // semispace_size_ should be a power of 2 and old_generation_size_ should be
86 // a multiple of Page::kPageSize. 69 // a multiple of Page::kPageSize.
87 #if defined(ANDROID) 70 #if defined(ANDROID)
88 static const int default_max_semispace_size_ = 2*MB; 71 reserved_semispace_size_(2*MB),
89 intptr_t Heap::max_old_generation_size_ = 192*MB; 72 max_semispace_size_(2*MB),
90 int Heap::initial_semispace_size_ = 128*KB; 73 initial_semispace_size_(128*KB),
91 intptr_t Heap::code_range_size_ = 0; 74 max_old_generation_size_(192*MB),
92 intptr_t Heap::max_executable_size_ = max_old_generation_size_; 75 max_executable_size_(max_old_generation_size_),
76 code_range_size_(0),
93 #elif defined(V8_TARGET_ARCH_X64) 77 #elif defined(V8_TARGET_ARCH_X64)
94 static const int default_max_semispace_size_ = 16*MB; 78 reserved_semispace_size_(16*MB),
95 intptr_t Heap::max_old_generation_size_ = 1*GB; 79 max_semispace_size_(16*MB),
96 int Heap::initial_semispace_size_ = 1*MB; 80 initial_semispace_size_(1*MB),
97 intptr_t Heap::code_range_size_ = 512*MB; 81 max_old_generation_size_(1*GB),
98 intptr_t Heap::max_executable_size_ = 256*MB; 82 max_executable_size_(256*MB),
83 code_range_size_(512*MB),
99 #else 84 #else
100 static const int default_max_semispace_size_ = 4*MB; 85 reserved_semispace_size_(8*MB),
Erik Corry 2011/04/20 20:07:40 We have until now limited max semispace size to 4m
Vyacheslav Egorov (Chromium) 2011/04/24 11:24:08 ooops. I thought I paid special attention to merge
101 intptr_t Heap::max_old_generation_size_ = 700*MB; 86 max_semispace_size_(8*MB),
102 int Heap::initial_semispace_size_ = 512*KB; 87 initial_semispace_size_(512*KB),
103 intptr_t Heap::code_range_size_ = 0; 88 max_old_generation_size_(512*MB),
104 intptr_t Heap::max_executable_size_ = 128*MB; 89 max_executable_size_(128*MB),
90 code_range_size_(0),
91 #endif
92 // Variables set based on semispace_size_ and old_generation_size_ in
93 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
94 // Will be 4 * reserved_semispace_size_ to ensure that young
95 // generation can be aligned to its size.
96 survived_since_last_expansion_(0),
97 always_allocate_scope_depth_(0),
98 linear_allocation_scope_depth_(0),
99 contexts_disposed_(0),
100 new_space_(this),
101 old_pointer_space_(NULL),
102 old_data_space_(NULL),
103 code_space_(NULL),
104 map_space_(NULL),
105 cell_space_(NULL),
106 lo_space_(NULL),
107 gc_state_(NOT_IN_GC),
108 mc_count_(0),
109 ms_count_(0),
110 gc_count_(0),
111 unflattened_strings_length_(0),
112 #ifdef DEBUG
113 allocation_allowed_(true),
114 allocation_timeout_(0),
115 disallow_allocation_failure_(false),
116 debug_utils_(NULL),
117 #endif // DEBUG
118 old_gen_promotion_limit_(kMinimumPromotionLimit),
119 old_gen_allocation_limit_(kMinimumAllocationLimit),
120 external_allocation_limit_(0),
121 amount_of_external_allocated_memory_(0),
122 amount_of_external_allocated_memory_at_last_global_gc_(0),
123 old_gen_exhausted_(false),
124 store_buffer_rebuilder_(store_buffer()),
125 hidden_symbol_(NULL),
126 global_gc_prologue_callback_(NULL),
127 global_gc_epilogue_callback_(NULL),
128 gc_safe_size_of_old_object_(NULL),
129 tracer_(NULL),
130 young_survivors_after_last_gc_(0),
131 high_survival_rate_period_length_(0),
132 survival_rate_(0),
133 previous_survival_rate_trend_(Heap::STABLE),
134 survival_rate_trend_(Heap::STABLE),
135 max_gc_pause_(0),
136 max_alive_after_gc_(0),
137 min_in_mutator_(kMaxInt),
138 alive_after_last_gc_(0),
139 last_gc_end_timestamp_(0.0),
140 store_buffer_(this),
141 marking_(this),
142 incremental_marking_(this),
143 number_idle_notifications_(0),
144 last_idle_notification_gc_count_(0),
145 last_idle_notification_gc_count_init_(false),
146 configured_(false) {
147 // Allow build-time customization of the max semispace size. Building
148 // V8 with snapshots and a non-default max semispace size is much
149 // easier if you can define it as part of the build environment.
150 #if defined(V8_MAX_SEMISPACE_SIZE)
151 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
105 #endif 152 #endif
106 153
107 // Allow build-time customization of the max semispace size. Building 154 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
108 // V8 with snapshots and a non-default max semispace size is much 155 global_contexts_list_ = NULL;
109 // easier if you can define it as part of the build environment. 156 mark_compact_collector_.heap_ = this;
110 #if defined(V8_MAX_SEMISPACE_SIZE) 157 external_string_table_.heap_ = this;
111 int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 158 }
112 #else
113 int Heap::max_semispace_size_ = default_max_semispace_size_;
114 #endif
115 159
116 // The snapshot semispace size will be the default semispace size if
117 // snapshotting is used and will be the requested semispace size as
118 // set up by ConfigureHeap otherwise.
119 int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
120
121 List<Heap::GCPrologueCallbackPair> Heap::gc_prologue_callbacks_;
122 List<Heap::GCEpilogueCallbackPair> Heap::gc_epilogue_callbacks_;
123
124 GCCallback Heap::global_gc_prologue_callback_ = NULL;
125 GCCallback Heap::global_gc_epilogue_callback_ = NULL;
126 HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
127
128 // Variables set based on semispace_size_ and old_generation_size_ in
129 // ConfigureHeap.
130
131 // Will be 4 * reserved_semispace_size_ to ensure that young
132 // generation can be aligned to its size.
133 int Heap::survived_since_last_expansion_ = 0;
134 intptr_t Heap::external_allocation_limit_ = 0;
135
136 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
137
138 int Heap::mc_count_ = 0;
139 int Heap::ms_count_ = 0;
140 unsigned int Heap::gc_count_ = 0;
141
142 GCTracer* Heap::tracer_ = NULL;
143
144 int Heap::unflattened_strings_length_ = 0;
145
146 int Heap::always_allocate_scope_depth_ = 0;
147 int Heap::linear_allocation_scope_depth_ = 0;
148 int Heap::contexts_disposed_ = 0;
149
150 int Heap::young_survivors_after_last_gc_ = 0;
151 int Heap::high_survival_rate_period_length_ = 0;
152 double Heap::survival_rate_ = 0;
153 Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
154 Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
155
156 #ifdef DEBUG
157 bool Heap::allocation_allowed_ = true;
158
159 int Heap::allocation_timeout_ = 0;
160 bool Heap::disallow_allocation_failure_ = false;
161 #endif // DEBUG
162
163 intptr_t GCTracer::alive_after_last_gc_ = 0;
164 double GCTracer::last_gc_end_timestamp_ = 0.0;
165 int GCTracer::max_gc_pause_ = 0;
166 intptr_t GCTracer::max_alive_after_gc_ = 0;
167 int GCTracer::min_in_mutator_ = kMaxInt;
168 160
169 intptr_t Heap::Capacity() { 161 intptr_t Heap::Capacity() {
170 if (!HasBeenSetup()) return 0; 162 if (!HasBeenSetup()) return 0;
171 163
172 return new_space_.Capacity() + 164 return new_space_.Capacity() +
173 old_pointer_space_->Capacity() + 165 old_pointer_space_->Capacity() +
174 old_data_space_->Capacity() + 166 old_data_space_->Capacity() +
175 code_space_->Capacity() + 167 code_space_->Capacity() +
176 map_space_->Capacity() + 168 map_space_->Capacity() +
177 cell_space_->Capacity(); 169 cell_space_->Capacity();
178 } 170 }
179 171
180 172
181 intptr_t Heap::CommittedMemory() { 173 intptr_t Heap::CommittedMemory() {
182 if (!HasBeenSetup()) return 0; 174 if (!HasBeenSetup()) return 0;
183 175
184 return new_space_.CommittedMemory() + 176 return new_space_.CommittedMemory() +
185 old_pointer_space_->CommittedMemory() + 177 old_pointer_space_->CommittedMemory() +
186 old_data_space_->CommittedMemory() + 178 old_data_space_->CommittedMemory() +
187 code_space_->CommittedMemory() + 179 code_space_->CommittedMemory() +
188 map_space_->CommittedMemory() + 180 map_space_->CommittedMemory() +
189 cell_space_->CommittedMemory() + 181 cell_space_->CommittedMemory() +
190 lo_space_->Size(); 182 lo_space_->Size();
191 } 183 }
192 184
193 intptr_t Heap::CommittedMemoryExecutable() { 185 intptr_t Heap::CommittedMemoryExecutable() {
194 if (!HasBeenSetup()) return 0; 186 if (!HasBeenSetup()) return 0;
195 187
196 return MemoryAllocator::SizeExecutable(); 188 return isolate()->memory_allocator()->SizeExecutable();
197 } 189 }
198 190
199 191
200 intptr_t Heap::Available() { 192 intptr_t Heap::Available() {
201 if (!HasBeenSetup()) return 0; 193 if (!HasBeenSetup()) return 0;
202 194
203 return new_space_.Available() + 195 return new_space_.Available() +
204 old_pointer_space_->Available() + 196 old_pointer_space_->Available() +
205 old_data_space_->Available() + 197 old_data_space_->Available() +
206 code_space_->Available() + 198 code_space_->Available() +
(...skipping 13 matching lines...) Expand all
220 212
221 213
222 int Heap::GcSafeSizeOfOldObject(HeapObject* object) { 214 int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
223 return object->Size(); 215 return object->Size();
224 } 216 }
225 217
226 218
227 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) { 219 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
228 // Is global GC requested? 220 // Is global GC requested?
229 if (space != NEW_SPACE || FLAG_gc_global) { 221 if (space != NEW_SPACE || FLAG_gc_global) {
230 Counters::gc_compactor_caused_by_request.Increment(); 222 isolate_->counters()->gc_compactor_caused_by_request()->Increment();
231 return MARK_COMPACTOR; 223 return MARK_COMPACTOR;
232 } 224 }
233 225
234 // Is enough data promoted to justify a global GC? 226 // Is enough data promoted to justify a global GC?
235 if (OldGenerationPromotionLimitReached()) { 227 if (OldGenerationPromotionLimitReached()) {
236 Counters::gc_compactor_caused_by_promoted_data.Increment(); 228 isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
237 return MARK_COMPACTOR; 229 return MARK_COMPACTOR;
238 } 230 }
239 231
240 // Have allocation in OLD and LO failed? 232 // Have allocation in OLD and LO failed?
241 if (old_gen_exhausted_) { 233 if (old_gen_exhausted_) {
242 Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment(); 234 isolate_->counters()->
235 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
243 return MARK_COMPACTOR; 236 return MARK_COMPACTOR;
244 } 237 }
245 238
246 // Is there enough space left in OLD to guarantee that a scavenge can 239 // Is there enough space left in OLD to guarantee that a scavenge can
247 // succeed? 240 // succeed?
248 // 241 //
249 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available 242 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
250 // for object promotion. It counts only the bytes that the memory 243 // for object promotion. It counts only the bytes that the memory
251 // allocator has not yet allocated from the OS and assigned to any space, 244 // allocator has not yet allocated from the OS and assigned to any space,
252 // and does not count available bytes already in the old space or code 245 // and does not count available bytes already in the old space or code
253 // space. Undercounting is safe---we may get an unrequested full GC when 246 // space. Undercounting is safe---we may get an unrequested full GC when
254 // a scavenge would have succeeded. 247 // a scavenge would have succeeded.
255 if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) { 248 if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
256 Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment(); 249 isolate_->counters()->
250 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
257 return MARK_COMPACTOR; 251 return MARK_COMPACTOR;
258 } 252 }
259 253
260 // Default 254 // Default
261 return SCAVENGER; 255 return SCAVENGER;
262 } 256 }
263 257
264 258
265 // TODO(1238405): Combine the infrastructure for --heap-stats and 259 // TODO(1238405): Combine the infrastructure for --heap-stats and
266 // --log-gc to avoid the complicated preprocessor and flag testing. 260 // --log-gc to avoid the complicated preprocessor and flag testing.
(...skipping 24 matching lines...) Expand all
291 } 285 }
292 #endif 286 #endif
293 } 287 }
294 288
295 289
296 #if defined(ENABLE_LOGGING_AND_PROFILING) 290 #if defined(ENABLE_LOGGING_AND_PROFILING)
297 void Heap::PrintShortHeapStatistics() { 291 void Heap::PrintShortHeapStatistics() {
298 if (!FLAG_trace_gc_verbose) return; 292 if (!FLAG_trace_gc_verbose) return;
299 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d" 293 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
300 ", available: %8" V8_PTR_PREFIX "d\n", 294 ", available: %8" V8_PTR_PREFIX "d\n",
301 MemoryAllocator::Size(), 295 isolate_->memory_allocator()->Size(),
302 MemoryAllocator::Available()); 296 isolate_->memory_allocator()->Available());
303 PrintF("New space, used: %8" V8_PTR_PREFIX "d" 297 PrintF("New space, used: %8" V8_PTR_PREFIX "d"
304 ", available: %8" V8_PTR_PREFIX "d\n", 298 ", available: %8" V8_PTR_PREFIX "d\n",
305 Heap::new_space_.Size(), 299 Heap::new_space_.Size(),
306 new_space_.Available()); 300 new_space_.Available());
307 PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d" 301 PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d"
308 ", available: %8" V8_PTR_PREFIX "d" 302 ", available: %8" V8_PTR_PREFIX "d"
309 ", waste: %8" V8_PTR_PREFIX "d\n", 303 ", waste: %8" V8_PTR_PREFIX "d\n",
310 old_pointer_space_->Size(), 304 old_pointer_space_->Size(),
311 old_pointer_space_->Available(), 305 old_pointer_space_->Available(),
312 old_pointer_space_->Waste()); 306 old_pointer_space_->Waste());
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
357 #elif defined(DEBUG) 351 #elif defined(DEBUG)
358 if (FLAG_heap_stats) ReportHeapStatistics("After GC"); 352 if (FLAG_heap_stats) ReportHeapStatistics("After GC");
359 #elif defined(ENABLE_LOGGING_AND_PROFILING) 353 #elif defined(ENABLE_LOGGING_AND_PROFILING)
360 if (FLAG_log_gc) new_space_.ReportStatistics(); 354 if (FLAG_log_gc) new_space_.ReportStatistics();
361 #endif 355 #endif
362 } 356 }
363 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 357 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
364 358
365 359
366 void Heap::GarbageCollectionPrologue() { 360 void Heap::GarbageCollectionPrologue() {
367 TranscendentalCache::Clear(); 361 isolate_->transcendental_cache()->Clear();
368 ClearJSFunctionResultCaches(); 362 ClearJSFunctionResultCaches();
369 gc_count_++; 363 gc_count_++;
370 unflattened_strings_length_ = 0; 364 unflattened_strings_length_ = 0;
371 #ifdef DEBUG 365 #ifdef DEBUG
372 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 366 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
373 allow_allocation(false); 367 allow_allocation(false);
374 368
375 if (FLAG_verify_heap) { 369 if (FLAG_verify_heap) {
376 Verify(); 370 Verify();
377 } 371 }
(...skipping 20 matching lines...) Expand all
398 void Heap::GarbageCollectionEpilogue() { 392 void Heap::GarbageCollectionEpilogue() {
399 LiveObjectList::GCEpilogue(); 393 LiveObjectList::GCEpilogue();
400 #ifdef DEBUG 394 #ifdef DEBUG
401 allow_allocation(true); 395 allow_allocation(true);
402 ZapFromSpace(); 396 ZapFromSpace();
403 397
404 if (FLAG_verify_heap) { 398 if (FLAG_verify_heap) {
405 Verify(); 399 Verify();
406 } 400 }
407 401
408 if (FLAG_print_global_handles) GlobalHandles::Print(); 402 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
409 if (FLAG_print_handles) PrintHandles(); 403 if (FLAG_print_handles) PrintHandles();
410 if (FLAG_gc_verbose) Print(); 404 if (FLAG_gc_verbose) Print();
411 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 405 if (FLAG_code_stats) ReportCodeStatistics("After GC");
412 #endif 406 #endif
413 407
414 Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects())); 408 isolate_->counters()->alive_after_last_gc()->Set(
409 static_cast<int>(SizeOfObjects()));
415 410
416 Counters::symbol_table_capacity.Set(symbol_table()->Capacity()); 411 isolate_->counters()->symbol_table_capacity()->Set(
417 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); 412 symbol_table()->Capacity());
413 isolate_->counters()->number_of_symbols()->Set(
414 symbol_table()->NumberOfElements());
418 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 415 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
419 ReportStatisticsAfterGC(); 416 ReportStatisticsAfterGC();
420 #endif 417 #endif
421 #ifdef ENABLE_DEBUGGER_SUPPORT 418 #ifdef ENABLE_DEBUGGER_SUPPORT
422 Debug::AfterGarbageCollection(); 419 isolate_->debug()->AfterGarbageCollection();
423 #endif 420 #endif
424 } 421 }
425 422
426 423
427 void Heap::CollectAllGarbage(int flags) { 424 void Heap::CollectAllGarbage(int flags) {
428 // Since we are ignoring the return value, the exact choice of space does 425 // Since we are ignoring the return value, the exact choice of space does
429 // not matter, so long as we do not specify NEW_SPACE, which would not 426 // not matter, so long as we do not specify NEW_SPACE, which would not
430 // cause a full GC. 427 // cause a full GC.
431 MarkCompactCollector::SetFlags(flags); 428 mark_compact_collector_.SetFlags(flags);
432 CollectGarbage(OLD_POINTER_SPACE); 429 CollectGarbage(OLD_POINTER_SPACE);
433 MarkCompactCollector::SetFlags(kNoGCFlags); 430 mark_compact_collector_.SetFlags(kNoGCFlags);
434 } 431 }
435 432
436 433
437 void Heap::CollectAllAvailableGarbage() { 434 void Heap::CollectAllAvailableGarbage() {
438 // Since we are ignoring the return value, the exact choice of space does 435 // Since we are ignoring the return value, the exact choice of space does
439 // not matter, so long as we do not specify NEW_SPACE, which would not 436 // not matter, so long as we do not specify NEW_SPACE, which would not
440 // cause a full GC. 437 // cause a full GC.
441 MarkCompactCollector::SetFlags(kMakeHeapIterableMask | kForceCompactionMask); 438 mark_compact_collector()->SetFlags(
439 kMakeHeapIterableMask | kForceCompactionMask);
442 440
443 // Major GC would invoke weak handle callbacks on weakly reachable 441 // Major GC would invoke weak handle callbacks on weakly reachable
444 // handles, but won't collect weakly reachable objects until next 442 // handles, but won't collect weakly reachable objects until next
445 // major GC. Therefore if we collect aggressively and weak handle callback 443 // major GC. Therefore if we collect aggressively and weak handle callback
446 // has been invoked, we rerun major GC to release objects which become 444 // has been invoked, we rerun major GC to release objects which become
447 // garbage. 445 // garbage.
448 // Note: as weak callbacks can execute arbitrary code, we cannot 446 // Note: as weak callbacks can execute arbitrary code, we cannot
449 // hope that eventually there will be no weak callbacks invocations. 447 // hope that eventually there will be no weak callbacks invocations.
450 // Therefore stop recollecting after several attempts. 448 // Therefore stop recollecting after several attempts.
451 const int kMaxNumberOfAttempts = 7; 449 const int kMaxNumberOfAttempts = 7;
452 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { 450 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
453 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { 451 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
454 break; 452 break;
455 } 453 }
456 } 454 }
457 MarkCompactCollector::SetFlags(kNoGCFlags); 455 mark_compact_collector()->SetFlags(kNoGCFlags);
458 } 456 }
459 457
460 458
461 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { 459 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
462 // The VM is in the GC state until exiting this function. 460 // The VM is in the GC state until exiting this function.
463 VMState state(GC); 461 VMState state(isolate_, GC);
464 462
465 #ifdef DEBUG 463 #ifdef DEBUG
466 // Reset the allocation timeout to the GC interval, but make sure to 464 // Reset the allocation timeout to the GC interval, but make sure to
467 // allow at least a few allocations after a collection. The reason 465 // allow at least a few allocations after a collection. The reason
468 // for this is that we have a lot of allocation sequences and we 466 // for this is that we have a lot of allocation sequences and we
469 // assume that a garbage collection will allow the subsequent 467 // assume that a garbage collection will allow the subsequent
470 // allocation attempts to go through. 468 // allocation attempts to go through.
471 allocation_timeout_ = Max(6, FLAG_gc_interval); 469 allocation_timeout_ = Max(6, FLAG_gc_interval);
472 #endif 470 #endif
473 471
474 if (collector == SCAVENGER && 472 if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
475 IncrementalMarking::state() != IncrementalMarking::STOPPED) {
476 if (FLAG_trace_incremental_marking) { 473 if (FLAG_trace_incremental_marking) {
477 PrintF("[IncrementalMarking] Scavenge during marking.\n"); 474 PrintF("[IncrementalMarking] Scavenge during marking.\n");
478 } 475 }
479 } 476 }
480 477
481 if (collector == MARK_COMPACTOR && 478 if (collector == MARK_COMPACTOR &&
482 !MarkCompactCollector::PreciseSweepingRequired() && 479 !mark_compact_collector()->PreciseSweepingRequired() &&
483 IncrementalMarking::state() == IncrementalMarking::MARKING && 480 incremental_marking()->IsMarking() &&
484 !IncrementalMarking::should_hurry() && 481 !incremental_marking()->should_hurry() &&
485 FLAG_incremental_marking_steps) { 482 FLAG_incremental_marking_steps) {
486 if (FLAG_trace_incremental_marking) { 483 if (FLAG_trace_incremental_marking) {
487 PrintF("[IncrementalMarking] Delaying MarkSweep.\n"); 484 PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
488 } 485 }
489 collector = SCAVENGER; 486 collector = SCAVENGER;
490 } 487 }
491 488
492 bool next_gc_likely_to_collect_more = false; 489 bool next_gc_likely_to_collect_more = false;
493 490
494 { GCTracer tracer; 491 { GCTracer tracer(this);
495 GarbageCollectionPrologue(); 492 GarbageCollectionPrologue();
496 // The GC count was incremented in the prologue. Tell the tracer about 493 // The GC count was incremented in the prologue. Tell the tracer about
497 // it. 494 // it.
498 tracer.set_gc_count(gc_count_); 495 tracer.set_gc_count(gc_count_);
499 496
500 // Tell the tracer which collector we've selected. 497 // Tell the tracer which collector we've selected.
501 tracer.set_collector(collector); 498 tracer.set_collector(collector);
502 499
503 HistogramTimer* rate = (collector == SCAVENGER) 500 HistogramTimer* rate = (collector == SCAVENGER)
504 ? &Counters::gc_scavenger 501 ? isolate_->counters()->gc_scavenger()
505 : &Counters::gc_compactor; 502 : isolate_->counters()->gc_compactor();
506 rate->Start(); 503 rate->Start();
507 next_gc_likely_to_collect_more = 504 next_gc_likely_to_collect_more =
508 PerformGarbageCollection(collector, &tracer); 505 PerformGarbageCollection(collector, &tracer);
509 rate->Stop(); 506 rate->Stop();
510 507
511 GarbageCollectionEpilogue(); 508 GarbageCollectionEpilogue();
512 } 509 }
513 510
514 511
515 ASSERT(collector == SCAVENGER || IncrementalMarking::IsStopped()); 512 ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
516 if (IncrementalMarking::IsStopped()) { 513 if (incremental_marking()->IsStopped()) {
517 if (IncrementalMarking::WorthActivating() && NextGCIsLikelyToBeFull()) { 514 if (incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) {
518 IncrementalMarking::Start(); 515 incremental_marking()->Start();
519 } 516 }
520 } 517 }
521 518
522 #ifdef ENABLE_LOGGING_AND_PROFILING 519 #ifdef ENABLE_LOGGING_AND_PROFILING
523 if (FLAG_log_gc) HeapProfiler::WriteSample(); 520 if (FLAG_log_gc) HeapProfiler::WriteSample();
524 #endif 521 #endif
525 522
526 return next_gc_likely_to_collect_more; 523 return next_gc_likely_to_collect_more;
527 } 524 }
528 525
529 526
530 void Heap::PerformScavenge() { 527 void Heap::PerformScavenge() {
531 GCTracer tracer; 528 GCTracer tracer(this);
532 if (IncrementalMarking::state() == IncrementalMarking::STOPPED) { 529 if (incremental_marking()->IsStopped()) {
533 PerformGarbageCollection(SCAVENGER, &tracer); 530 PerformGarbageCollection(SCAVENGER, &tracer);
534 } else { 531 } else {
535 PerformGarbageCollection(MARK_COMPACTOR, &tracer); 532 PerformGarbageCollection(MARK_COMPACTOR, &tracer);
536 } 533 }
537 } 534 }
538 535
539 536
540 #ifdef DEBUG 537 #ifdef DEBUG
541 // Helper class for verifying the symbol table. 538 // Helper class for verifying the symbol table.
542 class SymbolTableVerifier : public ObjectVisitor { 539 class SymbolTableVerifier : public ObjectVisitor {
543 public: 540 public:
544 SymbolTableVerifier() { }
545 void VisitPointers(Object** start, Object** end) { 541 void VisitPointers(Object** start, Object** end) {
546 // Visit all HeapObject pointers in [start, end). 542 // Visit all HeapObject pointers in [start, end).
547 for (Object** p = start; p < end; p++) { 543 for (Object** p = start; p < end; p++) {
548 if ((*p)->IsHeapObject()) { 544 if ((*p)->IsHeapObject()) {
549 // Check that the symbol is actually a symbol. 545 // Check that the symbol is actually a symbol.
550 ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol()); 546 ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
551 } 547 }
552 } 548 }
553 } 549 }
554 }; 550 };
555 #endif // DEBUG 551 #endif // DEBUG
556 552
557 553
558 static void VerifySymbolTable() { 554 static void VerifySymbolTable() {
559 #ifdef DEBUG 555 #ifdef DEBUG
560 SymbolTableVerifier verifier; 556 SymbolTableVerifier verifier;
561 Heap::symbol_table()->IterateElements(&verifier); 557 HEAP->symbol_table()->IterateElements(&verifier);
562 #endif // DEBUG 558 #endif // DEBUG
563 } 559 }
564 560
565 561
566 void Heap::ReserveSpace( 562 void Heap::ReserveSpace(
567 int new_space_size, 563 int new_space_size,
568 int pointer_space_size, 564 int pointer_space_size,
569 int data_space_size, 565 int data_space_size,
570 int code_space_size, 566 int code_space_size,
571 int map_space_size, 567 int map_space_size,
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
629 Shrink(); 625 Shrink();
630 if (new_space_.CommitFromSpaceIfNeeded()) return; 626 if (new_space_.CommitFromSpaceIfNeeded()) return;
631 627
632 // Committing memory to from space failed again. 628 // Committing memory to from space failed again.
633 // Memory is exhausted and we will die. 629 // Memory is exhausted and we will die.
634 V8::FatalProcessOutOfMemory("Committing semi space failed."); 630 V8::FatalProcessOutOfMemory("Committing semi space failed.");
635 } 631 }
636 632
637 633
638 void Heap::ClearJSFunctionResultCaches() { 634 void Heap::ClearJSFunctionResultCaches() {
639 if (Bootstrapper::IsActive()) return; 635 if (isolate_->bootstrapper()->IsActive()) return;
640 636
641 Object* context = global_contexts_list_; 637 Object* context = global_contexts_list_;
642 while (!context->IsUndefined()) { 638 while (!context->IsUndefined()) {
643 // Get the caches for this context: 639 // Get the caches for this context:
644 FixedArray* caches = 640 FixedArray* caches =
645 Context::cast(context)->jsfunction_result_caches(); 641 Context::cast(context)->jsfunction_result_caches();
646 // Clear the caches: 642 // Clear the caches:
647 int length = caches->length(); 643 int length = caches->length();
648 for (int i = 0; i < length; i++) { 644 for (int i = 0; i < length; i++) {
649 JSFunctionResultCache::cast(caches->get(i))->Clear(); 645 JSFunctionResultCache::cast(caches->get(i))->Clear();
650 } 646 }
651 // Get the next context: 647 // Get the next context:
652 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 648 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
653 } 649 }
654 } 650 }
655 651
656 652
653
657 void Heap::ClearNormalizedMapCaches() { 654 void Heap::ClearNormalizedMapCaches() {
658 if (Bootstrapper::IsActive()) return; 655 if (isolate_->bootstrapper()->IsActive()) return;
659 656
660 Object* context = global_contexts_list_; 657 Object* context = global_contexts_list_;
661 while (!context->IsUndefined()) { 658 while (!context->IsUndefined()) {
662 Context::cast(context)->normalized_map_cache()->Clear(); 659 Context::cast(context)->normalized_map_cache()->Clear();
663 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 660 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
664 } 661 }
665 } 662 }
666 663
667 664
668 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { 665 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
(...skipping 18 matching lines...) Expand all
687 } 684 }
688 685
689 survival_rate_ = survival_rate; 686 survival_rate_ = survival_rate;
690 } 687 }
691 688
692 bool Heap::PerformGarbageCollection(GarbageCollector collector, 689 bool Heap::PerformGarbageCollection(GarbageCollector collector,
693 GCTracer* tracer) { 690 GCTracer* tracer) {
694 bool next_gc_likely_to_collect_more = false; 691 bool next_gc_likely_to_collect_more = false;
695 692
696 if (collector != SCAVENGER) { 693 if (collector != SCAVENGER) {
697 PROFILE(CodeMovingGCEvent()); 694 PROFILE(isolate_, CodeMovingGCEvent());
698 } 695 }
699 696
700 VerifySymbolTable(); 697 VerifySymbolTable();
701 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 698 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
702 ASSERT(!allocation_allowed_); 699 ASSERT(!allocation_allowed_);
703 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 700 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
704 global_gc_prologue_callback_(); 701 global_gc_prologue_callback_();
705 } 702 }
706 703
707 GCType gc_type = 704 GCType gc_type =
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
746 743
747 old_gen_exhausted_ = false; 744 old_gen_exhausted_ = false;
748 } else { 745 } else {
749 tracer_ = tracer; 746 tracer_ = tracer;
750 Scavenge(); 747 Scavenge();
751 tracer_ = NULL; 748 tracer_ = NULL;
752 749
753 UpdateSurvivalRateTrend(start_new_space_size); 750 UpdateSurvivalRateTrend(start_new_space_size);
754 } 751 }
755 752
756 Counters::objs_since_last_young.Set(0); 753 isolate_->counters()->objs_since_last_young()->Set(0);
757 754
758 if (collector == MARK_COMPACTOR) { 755 if (collector == MARK_COMPACTOR) {
759 DisableAssertNoAllocation allow_allocation; 756 DisableAssertNoAllocation allow_allocation;
760 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 757 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
761 next_gc_likely_to_collect_more = 758 next_gc_likely_to_collect_more =
762 GlobalHandles::PostGarbageCollectionProcessing(); 759 isolate_->global_handles()->PostGarbageCollectionProcessing();
763 } 760 }
764 761
765 // Update relocatables. 762 // Update relocatables.
766 Relocatable::PostGarbageCollectionProcessing(); 763 Relocatable::PostGarbageCollectionProcessing();
767 764
768 if (collector == MARK_COMPACTOR) { 765 if (collector == MARK_COMPACTOR) {
769 // Register the amount of external allocated memory. 766 // Register the amount of external allocated memory.
770 amount_of_external_allocated_memory_at_last_global_gc_ = 767 amount_of_external_allocated_memory_at_last_global_gc_ =
771 amount_of_external_allocated_memory_; 768 amount_of_external_allocated_memory_;
772 } 769 }
(...skipping 13 matching lines...) Expand all
786 global_gc_epilogue_callback_(); 783 global_gc_epilogue_callback_();
787 } 784 }
788 VerifySymbolTable(); 785 VerifySymbolTable();
789 786
790 return next_gc_likely_to_collect_more; 787 return next_gc_likely_to_collect_more;
791 } 788 }
792 789
793 790
794 void Heap::MarkCompact(GCTracer* tracer) { 791 void Heap::MarkCompact(GCTracer* tracer) {
795 gc_state_ = MARK_COMPACT; 792 gc_state_ = MARK_COMPACT;
796 LOG(ResourceEvent("markcompact", "begin")); 793 LOG(isolate_, ResourceEvent("markcompact", "begin"));
797 794
798 MarkCompactCollector::Prepare(tracer); 795 mark_compact_collector_.Prepare(tracer);
799 796
800 bool is_compacting = MarkCompactCollector::IsCompacting(); 797 bool is_compacting = mark_compact_collector_.IsCompacting();
801 798
802 if (is_compacting) { 799 if (is_compacting) {
803 mc_count_++; 800 mc_count_++;
804 } else { 801 } else {
805 ms_count_++; 802 ms_count_++;
806 } 803 }
807 tracer->set_full_gc_count(mc_count_ + ms_count_); 804 tracer->set_full_gc_count(mc_count_ + ms_count_);
808 805
809 MarkCompactPrologue(is_compacting); 806 MarkCompactPrologue(is_compacting);
810 807
811 MarkCompactCollector::CollectGarbage(); 808 mark_compact_collector_.CollectGarbage();
812 809
813 LOG(ResourceEvent("markcompact", "end")); 810 LOG(isolate_, ResourceEvent("markcompact", "end"));
814 811
815 gc_state_ = NOT_IN_GC; 812 gc_state_ = NOT_IN_GC;
816 813
817 Shrink(); 814 Shrink();
818 815
819 Counters::objs_since_last_full.Set(0); 816 isolate_->counters()->objs_since_last_full()->Set(0);
820 817
821 contexts_disposed_ = 0; 818 contexts_disposed_ = 0;
822 } 819 }
823 820
824 821
825 void Heap::MarkCompactPrologue(bool is_compacting) { 822 void Heap::MarkCompactPrologue(bool is_compacting) {
826 // At any old GC clear the keyed lookup cache to enable collection of unused 823 // At any old GC clear the keyed lookup cache to enable collection of unused
827 // maps. 824 // maps.
828 KeyedLookupCache::Clear(); 825 isolate_->keyed_lookup_cache()->Clear();
829 ContextSlotCache::Clear(); 826 isolate_->context_slot_cache()->Clear();
830 DescriptorLookupCache::Clear(); 827 isolate_->descriptor_lookup_cache()->Clear();
831 828
832 CompilationCache::MarkCompactPrologue(); 829 isolate_->compilation_cache()->MarkCompactPrologue();
833 830
834 CompletelyClearInstanceofCache(); 831 CompletelyClearInstanceofCache();
835 832
836 if (is_compacting) FlushNumberStringCache(); 833 if (is_compacting) FlushNumberStringCache();
837 834
838 ClearNormalizedMapCaches(); 835 ClearNormalizedMapCaches();
839 } 836 }
840 837
841 838
842 Object* Heap::FindCodeObject(Address a) { 839 Object* Heap::FindCodeObject(Address a) {
843 Object* obj = NULL; // Initialization to please compiler. 840 Object* obj = NULL; // Initialization to please compiler.
844 { MaybeObject* maybe_obj = code_space_->FindObject(a); 841 { MaybeObject* maybe_obj = code_space_->FindObject(a);
845 if (!maybe_obj->ToObject(&obj)) { 842 if (!maybe_obj->ToObject(&obj)) {
846 obj = lo_space_->FindObject(a)->ToObjectUnchecked(); 843 obj = lo_space_->FindObject(a)->ToObjectUnchecked();
847 } 844 }
848 } 845 }
849 return obj; 846 return obj;
850 } 847 }
851 848
852 849
853 // Helper class for copying HeapObjects 850 // Helper class for copying HeapObjects
854 class ScavengeVisitor: public ObjectVisitor { 851 class ScavengeVisitor: public ObjectVisitor {
855 public: 852 public:
853 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
856 854
857 void VisitPointer(Object** p) { ScavengePointer(p); } 855 void VisitPointer(Object** p) { ScavengePointer(p); }
858 856
859 void VisitPointers(Object** start, Object** end) { 857 void VisitPointers(Object** start, Object** end) {
860 // Copy all HeapObject pointers in [start, end) 858 // Copy all HeapObject pointers in [start, end)
861 for (Object** p = start; p < end; p++) ScavengePointer(p); 859 for (Object** p = start; p < end; p++) ScavengePointer(p);
862 } 860 }
863 861
864 private: 862 private:
865 void ScavengePointer(Object** p) { 863 void ScavengePointer(Object** p) {
866 Object* object = *p; 864 Object* object = *p;
867 if (!Heap::InNewSpace(object)) return; 865 if (!heap_->InNewSpace(object)) return;
868 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 866 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
869 reinterpret_cast<HeapObject*>(object)); 867 reinterpret_cast<HeapObject*>(object));
870 } 868 }
869
870 Heap* heap_;
871 }; 871 };
872 872
873 873
874 // A queue of objects promoted during scavenge. Each object is accompanied
875 // by it's size to avoid dereferencing a map pointer for scanning.
876 class PromotionQueue {
877 public:
878 void Initialize(Address start_address) {
879 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
880 }
881
882 bool is_empty() { return front_ <= rear_; }
883
884 void insert(HeapObject* target, int size) {
885 *(--rear_) = reinterpret_cast<intptr_t>(target);
886 *(--rear_) = size;
887 // Assert no overflow into live objects.
888 ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
889 }
890
891 void remove(HeapObject** target, int* size) {
892 *target = reinterpret_cast<HeapObject*>(*(--front_));
893 *size = static_cast<int>(*(--front_));
894 // Assert no underflow.
895 ASSERT(front_ >= rear_);
896 }
897
898 private:
899 // The front of the queue is higher in memory than the rear.
900 intptr_t* front_;
901 intptr_t* rear_;
902 };
903
904
905 // Shared state read by the scavenge collector and set by ScavengeObject.
906 static PromotionQueue promotion_queue;
907
908
909 #ifdef DEBUG 874 #ifdef DEBUG
910 // Visitor class to verify pointers in code or data space do not point into 875 // Visitor class to verify pointers in code or data space do not point into
911 // new space. 876 // new space.
912 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 877 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
913 public: 878 public:
914 void VisitPointers(Object** start, Object**end) { 879 void VisitPointers(Object** start, Object**end) {
915 for (Object** current = start; current < end; current++) { 880 for (Object** current = start; current < end; current++) {
916 if ((*current)->IsHeapObject()) { 881 if ((*current)->IsHeapObject()) {
917 ASSERT(!Heap::InNewSpace(HeapObject::cast(*current))); 882 ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
918 } 883 }
919 } 884 }
920 } 885 }
921 }; 886 };
922 887
923 888
924 static void VerifyNonPointerSpacePointers() { 889 static void VerifyNonPointerSpacePointers() {
925 // Verify that there are no pointers to new space in spaces where we 890 // Verify that there are no pointers to new space in spaces where we
926 // do not expect them. 891 // do not expect them.
927 VerifyNonPointerSpacePointersVisitor v; 892 VerifyNonPointerSpacePointersVisitor v;
928 HeapObjectIterator code_it(Heap::code_space()); 893 HeapObjectIterator code_it(HEAP->code_space());
929 for (HeapObject* object = code_it.Next(); 894 for (HeapObject* object = code_it.Next();
930 object != NULL; object = code_it.Next()) 895 object != NULL; object = code_it.Next())
931 object->Iterate(&v); 896 object->Iterate(&v);
932 897
933 // The old data space was normally swept conservatively so that the iterator 898 // The old data space was normally swept conservatively so that the iterator
934 // doesn't work, so we normally skip the next bit. 899 // doesn't work, so we normally skip the next bit.
935 if (!Heap::old_data_space()->was_swept_conservatively()) { 900 if (!HEAP->old_data_space()->was_swept_conservatively()) {
936 HeapObjectIterator data_it(Heap::old_data_space()); 901 HeapObjectIterator data_it(HEAP->old_data_space());
937 for (HeapObject* object = data_it.Next(); 902 for (HeapObject* object = data_it.Next();
938 object != NULL; object = data_it.Next()) 903 object != NULL; object = data_it.Next())
939 object->Iterate(&v); 904 object->Iterate(&v);
940 } 905 }
941 } 906 }
942 #endif 907 #endif
943 908
944 909
945 void Heap::CheckNewSpaceExpansionCriteria() { 910 void Heap::CheckNewSpaceExpansionCriteria() {
946 if (new_space_.Capacity() < new_space_.MaximumCapacity() && 911 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
947 survived_since_last_expansion_ > new_space_.Capacity()) { 912 survived_since_last_expansion_ > new_space_.Capacity()) {
948 // Grow the size of new space if there is room to grow and enough 913 // Grow the size of new space if there is room to grow and enough
949 // data has survived scavenge since the last expansion. 914 // data has survived scavenge since the last expansion.
950 new_space_.Grow(); 915 new_space_.Grow();
951 survived_since_last_expansion_ = 0; 916 survived_since_last_expansion_ = 0;
952 } 917 }
953 } 918 }
954 919
955 920
956 void Heap::ScavengeStoreBufferCallback(MemoryChunk* page, 921 void Heap::ScavengeStoreBufferCallback(
957 StoreBufferEvent event) { 922 Heap* heap,
958 store_buffer_rebuilder_.Callback(page, event); 923 MemoryChunk* page,
924 StoreBufferEvent event) {
925 heap->store_buffer_rebuilder_.Callback(page, event);
959 } 926 }
960 927
961 928
962 void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) { 929 void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) {
963 if (event == kStoreBufferStartScanningPagesEvent) { 930 if (event == kStoreBufferStartScanningPagesEvent) {
964 start_of_current_page_ = NULL; 931 start_of_current_page_ = NULL;
965 current_page_ = NULL; 932 current_page_ = NULL;
966 } else if (event == kStoreBufferScanningPageEvent) { 933 } else if (event == kStoreBufferScanningPageEvent) {
967 if (current_page_ != NULL) { 934 if (current_page_ != NULL) {
968 // If this page already overflowed the store buffer during this iteration. 935 // If this page already overflowed the store buffer during this iteration.
969 if (current_page_->scan_on_scavenge()) { 936 if (current_page_->scan_on_scavenge()) {
970 // Then we should wipe out the entries that have been added for it. 937 // Then we should wipe out the entries that have been added for it.
971 StoreBuffer::SetTop(start_of_current_page_); 938 store_buffer_->SetTop(start_of_current_page_);
972 } else if (StoreBuffer::Top() - start_of_current_page_ >= 939 } else if (store_buffer_->Top() - start_of_current_page_ >=
973 (StoreBuffer::Limit() - StoreBuffer::Top()) >> 2) { 940 (store_buffer_->Limit() - store_buffer_->Top()) >> 2) {
974 // Did we find too many pointers in the previous page? The heuristic is 941 // Did we find too many pointers in the previous page? The heuristic is
975 // that no page can take more then 1/5 the remaining slots in the store 942 // that no page can take more then 1/5 the remaining slots in the store
976 // buffer. 943 // buffer.
977 current_page_->set_scan_on_scavenge(true); 944 current_page_->set_scan_on_scavenge(true);
978 StoreBuffer::SetTop(start_of_current_page_); 945 store_buffer_->SetTop(start_of_current_page_);
979 } else { 946 } else {
980 // In this case the page we scanned took a reasonable number of slots in 947 // In this case the page we scanned took a reasonable number of slots in
981 // the store buffer. It has now been rehabilitated and is no longer 948 // the store buffer. It has now been rehabilitated and is no longer
982 // marked scan_on_scavenge. 949 // marked scan_on_scavenge.
983 ASSERT(!current_page_->scan_on_scavenge()); 950 ASSERT(!current_page_->scan_on_scavenge());
984 } 951 }
985 } 952 }
986 start_of_current_page_ = StoreBuffer::Top(); 953 start_of_current_page_ = store_buffer_->Top();
987 current_page_ = page; 954 current_page_ = page;
988 } else if (event == kStoreBufferFullEvent) { 955 } else if (event == kStoreBufferFullEvent) {
989 // The current page overflowed the store buffer again. Wipe out its entries 956 // The current page overflowed the store buffer again. Wipe out its entries
990 // in the store buffer and mark it scan-on-scavenge again. This may happen 957 // in the store buffer and mark it scan-on-scavenge again. This may happen
991 // several times while scanning. 958 // several times while scanning.
992 if (current_page_ == NULL) { 959 if (current_page_ == NULL) {
993 // Store Buffer overflowed while scanning promoted objects. These are not 960 // Store Buffer overflowed while scanning promoted objects. These are not
994 // in any particular page, though they are likely to be clustered by the 961 // in any particular page, though they are likely to be clustered by the
995 // allocation routines. 962 // allocation routines.
996 StoreBuffer::HandleFullness(); 963 store_buffer_->HandleFullness();
997 } else { 964 } else {
998 // Store Buffer overflowed while scanning a particular old space page for 965 // Store Buffer overflowed while scanning a particular old space page for
999 // pointers to new space. 966 // pointers to new space.
1000 ASSERT(current_page_ == page); 967 ASSERT(current_page_ == page);
1001 ASSERT(page != NULL); 968 ASSERT(page != NULL);
1002 current_page_->set_scan_on_scavenge(true); 969 current_page_->set_scan_on_scavenge(true);
1003 ASSERT(start_of_current_page_ != StoreBuffer::Top()); 970 ASSERT(start_of_current_page_ != store_buffer_->Top());
1004 StoreBuffer::SetTop(start_of_current_page_); 971 store_buffer_->SetTop(start_of_current_page_);
1005 } 972 }
1006 } else { 973 } else {
1007 UNREACHABLE(); 974 UNREACHABLE();
1008 } 975 }
1009 } 976 }
1010 977
1011 978
1012 void Heap::Scavenge() { 979 void Heap::Scavenge() {
1013 #ifdef DEBUG 980 #ifdef DEBUG
1014 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); 981 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
1015 #endif 982 #endif
1016 983
1017 gc_state_ = SCAVENGE; 984 gc_state_ = SCAVENGE;
1018 985
1019 // Implements Cheney's copying algorithm 986 // Implements Cheney's copying algorithm
1020 LOG(ResourceEvent("scavenge", "begin")); 987 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1021 988
1022 // Clear descriptor cache. 989 // Clear descriptor cache.
1023 DescriptorLookupCache::Clear(); 990 isolate_->descriptor_lookup_cache()->Clear();
1024 991
1025 // Used for updating survived_since_last_expansion_ at function end. 992 // Used for updating survived_since_last_expansion_ at function end.
1026 intptr_t survived_watermark = PromotedSpaceSize(); 993 intptr_t survived_watermark = PromotedSpaceSize();
1027 994
1028 CheckNewSpaceExpansionCriteria(); 995 CheckNewSpaceExpansionCriteria();
1029 996
1030 SelectScavengingVisitorsTable(); 997 SelectScavengingVisitorsTable();
1031 998
1032 IncrementalMarking::PrepareForScavenge(); 999 incremental_marking()->PrepareForScavenge();
1033 1000
1034 // Flip the semispaces. After flipping, to space is empty, from space has 1001 // Flip the semispaces. After flipping, to space is empty, from space has
1035 // live objects. 1002 // live objects.
1036 new_space_.Flip(); 1003 new_space_.Flip();
1037 new_space_.ResetAllocationInfo(); 1004 new_space_.ResetAllocationInfo();
1038 1005
1039 // We need to sweep newly copied objects which can be either in the 1006 // We need to sweep newly copied objects which can be either in the
1040 // to space or promoted to the old generation. For to-space 1007 // to space or promoted to the old generation. For to-space
1041 // objects, we treat the bottom of the to space as a queue. Newly 1008 // objects, we treat the bottom of the to space as a queue. Newly
1042 // copied and unswept objects lie between a 'front' mark and the 1009 // copied and unswept objects lie between a 'front' mark and the
1043 // allocation pointer. 1010 // allocation pointer.
1044 // 1011 //
1045 // Promoted objects can go into various old-generation spaces, and 1012 // Promoted objects can go into various old-generation spaces, and
1046 // can be allocated internally in the spaces (from the free list). 1013 // can be allocated internally in the spaces (from the free list).
1047 // We treat the top of the to space as a queue of addresses of 1014 // We treat the top of the to space as a queue of addresses of
1048 // promoted objects. The addresses of newly promoted and unswept 1015 // promoted objects. The addresses of newly promoted and unswept
1049 // objects lie between a 'front' mark and a 'rear' mark that is 1016 // objects lie between a 'front' mark and a 'rear' mark that is
1050 // updated as a side effect of promoting an object. 1017 // updated as a side effect of promoting an object.
1051 // 1018 //
1052 // There is guaranteed to be enough room at the top of the to space 1019 // There is guaranteed to be enough room at the top of the to space
1053 // for the addresses of promoted objects: every object promoted 1020 // for the addresses of promoted objects: every object promoted
1054 // frees up its size in bytes from the top of the new space, and 1021 // frees up its size in bytes from the top of the new space, and
1055 // objects are at least one pointer in size. 1022 // objects are at least one pointer in size.
1056 Address new_space_front = new_space_.ToSpaceLow(); 1023 Address new_space_front = new_space_.ToSpaceLow();
1057 promotion_queue.Initialize(new_space_.ToSpaceHigh()); 1024 promotion_queue_.Initialize(new_space_.ToSpaceHigh());
1058 1025
1059 #ifdef DEBUG 1026 #ifdef DEBUG
1060 StoreBuffer::Clean(); 1027 store_buffer()->Clean();
1061 #endif 1028 #endif
1062 1029
1063 ScavengeVisitor scavenge_visitor; 1030 ScavengeVisitor scavenge_visitor(this);
1064 // Copy roots. 1031 // Copy roots.
1065 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); 1032 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1066 1033
1067 // Copy objects reachable from the old generation. 1034 // Copy objects reachable from the old generation.
1068 { 1035 {
1069 StoreBufferRebuildScope scope(&ScavengeStoreBufferCallback); 1036 StoreBufferRebuildScope scope(this,
1070 StoreBuffer::IteratePointersToNewSpace(&ScavengeObject); 1037 store_buffer(),
1038 &ScavengeStoreBufferCallback);
1039 store_buffer()->IteratePointersToNewSpace(&ScavengeObject);
1071 } 1040 }
1072 1041
1073 // Copy objects reachable from cells by scavenging cell values directly. 1042 // Copy objects reachable from cells by scavenging cell values directly.
1074 HeapObjectIterator cell_iterator(cell_space_); 1043 HeapObjectIterator cell_iterator(cell_space_);
1075 for (HeapObject* cell = cell_iterator.Next(); 1044 for (HeapObject* cell = cell_iterator.Next();
1076 cell != NULL; cell = cell_iterator.Next()) { 1045 cell != NULL; cell = cell_iterator.Next()) {
1077 if (cell->IsJSGlobalPropertyCell()) { 1046 if (cell->IsJSGlobalPropertyCell()) {
1078 Address value_address = 1047 Address value_address =
1079 reinterpret_cast<Address>(cell) + 1048 reinterpret_cast<Address>(cell) +
1080 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); 1049 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
1081 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); 1050 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1082 } 1051 }
1083 } 1052 }
1084 1053
1085 // Scavenge object reachable from the global contexts list directly. 1054 // Scavenge object reachable from the global contexts list directly.
1086 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); 1055 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
1087 1056
1088 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1057 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1089 1058
1090 UpdateNewSpaceReferencesInExternalStringTable( 1059 UpdateNewSpaceReferencesInExternalStringTable(
1091 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1060 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1092 1061
1093 LiveObjectList::UpdateReferencesForScavengeGC(); 1062 LiveObjectList::UpdateReferencesForScavengeGC();
1094 RuntimeProfiler::UpdateSamplesAfterScavenge(); 1063 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1095 IncrementalMarking::UpdateMarkingStackAfterScavenge(); 1064 incremental_marking()->UpdateMarkingStackAfterScavenge();
1096 1065
1097 ASSERT(new_space_front == new_space_.top()); 1066 ASSERT(new_space_front == new_space_.top());
1098 1067
1099 // Set age mark. 1068 // Set age mark.
1100 new_space_.set_age_mark(new_space_.top()); 1069 new_space_.set_age_mark(new_space_.top());
1101 1070
1102 // Update how much has survived scavenge. 1071 // Update how much has survived scavenge.
1103 IncrementYoungSurvivorsCounter(static_cast<int>( 1072 IncrementYoungSurvivorsCounter(static_cast<int>(
1104 (PromotedSpaceSize() - survived_watermark) + new_space_.Size())); 1073 (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
1105 1074
1106 LOG(ResourceEvent("scavenge", "end")); 1075 LOG(isolate_, ResourceEvent("scavenge", "end"));
1107 1076
1108 gc_state_ = NOT_IN_GC; 1077 gc_state_ = NOT_IN_GC;
1109 } 1078 }
1110 1079
1111 1080
1112 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) { 1081 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1082 Object** p) {
1113 MapWord first_word = HeapObject::cast(*p)->map_word(); 1083 MapWord first_word = HeapObject::cast(*p)->map_word();
1114 1084
1115 if (!first_word.IsForwardingAddress()) { 1085 if (!first_word.IsForwardingAddress()) {
1116 // Unreachable external string can be finalized. 1086 // Unreachable external string can be finalized.
1117 FinalizeExternalString(String::cast(*p)); 1087 heap->FinalizeExternalString(String::cast(*p));
1118 return NULL; 1088 return NULL;
1119 } 1089 }
1120 1090
1121 // String is still reachable. 1091 // String is still reachable.
1122 return String::cast(first_word.ToForwardingAddress()); 1092 return String::cast(first_word.ToForwardingAddress());
1123 } 1093 }
1124 1094
1125 1095
1126 void Heap::UpdateNewSpaceReferencesInExternalStringTable( 1096 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1127 ExternalStringTableUpdaterCallback updater_func) { 1097 ExternalStringTableUpdaterCallback updater_func) {
1128 ExternalStringTable::Verify(); 1098 external_string_table_.Verify();
1129 1099
1130 if (ExternalStringTable::new_space_strings_.is_empty()) return; 1100 if (external_string_table_.new_space_strings_.is_empty()) return;
1131 1101
1132 Object** start = &ExternalStringTable::new_space_strings_[0]; 1102 Object** start = &external_string_table_.new_space_strings_[0];
1133 Object** end = start + ExternalStringTable::new_space_strings_.length(); 1103 Object** end = start + external_string_table_.new_space_strings_.length();
1134 Object** last = start; 1104 Object** last = start;
1135 1105
1136 for (Object** p = start; p < end; ++p) { 1106 for (Object** p = start; p < end; ++p) {
1137 ASSERT(Heap::InFromSpace(*p)); 1107 ASSERT(InFromSpace(*p));
1138 String* target = updater_func(p); 1108 String* target = updater_func(this, p);
1139 1109
1140 if (target == NULL) continue; 1110 if (target == NULL) continue;
1141 1111
1142 ASSERT(target->IsExternalString()); 1112 ASSERT(target->IsExternalString());
1143 1113
1144 if (Heap::InNewSpace(target)) { 1114 if (InNewSpace(target)) {
1145 // String is still in new space. Update the table entry. 1115 // String is still in new space. Update the table entry.
1146 *last = target; 1116 *last = target;
1147 ++last; 1117 ++last;
1148 } else { 1118 } else {
1149 // String got promoted. Move it to the old string list. 1119 // String got promoted. Move it to the old string list.
1150 ExternalStringTable::AddOldString(target); 1120 external_string_table_.AddOldString(target);
1151 } 1121 }
1152 } 1122 }
1153 1123
1154 ASSERT(last <= end); 1124 ASSERT(last <= end);
1155 ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start)); 1125 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1156 } 1126 }
1157 1127
1158 1128
1159 static Object* ProcessFunctionWeakReferences(Object* function, 1129 static Object* ProcessFunctionWeakReferences(Heap* heap,
1130 Object* function,
1160 WeakObjectRetainer* retainer) { 1131 WeakObjectRetainer* retainer) {
1161 Object* head = Heap::undefined_value(); 1132 Object* head = heap->undefined_value();
1162 JSFunction* tail = NULL; 1133 JSFunction* tail = NULL;
1163 Object* candidate = function; 1134 Object* candidate = function;
1164 while (!candidate->IsUndefined()) { 1135 while (candidate != heap->undefined_value()) {
1165 // Check whether to keep the candidate in the list. 1136 // Check whether to keep the candidate in the list.
1166 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate); 1137 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1167 Object* retain = retainer->RetainAs(candidate); 1138 Object* retain = retainer->RetainAs(candidate);
1168 if (retain != NULL) { 1139 if (retain != NULL) {
1169 if (head->IsUndefined()) { 1140 if (head == heap->undefined_value()) {
1170 // First element in the list. 1141 // First element in the list.
1171 head = candidate_function; 1142 head = candidate_function;
1172 } else { 1143 } else {
1173 // Subsequent elements in the list. 1144 // Subsequent elements in the list.
1174 ASSERT(tail != NULL); 1145 ASSERT(tail != NULL);
1175 tail->set_next_function_link(candidate_function); 1146 tail->set_next_function_link(candidate_function);
1176 } 1147 }
1177 // Retained function is new tail. 1148 // Retained function is new tail.
1178 tail = candidate_function; 1149 tail = candidate_function;
1179 } 1150 }
1180 // Move to next element in the list. 1151 // Move to next element in the list.
1181 candidate = candidate_function->next_function_link(); 1152 candidate = candidate_function->next_function_link();
1182 } 1153 }
1183 1154
1184 // Terminate the list if there is one or more elements. 1155 // Terminate the list if there is one or more elements.
1185 if (tail != NULL) { 1156 if (tail != NULL) {
1186 tail->set_next_function_link(Heap::undefined_value()); 1157 tail->set_next_function_link(heap->undefined_value());
1187 } 1158 }
1188 1159
1189 return head; 1160 return head;
1190 } 1161 }
1191 1162
1192 1163
1193 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { 1164 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1194 Object* head = undefined_value(); 1165 Object* head = undefined_value();
1195 Context* tail = NULL; 1166 Context* tail = NULL;
1196 Object* candidate = global_contexts_list_; 1167 Object* candidate = global_contexts_list_;
1197 while (!candidate->IsUndefined()) { 1168 while (candidate != undefined_value()) {
1198 // Check whether to keep the candidate in the list. 1169 // Check whether to keep the candidate in the list.
1199 Context* candidate_context = reinterpret_cast<Context*>(candidate); 1170 Context* candidate_context = reinterpret_cast<Context*>(candidate);
1200 Object* retain = retainer->RetainAs(candidate); 1171 Object* retain = retainer->RetainAs(candidate);
1201 if (retain != NULL) { 1172 if (retain != NULL) {
1202 if (head->IsUndefined()) { 1173 if (head == undefined_value()) {
1203 // First element in the list. 1174 // First element in the list.
1204 head = candidate_context; 1175 head = candidate_context;
1205 } else { 1176 } else {
1206 // Subsequent elements in the list. 1177 // Subsequent elements in the list.
1207 ASSERT(tail != NULL); 1178 ASSERT(tail != NULL);
1208 tail->set_unchecked(Context::NEXT_CONTEXT_LINK, 1179 tail->set_unchecked(this,
1180 Context::NEXT_CONTEXT_LINK,
1209 candidate_context, 1181 candidate_context,
1210 UPDATE_WRITE_BARRIER); 1182 UPDATE_WRITE_BARRIER);
1211 } 1183 }
1212 // Retained context is new tail. 1184 // Retained context is new tail.
1213 tail = candidate_context; 1185 tail = candidate_context;
1214 1186
1215 // Process the weak list of optimized functions for the context. 1187 // Process the weak list of optimized functions for the context.
1216 Object* function_list_head = 1188 Object* function_list_head =
1217 ProcessFunctionWeakReferences( 1189 ProcessFunctionWeakReferences(
1190 this,
1218 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST), 1191 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1219 retainer); 1192 retainer);
1220 candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST, 1193 candidate_context->set_unchecked(this,
1194 Context::OPTIMIZED_FUNCTIONS_LIST,
1221 function_list_head, 1195 function_list_head,
1222 UPDATE_WRITE_BARRIER); 1196 UPDATE_WRITE_BARRIER);
1223 } 1197 }
1224 // Move to next element in the list. 1198 // Move to next element in the list.
1225 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK); 1199 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
1226 } 1200 }
1227 1201
1228 // Terminate the list if there is one or more elements. 1202 // Terminate the list if there is one or more elements.
1229 if (tail != NULL) { 1203 if (tail != NULL) {
1230 tail->set_unchecked(Context::NEXT_CONTEXT_LINK, 1204 tail->set_unchecked(this,
1205 Context::NEXT_CONTEXT_LINK,
1231 Heap::undefined_value(), 1206 Heap::undefined_value(),
1232 UPDATE_WRITE_BARRIER); 1207 UPDATE_WRITE_BARRIER);
1233 } 1208 }
1234 1209
1235 // Update the head of the list of contexts. 1210 // Update the head of the list of contexts.
1236 Heap::global_contexts_list_ = head; 1211 global_contexts_list_ = head;
1237 } 1212 }
1238 1213
1239 1214
1240 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> { 1215 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1241 public: 1216 public:
1242 static inline void VisitPointer(Object** p) { 1217 static inline void VisitPointer(Heap* heap, Object** p) {
1243 Object* object = *p; 1218 Object* object = *p;
1244 if (!Heap::InNewSpace(object)) return; 1219 if (!heap->InNewSpace(object)) return;
1245 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 1220 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1246 reinterpret_cast<HeapObject*>(object)); 1221 reinterpret_cast<HeapObject*>(object));
1247 } 1222 }
1248 }; 1223 };
1249 1224
1250 1225
1251 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, 1226 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1252 Address new_space_front) { 1227 Address new_space_front) {
1253 do { 1228 do {
1254 ASSERT(new_space_front <= new_space_.top()); 1229 ASSERT(new_space_front <= new_space_.top());
1255 1230
1256 // The addresses new_space_front and new_space_.top() define a 1231 // The addresses new_space_front and new_space_.top() define a
1257 // queue of unprocessed copied objects. Process them until the 1232 // queue of unprocessed copied objects. Process them until the
1258 // queue is empty. 1233 // queue is empty.
1259 while (new_space_front < new_space_.top()) { 1234 while (new_space_front < new_space_.top()) {
1260 HeapObject* object = HeapObject::FromAddress(new_space_front); 1235 HeapObject* object = HeapObject::FromAddress(new_space_front);
1261 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object); 1236 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object);
1262 } 1237 }
1263 1238
1264 // Promote and process all the to-be-promoted objects. 1239 // Promote and process all the to-be-promoted objects.
1265 { 1240 {
1266 StoreBufferRebuildScope scope(&ScavengeStoreBufferCallback); 1241 StoreBufferRebuildScope scope(this,
1267 while (!promotion_queue.is_empty()) { 1242 store_buffer(),
1243 &ScavengeStoreBufferCallback);
1244 while (!promotion_queue()->is_empty()) {
1268 HeapObject* target; 1245 HeapObject* target;
1269 int size; 1246 int size;
1270 promotion_queue.remove(&target, &size); 1247 promotion_queue()->remove(&target, &size);
1271 1248
1272 // Promoted object might be already partially visited 1249 // Promoted object might be already partially visited
1273 // during old space pointer iteration. Thus we search specificly 1250 // during old space pointer iteration. Thus we search specificly
1274 // for pointers to from semispace instead of looking for pointers 1251 // for pointers to from semispace instead of looking for pointers
1275 // to new space. 1252 // to new space.
1276 ASSERT(!target->IsMap()); 1253 ASSERT(!target->IsMap());
1277 IterateAndMarkPointersToFromSpace(target->address(), 1254 IterateAndMarkPointersToFromSpace(target->address(),
1278 target->address() + size, 1255 target->address() + size,
1279 &ScavengeObject); 1256 &ScavengeObject);
1280 } 1257 }
1281 } 1258 }
1282 1259
1283 // Take another spin if there are now unswept objects in new space 1260 // Take another spin if there are now unswept objects in new space
1284 // (there are currently no more unswept promoted objects). 1261 // (there are currently no more unswept promoted objects).
1285 } while (new_space_front < new_space_.top()); 1262 } while (new_space_front < new_space_.top());
1286 1263
1287 return new_space_front; 1264 return new_space_front;
1288 } 1265 }
1289 1266
1290 1267
1291 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; 1268 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
1292 1269
1293 typedef void (*ScavengingCallback)(Map* map, 1270 typedef void (*ScavengingCallback)(Map* map,
1294 HeapObject** slot, 1271 HeapObject** slot,
1295 HeapObject* object); 1272 HeapObject* object);
1296 1273
1274 // TODO(gc) ISOLATES MERGE: this table can no longer be static!
1297 static VisitorDispatchTable<ScavengingCallback> scavening_visitors_table_; 1275 static VisitorDispatchTable<ScavengingCallback> scavening_visitors_table_;
1298 1276
1299 static inline void DoScavengeObject(Map* map, 1277 static inline void DoScavengeObject(Map* map,
1300 HeapObject** slot, 1278 HeapObject** slot,
1301 HeapObject* obj) { 1279 HeapObject* obj) {
1302 scavening_visitors_table_.GetVisitor(map)(map, slot, obj); 1280 scavening_visitors_table_.GetVisitor(map)(map, slot, obj);
1303 } 1281 }
1304 1282
1305 1283
1306 template<MarksHandling marks_handling> 1284 template<MarksHandling marks_handling>
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1344 1322
1345 static VisitorDispatchTable<ScavengingCallback>* GetTable() { 1323 static VisitorDispatchTable<ScavengingCallback>* GetTable() {
1346 return &table_; 1324 return &table_;
1347 } 1325 }
1348 1326
1349 private: 1327 private:
1350 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 1328 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1351 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; 1329 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1352 1330
1353 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1331 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1354 static void RecordCopiedObject(HeapObject* obj) { 1332 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1355 bool should_record = false; 1333 bool should_record = false;
1356 #ifdef DEBUG 1334 #ifdef DEBUG
1357 should_record = FLAG_heap_stats; 1335 should_record = FLAG_heap_stats;
1358 #endif 1336 #endif
1359 #ifdef ENABLE_LOGGING_AND_PROFILING 1337 #ifdef ENABLE_LOGGING_AND_PROFILING
1360 should_record = should_record || FLAG_log_gc; 1338 should_record = should_record || FLAG_log_gc;
1361 #endif 1339 #endif
1362 if (should_record) { 1340 if (should_record) {
1363 if (Heap::new_space()->Contains(obj)) { 1341 if (heap->new_space()->Contains(obj)) {
1364 Heap::new_space()->RecordAllocation(obj); 1342 heap->new_space()->RecordAllocation(obj);
1365 } else { 1343 } else {
1366 Heap::new_space()->RecordPromotion(obj); 1344 heap->new_space()->RecordPromotion(obj);
1367 } 1345 }
1368 } 1346 }
1369 } 1347 }
1370 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1348 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1371 1349
1372 // Helper function used by CopyObject to copy a source object to an 1350 // Helper function used by CopyObject to copy a source object to an
1373 // allocated target object and update the forwarding pointer in the source 1351 // allocated target object and update the forwarding pointer in the source
1374 // object. Returns the target object. 1352 // object. Returns the target object.
1375 INLINE(static HeapObject* MigrateObject(HeapObject* source, 1353 INLINE(static HeapObject* MigrateObject(Heap* heap,
1354 HeapObject* source,
1376 HeapObject* target, 1355 HeapObject* target,
1377 int size)) { 1356 int size)) {
1378 // Copy the content of source to target. 1357 // Copy the content of source to target.
1379 Heap::CopyBlock(target->address(), source->address(), size); 1358 heap->CopyBlock(target->address(), source->address(), size);
1380 1359
1381 // Set the forwarding address. 1360 // Set the forwarding address.
1382 source->set_map_word(MapWord::FromForwardingAddress(target)); 1361 source->set_map_word(MapWord::FromForwardingAddress(target));
1383 1362
1384 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1363 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1385 // Update NewSpace stats if necessary. 1364 // Update NewSpace stats if necessary.
1386 RecordCopiedObject(target); 1365 RecordCopiedObject(heap, target);
1387 #endif 1366 #endif
1388 HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address())); 1367 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1389 #if defined(ENABLE_LOGGING_AND_PROFILING) 1368 #if defined(ENABLE_LOGGING_AND_PROFILING)
1390 if (Logger::is_logging() || CpuProfiler::is_profiling()) { 1369 Isolate* isolate = heap->isolate();
1370 if (isolate->logger()->is_logging() ||
1371 isolate->cpu_profiler()->is_profiling()) {
1391 if (target->IsSharedFunctionInfo()) { 1372 if (target->IsSharedFunctionInfo()) {
1392 PROFILE(SharedFunctionInfoMoveEvent( 1373 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1393 source->address(), target->address())); 1374 source->address(), target->address()));
1394 } 1375 }
1395 } 1376 }
1396 #endif 1377 #endif
1397 1378
1398 if (marks_handling == TRANSFER_MARKS) TransferMark(source, target); 1379 if (marks_handling == TRANSFER_MARKS) TransferMark(heap, source, target);
1399 1380
1400 return target; 1381 return target;
1401 } 1382 }
1402 1383
1403 1384
1404 INLINE(static void TransferMark(HeapObject* from, HeapObject* to)) { 1385 INLINE(static void TransferMark(Heap* heap,
1405 MarkBit from_mark_bit = Marking::MarkBitFrom(from); 1386 HeapObject* from,
1406 if (IncrementalMarking::IsBlack(from_mark_bit)) { 1387 HeapObject* to)) {
1407 IncrementalMarking::MarkBlack(Marking::MarkBitFrom(to)); 1388 MarkBit from_mark_bit = heap->marking()->MarkBitFrom(from);
1389 if (heap->incremental_marking()->IsBlack(from_mark_bit)) {
1390 heap->incremental_marking()->MarkBlack(heap->marking()->MarkBitFrom(to));
1408 } 1391 }
1409 } 1392 }
1410 1393
1411 template<ObjectContents object_contents, SizeRestriction size_restriction> 1394 template<ObjectContents object_contents, SizeRestriction size_restriction>
1412 static inline void EvacuateObject(Map* map, 1395 static inline void EvacuateObject(Map* map,
1413 HeapObject** slot, 1396 HeapObject** slot,
1414 HeapObject* object, 1397 HeapObject* object,
1415 int object_size) { 1398 int object_size) {
1416 ASSERT((size_restriction != SMALL) || 1399 ASSERT((size_restriction != SMALL) ||
1417 (object_size <= Page::kMaxHeapObjectSize)); 1400 (object_size <= Page::kMaxHeapObjectSize));
1418 ASSERT(object->Size() == object_size); 1401 ASSERT(object->Size() == object_size);
1419 1402
1420 if (Heap::ShouldBePromoted(object->address(), object_size)) { 1403 Heap* heap = map->heap();
1404 if (heap->ShouldBePromoted(object->address(), object_size)) {
1421 MaybeObject* maybe_result; 1405 MaybeObject* maybe_result;
1422 1406
1423 if ((size_restriction != SMALL) && 1407 if ((size_restriction != SMALL) &&
1424 (object_size > Page::kMaxHeapObjectSize)) { 1408 (object_size > Page::kMaxHeapObjectSize)) {
1425 maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size); 1409 maybe_result = heap->lo_space()->AllocateRawFixedArray(object_size);
1426 } else { 1410 } else {
1427 if (object_contents == DATA_OBJECT) { 1411 if (object_contents == DATA_OBJECT) {
1428 maybe_result = Heap::old_data_space()->AllocateRaw(object_size); 1412 maybe_result = heap->old_data_space()->AllocateRaw(object_size);
1429 } else { 1413 } else {
1430 maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size); 1414 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
1431 } 1415 }
1432 } 1416 }
1433 1417
1434 Object* result = NULL; // Initialization to please compiler. 1418 Object* result = NULL; // Initialization to please compiler.
1435 if (maybe_result->ToObject(&result)) { 1419 if (maybe_result->ToObject(&result)) {
1436 HeapObject* target = HeapObject::cast(result); 1420 HeapObject* target = HeapObject::cast(result);
1437 *slot = MigrateObject(object, target, object_size); 1421 *slot = MigrateObject(heap, object , target, object_size);
1438 1422
1439 if (object_contents == POINTER_OBJECT) { 1423 if (object_contents == POINTER_OBJECT) {
1440 promotion_queue.insert(target, object_size); 1424 heap->promotion_queue()->insert(target, object_size);
1441 } 1425 }
1442 1426
1443 Heap::tracer()->increment_promoted_objects_size(object_size); 1427 heap->tracer()->increment_promoted_objects_size(object_size);
1444 return; 1428 return;
1445 } 1429 }
1446 } 1430 }
1447 Object* result = 1431 Object* result =
1448 Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked(); 1432 heap->new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
1449 *slot = MigrateObject(object, HeapObject::cast(result), object_size); 1433 *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
1450 if (!Heap::InNewSpace(reinterpret_cast<Address>(slot))) { 1434 // TODO(gc) isolates
1451 StoreBuffer::EnterDirectlyIntoStoreBuffer( 1435 if (!HEAP->InNewSpace(reinterpret_cast<Address>(slot))) {
1436 HEAP->store_buffer()->EnterDirectlyIntoStoreBuffer(
1452 reinterpret_cast<Address>(slot)); 1437 reinterpret_cast<Address>(slot));
1453 } 1438 }
1454 return; 1439 return;
1455 } 1440 }
1456 1441
1457 1442
1458 static inline void EvacuateFixedArray(Map* map, 1443 static inline void EvacuateFixedArray(Map* map,
1459 HeapObject** slot, 1444 HeapObject** slot,
1460 HeapObject* object) { 1445 HeapObject* object) {
1461 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); 1446 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1495 static inline bool IsShortcutCandidate(int type) { 1480 static inline bool IsShortcutCandidate(int type) {
1496 return ((type & kShortcutTypeMask) == kShortcutTypeTag); 1481 return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1497 } 1482 }
1498 1483
1499 static inline void EvacuateShortcutCandidate(Map* map, 1484 static inline void EvacuateShortcutCandidate(Map* map,
1500 HeapObject** slot, 1485 HeapObject** slot,
1501 HeapObject* object) { 1486 HeapObject* object) {
1502 ASSERT(IsShortcutCandidate(map->instance_type())); 1487 ASSERT(IsShortcutCandidate(map->instance_type()));
1503 1488
1504 if (marks_handling == IGNORE_MARKS && 1489 if (marks_handling == IGNORE_MARKS &&
1505 ConsString::cast(object)->unchecked_second() == Heap::empty_string()) { 1490 ConsString::cast(object)->unchecked_second() ==
1491 map->heap()->empty_string()) {
1506 HeapObject* first = 1492 HeapObject* first =
1507 HeapObject::cast(ConsString::cast(object)->unchecked_first()); 1493 HeapObject::cast(ConsString::cast(object)->unchecked_first());
1508 1494
1509 *slot = first; 1495 *slot = first;
1510 1496
1511 if (!Heap::InNewSpace(first)) { 1497 if (!map->heap()->InNewSpace(first)) {
1512 object->set_map_word(MapWord::FromForwardingAddress(first)); 1498 object->set_map_word(MapWord::FromForwardingAddress(first));
1513 return; 1499 return;
1514 } 1500 }
1515 1501
1516 MapWord first_word = first->map_word(); 1502 MapWord first_word = first->map_word();
1517 if (first_word.IsForwardingAddress()) { 1503 if (first_word.IsForwardingAddress()) {
1518 HeapObject* target = first_word.ToForwardingAddress(); 1504 HeapObject* target = first_word.ToForwardingAddress();
1519 1505
1520 *slot = target; 1506 *slot = target;
1521 object->set_map_word(MapWord::FromForwardingAddress(target)); 1507 object->set_map_word(MapWord::FromForwardingAddress(target));
(...skipping 30 matching lines...) Expand all
1552 static VisitorDispatchTable<ScavengingCallback> table_; 1538 static VisitorDispatchTable<ScavengingCallback> table_;
1553 }; 1539 };
1554 1540
1555 1541
1556 template<MarksHandling marks_handling> 1542 template<MarksHandling marks_handling>
1557 VisitorDispatchTable<ScavengingCallback> 1543 VisitorDispatchTable<ScavengingCallback>
1558 ScavengingVisitor<marks_handling>::table_; 1544 ScavengingVisitor<marks_handling>::table_;
1559 1545
1560 1546
1561 void Heap::SelectScavengingVisitorsTable() { 1547 void Heap::SelectScavengingVisitorsTable() {
1562 if (IncrementalMarking::IsStopped()) { 1548 if (incremental_marking()->IsStopped()) {
1563 scavening_visitors_table_.CopyFrom( 1549 scavening_visitors_table_.CopyFrom(
1564 ScavengingVisitor<IGNORE_MARKS>::GetTable()); 1550 ScavengingVisitor<IGNORE_MARKS>::GetTable());
1565 } else { 1551 } else {
1566 scavening_visitors_table_.CopyFrom( 1552 scavening_visitors_table_.CopyFrom(
1567 ScavengingVisitor<TRANSFER_MARKS>::GetTable()); 1553 ScavengingVisitor<TRANSFER_MARKS>::GetTable());
1568 } 1554 }
1569 } 1555 }
1570 1556
1571 1557
1572 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 1558 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1573 ASSERT(InFromSpace(object)); 1559 ASSERT(HEAP->InFromSpace(object));
1574 MapWord first_word = object->map_word(); 1560 MapWord first_word = object->map_word();
1575 ASSERT(!first_word.IsForwardingAddress()); 1561 ASSERT(!first_word.IsForwardingAddress());
1576 Map* map = first_word.ToMap(); 1562 Map* map = first_word.ToMap();
1577 DoScavengeObject(map, p, object); 1563 DoScavengeObject(map, p, object);
1578 } 1564 }
1579 1565
1580 1566
1581 void Heap::ScavengePointer(HeapObject** p) {
1582 ScavengeObject(p, *p);
1583 }
1584
1585
1586 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, 1567 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1587 int instance_size) { 1568 int instance_size) {
1588 Object* result; 1569 Object* result;
1589 { MaybeObject* maybe_result = AllocateRawMap(); 1570 { MaybeObject* maybe_result = AllocateRawMap();
1590 if (!maybe_result->ToObject(&result)) return maybe_result; 1571 if (!maybe_result->ToObject(&result)) return maybe_result;
1591 } 1572 }
1592 1573
1593 // Map::cast cannot be used due to uninitialized map field. 1574 // Map::cast cannot be used due to uninitialized map field.
1594 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 1575 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1595 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1576 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1596 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1577 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1597 reinterpret_cast<Map*>(result)-> 1578 reinterpret_cast<Map*>(result)->set_visitor_id(
1598 set_visitor_id( 1579 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1599 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1600 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 1580 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1601 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 1581 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1602 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 1582 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1603 reinterpret_cast<Map*>(result)->set_bit_field(0); 1583 reinterpret_cast<Map*>(result)->set_bit_field(0);
1604 reinterpret_cast<Map*>(result)->set_bit_field2(0); 1584 reinterpret_cast<Map*>(result)->set_bit_field2(0);
1605 return result; 1585 return result;
1606 } 1586 }
1607 1587
1608 1588
1609 MaybeObject* Heap::AllocateMap(InstanceType instance_type, int instance_size) { 1589 MaybeObject* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
1698 // Allocate the empty array. 1678 // Allocate the empty array.
1699 { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); 1679 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1700 if (!maybe_obj->ToObject(&obj)) return false; 1680 if (!maybe_obj->ToObject(&obj)) return false;
1701 } 1681 }
1702 set_empty_fixed_array(FixedArray::cast(obj)); 1682 set_empty_fixed_array(FixedArray::cast(obj));
1703 1683
1704 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE); 1684 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE);
1705 if (!maybe_obj->ToObject(&obj)) return false; 1685 if (!maybe_obj->ToObject(&obj)) return false;
1706 } 1686 }
1707 set_null_value(obj); 1687 set_null_value(obj);
1688 Oddball::cast(obj)->set_kind(Oddball::kNull);
1708 1689
1709 // Allocate the empty descriptor array. 1690 // Allocate the empty descriptor array.
1710 { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); 1691 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1711 if (!maybe_obj->ToObject(&obj)) return false; 1692 if (!maybe_obj->ToObject(&obj)) return false;
1712 } 1693 }
1713 set_empty_descriptor_array(DescriptorArray::cast(obj)); 1694 set_empty_descriptor_array(DescriptorArray::cast(obj));
1714 1695
1715 // Fix the instance_descriptors for the existing maps. 1696 // Fix the instance_descriptors for the existing maps.
1716 meta_map()->set_instance_descriptors(empty_descriptor_array()); 1697 meta_map()->set_instance_descriptors(empty_descriptor_array());
1717 meta_map()->set_code_cache(empty_fixed_array()); 1698 meta_map()->set_code_cache(empty_fixed_array());
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
1895 if (!maybe_obj->ToObject(&obj)) return false; 1876 if (!maybe_obj->ToObject(&obj)) return false;
1896 } 1877 }
1897 set_shared_function_info_map(Map::cast(obj)); 1878 set_shared_function_info_map(Map::cast(obj));
1898 1879
1899 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE, 1880 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE,
1900 JSMessageObject::kSize); 1881 JSMessageObject::kSize);
1901 if (!maybe_obj->ToObject(&obj)) return false; 1882 if (!maybe_obj->ToObject(&obj)) return false;
1902 } 1883 }
1903 set_message_object_map(Map::cast(obj)); 1884 set_message_object_map(Map::cast(obj));
1904 1885
1905 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); 1886 ASSERT(!InNewSpace(empty_fixed_array()));
1906 return true; 1887 return true;
1907 } 1888 }
1908 1889
1909 1890
1910 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 1891 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
1911 // Statically ensure that it is safe to allocate heap numbers in paged 1892 // Statically ensure that it is safe to allocate heap numbers in paged
1912 // spaces. 1893 // spaces.
1913 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); 1894 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1914 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 1895 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1915 1896
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1948 { MaybeObject* maybe_result = AllocateRawCell(); 1929 { MaybeObject* maybe_result = AllocateRawCell();
1949 if (!maybe_result->ToObject(&result)) return maybe_result; 1930 if (!maybe_result->ToObject(&result)) return maybe_result;
1950 } 1931 }
1951 HeapObject::cast(result)->set_map(global_property_cell_map()); 1932 HeapObject::cast(result)->set_map(global_property_cell_map());
1952 JSGlobalPropertyCell::cast(result)->set_value(value); 1933 JSGlobalPropertyCell::cast(result)->set_value(value);
1953 return result; 1934 return result;
1954 } 1935 }
1955 1936
1956 1937
1957 MaybeObject* Heap::CreateOddball(const char* to_string, 1938 MaybeObject* Heap::CreateOddball(const char* to_string,
1958 Object* to_number) { 1939 Object* to_number,
1940 byte kind) {
1959 Object* result; 1941 Object* result;
1960 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE); 1942 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE);
1961 if (!maybe_result->ToObject(&result)) return maybe_result; 1943 if (!maybe_result->ToObject(&result)) return maybe_result;
1962 } 1944 }
1963 return Oddball::cast(result)->Initialize(to_string, to_number); 1945 return Oddball::cast(result)->Initialize(to_string, to_number, kind);
1964 } 1946 }
1965 1947
1966 1948
1967 bool Heap::CreateApiObjects() { 1949 bool Heap::CreateApiObjects() {
1968 Object* obj; 1950 Object* obj;
1969 1951
1970 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); 1952 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
1971 if (!maybe_obj->ToObject(&obj)) return false; 1953 if (!maybe_obj->ToObject(&obj)) return false;
1972 } 1954 }
1973 set_neander_map(Map::cast(obj)); 1955 set_neander_map(Map::cast(obj));
1974 1956
1975 { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map()); 1957 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
1976 if (!maybe_obj->ToObject(&obj)) return false; 1958 if (!maybe_obj->ToObject(&obj)) return false;
1977 } 1959 }
1978 Object* elements; 1960 Object* elements;
1979 { MaybeObject* maybe_elements = AllocateFixedArray(2); 1961 { MaybeObject* maybe_elements = AllocateFixedArray(2);
1980 if (!maybe_elements->ToObject(&elements)) return false; 1962 if (!maybe_elements->ToObject(&elements)) return false;
1981 } 1963 }
1982 FixedArray::cast(elements)->set(0, Smi::FromInt(0)); 1964 FixedArray::cast(elements)->set(0, Smi::FromInt(0));
1983 JSObject::cast(obj)->set_elements(FixedArray::cast(elements)); 1965 JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
1984 set_message_listeners(JSObject::cast(obj)); 1966 set_message_listeners(JSObject::cast(obj));
1985 1967
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2030 2012
2031 { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED); 2013 { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED);
2032 if (!maybe_obj->ToObject(&obj)) return false; 2014 if (!maybe_obj->ToObject(&obj)) return false;
2033 } 2015 }
2034 set_nan_value(obj); 2016 set_nan_value(obj);
2035 2017
2036 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE); 2018 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE);
2037 if (!maybe_obj->ToObject(&obj)) return false; 2019 if (!maybe_obj->ToObject(&obj)) return false;
2038 } 2020 }
2039 set_undefined_value(obj); 2021 set_undefined_value(obj);
2022 Oddball::cast(obj)->set_kind(Oddball::kUndefined);
2040 ASSERT(!InNewSpace(undefined_value())); 2023 ASSERT(!InNewSpace(undefined_value()));
2041 2024
2042 // Allocate initial symbol table. 2025 // Allocate initial symbol table.
2043 { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize); 2026 { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize);
2044 if (!maybe_obj->ToObject(&obj)) return false; 2027 if (!maybe_obj->ToObject(&obj)) return false;
2045 } 2028 }
2046 // Don't use set_symbol_table() due to asserts. 2029 // Don't use set_symbol_table() due to asserts.
2047 roots_[kSymbolTableRootIndex] = obj; 2030 roots_[kSymbolTableRootIndex] = obj;
2048 2031
2049 // Assign the print strings for oddballs after creating symboltable. 2032 // Assign the print strings for oddballs after creating symboltable.
2050 Object* symbol; 2033 Object* symbol;
2051 { MaybeObject* maybe_symbol = LookupAsciiSymbol("undefined"); 2034 { MaybeObject* maybe_symbol = LookupAsciiSymbol("undefined");
2052 if (!maybe_symbol->ToObject(&symbol)) return false; 2035 if (!maybe_symbol->ToObject(&symbol)) return false;
2053 } 2036 }
2054 Oddball::cast(undefined_value())->set_to_string(String::cast(symbol)); 2037 Oddball::cast(undefined_value())->set_to_string(String::cast(symbol));
2055 Oddball::cast(undefined_value())->set_to_number(nan_value()); 2038 Oddball::cast(undefined_value())->set_to_number(nan_value());
2056 2039
2057 // Allocate the null_value 2040 // Allocate the null_value
2058 { MaybeObject* maybe_obj = 2041 { MaybeObject* maybe_obj =
2059 Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0)); 2042 Oddball::cast(null_value())->Initialize("null",
2043 Smi::FromInt(0),
2044 Oddball::kNull);
2060 if (!maybe_obj->ToObject(&obj)) return false; 2045 if (!maybe_obj->ToObject(&obj)) return false;
2061 } 2046 }
2062 2047
2063 { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1)); 2048 { MaybeObject* maybe_obj = CreateOddball("true",
2049 Smi::FromInt(1),
2050 Oddball::kTrue);
2064 if (!maybe_obj->ToObject(&obj)) return false; 2051 if (!maybe_obj->ToObject(&obj)) return false;
2065 } 2052 }
2066 set_true_value(obj); 2053 set_true_value(obj);
2067 2054
2068 { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0)); 2055 { MaybeObject* maybe_obj = CreateOddball("false",
2056 Smi::FromInt(0),
2057 Oddball::kFalse);
2069 if (!maybe_obj->ToObject(&obj)) return false; 2058 if (!maybe_obj->ToObject(&obj)) return false;
2070 } 2059 }
2071 set_false_value(obj); 2060 set_false_value(obj);
2072 2061
2073 { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1)); 2062 { MaybeObject* maybe_obj = CreateOddball("hole",
2063 Smi::FromInt(-1),
2064 Oddball::kTheHole);
2074 if (!maybe_obj->ToObject(&obj)) return false; 2065 if (!maybe_obj->ToObject(&obj)) return false;
2075 } 2066 }
2076 set_the_hole_value(obj); 2067 set_the_hole_value(obj);
2077 2068
2078 { MaybeObject* maybe_obj = CreateOddball("arguments_marker", 2069 { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
2079 Smi::FromInt(-4)); 2070 Smi::FromInt(-4),
2071 Oddball::kArgumentMarker);
2080 if (!maybe_obj->ToObject(&obj)) return false; 2072 if (!maybe_obj->ToObject(&obj)) return false;
2081 } 2073 }
2082 set_arguments_marker(obj); 2074 set_arguments_marker(obj);
2083 2075
2084 { MaybeObject* maybe_obj = 2076 { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
2085 CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2)); 2077 Smi::FromInt(-2),
2078 Oddball::kOther);
2086 if (!maybe_obj->ToObject(&obj)) return false; 2079 if (!maybe_obj->ToObject(&obj)) return false;
2087 } 2080 }
2088 set_no_interceptor_result_sentinel(obj); 2081 set_no_interceptor_result_sentinel(obj);
2089 2082
2090 { MaybeObject* maybe_obj = 2083 { MaybeObject* maybe_obj = CreateOddball("termination_exception",
2091 CreateOddball("termination_exception", Smi::FromInt(-3)); 2084 Smi::FromInt(-3),
2085 Oddball::kOther);
2092 if (!maybe_obj->ToObject(&obj)) return false; 2086 if (!maybe_obj->ToObject(&obj)) return false;
2093 } 2087 }
2094 set_termination_exception(obj); 2088 set_termination_exception(obj);
2095 2089
2096 // Allocate the empty string. 2090 // Allocate the empty string.
2097 { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED); 2091 { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED);
2098 if (!maybe_obj->ToObject(&obj)) return false; 2092 if (!maybe_obj->ToObject(&obj)) return false;
2099 } 2093 }
2100 set_empty_string(String::cast(obj)); 2094 set_empty_string(String::cast(obj));
2101 2095
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2143 set_instanceof_cache_function(Smi::FromInt(0)); 2137 set_instanceof_cache_function(Smi::FromInt(0));
2144 set_instanceof_cache_map(Smi::FromInt(0)); 2138 set_instanceof_cache_map(Smi::FromInt(0));
2145 set_instanceof_cache_answer(Smi::FromInt(0)); 2139 set_instanceof_cache_answer(Smi::FromInt(0));
2146 2140
2147 CreateFixedStubs(); 2141 CreateFixedStubs();
2148 2142
2149 // Allocate the dictionary of intrinsic function names. 2143 // Allocate the dictionary of intrinsic function names.
2150 { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions); 2144 { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
2151 if (!maybe_obj->ToObject(&obj)) return false; 2145 if (!maybe_obj->ToObject(&obj)) return false;
2152 } 2146 }
2153 { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj); 2147 { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
2148 obj);
2154 if (!maybe_obj->ToObject(&obj)) return false; 2149 if (!maybe_obj->ToObject(&obj)) return false;
2155 } 2150 }
2156 set_intrinsic_function_names(StringDictionary::cast(obj)); 2151 set_intrinsic_function_names(StringDictionary::cast(obj));
2157 2152
2158 if (InitializeNumberStringCache()->IsFailure()) return false; 2153 if (InitializeNumberStringCache()->IsFailure()) return false;
2159 2154
2160 // Allocate cache for single character ASCII strings. 2155 // Allocate cache for single character ASCII strings.
2161 { MaybeObject* maybe_obj = 2156 { MaybeObject* maybe_obj =
2162 AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED); 2157 AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED);
2163 if (!maybe_obj->ToObject(&obj)) return false; 2158 if (!maybe_obj->ToObject(&obj)) return false;
2164 } 2159 }
2165 set_single_character_string_cache(FixedArray::cast(obj)); 2160 set_single_character_string_cache(FixedArray::cast(obj));
2166 2161
2167 // Allocate cache for external strings pointing to native source code. 2162 // Allocate cache for external strings pointing to native source code.
2168 { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount()); 2163 { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
2169 if (!maybe_obj->ToObject(&obj)) return false; 2164 if (!maybe_obj->ToObject(&obj)) return false;
2170 } 2165 }
2171 set_natives_source_cache(FixedArray::cast(obj)); 2166 set_natives_source_cache(FixedArray::cast(obj));
2172 2167
2173 // Handling of script id generation is in Factory::NewScript. 2168 // Handling of script id generation is in FACTORY->NewScript.
2174 set_last_script_id(undefined_value()); 2169 set_last_script_id(undefined_value());
2175 2170
2176 // Initialize keyed lookup cache. 2171 // Initialize keyed lookup cache.
2177 KeyedLookupCache::Clear(); 2172 isolate_->keyed_lookup_cache()->Clear();
2178 2173
2179 // Initialize context slot cache. 2174 // Initialize context slot cache.
2180 ContextSlotCache::Clear(); 2175 isolate_->context_slot_cache()->Clear();
2181 2176
2182 // Initialize descriptor cache. 2177 // Initialize descriptor cache.
2183 DescriptorLookupCache::Clear(); 2178 isolate_->descriptor_lookup_cache()->Clear();
2184 2179
2185 // Initialize compilation cache. 2180 // Initialize compilation cache.
2186 CompilationCache::Clear(); 2181 isolate_->compilation_cache()->Clear();
2187 2182
2188 return true; 2183 return true;
2189 } 2184 }
2190 2185
2191 2186
2192 MaybeObject* Heap::InitializeNumberStringCache() { 2187 MaybeObject* Heap::InitializeNumberStringCache() {
2193 // Compute the size of the number string cache based on the max heap size. 2188 // Compute the size of the number string cache based on the max heap size.
2194 // max_semispace_size_ == 512 KB => number_string_cache_size = 32. 2189 // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
2195 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB. 2190 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB.
2196 int number_string_cache_size = max_semispace_size_ / 512; 2191 int number_string_cache_size = max_semispace_size_ / 512;
2197 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size)); 2192 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
2198 Object* obj; 2193 Object* obj;
2199 MaybeObject* maybe_obj = 2194 MaybeObject* maybe_obj =
2200 AllocateFixedArray(number_string_cache_size * 2, TENURED); 2195 AllocateFixedArray(number_string_cache_size * 2, TENURED);
2201 if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj)); 2196 if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj));
2202 return maybe_obj; 2197 return maybe_obj;
2203 } 2198 }
2204 2199
2205 2200
2206 void Heap::FlushNumberStringCache() { 2201 void Heap::FlushNumberStringCache() {
2207 // Flush the number to string cache. 2202 // Flush the number to string cache.
2208 int len = number_string_cache()->length(); 2203 int len = number_string_cache()->length();
2209 for (int i = 0; i < len; i++) { 2204 for (int i = 0; i < len; i++) {
2210 number_string_cache()->set_undefined(i); 2205 number_string_cache()->set_undefined(this, i);
2211 } 2206 }
2212 } 2207 }
2213 2208
2214 2209
2215 static inline int double_get_hash(double d) { 2210 static inline int double_get_hash(double d) {
2216 DoubleRepresentation rep(d); 2211 DoubleRepresentation rep(d);
2217 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32); 2212 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
2218 } 2213 }
2219 2214
2220 2215
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2252 } else { 2247 } else {
2253 hash = double_get_hash(number->Number()) & mask; 2248 hash = double_get_hash(number->Number()) & mask;
2254 number_string_cache()->set(hash * 2, number); 2249 number_string_cache()->set(hash * 2, number);
2255 } 2250 }
2256 number_string_cache()->set(hash * 2 + 1, string); 2251 number_string_cache()->set(hash * 2 + 1, string);
2257 } 2252 }
2258 2253
2259 2254
2260 MaybeObject* Heap::NumberToString(Object* number, 2255 MaybeObject* Heap::NumberToString(Object* number,
2261 bool check_number_string_cache) { 2256 bool check_number_string_cache) {
2262 Counters::number_to_string_runtime.Increment(); 2257 isolate_->counters()->number_to_string_runtime()->Increment();
2263 if (check_number_string_cache) { 2258 if (check_number_string_cache) {
2264 Object* cached = GetNumberStringCache(number); 2259 Object* cached = GetNumberStringCache(number);
2265 if (cached != undefined_value()) { 2260 if (cached != undefined_value()) {
2266 return cached; 2261 return cached;
2267 } 2262 }
2268 } 2263 }
2269 2264
2270 char arr[100]; 2265 char arr[100];
2271 Vector<char> buffer(arr, ARRAY_SIZE(arr)); 2266 Vector<char> buffer(arr, ARRAY_SIZE(arr));
2272 const char* str; 2267 const char* str;
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2355 2350
2356 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { 2351 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
2357 Object* result; 2352 Object* result;
2358 { MaybeObject* maybe_result = 2353 { MaybeObject* maybe_result =
2359 Allocate(shared_function_info_map(), OLD_POINTER_SPACE); 2354 Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
2360 if (!maybe_result->ToObject(&result)) return maybe_result; 2355 if (!maybe_result->ToObject(&result)) return maybe_result;
2361 } 2356 }
2362 2357
2363 SharedFunctionInfo* share = SharedFunctionInfo::cast(result); 2358 SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
2364 share->set_name(name); 2359 share->set_name(name);
2365 Code* illegal = Builtins::builtin(Builtins::Illegal); 2360 Code* illegal = isolate_->builtins()->builtin(Builtins::Illegal);
2366 share->set_code(illegal); 2361 share->set_code(illegal);
2367 share->set_scope_info(SerializedScopeInfo::Empty()); 2362 share->set_scope_info(SerializedScopeInfo::Empty());
2368 Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric); 2363 Code* construct_stub = isolate_->builtins()->builtin(
2364 Builtins::JSConstructStubGeneric);
2369 share->set_construct_stub(construct_stub); 2365 share->set_construct_stub(construct_stub);
2370 share->set_expected_nof_properties(0); 2366 share->set_expected_nof_properties(0);
2371 share->set_length(0); 2367 share->set_length(0);
2372 share->set_formal_parameter_count(0); 2368 share->set_formal_parameter_count(0);
2373 share->set_instance_class_name(Object_symbol()); 2369 share->set_instance_class_name(Object_symbol());
2374 share->set_function_data(undefined_value()); 2370 share->set_function_data(undefined_value());
2375 share->set_script(undefined_value()); 2371 share->set_script(undefined_value());
2376 share->set_start_position_and_type(0); 2372 share->set_start_position_and_type(0);
2377 share->set_debug_info(undefined_value()); 2373 share->set_debug_info(undefined_value());
2378 share->set_inferred_name(empty_string()); 2374 share->set_inferred_name(empty_string());
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2416 2412
2417 2413
2418 // Returns true for a character in a range. Both limits are inclusive. 2414 // Returns true for a character in a range. Both limits are inclusive.
2419 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) { 2415 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
2420 // This makes uses of the the unsigned wraparound. 2416 // This makes uses of the the unsigned wraparound.
2421 return character - from <= to - from; 2417 return character - from <= to - from;
2422 } 2418 }
2423 2419
2424 2420
2425 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString( 2421 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
2422 Heap* heap,
2426 uint32_t c1, 2423 uint32_t c1,
2427 uint32_t c2) { 2424 uint32_t c2) {
2428 String* symbol; 2425 String* symbol;
2429 // Numeric strings have a different hash algorithm not known by 2426 // Numeric strings have a different hash algorithm not known by
2430 // LookupTwoCharsSymbolIfExists, so we skip this step for such strings. 2427 // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
2431 if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) && 2428 if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
2432 Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) { 2429 heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
2433 return symbol; 2430 return symbol;
2434 // Now we know the length is 2, we might as well make use of that fact 2431 // Now we know the length is 2, we might as well make use of that fact
2435 // when building the new string. 2432 // when building the new string.
2436 } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this 2433 } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
2437 ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this. 2434 ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
2438 Object* result; 2435 Object* result;
2439 { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2); 2436 { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
2440 if (!maybe_result->ToObject(&result)) return maybe_result; 2437 if (!maybe_result->ToObject(&result)) return maybe_result;
2441 } 2438 }
2442 char* dest = SeqAsciiString::cast(result)->GetChars(); 2439 char* dest = SeqAsciiString::cast(result)->GetChars();
2443 dest[0] = c1; 2440 dest[0] = c1;
2444 dest[1] = c2; 2441 dest[1] = c2;
2445 return result; 2442 return result;
2446 } else { 2443 } else {
2447 Object* result; 2444 Object* result;
2448 { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2); 2445 { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
2449 if (!maybe_result->ToObject(&result)) return maybe_result; 2446 if (!maybe_result->ToObject(&result)) return maybe_result;
2450 } 2447 }
2451 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 2448 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2452 dest[0] = c1; 2449 dest[0] = c1;
2453 dest[1] = c2; 2450 dest[1] = c2;
2454 return result; 2451 return result;
2455 } 2452 }
2456 } 2453 }
2457 2454
2458 2455
2459 MaybeObject* Heap::AllocateConsString(String* first, String* second) { 2456 MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2460 int first_length = first->length(); 2457 int first_length = first->length();
2461 if (first_length == 0) { 2458 if (first_length == 0) {
2462 return second; 2459 return second;
2463 } 2460 }
2464 2461
2465 int second_length = second->length(); 2462 int second_length = second->length();
2466 if (second_length == 0) { 2463 if (second_length == 0) {
2467 return first; 2464 return first;
2468 } 2465 }
2469 2466
2470 int length = first_length + second_length; 2467 int length = first_length + second_length;
2471 2468
2472 // Optimization for 2-byte strings often used as keys in a decompression 2469 // Optimization for 2-byte strings often used as keys in a decompression
2473 // dictionary. Check whether we already have the string in the symbol 2470 // dictionary. Check whether we already have the string in the symbol
2474 // table to prevent creation of many unneccesary strings. 2471 // table to prevent creation of many unneccesary strings.
2475 if (length == 2) { 2472 if (length == 2) {
2476 unsigned c1 = first->Get(0); 2473 unsigned c1 = first->Get(0);
2477 unsigned c2 = second->Get(0); 2474 unsigned c2 = second->Get(0);
2478 return MakeOrFindTwoCharacterString(c1, c2); 2475 return MakeOrFindTwoCharacterString(this, c1, c2);
2479 } 2476 }
2480 2477
2481 bool first_is_ascii = first->IsAsciiRepresentation(); 2478 bool first_is_ascii = first->IsAsciiRepresentation();
2482 bool second_is_ascii = second->IsAsciiRepresentation(); 2479 bool second_is_ascii = second->IsAsciiRepresentation();
2483 bool is_ascii = first_is_ascii && second_is_ascii; 2480 bool is_ascii = first_is_ascii && second_is_ascii;
2484 2481
2485 // Make sure that an out of memory exception is thrown if the length 2482 // Make sure that an out of memory exception is thrown if the length
2486 // of the new cons string is too large. 2483 // of the new cons string is too large.
2487 if (length > String::kMaxLength || length < 0) { 2484 if (length > String::kMaxLength || length < 0) {
2488 Top::context()->mark_out_of_memory(); 2485 isolate()->context()->mark_out_of_memory();
2489 return Failure::OutOfMemoryException(); 2486 return Failure::OutOfMemoryException();
2490 } 2487 }
2491 2488
2492 bool is_ascii_data_in_two_byte_string = false; 2489 bool is_ascii_data_in_two_byte_string = false;
2493 if (!is_ascii) { 2490 if (!is_ascii) {
2494 // At least one of the strings uses two-byte representation so we 2491 // At least one of the strings uses two-byte representation so we
2495 // can't use the fast case code for short ascii strings below, but 2492 // can't use the fast case code for short ascii strings below, but
2496 // we can try to save memory if all chars actually fit in ascii. 2493 // we can try to save memory if all chars actually fit in ascii.
2497 is_ascii_data_in_two_byte_string = 2494 is_ascii_data_in_two_byte_string =
2498 first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars(); 2495 first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
2499 if (is_ascii_data_in_two_byte_string) { 2496 if (is_ascii_data_in_two_byte_string) {
2500 Counters::string_add_runtime_ext_to_ascii.Increment(); 2497 isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2501 } 2498 }
2502 } 2499 }
2503 2500
2504 // If the resulting string is small make a flat string. 2501 // If the resulting string is small make a flat string.
2505 if (length < String::kMinNonFlatLength) { 2502 if (length < String::kMinNonFlatLength) {
2506 ASSERT(first->IsFlat()); 2503 ASSERT(first->IsFlat());
2507 ASSERT(second->IsFlat()); 2504 ASSERT(second->IsFlat());
2508 if (is_ascii) { 2505 if (is_ascii) {
2509 Object* result; 2506 Object* result;
2510 { MaybeObject* maybe_result = AllocateRawAsciiString(length); 2507 { MaybeObject* maybe_result = AllocateRawAsciiString(length);
(...skipping 20 matching lines...) Expand all
2531 } else { 2528 } else {
2532 if (is_ascii_data_in_two_byte_string) { 2529 if (is_ascii_data_in_two_byte_string) {
2533 Object* result; 2530 Object* result;
2534 { MaybeObject* maybe_result = AllocateRawAsciiString(length); 2531 { MaybeObject* maybe_result = AllocateRawAsciiString(length);
2535 if (!maybe_result->ToObject(&result)) return maybe_result; 2532 if (!maybe_result->ToObject(&result)) return maybe_result;
2536 } 2533 }
2537 // Copy the characters into the new object. 2534 // Copy the characters into the new object.
2538 char* dest = SeqAsciiString::cast(result)->GetChars(); 2535 char* dest = SeqAsciiString::cast(result)->GetChars();
2539 String::WriteToFlat(first, dest, 0, first_length); 2536 String::WriteToFlat(first, dest, 0, first_length);
2540 String::WriteToFlat(second, dest + first_length, 0, second_length); 2537 String::WriteToFlat(second, dest + first_length, 0, second_length);
2538 isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2541 return result; 2539 return result;
2542 } 2540 }
2543 2541
2544 Object* result; 2542 Object* result;
2545 { MaybeObject* maybe_result = AllocateRawTwoByteString(length); 2543 { MaybeObject* maybe_result = AllocateRawTwoByteString(length);
2546 if (!maybe_result->ToObject(&result)) return maybe_result; 2544 if (!maybe_result->ToObject(&result)) return maybe_result;
2547 } 2545 }
2548 // Copy the characters into the new object. 2546 // Copy the characters into the new object.
2549 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 2547 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2550 String::WriteToFlat(first, dest, 0, first_length); 2548 String::WriteToFlat(first, dest, 0, first_length);
(...skipping 21 matching lines...) Expand all
2572 } 2570 }
2573 2571
2574 2572
2575 MaybeObject* Heap::AllocateSubString(String* buffer, 2573 MaybeObject* Heap::AllocateSubString(String* buffer,
2576 int start, 2574 int start,
2577 int end, 2575 int end,
2578 PretenureFlag pretenure) { 2576 PretenureFlag pretenure) {
2579 int length = end - start; 2577 int length = end - start;
2580 2578
2581 if (length == 1) { 2579 if (length == 1) {
2582 return Heap::LookupSingleCharacterStringFromCode( 2580 return LookupSingleCharacterStringFromCode(buffer->Get(start));
2583 buffer->Get(start));
2584 } else if (length == 2) { 2581 } else if (length == 2) {
2585 // Optimization for 2-byte strings often used as keys in a decompression 2582 // Optimization for 2-byte strings often used as keys in a decompression
2586 // dictionary. Check whether we already have the string in the symbol 2583 // dictionary. Check whether we already have the string in the symbol
2587 // table to prevent creation of many unneccesary strings. 2584 // table to prevent creation of many unneccesary strings.
2588 unsigned c1 = buffer->Get(start); 2585 unsigned c1 = buffer->Get(start);
2589 unsigned c2 = buffer->Get(start + 1); 2586 unsigned c2 = buffer->Get(start + 1);
2590 return MakeOrFindTwoCharacterString(c1, c2); 2587 return MakeOrFindTwoCharacterString(this, c1, c2);
2591 } 2588 }
2592 2589
2593 // Make an attempt to flatten the buffer to reduce access time. 2590 // Make an attempt to flatten the buffer to reduce access time.
2594 buffer = buffer->TryFlattenGetString(); 2591 buffer = buffer->TryFlattenGetString();
2595 2592
2596 Object* result; 2593 Object* result;
2597 { MaybeObject* maybe_result = buffer->IsAsciiRepresentation() 2594 { MaybeObject* maybe_result = buffer->IsAsciiRepresentation()
2598 ? AllocateRawAsciiString(length, pretenure ) 2595 ? AllocateRawAsciiString(length, pretenure )
2599 : AllocateRawTwoByteString(length, pretenure); 2596 : AllocateRawTwoByteString(length, pretenure);
2600 if (!maybe_result->ToObject(&result)) return maybe_result; 2597 if (!maybe_result->ToObject(&result)) return maybe_result;
(...skipping 11 matching lines...) Expand all
2612 } 2609 }
2613 2610
2614 return result; 2611 return result;
2615 } 2612 }
2616 2613
2617 2614
2618 MaybeObject* Heap::AllocateExternalStringFromAscii( 2615 MaybeObject* Heap::AllocateExternalStringFromAscii(
2619 ExternalAsciiString::Resource* resource) { 2616 ExternalAsciiString::Resource* resource) {
2620 size_t length = resource->length(); 2617 size_t length = resource->length();
2621 if (length > static_cast<size_t>(String::kMaxLength)) { 2618 if (length > static_cast<size_t>(String::kMaxLength)) {
2622 Top::context()->mark_out_of_memory(); 2619 isolate()->context()->mark_out_of_memory();
2623 return Failure::OutOfMemoryException(); 2620 return Failure::OutOfMemoryException();
2624 } 2621 }
2625 2622
2626 Map* map = external_ascii_string_map(); 2623 Map* map = external_ascii_string_map();
2627 Object* result; 2624 Object* result;
2628 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 2625 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2629 if (!maybe_result->ToObject(&result)) return maybe_result; 2626 if (!maybe_result->ToObject(&result)) return maybe_result;
2630 } 2627 }
2631 2628
2632 ExternalAsciiString* external_string = ExternalAsciiString::cast(result); 2629 ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
2633 external_string->set_length(static_cast<int>(length)); 2630 external_string->set_length(static_cast<int>(length));
2634 external_string->set_hash_field(String::kEmptyHashField); 2631 external_string->set_hash_field(String::kEmptyHashField);
2635 external_string->set_resource(resource); 2632 external_string->set_resource(resource);
2636 2633
2637 return result; 2634 return result;
2638 } 2635 }
2639 2636
2640 2637
2641 MaybeObject* Heap::AllocateExternalStringFromTwoByte( 2638 MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2642 ExternalTwoByteString::Resource* resource) { 2639 ExternalTwoByteString::Resource* resource) {
2643 size_t length = resource->length(); 2640 size_t length = resource->length();
2644 if (length > static_cast<size_t>(String::kMaxLength)) { 2641 if (length > static_cast<size_t>(String::kMaxLength)) {
2645 Top::context()->mark_out_of_memory(); 2642 isolate()->context()->mark_out_of_memory();
2646 return Failure::OutOfMemoryException(); 2643 return Failure::OutOfMemoryException();
2647 } 2644 }
2648 2645
2649 // For small strings we check whether the resource contains only 2646 // For small strings we check whether the resource contains only
2650 // ASCII characters. If yes, we use a different string map. 2647 // ASCII characters. If yes, we use a different string map.
2651 static const size_t kAsciiCheckLengthLimit = 32; 2648 static const size_t kAsciiCheckLengthLimit = 32;
2652 bool is_ascii = length <= kAsciiCheckLengthLimit && 2649 bool is_ascii = length <= kAsciiCheckLengthLimit &&
2653 String::IsAscii(resource->data(), static_cast<int>(length)); 2650 String::IsAscii(resource->data(), static_cast<int>(length));
2654 Map* map = is_ascii ? 2651 Map* map = is_ascii ?
2655 Heap::external_string_with_ascii_data_map() : Heap::external_string_map(); 2652 external_string_with_ascii_data_map() : external_string_map();
2656 Object* result; 2653 Object* result;
2657 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 2654 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2658 if (!maybe_result->ToObject(&result)) return maybe_result; 2655 if (!maybe_result->ToObject(&result)) return maybe_result;
2659 } 2656 }
2660 2657
2661 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result); 2658 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
2662 external_string->set_length(static_cast<int>(length)); 2659 external_string->set_length(static_cast<int>(length));
2663 external_string->set_hash_field(String::kEmptyHashField); 2660 external_string->set_hash_field(String::kEmptyHashField);
2664 external_string->set_resource(resource); 2661 external_string->set_resource(resource);
2665 2662
2666 return result; 2663 return result;
2667 } 2664 }
2668 2665
2669 2666
2670 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) { 2667 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
2671 if (code <= String::kMaxAsciiCharCode) { 2668 if (code <= String::kMaxAsciiCharCode) {
2672 Object* value = Heap::single_character_string_cache()->get(code); 2669 Object* value = single_character_string_cache()->get(code);
2673 if (value != Heap::undefined_value()) return value; 2670 if (value != undefined_value()) return value;
2674 2671
2675 char buffer[1]; 2672 char buffer[1];
2676 buffer[0] = static_cast<char>(code); 2673 buffer[0] = static_cast<char>(code);
2677 Object* result; 2674 Object* result;
2678 MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1)); 2675 MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
2679 2676
2680 if (!maybe_result->ToObject(&result)) return maybe_result; 2677 if (!maybe_result->ToObject(&result)) return maybe_result;
2681 Heap::single_character_string_cache()->set(code, result); 2678 single_character_string_cache()->set(code, result);
2682 return result; 2679 return result;
2683 } 2680 }
2684 2681
2685 Object* result; 2682 Object* result;
2686 { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1); 2683 { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
2687 if (!maybe_result->ToObject(&result)) return maybe_result; 2684 if (!maybe_result->ToObject(&result)) return maybe_result;
2688 } 2685 }
2689 String* answer = String::cast(result); 2686 String* answer = String::cast(result);
2690 answer->Set(0, code); 2687 answer->Set(0, code);
2691 return answer; 2688 return answer;
2692 } 2689 }
2693 2690
2694 2691
2695 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 2692 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
2696 if (length < 0 || length > ByteArray::kMaxLength) { 2693 if (length < 0 || length > ByteArray::kMaxLength) {
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
2790 } else { 2787 } else {
2791 maybe_result = code_space_->AllocateRaw(obj_size); 2788 maybe_result = code_space_->AllocateRaw(obj_size);
2792 } 2789 }
2793 2790
2794 Object* result; 2791 Object* result;
2795 if (!maybe_result->ToObject(&result)) return maybe_result; 2792 if (!maybe_result->ToObject(&result)) return maybe_result;
2796 2793
2797 // Initialize the object 2794 // Initialize the object
2798 HeapObject::cast(result)->set_map(code_map()); 2795 HeapObject::cast(result)->set_map(code_map());
2799 Code* code = Code::cast(result); 2796 Code* code = Code::cast(result);
2800 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2797 ASSERT(!isolate_->code_range()->exists() ||
2798 isolate_->code_range()->contains(code->address()));
2801 code->set_instruction_size(desc.instr_size); 2799 code->set_instruction_size(desc.instr_size);
2802 code->set_relocation_info(ByteArray::cast(reloc_info)); 2800 code->set_relocation_info(ByteArray::cast(reloc_info));
2803 code->set_flags(flags); 2801 code->set_flags(flags);
2804 if (code->is_call_stub() || code->is_keyed_call_stub()) { 2802 if (code->is_call_stub() || code->is_keyed_call_stub()) {
2805 code->set_check_type(RECEIVER_MAP_CHECK); 2803 code->set_check_type(RECEIVER_MAP_CHECK);
2806 } 2804 }
2807 code->set_deoptimization_data(empty_fixed_array()); 2805 code->set_deoptimization_data(empty_fixed_array());
2808 // Allow self references to created code object by patching the handle to 2806 // Allow self references to created code object by patching the handle to
2809 // point to the newly allocated Code object. 2807 // point to the newly allocated Code object.
2810 if (!self_reference.is_null()) { 2808 if (!self_reference.is_null()) {
(...skipping 25 matching lines...) Expand all
2836 2834
2837 Object* result; 2835 Object* result;
2838 if (!maybe_result->ToObject(&result)) return maybe_result; 2836 if (!maybe_result->ToObject(&result)) return maybe_result;
2839 2837
2840 // Copy code object. 2838 // Copy code object.
2841 Address old_addr = code->address(); 2839 Address old_addr = code->address();
2842 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 2840 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
2843 CopyBlock(new_addr, old_addr, obj_size); 2841 CopyBlock(new_addr, old_addr, obj_size);
2844 // Relocate the copy. 2842 // Relocate the copy.
2845 Code* new_code = Code::cast(result); 2843 Code* new_code = Code::cast(result);
2846 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2844 ASSERT(!isolate_->code_range()->exists() ||
2845 isolate_->code_range()->contains(code->address()));
2847 new_code->Relocate(new_addr - old_addr); 2846 new_code->Relocate(new_addr - old_addr);
2848 return new_code; 2847 return new_code;
2849 } 2848 }
2850 2849
2851 2850
2852 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { 2851 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
2853 // Allocate ByteArray before the Code object, so that we do not risk 2852 // Allocate ByteArray before the Code object, so that we do not risk
2854 // leaving uninitialized Code object (and breaking the heap). 2853 // leaving uninitialized Code object (and breaking the heap).
2855 Object* reloc_info_array; 2854 Object* reloc_info_array;
2856 { MaybeObject* maybe_reloc_info_array = 2855 { MaybeObject* maybe_reloc_info_array =
(...skipping 28 matching lines...) Expand all
2885 // Copy header and instructions. 2884 // Copy header and instructions.
2886 memcpy(new_addr, old_addr, relocation_offset); 2885 memcpy(new_addr, old_addr, relocation_offset);
2887 2886
2888 Code* new_code = Code::cast(result); 2887 Code* new_code = Code::cast(result);
2889 new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); 2888 new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
2890 2889
2891 // Copy patched rinfo. 2890 // Copy patched rinfo.
2892 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); 2891 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
2893 2892
2894 // Relocate the copy. 2893 // Relocate the copy.
2895 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2894 ASSERT(!isolate_->code_range()->exists() ||
2895 isolate_->code_range()->contains(code->address()));
2896 new_code->Relocate(new_addr - old_addr); 2896 new_code->Relocate(new_addr - old_addr);
2897 2897
2898 #ifdef DEBUG 2898 #ifdef DEBUG
2899 code->Verify(); 2899 code->Verify();
2900 #endif 2900 #endif
2901 return new_code; 2901 return new_code;
2902 } 2902 }
2903 2903
2904 2904
2905 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { 2905 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
2906 ASSERT(gc_state_ == NOT_IN_GC); 2906 ASSERT(gc_state_ == NOT_IN_GC);
2907 ASSERT(map->instance_type() != MAP_TYPE); 2907 ASSERT(map->instance_type() != MAP_TYPE);
2908 // If allocation failures are disallowed, we may allocate in a different 2908 // If allocation failures are disallowed, we may allocate in a different
2909 // space when new space is full and the object is not a large object. 2909 // space when new space is full and the object is not a large object.
2910 AllocationSpace retry_space = 2910 AllocationSpace retry_space =
2911 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); 2911 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
2912 Object* result; 2912 Object* result;
2913 { MaybeObject* maybe_result = 2913 { MaybeObject* maybe_result =
2914 AllocateRaw(map->instance_size(), space, retry_space); 2914 AllocateRaw(map->instance_size(), space, retry_space);
2915 if (!maybe_result->ToObject(&result)) return maybe_result; 2915 if (!maybe_result->ToObject(&result)) return maybe_result;
2916 } 2916 }
2917 HeapObject::cast(result)->set_map(map); 2917 HeapObject::cast(result)->set_map(map);
2918 #ifdef ENABLE_LOGGING_AND_PROFILING 2918 #ifdef ENABLE_LOGGING_AND_PROFILING
2919 ProducerHeapProfile::RecordJSObjectAllocation(result); 2919 isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
2920 #endif 2920 #endif
2921 return result; 2921 return result;
2922 } 2922 }
2923 2923
2924 2924
2925 MaybeObject* Heap::InitializeFunction(JSFunction* function, 2925 MaybeObject* Heap::InitializeFunction(JSFunction* function,
2926 SharedFunctionInfo* shared, 2926 SharedFunctionInfo* shared,
2927 Object* prototype) { 2927 Object* prototype) {
2928 ASSERT(!prototype->IsMap()); 2928 ASSERT(!prototype->IsMap());
2929 function->initialize_properties(); 2929 function->initialize_properties();
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2977 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { 2977 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
2978 // To get fast allocation and map sharing for arguments objects we 2978 // To get fast allocation and map sharing for arguments objects we
2979 // allocate them based on an arguments boilerplate. 2979 // allocate them based on an arguments boilerplate.
2980 2980
2981 JSObject* boilerplate; 2981 JSObject* boilerplate;
2982 int arguments_object_size; 2982 int arguments_object_size;
2983 bool strict_mode_callee = callee->IsJSFunction() && 2983 bool strict_mode_callee = callee->IsJSFunction() &&
2984 JSFunction::cast(callee)->shared()->strict_mode(); 2984 JSFunction::cast(callee)->shared()->strict_mode();
2985 if (strict_mode_callee) { 2985 if (strict_mode_callee) {
2986 boilerplate = 2986 boilerplate =
2987 Top::context()->global_context()->strict_mode_arguments_boilerplate(); 2987 isolate()->context()->global_context()->
2988 strict_mode_arguments_boilerplate();
2988 arguments_object_size = kArgumentsObjectSizeStrict; 2989 arguments_object_size = kArgumentsObjectSizeStrict;
2989 } else { 2990 } else {
2990 boilerplate = Top::context()->global_context()->arguments_boilerplate(); 2991 boilerplate =
2992 isolate()->context()->global_context()->arguments_boilerplate();
2991 arguments_object_size = kArgumentsObjectSize; 2993 arguments_object_size = kArgumentsObjectSize;
2992 } 2994 }
2993 2995
2994 // This calls Copy directly rather than using Heap::AllocateRaw so we 2996 // This calls Copy directly rather than using Heap::AllocateRaw so we
2995 // duplicate the check here. 2997 // duplicate the check here.
2996 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 2998 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2997 2999
2998 // Check that the size of the boilerplate matches our 3000 // Check that the size of the boilerplate matches our
2999 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 3001 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
3000 // on the size being a known constant. 3002 // on the size being a known constant.
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3047 3049
3048 3050
3049 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) { 3051 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
3050 ASSERT(!fun->has_initial_map()); 3052 ASSERT(!fun->has_initial_map());
3051 3053
3052 // First create a new map with the size and number of in-object properties 3054 // First create a new map with the size and number of in-object properties
3053 // suggested by the function. 3055 // suggested by the function.
3054 int instance_size = fun->shared()->CalculateInstanceSize(); 3056 int instance_size = fun->shared()->CalculateInstanceSize();
3055 int in_object_properties = fun->shared()->CalculateInObjectProperties(); 3057 int in_object_properties = fun->shared()->CalculateInObjectProperties();
3056 Object* map_obj; 3058 Object* map_obj;
3057 { MaybeObject* maybe_map_obj = 3059 { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
3058 Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
3059 if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj; 3060 if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
3060 } 3061 }
3061 3062
3062 // Fetch or allocate prototype. 3063 // Fetch or allocate prototype.
3063 Object* prototype; 3064 Object* prototype;
3064 if (fun->has_instance_prototype()) { 3065 if (fun->has_instance_prototype()) {
3065 prototype = fun->instance_prototype(); 3066 prototype = fun->instance_prototype();
3066 } else { 3067 } else {
3067 { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun); 3068 { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
3068 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype; 3069 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
3244 3245
3245 // The global object might be created from an object template with accessors. 3246 // The global object might be created from an object template with accessors.
3246 // Fill these accessors into the dictionary. 3247 // Fill these accessors into the dictionary.
3247 DescriptorArray* descs = map->instance_descriptors(); 3248 DescriptorArray* descs = map->instance_descriptors();
3248 for (int i = 0; i < descs->number_of_descriptors(); i++) { 3249 for (int i = 0; i < descs->number_of_descriptors(); i++) {
3249 PropertyDetails details = descs->GetDetails(i); 3250 PropertyDetails details = descs->GetDetails(i);
3250 ASSERT(details.type() == CALLBACKS); // Only accessors are expected. 3251 ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
3251 PropertyDetails d = 3252 PropertyDetails d =
3252 PropertyDetails(details.attributes(), CALLBACKS, details.index()); 3253 PropertyDetails(details.attributes(), CALLBACKS, details.index());
3253 Object* value = descs->GetCallbacksObject(i); 3254 Object* value = descs->GetCallbacksObject(i);
3254 { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value); 3255 { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
3255 if (!maybe_value->ToObject(&value)) return maybe_value; 3256 if (!maybe_value->ToObject(&value)) return maybe_value;
3256 } 3257 }
3257 3258
3258 Object* result; 3259 Object* result;
3259 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d); 3260 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
3260 if (!maybe_result->ToObject(&result)) return maybe_result; 3261 if (!maybe_result->ToObject(&result)) return maybe_result;
3261 } 3262 }
3262 dictionary = StringDictionary::cast(result); 3263 dictionary = StringDictionary::cast(result);
3263 } 3264 }
3264 3265
3265 // Allocate the global object and initialize it with the backing store. 3266 // Allocate the global object and initialize it with the backing store.
3266 { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE); 3267 { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE);
3267 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3268 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3268 } 3269 }
3269 JSObject* global = JSObject::cast(obj); 3270 JSObject* global = JSObject::cast(obj);
3270 InitializeJSObjectFromMap(global, dictionary, map); 3271 InitializeJSObjectFromMap(global, dictionary, map);
3271 3272
3272 // Create a new map for the global object. 3273 // Create a new map for the global object.
3273 { MaybeObject* maybe_obj = map->CopyDropDescriptors(); 3274 { MaybeObject* maybe_obj = map->CopyDropDescriptors();
3274 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3275 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3275 } 3276 }
3276 Map* new_map = Map::cast(obj); 3277 Map* new_map = Map::cast(obj);
3277 3278
3278 // Setup the global object as a normalized object. 3279 // Setup the global object as a normalized object.
3279 global->set_map(new_map); 3280 global->set_map(new_map);
3280 global->map()->set_instance_descriptors(Heap::empty_descriptor_array()); 3281 global->map()->set_instance_descriptors(empty_descriptor_array());
3281 global->set_properties(dictionary); 3282 global->set_properties(dictionary);
3282 3283
3283 // Make sure result is a global object with properties in dictionary. 3284 // Make sure result is a global object with properties in dictionary.
3284 ASSERT(global->IsGlobalObject()); 3285 ASSERT(global->IsGlobalObject());
3285 ASSERT(!global->HasFastProperties()); 3286 ASSERT(!global->HasFastProperties());
3286 return global; 3287 return global;
3287 } 3288 }
3288 3289
3289 3290
3290 MaybeObject* Heap::CopyJSObject(JSObject* source) { 3291 MaybeObject* Heap::CopyJSObject(JSObject* source) {
(...skipping 18 matching lines...) Expand all
3309 source->address(), 3310 source->address(),
3310 object_size); 3311 object_size);
3311 // Update write barrier for all fields that lie beyond the header. 3312 // Update write barrier for all fields that lie beyond the header.
3312 RecordWrites(clone_address, 3313 RecordWrites(clone_address,
3313 JSObject::kHeaderSize, 3314 JSObject::kHeaderSize,
3314 (object_size - JSObject::kHeaderSize) / kPointerSize); 3315 (object_size - JSObject::kHeaderSize) / kPointerSize);
3315 } else { 3316 } else {
3316 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); 3317 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
3317 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3318 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3318 } 3319 }
3319 ASSERT(Heap::InNewSpace(clone)); 3320 ASSERT(InNewSpace(clone));
3320 // Since we know the clone is allocated in new space, we can copy 3321 // Since we know the clone is allocated in new space, we can copy
3321 // the contents without worrying about updating the write barrier. 3322 // the contents without worrying about updating the write barrier.
3322 CopyBlock(HeapObject::cast(clone)->address(), 3323 CopyBlock(HeapObject::cast(clone)->address(),
3323 source->address(), 3324 source->address(),
3324 object_size); 3325 object_size);
3325 } 3326 }
3326 3327
3327 FixedArray* elements = FixedArray::cast(source->elements()); 3328 FixedArray* elements = FixedArray::cast(source->elements());
3328 FixedArray* properties = FixedArray::cast(source->properties()); 3329 FixedArray* properties = FixedArray::cast(source->properties());
3329 // Update elements if necessary. 3330 // Update elements if necessary.
3330 if (elements->length() > 0) { 3331 if (elements->length() > 0) {
3331 Object* elem; 3332 Object* elem;
3332 { MaybeObject* maybe_elem = 3333 { MaybeObject* maybe_elem =
3333 (elements->map() == fixed_cow_array_map()) ? 3334 (elements->map() == fixed_cow_array_map()) ?
3334 elements : CopyFixedArray(elements); 3335 elements : CopyFixedArray(elements);
3335 if (!maybe_elem->ToObject(&elem)) return maybe_elem; 3336 if (!maybe_elem->ToObject(&elem)) return maybe_elem;
3336 } 3337 }
3337 JSObject::cast(clone)->set_elements(FixedArray::cast(elem)); 3338 JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
3338 } 3339 }
3339 // Update properties if necessary. 3340 // Update properties if necessary.
3340 if (properties->length() > 0) { 3341 if (properties->length() > 0) {
3341 Object* prop; 3342 Object* prop;
3342 { MaybeObject* maybe_prop = CopyFixedArray(properties); 3343 { MaybeObject* maybe_prop = CopyFixedArray(properties);
3343 if (!maybe_prop->ToObject(&prop)) return maybe_prop; 3344 if (!maybe_prop->ToObject(&prop)) return maybe_prop;
3344 } 3345 }
3345 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); 3346 JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
3346 } 3347 }
3347 // Return the new clone. 3348 // Return the new clone.
3348 #ifdef ENABLE_LOGGING_AND_PROFILING 3349 #ifdef ENABLE_LOGGING_AND_PROFILING
3349 ProducerHeapProfile::RecordJSObjectAllocation(clone); 3350 isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
3350 #endif 3351 #endif
3351 return clone; 3352 return clone;
3352 } 3353 }
3353 3354
3354 3355
3355 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, 3356 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
3356 JSGlobalProxy* object) { 3357 JSGlobalProxy* object) {
3357 ASSERT(constructor->has_initial_map()); 3358 ASSERT(constructor->has_initial_map());
3358 Map* map = constructor->initial_map(); 3359 Map* map = constructor->initial_map();
3359 3360
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
3395 } 3396 }
3396 3397
3397 3398
3398 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, 3399 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
3399 PretenureFlag pretenure) { 3400 PretenureFlag pretenure) {
3400 // V8 only supports characters in the Basic Multilingual Plane. 3401 // V8 only supports characters in the Basic Multilingual Plane.
3401 const uc32 kMaxSupportedChar = 0xFFFF; 3402 const uc32 kMaxSupportedChar = 0xFFFF;
3402 // Count the number of characters in the UTF-8 string and check if 3403 // Count the number of characters in the UTF-8 string and check if
3403 // it is an ASCII string. 3404 // it is an ASCII string.
3404 Access<ScannerConstants::Utf8Decoder> 3405 Access<ScannerConstants::Utf8Decoder>
3405 decoder(ScannerConstants::utf8_decoder()); 3406 decoder(isolate_->scanner_constants()->utf8_decoder());
3406 decoder->Reset(string.start(), string.length()); 3407 decoder->Reset(string.start(), string.length());
3407 int chars = 0; 3408 int chars = 0;
3408 while (decoder->has_more()) { 3409 while (decoder->has_more()) {
3409 decoder->GetNext(); 3410 decoder->GetNext();
3410 chars++; 3411 chars++;
3411 } 3412 }
3412 3413
3413 Object* result; 3414 Object* result;
3414 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure); 3415 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
3415 if (!maybe_result->ToObject(&result)) return maybe_result; 3416 if (!maybe_result->ToObject(&result)) return maybe_result;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3448 return result; 3449 return result;
3449 } 3450 }
3450 3451
3451 3452
3452 Map* Heap::SymbolMapForString(String* string) { 3453 Map* Heap::SymbolMapForString(String* string) {
3453 // If the string is in new space it cannot be used as a symbol. 3454 // If the string is in new space it cannot be used as a symbol.
3454 if (InNewSpace(string)) return NULL; 3455 if (InNewSpace(string)) return NULL;
3455 3456
3456 // Find the corresponding symbol map for strings. 3457 // Find the corresponding symbol map for strings.
3457 Map* map = string->map(); 3458 Map* map = string->map();
3458 if (map == ascii_string_map()) return ascii_symbol_map(); 3459 if (map == ascii_string_map()) {
3459 if (map == string_map()) return symbol_map(); 3460 return ascii_symbol_map();
3460 if (map == cons_string_map()) return cons_symbol_map(); 3461 }
3461 if (map == cons_ascii_string_map()) return cons_ascii_symbol_map(); 3462 if (map == string_map()) {
3462 if (map == external_string_map()) return external_symbol_map(); 3463 return symbol_map();
3463 if (map == external_ascii_string_map()) return external_ascii_symbol_map(); 3464 }
3465 if (map == cons_string_map()) {
3466 return cons_symbol_map();
3467 }
3468 if (map == cons_ascii_string_map()) {
3469 return cons_ascii_symbol_map();
3470 }
3471 if (map == external_string_map()) {
3472 return external_symbol_map();
3473 }
3474 if (map == external_ascii_string_map()) {
3475 return external_ascii_symbol_map();
3476 }
3464 if (map == external_string_with_ascii_data_map()) { 3477 if (map == external_string_with_ascii_data_map()) {
3465 return external_symbol_with_ascii_data_map(); 3478 return external_symbol_with_ascii_data_map();
3466 } 3479 }
3467 3480
3468 // No match found. 3481 // No match found.
3469 return NULL; 3482 return NULL;
3470 } 3483 }
3471 3484
3472 3485
3473 MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer, 3486 MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
3627 : lo_space_->AllocateRawFixedArray(size); 3640 : lo_space_->AllocateRawFixedArray(size);
3628 } 3641 }
3629 3642
3630 3643
3631 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 3644 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
3632 int len = src->length(); 3645 int len = src->length();
3633 Object* obj; 3646 Object* obj;
3634 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 3647 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
3635 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3648 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3636 } 3649 }
3637 if (Heap::InNewSpace(obj)) { 3650 if (InNewSpace(obj)) {
3638 HeapObject* dst = HeapObject::cast(obj); 3651 HeapObject* dst = HeapObject::cast(obj);
3639 dst->set_map(map); 3652 dst->set_map(map);
3640 CopyBlock(dst->address() + kPointerSize, 3653 CopyBlock(dst->address() + kPointerSize,
3641 src->address() + kPointerSize, 3654 src->address() + kPointerSize,
3642 FixedArray::SizeFor(len) - kPointerSize); 3655 FixedArray::SizeFor(len) - kPointerSize);
3643 return obj; 3656 return obj;
3644 } 3657 }
3645 HeapObject::cast(obj)->set_map(map); 3658 HeapObject::cast(obj)->set_map(map);
3646 FixedArray* result = FixedArray::cast(obj); 3659 FixedArray* result = FixedArray::cast(obj);
3647 result->set_length(len); 3660 result->set_length(len);
(...skipping 11 matching lines...) Expand all
3659 if (length == 0) return empty_fixed_array(); 3672 if (length == 0) return empty_fixed_array();
3660 Object* result; 3673 Object* result;
3661 { MaybeObject* maybe_result = AllocateRawFixedArray(length); 3674 { MaybeObject* maybe_result = AllocateRawFixedArray(length);
3662 if (!maybe_result->ToObject(&result)) return maybe_result; 3675 if (!maybe_result->ToObject(&result)) return maybe_result;
3663 } 3676 }
3664 // Initialize header. 3677 // Initialize header.
3665 FixedArray* array = reinterpret_cast<FixedArray*>(result); 3678 FixedArray* array = reinterpret_cast<FixedArray*>(result);
3666 array->set_map(fixed_array_map()); 3679 array->set_map(fixed_array_map());
3667 array->set_length(length); 3680 array->set_length(length);
3668 // Initialize body. 3681 // Initialize body.
3669 ASSERT(!Heap::InNewSpace(undefined_value())); 3682 ASSERT(!InNewSpace(undefined_value()));
3670 MemsetPointer(array->data_start(), undefined_value(), length); 3683 MemsetPointer(array->data_start(), undefined_value(), length);
3671 return result; 3684 return result;
3672 } 3685 }
3673 3686
3674 3687
3675 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 3688 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
3676 if (length < 0 || length > FixedArray::kMaxLength) { 3689 if (length < 0 || length > FixedArray::kMaxLength) {
3677 return Failure::OutOfMemoryException(); 3690 return Failure::OutOfMemoryException();
3678 } 3691 }
3679 3692
(...skipping 10 matching lines...) Expand all
3690 } 3703 }
3691 3704
3692 AllocationSpace retry_space = 3705 AllocationSpace retry_space =
3693 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE; 3706 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE;
3694 3707
3695 return AllocateRaw(size, space, retry_space); 3708 return AllocateRaw(size, space, retry_space);
3696 } 3709 }
3697 3710
3698 3711
3699 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( 3712 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
3713 Heap* heap,
3700 int length, 3714 int length,
3701 PretenureFlag pretenure, 3715 PretenureFlag pretenure,
3702 Object* filler) { 3716 Object* filler) {
3703 ASSERT(length >= 0); 3717 ASSERT(length >= 0);
3704 ASSERT(Heap::empty_fixed_array()->IsFixedArray()); 3718 ASSERT(heap->empty_fixed_array()->IsFixedArray());
3705 if (length == 0) return Heap::empty_fixed_array(); 3719 if (length == 0) return heap->empty_fixed_array();
3706 3720
3707 ASSERT(!Heap::InNewSpace(filler)); 3721 ASSERT(!heap->InNewSpace(filler));
3708 Object* result; 3722 Object* result;
3709 { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure); 3723 { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
3710 if (!maybe_result->ToObject(&result)) return maybe_result; 3724 if (!maybe_result->ToObject(&result)) return maybe_result;
3711 } 3725 }
3712 3726
3713 HeapObject::cast(result)->set_map(Heap::fixed_array_map()); 3727 HeapObject::cast(result)->set_map(heap->fixed_array_map());
3714 FixedArray* array = FixedArray::cast(result); 3728 FixedArray* array = FixedArray::cast(result);
3715 array->set_length(length); 3729 array->set_length(length);
3716 MemsetPointer(array->data_start(), filler, length); 3730 MemsetPointer(array->data_start(), filler, length);
3717 return array; 3731 return array;
3718 } 3732 }
3719 3733
3720 3734
3721 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { 3735 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
3722 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); 3736 return AllocateFixedArrayWithFiller(this,
3737 length,
3738 pretenure,
3739 undefined_value());
3723 } 3740 }
3724 3741
3725 3742
3726 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length, 3743 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
3727 PretenureFlag pretenure) { 3744 PretenureFlag pretenure) {
3728 return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value()); 3745 return AllocateFixedArrayWithFiller(this,
3746 length,
3747 pretenure,
3748 the_hole_value());
3729 } 3749 }
3730 3750
3731 3751
3732 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) { 3752 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
3733 if (length == 0) return empty_fixed_array(); 3753 if (length == 0) return empty_fixed_array();
3734 3754
3735 Object* obj; 3755 Object* obj;
3736 { MaybeObject* maybe_obj = AllocateRawFixedArray(length); 3756 { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
3737 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3757 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3738 } 3758 }
3739 3759
3740 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map()); 3760 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map());
3741 FixedArray::cast(obj)->set_length(length); 3761 FixedArray::cast(obj)->set_length(length);
3742 return obj; 3762 return obj;
3743 } 3763 }
3744 3764
3745 3765
3746 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 3766 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
3747 Object* result; 3767 Object* result;
3748 { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure); 3768 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
3749 if (!maybe_result->ToObject(&result)) return maybe_result; 3769 if (!maybe_result->ToObject(&result)) return maybe_result;
3750 } 3770 }
3751 reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map()); 3771 reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
3752 ASSERT(result->IsHashTable()); 3772 ASSERT(result->IsHashTable());
3753 return result; 3773 return result;
3754 } 3774 }
3755 3775
3756 3776
3757 MaybeObject* Heap::AllocateGlobalContext() { 3777 MaybeObject* Heap::AllocateGlobalContext() {
3758 Object* result; 3778 Object* result;
3759 { MaybeObject* maybe_result = 3779 { MaybeObject* maybe_result =
3760 Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); 3780 AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
3761 if (!maybe_result->ToObject(&result)) return maybe_result; 3781 if (!maybe_result->ToObject(&result)) return maybe_result;
3762 } 3782 }
3763 Context* context = reinterpret_cast<Context*>(result); 3783 Context* context = reinterpret_cast<Context*>(result);
3764 context->set_map(global_context_map()); 3784 context->set_map(global_context_map());
3765 ASSERT(context->IsGlobalContext()); 3785 ASSERT(context->IsGlobalContext());
3766 ASSERT(result->IsContext()); 3786 ASSERT(result->IsContext());
3767 return result; 3787 return result;
3768 } 3788 }
3769 3789
3770 3790
3771 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) { 3791 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
3772 ASSERT(length >= Context::MIN_CONTEXT_SLOTS); 3792 ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
3773 Object* result; 3793 Object* result;
3774 { MaybeObject* maybe_result = Heap::AllocateFixedArray(length); 3794 { MaybeObject* maybe_result = AllocateFixedArray(length);
3775 if (!maybe_result->ToObject(&result)) return maybe_result; 3795 if (!maybe_result->ToObject(&result)) return maybe_result;
3776 } 3796 }
3777 Context* context = reinterpret_cast<Context*>(result); 3797 Context* context = reinterpret_cast<Context*>(result);
3778 context->set_map(context_map()); 3798 context->set_map(context_map());
3779 context->set_closure(function); 3799 context->set_closure(function);
3780 context->set_fcontext(context); 3800 context->set_fcontext(context);
3781 context->set_previous(NULL); 3801 context->set_previous(NULL);
3782 context->set_extension(NULL); 3802 context->set_extension(NULL);
3783 context->set_global(function->context()->global()); 3803 context->set_global(function->context()->global());
3784 ASSERT(!context->IsGlobalContext()); 3804 ASSERT(!context->IsGlobalContext());
3785 ASSERT(context->is_function_context()); 3805 ASSERT(context->is_function_context());
3786 ASSERT(result->IsContext()); 3806 ASSERT(result->IsContext());
3787 return result; 3807 return result;
3788 } 3808 }
3789 3809
3790 3810
3791 MaybeObject* Heap::AllocateWithContext(Context* previous, 3811 MaybeObject* Heap::AllocateWithContext(Context* previous,
3792 JSObject* extension, 3812 JSObject* extension,
3793 bool is_catch_context) { 3813 bool is_catch_context) {
3794 Object* result; 3814 Object* result;
3795 { MaybeObject* maybe_result = 3815 { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3796 Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3797 if (!maybe_result->ToObject(&result)) return maybe_result; 3816 if (!maybe_result->ToObject(&result)) return maybe_result;
3798 } 3817 }
3799 Context* context = reinterpret_cast<Context*>(result); 3818 Context* context = reinterpret_cast<Context*>(result);
3800 context->set_map(is_catch_context ? catch_context_map() : context_map()); 3819 context->set_map(is_catch_context ? catch_context_map() :
3820 context_map());
3801 context->set_closure(previous->closure()); 3821 context->set_closure(previous->closure());
3802 context->set_fcontext(previous->fcontext()); 3822 context->set_fcontext(previous->fcontext());
3803 context->set_previous(previous); 3823 context->set_previous(previous);
3804 context->set_extension(extension); 3824 context->set_extension(extension);
3805 context->set_global(previous->global()); 3825 context->set_global(previous->global());
3806 ASSERT(!context->IsGlobalContext()); 3826 ASSERT(!context->IsGlobalContext());
3807 ASSERT(!context->is_function_context()); 3827 ASSERT(!context->is_function_context());
3808 ASSERT(result->IsContext()); 3828 ASSERT(result->IsContext());
3809 return result; 3829 return result;
3810 } 3830 }
3811 3831
3812 3832
3813 MaybeObject* Heap::AllocateStruct(InstanceType type) { 3833 MaybeObject* Heap::AllocateStruct(InstanceType type) {
3814 Map* map; 3834 Map* map;
3815 switch (type) { 3835 switch (type) {
3816 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break; 3836 #define MAKE_CASE(NAME, Name, name) \
3837 case NAME##_TYPE: map = name##_map(); break;
3817 STRUCT_LIST(MAKE_CASE) 3838 STRUCT_LIST(MAKE_CASE)
3818 #undef MAKE_CASE 3839 #undef MAKE_CASE
3819 default: 3840 default:
3820 UNREACHABLE(); 3841 UNREACHABLE();
3821 return Failure::InternalError(); 3842 return Failure::InternalError();
3822 } 3843 }
3823 int size = map->instance_size(); 3844 int size = map->instance_size();
3824 AllocationSpace space = 3845 AllocationSpace space =
3825 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE; 3846 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
3826 Object* result; 3847 Object* result;
3827 { MaybeObject* maybe_result = Heap::Allocate(map, space); 3848 { MaybeObject* maybe_result = Allocate(map, space);
3828 if (!maybe_result->ToObject(&result)) return maybe_result; 3849 if (!maybe_result->ToObject(&result)) return maybe_result;
3829 } 3850 }
3830 Struct::cast(result)->InitializeBody(size); 3851 Struct::cast(result)->InitializeBody(size);
3831 return result; 3852 return result;
3832 } 3853 }
3833 3854
3834 3855
3835 void Heap::EnsureHeapIsIterable() { 3856 void Heap::EnsureHeapIsIterable() {
3836 ASSERT(IsAllocationAllowed()); 3857 ASSERT(IsAllocationAllowed());
3837 if (old_pointer_space()->was_swept_conservatively() || 3858 if (old_pointer_space()->was_swept_conservatively() ||
3838 old_data_space()->was_swept_conservatively()) { 3859 old_data_space()->was_swept_conservatively()) {
3839 CollectAllGarbage(kMakeHeapIterableMask); 3860 CollectAllGarbage(kMakeHeapIterableMask);
3840 } 3861 }
3841 ASSERT(!old_pointer_space()->was_swept_conservatively()); 3862 ASSERT(!old_pointer_space()->was_swept_conservatively());
3842 ASSERT(!old_data_space()->was_swept_conservatively()); 3863 ASSERT(!old_data_space()->was_swept_conservatively());
3843 } 3864 }
3844 3865
3845 3866
3846 bool Heap::IdleNotification() { 3867 bool Heap::IdleNotification() {
3847 static const int kIdlesBeforeScavenge = 4; 3868 static const int kIdlesBeforeScavenge = 4;
3848 static const int kIdlesBeforeMarkSweep = 7; 3869 static const int kIdlesBeforeMarkSweep = 7;
3849 static const int kIdlesBeforeMarkCompact = 8; 3870 static const int kIdlesBeforeMarkCompact = 8;
3850 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1; 3871 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
3851 static const unsigned int kGCsBetweenCleanup = 4; 3872 static const unsigned int kGCsBetweenCleanup = 4;
3852 static int number_idle_notifications = 0; 3873
3853 static unsigned int last_gc_count = gc_count_; 3874 if (!last_idle_notification_gc_count_init_) {
3875 last_idle_notification_gc_count_ = gc_count_;
3876 last_idle_notification_gc_count_init_ = true;
3877 }
3854 3878
3855 bool uncommit = true; 3879 bool uncommit = true;
3856 bool finished = false; 3880 bool finished = false;
3857 3881
3858 // Reset the number of idle notifications received when a number of 3882 // Reset the number of idle notifications received when a number of
3859 // GCs have taken place. This allows another round of cleanup based 3883 // GCs have taken place. This allows another round of cleanup based
3860 // on idle notifications if enough work has been carried out to 3884 // on idle notifications if enough work has been carried out to
3861 // provoke a number of garbage collections. 3885 // provoke a number of garbage collections.
3862 if (gc_count_ - last_gc_count < kGCsBetweenCleanup) { 3886 if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
3863 number_idle_notifications = 3887 number_idle_notifications_ =
3864 Min(number_idle_notifications + 1, kMaxIdleCount); 3888 Min(number_idle_notifications_ + 1, kMaxIdleCount);
3865 } else { 3889 } else {
3866 number_idle_notifications = 0; 3890 number_idle_notifications_ = 0;
3867 last_gc_count = gc_count_; 3891 last_idle_notification_gc_count_ = gc_count_;
3868 } 3892 }
3869 3893
3870 if (number_idle_notifications == kIdlesBeforeScavenge) { 3894 if (number_idle_notifications_ == kIdlesBeforeScavenge) {
3871 if (contexts_disposed_ > 0) { 3895 if (contexts_disposed_ > 0) {
3872 HistogramTimerScope scope(&Counters::gc_context); 3896 HistogramTimerScope scope(isolate_->counters()->gc_context());
3873 CollectAllGarbage(kNoGCFlags); 3897 CollectAllGarbage(kNoGCFlags);
3874 } else { 3898 } else {
3875 CollectGarbage(NEW_SPACE); 3899 CollectGarbage(NEW_SPACE);
3876 } 3900 }
3877 new_space_.Shrink(); 3901 new_space_.Shrink();
3878 last_gc_count = gc_count_; 3902 last_idle_notification_gc_count_ = gc_count_;
3879 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) { 3903 } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
3880 // Before doing the mark-sweep collections we clear the 3904 // Before doing the mark-sweep collections we clear the
3881 // compilation cache to avoid hanging on to source code and 3905 // compilation cache to avoid hanging on to source code and
3882 // generated code for cached functions. 3906 // generated code for cached functions.
3883 CompilationCache::Clear(); 3907 isolate_->compilation_cache()->Clear();
3884 3908
3885 CollectAllGarbage(kNoGCFlags); 3909 CollectAllGarbage(kNoGCFlags);
3886 new_space_.Shrink(); 3910 new_space_.Shrink();
3887 last_gc_count = gc_count_; 3911 last_idle_notification_gc_count_ = gc_count_;
3888 3912
3889 } else if (number_idle_notifications == kIdlesBeforeMarkCompact) { 3913 } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
3890 CollectAllGarbage(kForceCompactionMask); 3914 CollectAllGarbage(kForceCompactionMask);
3891 new_space_.Shrink(); 3915 new_space_.Shrink();
3892 last_gc_count = gc_count_; 3916 last_idle_notification_gc_count_ = gc_count_;
3917 number_idle_notifications_ = 0;
3893 finished = true; 3918 finished = true;
3894
3895 } else if (contexts_disposed_ > 0) { 3919 } else if (contexts_disposed_ > 0) {
3896 if (FLAG_expose_gc) { 3920 if (FLAG_expose_gc) {
3897 contexts_disposed_ = 0; 3921 contexts_disposed_ = 0;
3898 } else { 3922 } else {
3899 HistogramTimerScope scope(&Counters::gc_context); 3923 HistogramTimerScope scope(isolate_->counters()->gc_context());
3900 CollectAllGarbage(kNoGCFlags); 3924 CollectAllGarbage(kNoGCFlags);
3901 last_gc_count = gc_count_; 3925 last_idle_notification_gc_count_ = gc_count_;
3902 } 3926 }
3903 // If this is the first idle notification, we reset the 3927 // If this is the first idle notification, we reset the
3904 // notification count to avoid letting idle notifications for 3928 // notification count to avoid letting idle notifications for
3905 // context disposal garbage collections start a potentially too 3929 // context disposal garbage collections start a potentially too
3906 // aggressive idle GC cycle. 3930 // aggressive idle GC cycle.
3907 if (number_idle_notifications <= 1) { 3931 if (number_idle_notifications_ <= 1) {
3908 number_idle_notifications = 0; 3932 number_idle_notifications_ = 0;
3909 uncommit = false; 3933 uncommit = false;
3910 } 3934 }
3911 } else if (number_idle_notifications > kIdlesBeforeMarkCompact) { 3935 } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
3912 // If we have received more than kIdlesBeforeMarkCompact idle 3936 // If we have received more than kIdlesBeforeMarkCompact idle
3913 // notifications we do not perform any cleanup because we don't 3937 // notifications we do not perform any cleanup because we don't
3914 // expect to gain much by doing so. 3938 // expect to gain much by doing so.
3915 finished = true; 3939 finished = true;
3916 } 3940 }
3917 3941
3918 // Make sure that we have no pending context disposals and 3942 // Make sure that we have no pending context disposals and
3919 // conditionally uncommit from space. 3943 // conditionally uncommit from space.
3920 ASSERT(contexts_disposed_ == 0); 3944 ASSERT(contexts_disposed_ == 0);
3921 if (uncommit) Heap::UncommitFromSpace(); 3945 if (uncommit) UncommitFromSpace();
3922 return finished; 3946 return finished;
3923 } 3947 }
3924 3948
3925 3949
3926 #ifdef DEBUG 3950 #ifdef DEBUG
3927 3951
3928 void Heap::Print() { 3952 void Heap::Print() {
3929 if (!HasBeenSetup()) return; 3953 if (!HasBeenSetup()) return;
3930 Top::PrintStack(); 3954 isolate()->PrintStack();
3931 AllSpaces spaces; 3955 AllSpaces spaces;
3932 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) 3956 for (Space* space = spaces.next(); space != NULL; space = spaces.next())
3933 space->Print(); 3957 space->Print();
3934 } 3958 }
3935 3959
3936 3960
3937 void Heap::ReportCodeStatistics(const char* title) { 3961 void Heap::ReportCodeStatistics(const char* title) {
3938 PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title); 3962 PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
3939 PagedSpace::ResetCodeStatistics(); 3963 PagedSpace::ResetCodeStatistics();
3940 // We do not look for code in new space, map space, or old space. If code 3964 // We do not look for code in new space, map space, or old space. If code
(...skipping 12 matching lines...) Expand all
3953 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n", 3977 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
3954 title, gc_count_); 3978 title, gc_count_);
3955 PrintF("mark-compact GC : %d\n", mc_count_); 3979 PrintF("mark-compact GC : %d\n", mc_count_);
3956 PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n", 3980 PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n",
3957 old_gen_promotion_limit_); 3981 old_gen_promotion_limit_);
3958 PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n", 3982 PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n",
3959 old_gen_allocation_limit_); 3983 old_gen_allocation_limit_);
3960 3984
3961 PrintF("\n"); 3985 PrintF("\n");
3962 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles()); 3986 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
3963 GlobalHandles::PrintStats(); 3987 isolate_->global_handles()->PrintStats();
3964 PrintF("\n"); 3988 PrintF("\n");
3965 3989
3966 PrintF("Heap statistics : "); 3990 PrintF("Heap statistics : ");
3967 MemoryAllocator::ReportStatistics(); 3991 isolate_->memory_allocator()->ReportStatistics();
3968 PrintF("To space : "); 3992 PrintF("To space : ");
3969 new_space_.ReportStatistics(); 3993 new_space_.ReportStatistics();
3970 PrintF("Old pointer space : "); 3994 PrintF("Old pointer space : ");
3971 old_pointer_space_->ReportStatistics(); 3995 old_pointer_space_->ReportStatistics();
3972 PrintF("Old data space : "); 3996 PrintF("Old data space : ");
3973 old_data_space_->ReportStatistics(); 3997 old_data_space_->ReportStatistics();
3974 PrintF("Code space : "); 3998 PrintF("Code space : ");
3975 code_space_->ReportStatistics(); 3999 code_space_->ReportStatistics();
3976 PrintF("Map space : "); 4000 PrintF("Map space : ");
3977 map_space_->ReportStatistics(); 4001 map_space_->ReportStatistics();
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
4030 4054
4031 return false; 4055 return false;
4032 } 4056 }
4033 4057
4034 4058
4035 #ifdef DEBUG 4059 #ifdef DEBUG
4036 static void DummyScavengePointer(HeapObject** p, HeapObject* o) { 4060 static void DummyScavengePointer(HeapObject** p, HeapObject* o) {
4037 // When we are not in GC the Heap::InNewSpace() predicate 4061 // When we are not in GC the Heap::InNewSpace() predicate
4038 // checks that pointers which satisfy predicate point into 4062 // checks that pointers which satisfy predicate point into
4039 // the active semispace. 4063 // the active semispace.
4040 Heap::InNewSpace(*p); 4064 // TODO(gc) ISOLATES MERGE
4065 HEAP->InNewSpace(*p);
4041 } 4066 }
4042 4067
4043 4068
4044 static void VerifyPointers( 4069 static void VerifyPointers(
4045 PagedSpace* space, 4070 PagedSpace* space,
4046 PointerRegionCallback visit_pointer_region) { 4071 PointerRegionCallback visit_pointer_region) {
4047 PageIterator it(space); 4072 PageIterator it(space);
4048 4073
4049 while (it.has_next()) { 4074 while (it.has_next()) {
4050 Page* page = it.next(); 4075 Page* page = it.next();
4051 Heap::IteratePointersOnPage( 4076 HEAP->IteratePointersOnPage(reinterpret_cast<PagedSpace*>(page->owner()),
4052 reinterpret_cast<PagedSpace*>(page->owner()), 4077 &Heap::IteratePointersToNewSpace,
4053 &Heap::IteratePointersToNewSpace, 4078 &DummyScavengePointer,
4054 &DummyScavengePointer, 4079 page);
4055 page);
4056 } 4080 }
4057 } 4081 }
4058 4082
4059 4083
4060 static void VerifyPointers(LargeObjectSpace* space) { 4084 static void VerifyPointers(LargeObjectSpace* space) {
4061 LargeObjectIterator it(space); 4085 LargeObjectIterator it(space);
4062 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { 4086 for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
4063 if (object->IsFixedArray()) { 4087 if (object->IsFixedArray()) {
4064 Address slot_address = object->address(); 4088 Address slot_address = object->address();
4065 Address end = object->address() + object->Size(); 4089 Address end = object->address() + object->Size();
4066 4090
4067 while (slot_address < end) { 4091 while (slot_address < end) {
4068 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); 4092 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address);
4069 // When we are not in GC the Heap::InNewSpace() predicate 4093 // When we are not in GC the Heap::InNewSpace() predicate
4070 // checks that pointers which satisfy predicate point into 4094 // checks that pointers which satisfy predicate point into
4071 // the active semispace. 4095 // the active semispace.
4072 Heap::InNewSpace(*slot); 4096 HEAP->InNewSpace(*slot);
4073 slot_address += kPointerSize; 4097 slot_address += kPointerSize;
4074 } 4098 }
4075 } 4099 }
4076 } 4100 }
4077 } 4101 }
4078 4102
4079 4103
4080 void Heap::Verify() { 4104 void Heap::Verify() {
4081 ASSERT(HasBeenSetup()); 4105 ASSERT(HasBeenSetup());
4082 4106
4083 StoreBuffer::Verify(); 4107 store_buffer()->Verify();
4084 4108
4085 VerifyPointersVisitor visitor; 4109 VerifyPointersVisitor visitor;
4086 IterateRoots(&visitor, VISIT_ONLY_STRONG); 4110 IterateRoots(&visitor, VISIT_ONLY_STRONG);
4087 4111
4088 new_space_.Verify(); 4112 new_space_.Verify();
4089 4113
4090 old_pointer_space_->Verify(&visitor); 4114 old_pointer_space_->Verify(&visitor);
4091 map_space_->Verify(&visitor); 4115 map_space_->Verify(&visitor);
4092 4116
4093 VerifyPointers(old_pointer_space_, &IteratePointersToNewSpace); 4117 VerifyPointers(old_pointer_space_, &IteratePointersToNewSpace);
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
4179 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure()); 4203 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure());
4180 for (Address a = new_space_.FromSpaceLow(); 4204 for (Address a = new_space_.FromSpaceLow();
4181 a < new_space_.FromSpaceHigh(); 4205 a < new_space_.FromSpaceHigh();
4182 a += kPointerSize) { 4206 a += kPointerSize) {
4183 Memory::Address_at(a) = kFromSpaceZapValue; 4207 Memory::Address_at(a) = kFromSpaceZapValue;
4184 } 4208 }
4185 } 4209 }
4186 #endif // DEBUG 4210 #endif // DEBUG
4187 4211
4188 4212
4189 void Heap::IteratePointersToNewSpace(Address start, 4213 void Heap::IteratePointersToNewSpace(Heap* heap,
4214 Address start,
4190 Address end, 4215 Address end,
4191 ObjectSlotCallback copy_object_func) { 4216 ObjectSlotCallback copy_object_func) {
4192 for (Address slot_address = start; 4217 for (Address slot_address = start;
4193 slot_address < end; 4218 slot_address < end;
4194 slot_address += kPointerSize) { 4219 slot_address += kPointerSize) {
4195 Object** slot = reinterpret_cast<Object**>(slot_address); 4220 Object** slot = reinterpret_cast<Object**>(slot_address);
4196 if (Heap::InNewSpace(*slot)) { 4221 if (heap->InNewSpace(*slot)) {
4197 HeapObject* object = reinterpret_cast<HeapObject*>(*slot); 4222 HeapObject* object = reinterpret_cast<HeapObject*>(*slot);
4198 ASSERT(object->IsHeapObject()); 4223 ASSERT(object->IsHeapObject());
4199 copy_object_func(reinterpret_cast<HeapObject**>(slot), object); 4224 copy_object_func(reinterpret_cast<HeapObject**>(slot), object);
4200 } 4225 }
4201 } 4226 }
4202 } 4227 }
4203 4228
4204 4229
4205 // Compute start address of the first map following given addr. 4230 // Compute start address of the first map following given addr.
4206 static inline Address MapStartAlign(Address addr) { 4231 static inline Address MapStartAlign(Address addr) {
4207 Address page = Page::FromAddress(addr)->ObjectAreaStart(); 4232 Address page = Page::FromAddress(addr)->ObjectAreaStart();
4208 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize); 4233 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize);
4209 } 4234 }
4210 4235
4211 4236
4212 // Compute end address of the first map preceding given addr. 4237 // Compute end address of the first map preceding given addr.
4213 static inline Address MapEndAlign(Address addr) { 4238 static inline Address MapEndAlign(Address addr) {
4214 Address page = Page::FromAllocationTop(addr)->ObjectAreaStart(); 4239 Address page = Page::FromAllocationTop(addr)->ObjectAreaStart();
4215 return page + ((addr - page) / Map::kSize * Map::kSize); 4240 return page + ((addr - page) / Map::kSize * Map::kSize);
4216 } 4241 }
4217 4242
4218 4243
4219 static void IteratePointersToNewSpaceInMaps( 4244 static void IteratePointersToNewSpaceInMaps(
4245 Heap* heap,
4220 Address start, 4246 Address start,
4221 Address end, 4247 Address end,
4222 ObjectSlotCallback copy_object_func) { 4248 ObjectSlotCallback copy_object_func) {
4223 ASSERT(MapStartAlign(start) == start); 4249 ASSERT(MapStartAlign(start) == start);
4224 ASSERT(MapEndAlign(end) == end); 4250 ASSERT(MapEndAlign(end) == end);
4225 4251
4226 Address map_address = start; 4252 Address map_address = start;
4227
4228 while (map_address < end) { 4253 while (map_address < end) {
4229 ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address))); 4254 ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
4230 ASSERT(Memory::Object_at(map_address)->IsMap()); 4255 ASSERT(Memory::Object_at(map_address)->IsMap());
4231 4256
4232 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; 4257 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
4233 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; 4258 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
4234 4259
4235 Heap::IteratePointersToNewSpace(pointer_fields_start, 4260 Heap::IteratePointersToNewSpace(heap,
4261 pointer_fields_start,
4236 pointer_fields_end, 4262 pointer_fields_end,
4237 copy_object_func); 4263 copy_object_func);
4238 map_address += Map::kSize; 4264 map_address += Map::kSize;
4239 } 4265 }
4240 } 4266 }
4241 4267
4242 4268
4243 void Heap::IteratePointersFromMapsToNewSpace( 4269 void Heap::IteratePointersFromMapsToNewSpace(
4270 Heap* heap,
4244 Address start, 4271 Address start,
4245 Address end, 4272 Address end,
4246 ObjectSlotCallback copy_object_func) { 4273 ObjectSlotCallback copy_object_func) {
4247 Address map_aligned_start = MapStartAlign(start); 4274 Address map_aligned_start = MapStartAlign(start);
4248 Address map_aligned_end = MapEndAlign(end); 4275 Address map_aligned_end = MapEndAlign(end);
4249 4276
4250 ASSERT(map_aligned_start == start); 4277 ASSERT(map_aligned_start == start);
4251 ASSERT(map_aligned_end == end); 4278 ASSERT(map_aligned_end == end);
4252 4279
4253 IteratePointersToNewSpaceInMaps(map_aligned_start, 4280 IteratePointersToNewSpaceInMaps(heap,
4281 map_aligned_start,
4254 map_aligned_end, 4282 map_aligned_end,
4255 copy_object_func); 4283 copy_object_func);
4256 } 4284 }
4257 4285
4258 4286
4259 void Heap::IterateAndMarkPointersToFromSpace(Address start, 4287 void Heap::IterateAndMarkPointersToFromSpace(Address start,
4260 Address end, 4288 Address end,
4261 ObjectSlotCallback callback) { 4289 ObjectSlotCallback callback) {
4262 Address slot_address = start; 4290 Address slot_address = start;
4263 while (slot_address < end) { 4291 while (slot_address < end) {
4264 Object** slot = reinterpret_cast<Object**>(slot_address); 4292 Object** slot = reinterpret_cast<Object**>(slot_address);
4265 Object* object = *slot; 4293 Object* object = *slot;
4266 // If the store buffer becomes overfull we mark pages as being exempt from 4294 // If the store buffer becomes overfull we mark pages as being exempt from
4267 // the store buffer. These pages are scanned to find pointers that point 4295 // the store buffer. These pages are scanned to find pointers that point
4268 // to the new space. In that case we may hit newly promoted objects and 4296 // to the new space. In that case we may hit newly promoted objects and
4269 // fix the pointers before the promotion queue gets to them. Thus the 'if'. 4297 // fix the pointers before the promotion queue gets to them. Thus the 'if'.
4270 if (Heap::InFromSpace(object)) { 4298 if (Heap::InFromSpace(object)) {
4271 callback(reinterpret_cast<HeapObject**>(slot), HeapObject::cast(object)); 4299 callback(reinterpret_cast<HeapObject**>(slot), HeapObject::cast(object));
4272 if (Heap::InNewSpace(*slot)) { 4300 if (InNewSpace(*slot)) {
4273 ASSERT(Heap::InToSpace(*slot)); 4301 ASSERT(Heap::InToSpace(*slot));
4274 ASSERT((*slot)->IsHeapObject()); 4302 ASSERT((*slot)->IsHeapObject());
4275 } 4303 }
4276 } 4304 }
4277 slot_address += kPointerSize; 4305 slot_address += kPointerSize;
4278 } 4306 }
4279 } 4307 }
4280 4308
4281 4309
4282 #ifdef DEBUG 4310 #ifdef DEBUG
(...skipping 17 matching lines...) Expand all
4300 Object** limit, 4328 Object** limit,
4301 Object**** store_buffer_position, 4329 Object**** store_buffer_position,
4302 Object*** store_buffer_top, 4330 Object*** store_buffer_top,
4303 CheckStoreBufferFilter filter, 4331 CheckStoreBufferFilter filter,
4304 Address special_garbage_start, 4332 Address special_garbage_start,
4305 Address special_garbage_end) { 4333 Address special_garbage_end) {
4306 for ( ; current < limit; current++) { 4334 for ( ; current < limit; current++) {
4307 Object* o = *current; 4335 Object* o = *current;
4308 Address current_address = reinterpret_cast<Address>(current); 4336 Address current_address = reinterpret_cast<Address>(current);
4309 // Skip free space. 4337 // Skip free space.
4310 if (o == Heap::free_space_map()) { 4338 // TODO(gc) ISOLATES MERGE
4339 if (o == HEAP->free_space_map()) {
4311 Address current_address = reinterpret_cast<Address>(current); 4340 Address current_address = reinterpret_cast<Address>(current);
4312 FreeSpace* free_space = 4341 FreeSpace* free_space =
4313 FreeSpace::cast(HeapObject::FromAddress(current_address)); 4342 FreeSpace::cast(HeapObject::FromAddress(current_address));
4314 int skip = free_space->Size(); 4343 int skip = free_space->Size();
4315 ASSERT(current_address + skip <= reinterpret_cast<Address>(limit)); 4344 ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
4316 ASSERT(skip > 0); 4345 ASSERT(skip > 0);
4317 current_address += skip - kPointerSize; 4346 current_address += skip - kPointerSize;
4318 current = reinterpret_cast<Object**>(current_address); 4347 current = reinterpret_cast<Object**>(current_address);
4319 continue; 4348 continue;
4320 } 4349 }
4321 // Skip the current linear allocation space between top and limit which is 4350 // Skip the current linear allocation space between top and limit which is
4322 // unmarked with the free space map, but can contain junk. 4351 // unmarked with the free space map, but can contain junk.
4323 if (current_address == special_garbage_start && 4352 if (current_address == special_garbage_start &&
4324 special_garbage_end != special_garbage_start) { 4353 special_garbage_end != special_garbage_start) {
4325 current_address = special_garbage_end - kPointerSize; 4354 current_address = special_garbage_end - kPointerSize;
4326 current = reinterpret_cast<Object**>(current_address); 4355 current = reinterpret_cast<Object**>(current_address);
4327 continue; 4356 continue;
4328 } 4357 }
4329 if (!(*filter)(current)) continue; 4358 if (!(*filter)(current)) continue;
4330 ASSERT(current_address < special_garbage_start || 4359 ASSERT(current_address < special_garbage_start ||
4331 current_address >= special_garbage_end); 4360 current_address >= special_garbage_end);
4332 ASSERT(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue); 4361 ASSERT(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue);
4333 // We have to check that the pointer does not point into new space 4362 // We have to check that the pointer does not point into new space
4334 // without trying to cast it to a heap object since the hash field of 4363 // without trying to cast it to a heap object since the hash field of
4335 // a string can contain values like 1 and 3 which are tagged null 4364 // a string can contain values like 1 and 3 which are tagged null
4336 // pointers. 4365 // pointers.
4337 if (!Heap::InNewSpace(o)) continue; 4366 // TODO(gc) ISOLATES MERGE
4367 if (!HEAP->InNewSpace(o)) continue;
4338 while (**store_buffer_position < current && 4368 while (**store_buffer_position < current &&
4339 *store_buffer_position < store_buffer_top) { 4369 *store_buffer_position < store_buffer_top) {
4340 (*store_buffer_position)++; 4370 (*store_buffer_position)++;
4341 } 4371 }
4342 if (**store_buffer_position != current || 4372 if (**store_buffer_position != current ||
4343 *store_buffer_position == store_buffer_top) { 4373 *store_buffer_position == store_buffer_top) {
4344 Object** obj_start = current; 4374 Object** obj_start = current;
4345 while (!(*obj_start)->IsMap()) obj_start--; 4375 while (!(*obj_start)->IsMap()) obj_start--;
4346 UNREACHABLE(); 4376 UNREACHABLE();
4347 } 4377 }
4348 } 4378 }
4349 } 4379 }
4350 4380
4351 4381
4352 // Check that the store buffer contains all intergenerational pointers by 4382 // Check that the store buffer contains all intergenerational pointers by
4353 // scanning a page and ensuring that all pointers to young space are in the 4383 // scanning a page and ensuring that all pointers to young space are in the
4354 // store buffer. 4384 // store buffer.
4355 void Heap::OldPointerSpaceCheckStoreBuffer() { 4385 void Heap::OldPointerSpaceCheckStoreBuffer() {
4356 OldSpace* space = old_pointer_space(); 4386 OldSpace* space = old_pointer_space();
4357 PageIterator pages(space); 4387 PageIterator pages(space);
4358 4388
4359 StoreBuffer::SortUniq(); 4389 store_buffer()->SortUniq();
4360 4390
4361 while (pages.has_next()) { 4391 while (pages.has_next()) {
4362 Page* page = pages.next(); 4392 Page* page = pages.next();
4363 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); 4393 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
4364 4394
4365 Address end = page->ObjectAreaEnd(); 4395 Address end = page->ObjectAreaEnd();
4366 4396
4367 Object*** store_buffer_position = StoreBuffer::Start(); 4397 Object*** store_buffer_position = store_buffer()->Start();
4368 Object*** store_buffer_top = StoreBuffer::Top(); 4398 Object*** store_buffer_top = store_buffer()->Top();
4369 4399
4370 Object** limit = reinterpret_cast<Object**>(end); 4400 Object** limit = reinterpret_cast<Object**>(end);
4371 CheckStoreBuffer(current, 4401 CheckStoreBuffer(current,
4372 limit, 4402 limit,
4373 &store_buffer_position, 4403 &store_buffer_position,
4374 store_buffer_top, 4404 store_buffer_top,
4375 &EverythingsAPointer, 4405 &EverythingsAPointer,
4376 space->top(), 4406 space->top(),
4377 space->limit()); 4407 space->limit());
4378 } 4408 }
4379 } 4409 }
4380 4410
4381 4411
4382 void Heap::MapSpaceCheckStoreBuffer() { 4412 void Heap::MapSpaceCheckStoreBuffer() {
4383 MapSpace* space = map_space(); 4413 MapSpace* space = map_space();
4384 PageIterator pages(space); 4414 PageIterator pages(space);
4385 4415
4386 StoreBuffer::SortUniq(); 4416 store_buffer()->SortUniq();
4387 4417
4388 while (pages.has_next()) { 4418 while (pages.has_next()) {
4389 Page* page = pages.next(); 4419 Page* page = pages.next();
4390 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); 4420 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
4391 4421
4392 Address end = page->ObjectAreaEnd(); 4422 Address end = page->ObjectAreaEnd();
4393 4423
4394 Object*** store_buffer_position = StoreBuffer::Start(); 4424 Object*** store_buffer_position = store_buffer()->Start();
4395 Object*** store_buffer_top = StoreBuffer::Top(); 4425 Object*** store_buffer_top = store_buffer()->Top();
4396 4426
4397 Object** limit = reinterpret_cast<Object**>(end); 4427 Object** limit = reinterpret_cast<Object**>(end);
4398 CheckStoreBuffer(current, 4428 CheckStoreBuffer(current,
4399 limit, 4429 limit,
4400 &store_buffer_position, 4430 &store_buffer_position,
4401 store_buffer_top, 4431 store_buffer_top,
4402 &IsAMapPointerAddress, 4432 &IsAMapPointerAddress,
4403 space->top(), 4433 space->top(),
4404 space->limit()); 4434 space->limit());
4405 } 4435 }
4406 } 4436 }
4407 4437
4408 4438
4409 void Heap::LargeObjectSpaceCheckStoreBuffer() { 4439 void Heap::LargeObjectSpaceCheckStoreBuffer() {
4410 LargeObjectIterator it(lo_space()); 4440 LargeObjectIterator it(lo_space());
4411 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { 4441 for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
4412 // We only have code, sequential strings, or fixed arrays in large 4442 // We only have code, sequential strings, or fixed arrays in large
4413 // object space, and only fixed arrays can possibly contain pointers to 4443 // object space, and only fixed arrays can possibly contain pointers to
4414 // the young generation. 4444 // the young generation.
4415 if (object->IsFixedArray()) { 4445 if (object->IsFixedArray()) {
4416 Object*** store_buffer_position = StoreBuffer::Start(); 4446 Object*** store_buffer_position = store_buffer()->Start();
4417 Object*** store_buffer_top = StoreBuffer::Top(); 4447 Object*** store_buffer_top = store_buffer()->Top();
4418 Object** current = reinterpret_cast<Object**>(object->address()); 4448 Object** current = reinterpret_cast<Object**>(object->address());
4419 Object** limit = 4449 Object** limit =
4420 reinterpret_cast<Object**>(object->address() + object->Size()); 4450 reinterpret_cast<Object**>(object->address() + object->Size());
4421 CheckStoreBuffer(current, 4451 CheckStoreBuffer(current,
4422 limit, 4452 limit,
4423 &store_buffer_position, 4453 &store_buffer_position,
4424 store_buffer_top, 4454 store_buffer_top,
4425 &EverythingsAPointer, 4455 &EverythingsAPointer,
4426 NULL, 4456 NULL,
4427 NULL); 4457 NULL);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
4464 void Heap::IteratePointersOnPage( 4494 void Heap::IteratePointersOnPage(
4465 PagedSpace* space, 4495 PagedSpace* space,
4466 PointerRegionCallback visit_pointer_region, 4496 PointerRegionCallback visit_pointer_region,
4467 ObjectSlotCallback copy_object_func, 4497 ObjectSlotCallback copy_object_func,
4468 Page* page) { 4498 Page* page) {
4469 Address visitable_start = page->ObjectAreaStart(); 4499 Address visitable_start = page->ObjectAreaStart();
4470 Address end_of_page = page->ObjectAreaEnd(); 4500 Address end_of_page = page->ObjectAreaEnd();
4471 4501
4472 Address visitable_end = visitable_start; 4502 Address visitable_end = visitable_start;
4473 4503
4474 Object* free_space_map = Heap::free_space_map(); 4504 // TODO(gc) ISOLATES
4475 Object* two_pointer_filler_map = Heap::two_pointer_filler_map(); 4505 Object* free_space_map = HEAP->free_space_map();
4506 Object* two_pointer_filler_map = HEAP->two_pointer_filler_map();
4476 4507
4477 while (visitable_end < end_of_page) { 4508 while (visitable_end < end_of_page) {
4478 Object* o = *reinterpret_cast<Object**>(visitable_end); 4509 Object* o = *reinterpret_cast<Object**>(visitable_end);
4479 // Skip fillers but not things that look like fillers in the special 4510 // Skip fillers but not things that look like fillers in the special
4480 // garbage section which can contain anything. 4511 // garbage section which can contain anything.
4481 if (o == free_space_map || 4512 if (o == free_space_map ||
4482 o == two_pointer_filler_map || 4513 o == two_pointer_filler_map ||
4483 visitable_end == space->top()) { 4514 visitable_end == space->top()) {
4484 if (visitable_start != visitable_end) { 4515 if (visitable_start != visitable_end) {
4485 // After calling this the special garbage section may have moved. 4516 // After calling this the special garbage section may have moved.
4486 visit_pointer_region(visitable_start, visitable_end, copy_object_func); 4517 visit_pointer_region(HEAP,
4518 visitable_start,
4519 visitable_end,
4520 copy_object_func);
4487 if (visitable_end >= space->top() && visitable_end < space->limit()) { 4521 if (visitable_end >= space->top() && visitable_end < space->limit()) {
4488 visitable_end = space->limit(); 4522 visitable_end = space->limit();
4489 visitable_start = visitable_end; 4523 visitable_start = visitable_end;
4490 continue; 4524 continue;
4491 } 4525 }
4492 } 4526 }
4493 if (visitable_end == space->top() && visitable_end != space->limit()) { 4527 if (visitable_end == space->top() && visitable_end != space->limit()) {
4494 visitable_start = visitable_end = space->limit(); 4528 visitable_start = visitable_end = space->limit();
4495 } else { 4529 } else {
4496 // At this point we are either at the start of a filler or we are at 4530 // At this point we are either at the start of a filler or we are at
4497 // the point where the space->top() used to be before the 4531 // the point where the space->top() used to be before the
4498 // visit_pointer_region call above. Either way we can skip the 4532 // visit_pointer_region call above. Either way we can skip the
4499 // object at the current spot: We don't promise to visit objects 4533 // object at the current spot: We don't promise to visit objects
4500 // allocated during heap traversal, and if space->top() moved then it 4534 // allocated during heap traversal, and if space->top() moved then it
4501 // must be because an object was allocated at this point. 4535 // must be because an object was allocated at this point.
4502 visitable_start = 4536 visitable_start =
4503 visitable_end + HeapObject::FromAddress(visitable_end)->Size(); 4537 visitable_end + HeapObject::FromAddress(visitable_end)->Size();
4504 visitable_end = visitable_start; 4538 visitable_end = visitable_start;
4505 } 4539 }
4506 } else { 4540 } else {
4507 ASSERT(o != free_space_map); 4541 ASSERT(o != free_space_map);
4508 ASSERT(o != two_pointer_filler_map); 4542 ASSERT(o != two_pointer_filler_map);
4509 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); 4543 ASSERT(visitable_end < space->top() || visitable_end >= space->limit());
4510 visitable_end += kPointerSize; 4544 visitable_end += kPointerSize;
4511 } 4545 }
4512 } 4546 }
4513 ASSERT(visitable_end == end_of_page); 4547 ASSERT(visitable_end == end_of_page);
4514 if (visitable_start != visitable_end) { 4548 if (visitable_start != visitable_end) {
4515 visit_pointer_region(visitable_start, visitable_end, copy_object_func); 4549 visit_pointer_region(HEAP,
4550 visitable_start,
4551 visitable_end,
4552 copy_object_func);
4516 } 4553 }
4517 } 4554 }
4518 4555
4519 4556
4520 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { 4557 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
4521 IterateStrongRoots(v, mode); 4558 IterateStrongRoots(v, mode);
4522 IterateWeakRoots(v, mode); 4559 IterateWeakRoots(v, mode);
4523 } 4560 }
4524 4561
4525 4562
4526 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { 4563 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
4527 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); 4564 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
4528 v->Synchronize("symbol_table"); 4565 v->Synchronize("symbol_table");
4529 if (mode != VISIT_ALL_IN_SCAVENGE) { 4566 if (mode != VISIT_ALL_IN_SCAVENGE) {
4530 // Scavenge collections have special processing for this. 4567 // Scavenge collections have special processing for this.
4531 ExternalStringTable::Iterate(v); 4568 external_string_table_.Iterate(v);
4532 } 4569 }
4533 v->Synchronize("external_string_table"); 4570 v->Synchronize("external_string_table");
4534 } 4571 }
4535 4572
4536 4573
4537 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { 4574 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
4538 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); 4575 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
4539 v->Synchronize("strong_root_list"); 4576 v->Synchronize("strong_root_list");
4540 4577
4541 v->VisitPointer(BitCast<Object**>(&hidden_symbol_)); 4578 v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
4542 v->Synchronize("symbol"); 4579 v->Synchronize("symbol");
4543 4580
4544 Bootstrapper::Iterate(v); 4581 isolate_->bootstrapper()->Iterate(v);
4545 v->Synchronize("bootstrapper"); 4582 v->Synchronize("bootstrapper");
4546 Top::Iterate(v); 4583 isolate_->Iterate(v);
4547 v->Synchronize("top"); 4584 v->Synchronize("top");
4548 Relocatable::Iterate(v); 4585 Relocatable::Iterate(v);
4549 v->Synchronize("relocatable"); 4586 v->Synchronize("relocatable");
4550 4587
4551 #ifdef ENABLE_DEBUGGER_SUPPORT 4588 #ifdef ENABLE_DEBUGGER_SUPPORT
4552 Debug::Iterate(v); 4589 isolate_->debug()->Iterate(v);
4553 #endif 4590 #endif
4554 v->Synchronize("debug"); 4591 v->Synchronize("debug");
4555 CompilationCache::Iterate(v); 4592 isolate_->compilation_cache()->Iterate(v);
4556 v->Synchronize("compilationcache"); 4593 v->Synchronize("compilationcache");
4557 4594
4558 // Iterate over local handles in handle scopes. 4595 // Iterate over local handles in handle scopes.
4559 HandleScopeImplementer::Iterate(v); 4596 isolate_->handle_scope_implementer()->Iterate(v);
4560 v->Synchronize("handlescope"); 4597 v->Synchronize("handlescope");
4561 4598
4562 // Iterate over the builtin code objects and code stubs in the 4599 // Iterate over the builtin code objects and code stubs in the
4563 // heap. Note that it is not necessary to iterate over code objects 4600 // heap. Note that it is not necessary to iterate over code objects
4564 // on scavenge collections. 4601 // on scavenge collections.
4565 if (mode != VISIT_ALL_IN_SCAVENGE) { 4602 if (mode != VISIT_ALL_IN_SCAVENGE) {
4566 Builtins::IterateBuiltins(v); 4603 isolate_->builtins()->IterateBuiltins(v);
4567 } 4604 }
4568 v->Synchronize("builtins"); 4605 v->Synchronize("builtins");
4569 4606
4570 // Iterate over global handles. 4607 // Iterate over global handles.
4571 if (mode == VISIT_ONLY_STRONG) { 4608 if (mode == VISIT_ONLY_STRONG) {
4572 GlobalHandles::IterateStrongRoots(v); 4609 isolate_->global_handles()->IterateStrongRoots(v);
4573 } else { 4610 } else {
4574 GlobalHandles::IterateAllRoots(v); 4611 isolate_->global_handles()->IterateAllRoots(v);
4575 } 4612 }
4576 v->Synchronize("globalhandles"); 4613 v->Synchronize("globalhandles");
4577 4614
4578 // Iterate over pointers being held by inactive threads. 4615 // Iterate over pointers being held by inactive threads.
4579 ThreadManager::Iterate(v); 4616 isolate_->thread_manager()->Iterate(v);
4580 v->Synchronize("threadmanager"); 4617 v->Synchronize("threadmanager");
4581 4618
4582 // Iterate over the pointers the Serialization/Deserialization code is 4619 // Iterate over the pointers the Serialization/Deserialization code is
4583 // holding. 4620 // holding.
4584 // During garbage collection this keeps the partial snapshot cache alive. 4621 // During garbage collection this keeps the partial snapshot cache alive.
4585 // During deserialization of the startup snapshot this creates the partial 4622 // During deserialization of the startup snapshot this creates the partial
4586 // snapshot cache and deserializes the objects it refers to. During 4623 // snapshot cache and deserializes the objects it refers to. During
4587 // serialization this does nothing, since the partial snapshot cache is 4624 // serialization this does nothing, since the partial snapshot cache is
4588 // empty. However the next thing we do is create the partial snapshot, 4625 // empty. However the next thing we do is create the partial snapshot,
4589 // filling up the partial snapshot cache with objects it needs as we go. 4626 // filling up the partial snapshot cache with objects it needs as we go.
4590 SerializerDeserializer::Iterate(v); 4627 SerializerDeserializer::Iterate(v);
4591 // We don't do a v->Synchronize call here, because in debug mode that will 4628 // We don't do a v->Synchronize call here, because in debug mode that will
4592 // output a flag to the snapshot. However at this point the serializer and 4629 // output a flag to the snapshot. However at this point the serializer and
4593 // deserializer are deliberately a little unsynchronized (see above) so the 4630 // deserializer are deliberately a little unsynchronized (see above) so the
4594 // checking of the sync flag in the snapshot would fail. 4631 // checking of the sync flag in the snapshot would fail.
4595 } 4632 }
4596 4633
4597 4634
4598 // Flag is set when the heap has been configured. The heap can be repeatedly
4599 // configured through the API until it is setup.
4600 static bool heap_configured = false;
4601
4602 // TODO(1236194): Since the heap size is configurable on the command line 4635 // TODO(1236194): Since the heap size is configurable on the command line
4603 // and through the API, we should gracefully handle the case that the heap 4636 // and through the API, we should gracefully handle the case that the heap
4604 // size is not big enough to fit all the initial objects. 4637 // size is not big enough to fit all the initial objects.
4605 bool Heap::ConfigureHeap(intptr_t max_semispace_size, 4638 bool Heap::ConfigureHeap(intptr_t max_semispace_size,
4606 intptr_t max_old_gen_size, 4639 intptr_t max_old_gen_size,
4607 intptr_t max_executable_size) { 4640 intptr_t max_executable_size) {
4608 if (HasBeenSetup()) return false; 4641 if (HasBeenSetup()) return false;
4609 4642
4610 if (max_semispace_size > 0) max_semispace_size_ = max_semispace_size; 4643 if (max_semispace_size > 0) max_semispace_size_ = max_semispace_size;
4611 4644
(...skipping 26 matching lines...) Expand all
4638 // The new space size must be a power of two to support single-bit testing 4671 // The new space size must be a power of two to support single-bit testing
4639 // for containment. 4672 // for containment.
4640 max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_); 4673 max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
4641 reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_); 4674 reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
4642 initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_); 4675 initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
4643 external_allocation_limit_ = 10 * max_semispace_size_; 4676 external_allocation_limit_ = 10 * max_semispace_size_;
4644 4677
4645 // The old generation is paged. 4678 // The old generation is paged.
4646 max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize); 4679 max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
4647 4680
4648 heap_configured = true; 4681 configured_ = true;
4649 return true; 4682 return true;
4650 } 4683 }
4651 4684
4652 4685
4653 bool Heap::ConfigureHeapDefault() { 4686 bool Heap::ConfigureHeapDefault() {
4654 return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) * KB, 4687 return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) * KB,
4655 static_cast<intptr_t>(FLAG_max_old_space_size) * MB, 4688 static_cast<intptr_t>(FLAG_max_old_space_size) * MB,
4656 static_cast<intptr_t>(FLAG_max_executable_size) * MB); 4689 static_cast<intptr_t>(FLAG_max_executable_size) * MB);
4657 } 4690 }
4658 4691
4659 4692
4660 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { 4693 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
4661 *stats->start_marker = HeapStats::kStartMarker; 4694 *stats->start_marker = HeapStats::kStartMarker;
4662 *stats->end_marker = HeapStats::kEndMarker; 4695 *stats->end_marker = HeapStats::kEndMarker;
4663 *stats->new_space_size = new_space_.SizeAsInt(); 4696 *stats->new_space_size = new_space_.SizeAsInt();
4664 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity()); 4697 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
4665 *stats->old_pointer_space_size = old_pointer_space_->Size(); 4698 *stats->old_pointer_space_size = old_pointer_space_->Size();
4666 *stats->old_pointer_space_capacity = old_pointer_space_->Capacity(); 4699 *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
4667 *stats->old_data_space_size = old_data_space_->Size(); 4700 *stats->old_data_space_size = old_data_space_->Size();
4668 *stats->old_data_space_capacity = old_data_space_->Capacity(); 4701 *stats->old_data_space_capacity = old_data_space_->Capacity();
4669 *stats->code_space_size = code_space_->Size(); 4702 *stats->code_space_size = code_space_->Size();
4670 *stats->code_space_capacity = code_space_->Capacity(); 4703 *stats->code_space_capacity = code_space_->Capacity();
4671 *stats->map_space_size = map_space_->Size(); 4704 *stats->map_space_size = map_space_->Size();
4672 *stats->map_space_capacity = map_space_->Capacity(); 4705 *stats->map_space_capacity = map_space_->Capacity();
4673 *stats->cell_space_size = cell_space_->Size(); 4706 *stats->cell_space_size = cell_space_->Size();
4674 *stats->cell_space_capacity = cell_space_->Capacity(); 4707 *stats->cell_space_capacity = cell_space_->Capacity();
4675 *stats->lo_space_size = lo_space_->Size(); 4708 *stats->lo_space_size = lo_space_->Size();
4676 GlobalHandles::RecordStats(stats); 4709 isolate_->global_handles()->RecordStats(stats);
4677 *stats->memory_allocator_size = MemoryAllocator::Size(); 4710 *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
4678 *stats->memory_allocator_capacity = 4711 *stats->memory_allocator_capacity =
4679 MemoryAllocator::Size() + MemoryAllocator::Available(); 4712 isolate()->memory_allocator()->Size() +
4713 isolate()->memory_allocator()->Available();
4680 *stats->os_error = OS::GetLastError(); 4714 *stats->os_error = OS::GetLastError();
4715 isolate()->memory_allocator()->Available();
4681 if (take_snapshot) { 4716 if (take_snapshot) {
4682 HeapIterator iterator; 4717 HeapIterator iterator;
4683 for (HeapObject* obj = iterator.Next(); 4718 for (HeapObject* obj = iterator.Next();
4684 obj != NULL; 4719 obj != NULL;
4685 obj = iterator.Next()) { 4720 obj = iterator.Next()) {
4686 InstanceType type = obj->map()->instance_type(); 4721 InstanceType type = obj->map()->instance_type();
4687 ASSERT(0 <= type && type <= LAST_TYPE); 4722 ASSERT(0 <= type && type <= LAST_TYPE);
4688 stats->objects_per_type[type]++; 4723 stats->objects_per_type[type]++;
4689 stats->size_per_type[type] += obj->Size(); 4724 stats->size_per_type[type] += obj->Size();
4690 } 4725 }
(...skipping 11 matching lines...) Expand all
4702 } 4737 }
4703 4738
4704 4739
4705 int Heap::PromotedExternalMemorySize() { 4740 int Heap::PromotedExternalMemorySize() {
4706 if (amount_of_external_allocated_memory_ 4741 if (amount_of_external_allocated_memory_
4707 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; 4742 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
4708 return amount_of_external_allocated_memory_ 4743 return amount_of_external_allocated_memory_
4709 - amount_of_external_allocated_memory_at_last_global_gc_; 4744 - amount_of_external_allocated_memory_at_last_global_gc_;
4710 } 4745 }
4711 4746
4747 #ifdef DEBUG
4748
4749 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
4750 static const int kMarkTag = 2;
4751
4752
4753 class HeapDebugUtils {
4754 public:
4755 explicit HeapDebugUtils(Heap* heap)
4756 : search_for_any_global_(false),
4757 search_target_(NULL),
4758 found_target_(false),
4759 object_stack_(20),
4760 heap_(heap) {
4761 }
4762
4763 class MarkObjectVisitor : public ObjectVisitor {
4764 public:
4765 explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4766
4767 void VisitPointers(Object** start, Object** end) {
4768 // Copy all HeapObject pointers in [start, end)
4769 for (Object** p = start; p < end; p++) {
4770 if ((*p)->IsHeapObject())
4771 utils_->MarkObjectRecursively(p);
4772 }
4773 }
4774
4775 HeapDebugUtils* utils_;
4776 };
4777
4778 void MarkObjectRecursively(Object** p) {
4779 if (!(*p)->IsHeapObject()) return;
4780
4781 HeapObject* obj = HeapObject::cast(*p);
4782
4783 Object* map = obj->map();
4784
4785 if (!map->IsHeapObject()) return; // visited before
4786
4787 if (found_target_) return; // stop if target found
4788 object_stack_.Add(obj);
4789 if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
4790 (!search_for_any_global_ && (obj == search_target_))) {
4791 found_target_ = true;
4792 return;
4793 }
4794
4795 // not visited yet
4796 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
4797
4798 Address map_addr = map_p->address();
4799
4800 obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
4801
4802 MarkObjectRecursively(&map);
4803
4804 MarkObjectVisitor mark_visitor(this);
4805
4806 obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
4807 &mark_visitor);
4808
4809 if (!found_target_) // don't pop if found the target
4810 object_stack_.RemoveLast();
4811 }
4812
4813
4814 class UnmarkObjectVisitor : public ObjectVisitor {
4815 public:
4816 explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4817
4818 void VisitPointers(Object** start, Object** end) {
4819 // Copy all HeapObject pointers in [start, end)
4820 for (Object** p = start; p < end; p++) {
4821 if ((*p)->IsHeapObject())
4822 utils_->UnmarkObjectRecursively(p);
4823 }
4824 }
4825
4826 HeapDebugUtils* utils_;
4827 };
4828
4829
4830 void UnmarkObjectRecursively(Object** p) {
4831 if (!(*p)->IsHeapObject()) return;
4832
4833 HeapObject* obj = HeapObject::cast(*p);
4834
4835 Object* map = obj->map();
4836
4837 if (map->IsHeapObject()) return; // unmarked already
4838
4839 Address map_addr = reinterpret_cast<Address>(map);
4840
4841 map_addr -= kMarkTag;
4842
4843 ASSERT_TAG_ALIGNED(map_addr);
4844
4845 HeapObject* map_p = HeapObject::FromAddress(map_addr);
4846
4847 obj->set_map(reinterpret_cast<Map*>(map_p));
4848
4849 UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
4850
4851 UnmarkObjectVisitor unmark_visitor(this);
4852
4853 obj->IterateBody(Map::cast(map_p)->instance_type(),
4854 obj->SizeFromMap(Map::cast(map_p)),
4855 &unmark_visitor);
4856 }
4857
4858
4859 void MarkRootObjectRecursively(Object** root) {
4860 if (search_for_any_global_) {
4861 ASSERT(search_target_ == NULL);
4862 } else {
4863 ASSERT(search_target_->IsHeapObject());
4864 }
4865 found_target_ = false;
4866 object_stack_.Clear();
4867
4868 MarkObjectRecursively(root);
4869 UnmarkObjectRecursively(root);
4870
4871 if (found_target_) {
4872 PrintF("=====================================\n");
4873 PrintF("==== Path to object ====\n");
4874 PrintF("=====================================\n\n");
4875
4876 ASSERT(!object_stack_.is_empty());
4877 for (int i = 0; i < object_stack_.length(); i++) {
4878 if (i > 0) PrintF("\n |\n |\n V\n\n");
4879 Object* obj = object_stack_[i];
4880 obj->Print();
4881 }
4882 PrintF("=====================================\n");
4883 }
4884 }
4885
4886 // Helper class for visiting HeapObjects recursively.
4887 class MarkRootVisitor: public ObjectVisitor {
4888 public:
4889 explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4890
4891 void VisitPointers(Object** start, Object** end) {
4892 // Visit all HeapObject pointers in [start, end)
4893 for (Object** p = start; p < end; p++) {
4894 if ((*p)->IsHeapObject())
4895 utils_->MarkRootObjectRecursively(p);
4896 }
4897 }
4898
4899 HeapDebugUtils* utils_;
4900 };
4901
4902 bool search_for_any_global_;
4903 Object* search_target_;
4904 bool found_target_;
4905 List<Object*> object_stack_;
4906 Heap* heap_;
4907
4908 friend class Heap;
4909 };
4910
4911 #endif
4712 4912
4713 bool Heap::Setup(bool create_heap_objects) { 4913 bool Heap::Setup(bool create_heap_objects) {
4914 #ifdef DEBUG
4915 debug_utils_ = new HeapDebugUtils(this);
4916 #endif
4917
4714 // Initialize heap spaces and initial maps and objects. Whenever something 4918 // Initialize heap spaces and initial maps and objects. Whenever something
4715 // goes wrong, just return false. The caller should check the results and 4919 // goes wrong, just return false. The caller should check the results and
4716 // call Heap::TearDown() to release allocated memory. 4920 // call Heap::TearDown() to release allocated memory.
4717 // 4921 //
4718 // If the heap is not yet configured (eg, through the API), configure it. 4922 // If the heap is not yet configured (eg, through the API), configure it.
4719 // Configuration is based on the flags new-space-size (really the semispace 4923 // Configuration is based on the flags new-space-size (really the semispace
4720 // size) and old-space-size if set or the initial values of semispace_size_ 4924 // size) and old-space-size if set or the initial values of semispace_size_
4721 // and old_generation_size_ otherwise. 4925 // and old_generation_size_ otherwise.
4722 if (!heap_configured) { 4926 if (!configured_) {
4723 if (!ConfigureHeapDefault()) return false; 4927 if (!ConfigureHeapDefault()) return false;
4724 } 4928 }
4725 4929
4726 ScavengingVisitor<TRANSFER_MARKS>::Initialize(); 4930 gc_initializer_mutex->Lock();
4727 ScavengingVisitor<IGNORE_MARKS>::Initialize(); 4931 static bool initialized_gc = false;
4728 NewSpaceScavenger::Initialize(); 4932 if (!initialized_gc) {
4729 MarkCompactCollector::Initialize(); 4933 initialized_gc = true;
4934 ScavengingVisitor<TRANSFER_MARKS>::Initialize();
4935 ScavengingVisitor<IGNORE_MARKS>::Initialize();
4936 NewSpaceScavenger::Initialize();
4937 MarkCompactCollector::Initialize();
4938 }
4939 gc_initializer_mutex->Unlock();
4730 4940
4731 MarkMapPointersAsEncoded(false); 4941 MarkMapPointersAsEncoded(false);
4732 4942
4733 // Setup memory allocator. 4943 // Setup memory allocator.
4734 if (!MemoryAllocator::Setup(MaxReserved(), MaxExecutableSize())) return false; 4944 if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
4945 return false;
4735 4946
4736 // Setup new space. 4947 // Setup new space.
4737 if (!new_space_.Setup(reserved_semispace_size_)) { 4948 if (!new_space_.Setup(reserved_semispace_size_)) {
4738 return false; 4949 return false;
4739 } 4950 }
4740 4951
4741 // Initialize old pointer space. 4952 // Initialize old pointer space.
4742 old_pointer_space_ = 4953 old_pointer_space_ =
4743 new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE); 4954 new OldSpace(this,
4955 max_old_generation_size_,
4956 OLD_POINTER_SPACE,
4957 NOT_EXECUTABLE);
4744 if (old_pointer_space_ == NULL) return false; 4958 if (old_pointer_space_ == NULL) return false;
4745 if (!old_pointer_space_->Setup()) return false; 4959 if (!old_pointer_space_->Setup()) return false;
4746 4960
4747 // Initialize old data space. 4961 // Initialize old data space.
4748 old_data_space_ = 4962 old_data_space_ =
4749 new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE); 4963 new OldSpace(this,
4964 max_old_generation_size_,
4965 OLD_DATA_SPACE,
4966 NOT_EXECUTABLE);
4750 if (old_data_space_ == NULL) return false; 4967 if (old_data_space_ == NULL) return false;
4751 if (!old_data_space_->Setup()) return false; 4968 if (!old_data_space_->Setup()) return false;
4752 4969
4753 // Initialize the code space, set its maximum capacity to the old 4970 // Initialize the code space, set its maximum capacity to the old
4754 // generation size. It needs executable memory. 4971 // generation size. It needs executable memory.
4755 // On 64-bit platform(s), we put all code objects in a 2 GB range of 4972 // On 64-bit platform(s), we put all code objects in a 2 GB range of
4756 // virtual address space, so that they can call each other with near calls. 4973 // virtual address space, so that they can call each other with near calls.
4757 if (code_range_size_ > 0) { 4974 if (code_range_size_ > 0) {
4758 if (!CodeRange::Setup(code_range_size_)) { 4975 if (!isolate_->code_range()->Setup(code_range_size_)) {
4759 return false; 4976 return false;
4760 } 4977 }
4761 } 4978 }
4762 4979
4763 code_space_ = 4980 code_space_ =
4764 new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE); 4981 new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
4765 if (code_space_ == NULL) return false; 4982 if (code_space_ == NULL) return false;
4766 if (!code_space_->Setup()) return false; 4983 if (!code_space_->Setup()) return false;
4767 4984
4768 // Initialize map space. 4985 // Initialize map space.
4769 map_space_ = new MapSpace(max_old_generation_size_, 4986 map_space_ = new MapSpace(this,
4987 max_old_generation_size_,
4770 FLAG_max_map_space_pages, 4988 FLAG_max_map_space_pages,
4771 MAP_SPACE); 4989 MAP_SPACE);
4772 if (map_space_ == NULL) return false; 4990 if (map_space_ == NULL) return false;
4773 if (!map_space_->Setup()) return false; 4991 if (!map_space_->Setup()) return false;
4774 4992
4775 // Initialize global property cell space. 4993 // Initialize global property cell space.
4776 cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE); 4994 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
4777 if (cell_space_ == NULL) return false; 4995 if (cell_space_ == NULL) return false;
4778 if (!cell_space_->Setup()) return false; 4996 if (!cell_space_->Setup()) return false;
4779 4997
4780 // The large object code space may contain code or data. We set the memory 4998 // The large object code space may contain code or data. We set the memory
4781 // to be non-executable here for safety, but this means we need to enable it 4999 // to be non-executable here for safety, but this means we need to enable it
4782 // explicitly when allocating large code objects. 5000 // explicitly when allocating large code objects.
4783 lo_space_ = new LargeObjectSpace(LO_SPACE); 5001 lo_space_ = new LargeObjectSpace(this, LO_SPACE);
4784 if (lo_space_ == NULL) return false; 5002 if (lo_space_ == NULL) return false;
4785 if (!lo_space_->Setup()) return false; 5003 if (!lo_space_->Setup()) return false;
4786 5004
4787 if (create_heap_objects) { 5005 if (create_heap_objects) {
4788 // Create initial maps. 5006 // Create initial maps.
4789 if (!CreateInitialMaps()) return false; 5007 if (!CreateInitialMaps()) return false;
4790 if (!CreateApiObjects()) return false; 5008 if (!CreateApiObjects()) return false;
4791 5009
4792 // Create initial objects 5010 // Create initial objects
4793 if (!CreateInitialObjects()) return false; 5011 if (!CreateInitialObjects()) return false;
4794 5012
4795 global_contexts_list_ = undefined_value(); 5013 global_contexts_list_ = undefined_value();
4796 } 5014 }
4797 5015
4798 LOG(IntPtrTEvent("heap-capacity", Capacity())); 5016 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
4799 LOG(IntPtrTEvent("heap-available", Available())); 5017 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
4800 5018
4801 #ifdef ENABLE_LOGGING_AND_PROFILING 5019 #ifdef ENABLE_LOGGING_AND_PROFILING
4802 // This should be called only after initial objects have been created. 5020 // This should be called only after initial objects have been created.
4803 ProducerHeapProfile::Setup(); 5021 isolate_->producer_heap_profile()->Setup();
4804 #endif 5022 #endif
4805 5023
4806 if (!Marking::Setup()) return false; 5024 if (!marking()->Setup()) return false;
5025
5026 store_buffer()->Setup();
4807 5027
4808 return true; 5028 return true;
4809 } 5029 }
4810 5030
4811 5031
4812 void Heap::SetStackLimits() { 5032 void Heap::SetStackLimits() {
5033 ASSERT(isolate_ != NULL);
5034 ASSERT(isolate_ == isolate());
4813 // On 64 bit machines, pointers are generally out of range of Smis. We write 5035 // On 64 bit machines, pointers are generally out of range of Smis. We write
4814 // something that looks like an out of range Smi to the GC. 5036 // something that looks like an out of range Smi to the GC.
4815 5037
4816 // Set up the special root array entries containing the stack limits. 5038 // Set up the special root array entries containing the stack limits.
4817 // These are actually addresses, but the tag makes the GC ignore it. 5039 // These are actually addresses, but the tag makes the GC ignore it.
4818 roots_[kStackLimitRootIndex] = 5040 roots_[kStackLimitRootIndex] =
4819 reinterpret_cast<Object*>( 5041 reinterpret_cast<Object*>(
4820 (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag); 5042 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
4821 roots_[kRealStackLimitRootIndex] = 5043 roots_[kRealStackLimitRootIndex] =
4822 reinterpret_cast<Object*>( 5044 reinterpret_cast<Object*>(
4823 (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag); 5045 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
4824 } 5046 }
4825 5047
4826 5048
4827 void Heap::TearDown() { 5049 void Heap::TearDown() {
4828 if (FLAG_print_cumulative_gc_stat) { 5050 if (FLAG_print_cumulative_gc_stat) {
4829 PrintF("\n\n"); 5051 PrintF("\n\n");
4830 PrintF("gc_count=%d ", gc_count_); 5052 PrintF("gc_count=%d ", gc_count_);
4831 PrintF("mark_sweep_count=%d ", ms_count_); 5053 PrintF("mark_sweep_count=%d ", ms_count_);
4832 PrintF("mark_compact_count=%d ", mc_count_); 5054 PrintF("mark_compact_count=%d ", mc_count_);
4833 PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause()); 5055 PrintF("max_gc_pause=%d ", get_max_gc_pause());
4834 PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator()); 5056 PrintF("min_in_mutator=%d ", get_min_in_mutator());
4835 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", 5057 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
4836 GCTracer::get_max_alive_after_gc()); 5058 get_max_alive_after_gc());
4837 PrintF("\n\n"); 5059 PrintF("\n\n");
4838 } 5060 }
4839 5061
4840 GlobalHandles::TearDown(); 5062 isolate_->global_handles()->TearDown();
4841 5063
4842 ExternalStringTable::TearDown(); 5064 external_string_table_.TearDown();
4843 5065
4844 new_space_.TearDown(); 5066 new_space_.TearDown();
4845 5067
4846 if (old_pointer_space_ != NULL) { 5068 if (old_pointer_space_ != NULL) {
4847 old_pointer_space_->TearDown(); 5069 old_pointer_space_->TearDown();
4848 delete old_pointer_space_; 5070 delete old_pointer_space_;
4849 old_pointer_space_ = NULL; 5071 old_pointer_space_ = NULL;
4850 } 5072 }
4851 5073
4852 if (old_data_space_ != NULL) { 5074 if (old_data_space_ != NULL) {
(...skipping 19 matching lines...) Expand all
4872 delete cell_space_; 5094 delete cell_space_;
4873 cell_space_ = NULL; 5095 cell_space_ = NULL;
4874 } 5096 }
4875 5097
4876 if (lo_space_ != NULL) { 5098 if (lo_space_ != NULL) {
4877 lo_space_->TearDown(); 5099 lo_space_->TearDown();
4878 delete lo_space_; 5100 delete lo_space_;
4879 lo_space_ = NULL; 5101 lo_space_ = NULL;
4880 } 5102 }
4881 5103
4882 Marking::TearDown(); 5104 marking()->TearDown();
5105 store_buffer()->TearDown();
4883 5106
4884 MemoryAllocator::TearDown(); 5107 isolate_->memory_allocator()->TearDown();
5108
5109 #ifdef DEBUG
5110 delete debug_utils_;
5111 debug_utils_ = NULL;
5112 #endif
4885 } 5113 }
4886 5114
4887 5115
4888 void Heap::Shrink() { 5116 void Heap::Shrink() {
4889 // Try to shrink all paged spaces. 5117 // Try to shrink all paged spaces.
4890 PagedSpaces spaces; 5118 PagedSpaces spaces;
4891 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next()) 5119 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
4892 space->Shrink(); 5120 space->Shrink();
4893 } 5121 }
4894 5122
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
4963 for (Object** p = start; p < end; p++) 5191 for (Object** p = start; p < end; p++)
4964 PrintF(" handle %p to %p\n", 5192 PrintF(" handle %p to %p\n",
4965 reinterpret_cast<void*>(p), 5193 reinterpret_cast<void*>(p),
4966 reinterpret_cast<void*>(*p)); 5194 reinterpret_cast<void*>(*p));
4967 } 5195 }
4968 }; 5196 };
4969 5197
4970 void Heap::PrintHandles() { 5198 void Heap::PrintHandles() {
4971 PrintF("Handles:\n"); 5199 PrintF("Handles:\n");
4972 PrintHandleVisitor v; 5200 PrintHandleVisitor v;
4973 HandleScopeImplementer::Iterate(&v); 5201 isolate_->handle_scope_implementer()->Iterate(&v);
4974 } 5202 }
4975 5203
4976 #endif 5204 #endif
4977 5205
4978 5206
4979 Space* AllSpaces::next() { 5207 Space* AllSpaces::next() {
4980 switch (counter_++) { 5208 switch (counter_++) {
4981 case NEW_SPACE: 5209 case NEW_SPACE:
4982 return Heap::new_space(); 5210 return HEAP->new_space();
4983 case OLD_POINTER_SPACE: 5211 case OLD_POINTER_SPACE:
4984 return Heap::old_pointer_space(); 5212 return HEAP->old_pointer_space();
4985 case OLD_DATA_SPACE: 5213 case OLD_DATA_SPACE:
4986 return Heap::old_data_space(); 5214 return HEAP->old_data_space();
4987 case CODE_SPACE: 5215 case CODE_SPACE:
4988 return Heap::code_space(); 5216 return HEAP->code_space();
4989 case MAP_SPACE: 5217 case MAP_SPACE:
4990 return Heap::map_space(); 5218 return HEAP->map_space();
4991 case CELL_SPACE: 5219 case CELL_SPACE:
4992 return Heap::cell_space(); 5220 return HEAP->cell_space();
4993 case LO_SPACE: 5221 case LO_SPACE:
4994 return Heap::lo_space(); 5222 return HEAP->lo_space();
4995 default: 5223 default:
4996 return NULL; 5224 return NULL;
4997 } 5225 }
4998 } 5226 }
4999 5227
5000 5228
5001 PagedSpace* PagedSpaces::next() { 5229 PagedSpace* PagedSpaces::next() {
5002 switch (counter_++) { 5230 switch (counter_++) {
5003 case OLD_POINTER_SPACE: 5231 case OLD_POINTER_SPACE:
5004 return Heap::old_pointer_space(); 5232 return HEAP->old_pointer_space();
5005 case OLD_DATA_SPACE: 5233 case OLD_DATA_SPACE:
5006 return Heap::old_data_space(); 5234 return HEAP->old_data_space();
5007 case CODE_SPACE: 5235 case CODE_SPACE:
5008 return Heap::code_space(); 5236 return HEAP->code_space();
5009 case MAP_SPACE: 5237 case MAP_SPACE:
5010 return Heap::map_space(); 5238 return HEAP->map_space();
5011 case CELL_SPACE: 5239 case CELL_SPACE:
5012 return Heap::cell_space(); 5240 return HEAP->cell_space();
5013 default: 5241 default:
5014 return NULL; 5242 return NULL;
5015 } 5243 }
5016 } 5244 }
5017 5245
5018 5246
5019 5247
5020 OldSpace* OldSpaces::next() { 5248 OldSpace* OldSpaces::next() {
5021 switch (counter_++) { 5249 switch (counter_++) {
5022 case OLD_POINTER_SPACE: 5250 case OLD_POINTER_SPACE:
5023 return Heap::old_pointer_space(); 5251 return HEAP->old_pointer_space();
5024 case OLD_DATA_SPACE: 5252 case OLD_DATA_SPACE:
5025 return Heap::old_data_space(); 5253 return HEAP->old_data_space();
5026 case CODE_SPACE: 5254 case CODE_SPACE:
5027 return Heap::code_space(); 5255 return HEAP->code_space();
5028 default: 5256 default:
5029 return NULL; 5257 return NULL;
5030 } 5258 }
5031 } 5259 }
5032 5260
5033 5261
5034 SpaceIterator::SpaceIterator() 5262 SpaceIterator::SpaceIterator()
5035 : current_space_(FIRST_SPACE), 5263 : current_space_(FIRST_SPACE),
5036 iterator_(NULL), 5264 iterator_(NULL),
5037 size_func_(NULL) { 5265 size_func_(NULL) {
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
5072 return CreateIterator(); 5300 return CreateIterator();
5073 } 5301 }
5074 5302
5075 5303
5076 // Create an iterator for the space to iterate. 5304 // Create an iterator for the space to iterate.
5077 ObjectIterator* SpaceIterator::CreateIterator() { 5305 ObjectIterator* SpaceIterator::CreateIterator() {
5078 ASSERT(iterator_ == NULL); 5306 ASSERT(iterator_ == NULL);
5079 5307
5080 switch (current_space_) { 5308 switch (current_space_) {
5081 case NEW_SPACE: 5309 case NEW_SPACE:
5082 iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_); 5310 iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
5083 break; 5311 break;
5084 case OLD_POINTER_SPACE: 5312 case OLD_POINTER_SPACE:
5085 iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_); 5313 iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
5086 break; 5314 break;
5087 case OLD_DATA_SPACE: 5315 case OLD_DATA_SPACE:
5088 iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_); 5316 iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
5089 break; 5317 break;
5090 case CODE_SPACE: 5318 case CODE_SPACE:
5091 iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_); 5319 iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
5092 break; 5320 break;
5093 case MAP_SPACE: 5321 case MAP_SPACE:
5094 iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_); 5322 iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
5095 break; 5323 break;
5096 case CELL_SPACE: 5324 case CELL_SPACE:
5097 iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_); 5325 iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
5098 break; 5326 break;
5099 case LO_SPACE: 5327 case LO_SPACE:
5100 iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_); 5328 iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
5101 break; 5329 break;
5102 } 5330 }
5103 5331
5104 // Return the newly allocated iterator; 5332 // Return the newly allocated iterator;
5105 ASSERT(iterator_ != NULL); 5333 ASSERT(iterator_ != NULL);
5106 return iterator_; 5334 return iterator_;
5107 } 5335 }
5108 5336
5109 5337
5110 class HeapObjectsFilter { 5338 class HeapObjectsFilter {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
5157 }; 5385 };
5158 5386
5159 void MarkUnreachableObjects() { 5387 void MarkUnreachableObjects() {
5160 HeapIterator iterator; 5388 HeapIterator iterator;
5161 for (HeapObject* obj = iterator.Next(); 5389 for (HeapObject* obj = iterator.Next();
5162 obj != NULL; 5390 obj != NULL;
5163 obj = iterator.Next()) { 5391 obj = iterator.Next()) {
5164 IntrusiveMarking::SetMark(obj); 5392 IntrusiveMarking::SetMark(obj);
5165 } 5393 }
5166 UnmarkingVisitor visitor; 5394 UnmarkingVisitor visitor;
5167 Heap::IterateRoots(&visitor, VISIT_ALL); 5395 HEAP->IterateRoots(&visitor, VISIT_ALL);
5168 while (visitor.can_process()) 5396 while (visitor.can_process())
5169 visitor.ProcessNext(); 5397 visitor.ProcessNext();
5170 } 5398 }
5171 5399
5172 AssertNoAllocation no_alloc; 5400 AssertNoAllocation no_alloc;
5173 }; 5401 };
5174 5402
5175 5403
5176 HeapIterator::HeapIterator() { 5404 HeapIterator::HeapIterator() {
5177 Init(); 5405 Init();
5178 } 5406 }
5179 5407
5180 5408
5181 HeapIterator::~HeapIterator() { 5409 HeapIterator::~HeapIterator() {
5182 Shutdown(); 5410 Shutdown();
5183 } 5411 }
5184 5412
5185 5413
5186 void HeapIterator::Init() { 5414 void HeapIterator::Init() {
5187 // Start the iteration. 5415 // Start the iteration.
5188 Heap::EnsureHeapIsIterable(); 5416 HEAP->EnsureHeapIsIterable();
5189 space_iterator_ = new SpaceIterator(); 5417 space_iterator_ = new SpaceIterator();
5190 object_iterator_ = space_iterator_->next(); 5418 object_iterator_ = space_iterator_->next();
5191 } 5419 }
5192 5420
5193 5421
5194 void HeapIterator::Shutdown() { 5422 void HeapIterator::Shutdown() {
5195 // Make sure the last iterator is deallocated. 5423 // Make sure the last iterator is deallocated.
5196 delete space_iterator_; 5424 delete space_iterator_;
5197 space_iterator_ = NULL; 5425 space_iterator_ = NULL;
5198 object_iterator_ = NULL; 5426 object_iterator_ = NULL;
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after
5423 OldSpaces spaces; 5651 OldSpaces spaces;
5424 for (OldSpace* space = spaces.next(); 5652 for (OldSpace* space = spaces.next();
5425 space != NULL; 5653 space != NULL;
5426 space = spaces.next()) { 5654 space = spaces.next()) {
5427 holes_size += space->Waste() + space->Available(); 5655 holes_size += space->Waste() + space->Available();
5428 } 5656 }
5429 return holes_size; 5657 return holes_size;
5430 } 5658 }
5431 5659
5432 5660
5433 GCTracer::GCTracer() 5661 GCTracer::GCTracer(Heap* heap)
5434 : start_time_(0.0), 5662 : start_time_(0.0),
5435 start_size_(0), 5663 start_size_(0),
5436 gc_count_(0), 5664 gc_count_(0),
5437 full_gc_count_(0), 5665 full_gc_count_(0),
5438 is_compacting_(false), 5666 is_compacting_(false),
5439 marked_count_(0), 5667 marked_count_(0),
5440 allocated_since_last_gc_(0), 5668 allocated_since_last_gc_(0),
5441 spent_in_mutator_(0), 5669 spent_in_mutator_(0),
5442 promoted_objects_size_(0) { 5670 promoted_objects_size_(0),
5443 // These two fields reflect the state of the previous full collection. 5671 heap_(heap) {
5444 // Set them before they are changed by the collector.
5445 previous_has_compacted_ = MarkCompactCollector::HasCompacted();
5446 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return; 5672 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
5447 start_time_ = OS::TimeCurrentMillis(); 5673 start_time_ = OS::TimeCurrentMillis();
5448 start_size_ = Heap::SizeOfObjects(); 5674 start_size_ = heap_->SizeOfObjects();
5449 5675
5450 for (int i = 0; i < Scope::kNumberOfScopes; i++) { 5676 for (int i = 0; i < Scope::kNumberOfScopes; i++) {
5451 scopes_[i] = 0; 5677 scopes_[i] = 0;
5452 } 5678 }
5453 5679
5454 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize(); 5680 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
5455 5681
5456 allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_; 5682 allocated_since_last_gc_ =
5683 heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
5457 5684
5458 if (last_gc_end_timestamp_ > 0) { 5685 if (heap_->last_gc_end_timestamp_ > 0) {
5459 spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0); 5686 spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
5460 } 5687 }
5461 5688
5462 steps_count_ = IncrementalMarking::steps_count(); 5689 steps_count_ = heap_->incremental_marking()->steps_count();
5463 steps_took_ = IncrementalMarking::steps_took(); 5690 steps_took_ = heap_->incremental_marking()->steps_took();
5464 } 5691 }
5465 5692
5466 5693
5467 GCTracer::~GCTracer() { 5694 GCTracer::~GCTracer() {
5468 // Printf ONE line iff flag is set. 5695 // Printf ONE line iff flag is set.
5469 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return; 5696 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
5470 5697
5471 bool first_gc = (last_gc_end_timestamp_ == 0); 5698 bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
5472 5699
5473 alive_after_last_gc_ = Heap::SizeOfObjects(); 5700 heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
5474 last_gc_end_timestamp_ = OS::TimeCurrentMillis(); 5701 heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
5475 5702
5476 int time = static_cast<int>(last_gc_end_timestamp_ - start_time_); 5703 int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
5477 5704
5478 // Update cumulative GC statistics if required. 5705 // Update cumulative GC statistics if required.
5479 if (FLAG_print_cumulative_gc_stat) { 5706 if (FLAG_print_cumulative_gc_stat) {
5480 max_gc_pause_ = Max(max_gc_pause_, time); 5707 heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
5481 max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_); 5708 heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
5709 heap_->alive_after_last_gc_);
5482 if (!first_gc) { 5710 if (!first_gc) {
5483 min_in_mutator_ = Min(min_in_mutator_, 5711 heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
5484 static_cast<int>(spent_in_mutator_)); 5712 static_cast<int>(spent_in_mutator_));
5485 } 5713 }
5486 } 5714 }
5487 5715
5488 if (!FLAG_trace_gc_nvp) { 5716 if (!FLAG_trace_gc_nvp) {
5489 int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]); 5717 int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
5490 5718
5491 PrintF("%s %.1f -> %.1f MB, ", 5719 PrintF("%s %.1f -> %.1f MB, ",
5492 CollectorString(), 5720 CollectorString(),
5493 static_cast<double>(start_size_) / MB, 5721 static_cast<double>(start_size_) / MB,
5494 SizeOfHeapObjects()); 5722 SizeOfHeapObjects());
(...skipping 10 matching lines...) Expand all
5505 PrintF("pause=%d ", time); 5733 PrintF("pause=%d ", time);
5506 PrintF("mutator=%d ", 5734 PrintF("mutator=%d ",
5507 static_cast<int>(spent_in_mutator_)); 5735 static_cast<int>(spent_in_mutator_));
5508 5736
5509 PrintF("gc="); 5737 PrintF("gc=");
5510 switch (collector_) { 5738 switch (collector_) {
5511 case SCAVENGER: 5739 case SCAVENGER:
5512 PrintF("s"); 5740 PrintF("s");
5513 break; 5741 break;
5514 case MARK_COMPACTOR: 5742 case MARK_COMPACTOR:
5515 PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms"); 5743 PrintF("%s",
5744 heap_->mark_compact_collector_.HasCompacted() ? "mc" : "ms");
5516 break; 5745 break;
5517 default: 5746 default:
5518 UNREACHABLE(); 5747 UNREACHABLE();
5519 } 5748 }
5520 PrintF(" "); 5749 PrintF(" ");
5521 5750
5522 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL])); 5751 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
5523 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK])); 5752 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
5524 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP])); 5753 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
5525 PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE])); 5754 PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
5526 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT])); 5755 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
5527 5756
5528 PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_); 5757 PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
5529 PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects()); 5758 PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
5530 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ", 5759 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
5531 in_free_list_or_wasted_before_gc_); 5760 in_free_list_or_wasted_before_gc_);
5532 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize()); 5761 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
5533 5762
5534 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_); 5763 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
5535 PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_); 5764 PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
5536 PrintF("stepscount=%d ", steps_count_); 5765 PrintF("stepscount=%d ", steps_count_);
5537 PrintF("stepstook=%d ", static_cast<int>(steps_took_)); 5766 PrintF("stepstook=%d ", static_cast<int>(steps_took_));
5538 5767
5539 PrintF("\n"); 5768 PrintF("\n");
5540 } 5769 }
5541 5770
5542 #if defined(ENABLE_LOGGING_AND_PROFILING) 5771 #if defined(ENABLE_LOGGING_AND_PROFILING)
5543 Heap::PrintShortHeapStatistics(); 5772 heap_->PrintShortHeapStatistics();
5544 #endif 5773 #endif
5545 } 5774 }
5546 5775
5547 5776
5548 const char* GCTracer::CollectorString() { 5777 const char* GCTracer::CollectorString() {
5549 switch (collector_) { 5778 switch (collector_) {
5550 case SCAVENGER: 5779 case SCAVENGER:
5551 return "Scavenge"; 5780 return "Scavenge";
5552 case MARK_COMPACTOR: 5781 case MARK_COMPACTOR:
5553 return MarkCompactCollector::HasCompacted() ? "Mark-compact" 5782 return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
5554 : "Mark-sweep"; 5783 : "Mark-sweep";
5555 } 5784 }
5556 return "Unknown GC"; 5785 return "Unknown GC";
5557 } 5786 }
5558 5787
5559 5788
5560 int KeyedLookupCache::Hash(Map* map, String* name) { 5789 int KeyedLookupCache::Hash(Map* map, String* name) {
5561 // Uses only lower 32 bits if pointers are larger. 5790 // Uses only lower 32 bits if pointers are larger.
5562 uintptr_t addr_hash = 5791 uintptr_t addr_hash =
5563 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift; 5792 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift;
5564 return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask); 5793 return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask);
5565 } 5794 }
5566 5795
5567 5796
5568 int KeyedLookupCache::Lookup(Map* map, String* name) { 5797 int KeyedLookupCache::Lookup(Map* map, String* name) {
5569 int index = Hash(map, name); 5798 int index = Hash(map, name);
5570 Key& key = keys_[index]; 5799 Key& key = keys_[index];
5571 if ((key.map == map) && key.name->Equals(name)) { 5800 if ((key.map == map) && key.name->Equals(name)) {
5572 return field_offsets_[index]; 5801 return field_offsets_[index];
5573 } 5802 }
5574 return -1; 5803 return kNotFound;
5575 } 5804 }
5576 5805
5577 5806
5578 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) { 5807 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
5579 String* symbol; 5808 String* symbol;
5580 if (Heap::LookupSymbolIfExists(name, &symbol)) { 5809 if (HEAP->LookupSymbolIfExists(name, &symbol)) {
5581 int index = Hash(map, symbol); 5810 int index = Hash(map, symbol);
5582 Key& key = keys_[index]; 5811 Key& key = keys_[index];
5583 key.map = map; 5812 key.map = map;
5584 key.name = symbol; 5813 key.name = symbol;
5585 field_offsets_[index] = field_offset; 5814 field_offsets_[index] = field_offset;
5586 } 5815 }
5587 } 5816 }
5588 5817
5589 5818
5590 void KeyedLookupCache::Clear() { 5819 void KeyedLookupCache::Clear() {
5591 for (int index = 0; index < kLength; index++) keys_[index].map = NULL; 5820 for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
5592 } 5821 }
5593 5822
5594 5823
5595 KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
5596
5597
5598 int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
5599
5600
5601 void DescriptorLookupCache::Clear() { 5824 void DescriptorLookupCache::Clear() {
5602 for (int index = 0; index < kLength; index++) keys_[index].array = NULL; 5825 for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
5603 } 5826 }
5604 5827
5605 5828
5606 DescriptorLookupCache::Key
5607 DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
5608
5609 int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
5610
5611
5612 #ifdef DEBUG 5829 #ifdef DEBUG
5613 void Heap::GarbageCollectionGreedyCheck() { 5830 void Heap::GarbageCollectionGreedyCheck() {
5614 ASSERT(FLAG_gc_greedy); 5831 ASSERT(FLAG_gc_greedy);
5615 if (Bootstrapper::IsActive()) return; 5832 if (isolate_->bootstrapper()->IsActive()) return;
5616 if (disallow_allocation_failure()) return; 5833 if (disallow_allocation_failure()) return;
5617 CollectGarbage(NEW_SPACE); 5834 CollectGarbage(NEW_SPACE);
5618 } 5835 }
5619 #endif 5836 #endif
5620 5837
5621 5838
5622 TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t) 5839 TranscendentalCache::SubCache::SubCache(Type t)
5623 : type_(t) { 5840 : type_(t),
5841 isolate_(Isolate::Current()) {
5624 uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't 5842 uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't
5625 uint32_t in1 = 0xffffffffu; // generated by the FPU. 5843 uint32_t in1 = 0xffffffffu; // generated by the FPU.
5626 for (int i = 0; i < kCacheSize; i++) { 5844 for (int i = 0; i < kCacheSize; i++) {
5627 elements_[i].in[0] = in0; 5845 elements_[i].in[0] = in0;
5628 elements_[i].in[1] = in1; 5846 elements_[i].in[1] = in1;
5629 elements_[i].output = NULL; 5847 elements_[i].output = NULL;
5630 } 5848 }
5631 } 5849 }
5632 5850
5633 5851
5634 TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
5635
5636
5637 void TranscendentalCache::Clear() { 5852 void TranscendentalCache::Clear() {
5638 for (int i = 0; i < kNumberOfCaches; i++) { 5853 for (int i = 0; i < kNumberOfCaches; i++) {
5639 if (caches_[i] != NULL) { 5854 if (caches_[i] != NULL) {
5640 delete caches_[i]; 5855 delete caches_[i];
5641 caches_[i] = NULL; 5856 caches_[i] = NULL;
5642 } 5857 }
5643 } 5858 }
5644 } 5859 }
5645 5860
5646 5861
5647 void ExternalStringTable::CleanUp() { 5862 void ExternalStringTable::CleanUp() {
5648 int last = 0; 5863 int last = 0;
5649 for (int i = 0; i < new_space_strings_.length(); ++i) { 5864 for (int i = 0; i < new_space_strings_.length(); ++i) {
5650 if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue; 5865 if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
5651 if (Heap::InNewSpace(new_space_strings_[i])) { 5866 if (heap_->InNewSpace(new_space_strings_[i])) {
5652 new_space_strings_[last++] = new_space_strings_[i]; 5867 new_space_strings_[last++] = new_space_strings_[i];
5653 } else { 5868 } else {
5654 old_space_strings_.Add(new_space_strings_[i]); 5869 old_space_strings_.Add(new_space_strings_[i]);
5655 } 5870 }
5656 } 5871 }
5657 new_space_strings_.Rewind(last); 5872 new_space_strings_.Rewind(last);
5658 last = 0; 5873 last = 0;
5659 for (int i = 0; i < old_space_strings_.length(); ++i) { 5874 for (int i = 0; i < old_space_strings_.length(); ++i) {
5660 if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue; 5875 if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
5661 ASSERT(!Heap::InNewSpace(old_space_strings_[i])); 5876 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
5662 old_space_strings_[last++] = old_space_strings_[i]; 5877 old_space_strings_[last++] = old_space_strings_[i];
5663 } 5878 }
5664 old_space_strings_.Rewind(last); 5879 old_space_strings_.Rewind(last);
5665 Verify(); 5880 Verify();
5666 } 5881 }
5667 5882
5668 5883
5669 void ExternalStringTable::TearDown() { 5884 void ExternalStringTable::TearDown() {
5670 new_space_strings_.Free(); 5885 new_space_strings_.Free();
5671 old_space_strings_.Free(); 5886 old_space_strings_.Free();
5672 } 5887 }
5673 5888
5674 5889
5675 List<Object*> ExternalStringTable::new_space_strings_;
5676 List<Object*> ExternalStringTable::old_space_strings_;
5677
5678 } } // namespace v8::internal 5890 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698