Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(17)

Side by Side Diff: src/heap.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 #include "scanner-base.h" 43 #include "scanner-base.h"
44 #include "scopeinfo.h" 44 #include "scopeinfo.h"
45 #include "snapshot.h" 45 #include "snapshot.h"
46 #include "v8threads.h" 46 #include "v8threads.h"
47 #include "vm-state-inl.h" 47 #include "vm-state-inl.h"
48 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 48 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
49 #include "regexp-macro-assembler.h" 49 #include "regexp-macro-assembler.h"
50 #include "arm/regexp-macro-assembler-arm.h" 50 #include "arm/regexp-macro-assembler-arm.h"
51 #endif 51 #endif
52 52
53
54 namespace v8 { 53 namespace v8 {
55 namespace internal { 54 namespace internal {
56 55
57 56
58 String* Heap::hidden_symbol_;
59 Object* Heap::roots_[Heap::kRootListLength];
60 Object* Heap::global_contexts_list_;
61
62
63 NewSpace Heap::new_space_;
64 OldSpace* Heap::old_pointer_space_ = NULL;
65 OldSpace* Heap::old_data_space_ = NULL;
66 OldSpace* Heap::code_space_ = NULL;
67 MapSpace* Heap::map_space_ = NULL;
68 CellSpace* Heap::cell_space_ = NULL;
69 LargeObjectSpace* Heap::lo_space_ = NULL;
70
71 static const intptr_t kMinimumPromotionLimit = 2 * MB; 57 static const intptr_t kMinimumPromotionLimit = 2 * MB;
72 static const intptr_t kMinimumAllocationLimit = 8 * MB; 58 static const intptr_t kMinimumAllocationLimit = 8 * MB;
73 59
74 intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
75 intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
76 60
77 int Heap::old_gen_exhausted_ = false; 61 static Mutex* gc_initializer_mutex = OS::CreateMutex();
78 62
79 int Heap::amount_of_external_allocated_memory_ = 0;
80 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
81 63
64 Heap::Heap()
65 : isolate_(NULL),
82 // semispace_size_ should be a power of 2 and old_generation_size_ should be 66 // semispace_size_ should be a power of 2 and old_generation_size_ should be
83 // a multiple of Page::kPageSize. 67 // a multiple of Page::kPageSize.
84 #if defined(ANDROID) 68 #if defined(ANDROID)
85 static const int default_max_semispace_size_ = 2*MB; 69 reserved_semispace_size_(2*MB),
86 intptr_t Heap::max_old_generation_size_ = 192*MB; 70 max_semispace_size_(2*MB),
87 int Heap::initial_semispace_size_ = 128*KB; 71 initial_semispace_size_(128*KB),
88 intptr_t Heap::code_range_size_ = 0; 72 max_old_generation_size_(192*MB),
89 intptr_t Heap::max_executable_size_ = max_old_generation_size_; 73 max_executable_size_(max_old_generation_size_),
74 code_range_size_(0),
90 #elif defined(V8_TARGET_ARCH_X64) 75 #elif defined(V8_TARGET_ARCH_X64)
91 static const int default_max_semispace_size_ = 16*MB; 76 reserved_semispace_size_(16*MB),
92 intptr_t Heap::max_old_generation_size_ = 1*GB; 77 max_semispace_size_(16*MB),
93 int Heap::initial_semispace_size_ = 1*MB; 78 initial_semispace_size_(1*MB),
94 intptr_t Heap::code_range_size_ = 512*MB; 79 max_old_generation_size_(1*GB),
95 intptr_t Heap::max_executable_size_ = 256*MB; 80 max_executable_size_(256*MB),
81 code_range_size_(512*MB),
96 #else 82 #else
97 static const int default_max_semispace_size_ = 8*MB; 83 reserved_semispace_size_(8*MB),
98 intptr_t Heap::max_old_generation_size_ = 512*MB; 84 max_semispace_size_(8*MB),
99 int Heap::initial_semispace_size_ = 512*KB; 85 initial_semispace_size_(512*KB),
100 intptr_t Heap::code_range_size_ = 0; 86 max_old_generation_size_(512*MB),
101 intptr_t Heap::max_executable_size_ = 128*MB; 87 max_executable_size_(128*MB),
88 code_range_size_(0),
89 #endif
90 // Variables set based on semispace_size_ and old_generation_size_ in
91 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
92 // Will be 4 * reserved_semispace_size_ to ensure that young
93 // generation can be aligned to its size.
94 survived_since_last_expansion_(0),
95 always_allocate_scope_depth_(0),
96 linear_allocation_scope_depth_(0),
97 contexts_disposed_(0),
98 new_space_(this),
99 old_pointer_space_(NULL),
100 old_data_space_(NULL),
101 code_space_(NULL),
102 map_space_(NULL),
103 cell_space_(NULL),
104 lo_space_(NULL),
105 gc_state_(NOT_IN_GC),
106 mc_count_(0),
107 ms_count_(0),
108 gc_count_(0),
109 unflattened_strings_length_(0),
110 #ifdef DEBUG
111 allocation_allowed_(true),
112 allocation_timeout_(0),
113 disallow_allocation_failure_(false),
114 debug_utils_(NULL),
115 #endif // DEBUG
116 old_gen_promotion_limit_(kMinimumPromotionLimit),
117 old_gen_allocation_limit_(kMinimumAllocationLimit),
118 external_allocation_limit_(0),
119 amount_of_external_allocated_memory_(0),
120 amount_of_external_allocated_memory_at_last_global_gc_(0),
121 old_gen_exhausted_(false),
122 hidden_symbol_(NULL),
123 global_gc_prologue_callback_(NULL),
124 global_gc_epilogue_callback_(NULL),
125 gc_safe_size_of_old_object_(NULL),
126 tracer_(NULL),
127 young_survivors_after_last_gc_(0),
128 high_survival_rate_period_length_(0),
129 survival_rate_(0),
130 previous_survival_rate_trend_(Heap::STABLE),
131 survival_rate_trend_(Heap::STABLE),
132 max_gc_pause_(0),
133 max_alive_after_gc_(0),
134 min_in_mutator_(kMaxInt),
135 alive_after_last_gc_(0),
136 last_gc_end_timestamp_(0.0),
137 page_watermark_invalidated_mark_(1 << Page::WATERMARK_INVALIDATED),
138 number_idle_notifications_(0),
139 last_idle_notification_gc_count_(0),
140 last_idle_notification_gc_count_init_(false),
141 configured_(false),
142 is_safe_to_read_maps_(true) {
143 // Allow build-time customization of the max semispace size. Building
144 // V8 with snapshots and a non-default max semispace size is much
145 // easier if you can define it as part of the build environment.
146 #if defined(V8_MAX_SEMISPACE_SIZE)
147 max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
102 #endif 148 #endif
103 149
104 // Allow build-time customization of the max semispace size. Building 150 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
105 // V8 with snapshots and a non-default max semispace size is much 151 global_contexts_list_ = NULL;
106 // easier if you can define it as part of the build environment. 152 mark_compact_collector_.heap_ = this;
107 #if defined(V8_MAX_SEMISPACE_SIZE) 153 external_string_table_.heap_ = this;
108 int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 154 }
109 #else
110 int Heap::max_semispace_size_ = default_max_semispace_size_;
111 #endif
112 155
113 // The snapshot semispace size will be the default semispace size if
114 // snapshotting is used and will be the requested semispace size as
115 // set up by ConfigureHeap otherwise.
116 int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
117
118 List<Heap::GCPrologueCallbackPair> Heap::gc_prologue_callbacks_;
119 List<Heap::GCEpilogueCallbackPair> Heap::gc_epilogue_callbacks_;
120
121 GCCallback Heap::global_gc_prologue_callback_ = NULL;
122 GCCallback Heap::global_gc_epilogue_callback_ = NULL;
123 HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
124
125 // Variables set based on semispace_size_ and old_generation_size_ in
126 // ConfigureHeap.
127
128 // Will be 4 * reserved_semispace_size_ to ensure that young
129 // generation can be aligned to its size.
130 int Heap::survived_since_last_expansion_ = 0;
131 intptr_t Heap::external_allocation_limit_ = 0;
132
133 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
134
135 int Heap::mc_count_ = 0;
136 int Heap::ms_count_ = 0;
137 unsigned int Heap::gc_count_ = 0;
138
139 GCTracer* Heap::tracer_ = NULL;
140
141 int Heap::unflattened_strings_length_ = 0;
142
143 int Heap::always_allocate_scope_depth_ = 0;
144 int Heap::linear_allocation_scope_depth_ = 0;
145 int Heap::contexts_disposed_ = 0;
146
147 int Heap::young_survivors_after_last_gc_ = 0;
148 int Heap::high_survival_rate_period_length_ = 0;
149 double Heap::survival_rate_ = 0;
150 Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
151 Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
152
153 #ifdef DEBUG
154 bool Heap::allocation_allowed_ = true;
155
156 int Heap::allocation_timeout_ = 0;
157 bool Heap::disallow_allocation_failure_ = false;
158 #endif // DEBUG
159
160 intptr_t GCTracer::alive_after_last_gc_ = 0;
161 double GCTracer::last_gc_end_timestamp_ = 0.0;
162 int GCTracer::max_gc_pause_ = 0;
163 intptr_t GCTracer::max_alive_after_gc_ = 0;
164 int GCTracer::min_in_mutator_ = kMaxInt;
165 156
166 intptr_t Heap::Capacity() { 157 intptr_t Heap::Capacity() {
167 if (!HasBeenSetup()) return 0; 158 if (!HasBeenSetup()) return 0;
168 159
169 return new_space_.Capacity() + 160 return new_space_.Capacity() +
170 old_pointer_space_->Capacity() + 161 old_pointer_space_->Capacity() +
171 old_data_space_->Capacity() + 162 old_data_space_->Capacity() +
172 code_space_->Capacity() + 163 code_space_->Capacity() +
173 map_space_->Capacity() + 164 map_space_->Capacity() +
174 cell_space_->Capacity(); 165 cell_space_->Capacity();
175 } 166 }
176 167
177 168
178 intptr_t Heap::CommittedMemory() { 169 intptr_t Heap::CommittedMemory() {
179 if (!HasBeenSetup()) return 0; 170 if (!HasBeenSetup()) return 0;
180 171
181 return new_space_.CommittedMemory() + 172 return new_space_.CommittedMemory() +
182 old_pointer_space_->CommittedMemory() + 173 old_pointer_space_->CommittedMemory() +
183 old_data_space_->CommittedMemory() + 174 old_data_space_->CommittedMemory() +
184 code_space_->CommittedMemory() + 175 code_space_->CommittedMemory() +
185 map_space_->CommittedMemory() + 176 map_space_->CommittedMemory() +
186 cell_space_->CommittedMemory() + 177 cell_space_->CommittedMemory() +
187 lo_space_->Size(); 178 lo_space_->Size();
188 } 179 }
189 180
190 intptr_t Heap::CommittedMemoryExecutable() { 181 intptr_t Heap::CommittedMemoryExecutable() {
191 if (!HasBeenSetup()) return 0; 182 if (!HasBeenSetup()) return 0;
192 183
193 return MemoryAllocator::SizeExecutable(); 184 return isolate()->memory_allocator()->SizeExecutable();
194 } 185 }
195 186
196 187
197 intptr_t Heap::Available() { 188 intptr_t Heap::Available() {
198 if (!HasBeenSetup()) return 0; 189 if (!HasBeenSetup()) return 0;
199 190
200 return new_space_.Available() + 191 return new_space_.Available() +
201 old_pointer_space_->Available() + 192 old_pointer_space_->Available() +
202 old_data_space_->Available() + 193 old_data_space_->Available() +
203 code_space_->Available() + 194 code_space_->Available() +
204 map_space_->Available() + 195 map_space_->Available() +
205 cell_space_->Available(); 196 cell_space_->Available();
206 } 197 }
207 198
208 199
209 bool Heap::HasBeenSetup() { 200 bool Heap::HasBeenSetup() {
210 return old_pointer_space_ != NULL && 201 return old_pointer_space_ != NULL &&
211 old_data_space_ != NULL && 202 old_data_space_ != NULL &&
212 code_space_ != NULL && 203 code_space_ != NULL &&
213 map_space_ != NULL && 204 map_space_ != NULL &&
214 cell_space_ != NULL && 205 cell_space_ != NULL &&
215 lo_space_ != NULL; 206 lo_space_ != NULL;
216 } 207 }
217 208
218 209
219 int Heap::GcSafeSizeOfOldObject(HeapObject* object) { 210 int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
220 ASSERT(!Heap::InNewSpace(object)); // Code only works for old objects. 211 ASSERT(!HEAP->InNewSpace(object)); // Code only works for old objects.
221 ASSERT(!MarkCompactCollector::are_map_pointers_encoded()); 212 ASSERT(!HEAP->mark_compact_collector()->are_map_pointers_encoded());
222 MapWord map_word = object->map_word(); 213 MapWord map_word = object->map_word();
223 map_word.ClearMark(); 214 map_word.ClearMark();
224 map_word.ClearOverflow(); 215 map_word.ClearOverflow();
225 return object->SizeFromMap(map_word.ToMap()); 216 return object->SizeFromMap(map_word.ToMap());
226 } 217 }
227 218
228 219
229 int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) { 220 int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
230 ASSERT(!Heap::InNewSpace(object)); // Code only works for old objects. 221 ASSERT(!HEAP->InNewSpace(object)); // Code only works for old objects.
231 ASSERT(MarkCompactCollector::are_map_pointers_encoded()); 222 ASSERT(HEAP->mark_compact_collector()->are_map_pointers_encoded());
232 uint32_t marker = Memory::uint32_at(object->address()); 223 uint32_t marker = Memory::uint32_at(object->address());
233 if (marker == MarkCompactCollector::kSingleFreeEncoding) { 224 if (marker == MarkCompactCollector::kSingleFreeEncoding) {
234 return kIntSize; 225 return kIntSize;
235 } else if (marker == MarkCompactCollector::kMultiFreeEncoding) { 226 } else if (marker == MarkCompactCollector::kMultiFreeEncoding) {
236 return Memory::int_at(object->address() + kIntSize); 227 return Memory::int_at(object->address() + kIntSize);
237 } else { 228 } else {
238 MapWord map_word = object->map_word(); 229 MapWord map_word = object->map_word();
239 Address map_address = map_word.DecodeMapAddress(Heap::map_space()); 230 Address map_address = map_word.DecodeMapAddress(HEAP->map_space());
240 Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_address)); 231 Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_address));
241 return object->SizeFromMap(map); 232 return object->SizeFromMap(map);
242 } 233 }
243 } 234 }
244 235
245 236
246 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) { 237 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
247 // Is global GC requested? 238 // Is global GC requested?
248 if (space != NEW_SPACE || FLAG_gc_global) { 239 if (space != NEW_SPACE || FLAG_gc_global) {
249 Counters::gc_compactor_caused_by_request.Increment(); 240 isolate_->counters()->gc_compactor_caused_by_request()->Increment();
250 return MARK_COMPACTOR; 241 return MARK_COMPACTOR;
251 } 242 }
252 243
253 // Is enough data promoted to justify a global GC? 244 // Is enough data promoted to justify a global GC?
254 if (OldGenerationPromotionLimitReached()) { 245 if (OldGenerationPromotionLimitReached()) {
255 Counters::gc_compactor_caused_by_promoted_data.Increment(); 246 isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
256 return MARK_COMPACTOR; 247 return MARK_COMPACTOR;
257 } 248 }
258 249
259 // Have allocation in OLD and LO failed? 250 // Have allocation in OLD and LO failed?
260 if (old_gen_exhausted_) { 251 if (old_gen_exhausted_) {
261 Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment(); 252 isolate_->counters()->
253 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
262 return MARK_COMPACTOR; 254 return MARK_COMPACTOR;
263 } 255 }
264 256
265 // Is there enough space left in OLD to guarantee that a scavenge can 257 // Is there enough space left in OLD to guarantee that a scavenge can
266 // succeed? 258 // succeed?
267 // 259 //
268 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available 260 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
269 // for object promotion. It counts only the bytes that the memory 261 // for object promotion. It counts only the bytes that the memory
270 // allocator has not yet allocated from the OS and assigned to any space, 262 // allocator has not yet allocated from the OS and assigned to any space,
271 // and does not count available bytes already in the old space or code 263 // and does not count available bytes already in the old space or code
272 // space. Undercounting is safe---we may get an unrequested full GC when 264 // space. Undercounting is safe---we may get an unrequested full GC when
273 // a scavenge would have succeeded. 265 // a scavenge would have succeeded.
274 if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) { 266 if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
275 Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment(); 267 isolate_->counters()->
268 gc_compactor_caused_by_oldspace_exhaustion()->Increment();
276 return MARK_COMPACTOR; 269 return MARK_COMPACTOR;
277 } 270 }
278 271
279 // Default 272 // Default
280 return SCAVENGER; 273 return SCAVENGER;
281 } 274 }
282 275
283 276
284 // TODO(1238405): Combine the infrastructure for --heap-stats and 277 // TODO(1238405): Combine the infrastructure for --heap-stats and
285 // --log-gc to avoid the complicated preprocessor and flag testing. 278 // --log-gc to avoid the complicated preprocessor and flag testing.
(...skipping 24 matching lines...) Expand all
310 } 303 }
311 #endif 304 #endif
312 } 305 }
313 306
314 307
315 #if defined(ENABLE_LOGGING_AND_PROFILING) 308 #if defined(ENABLE_LOGGING_AND_PROFILING)
316 void Heap::PrintShortHeapStatistics() { 309 void Heap::PrintShortHeapStatistics() {
317 if (!FLAG_trace_gc_verbose) return; 310 if (!FLAG_trace_gc_verbose) return;
318 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d" 311 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
319 ", available: %8" V8_PTR_PREFIX "d\n", 312 ", available: %8" V8_PTR_PREFIX "d\n",
320 MemoryAllocator::Size(), 313 isolate_->memory_allocator()->Size(),
321 MemoryAllocator::Available()); 314 isolate_->memory_allocator()->Available());
322 PrintF("New space, used: %8" V8_PTR_PREFIX "d" 315 PrintF("New space, used: %8" V8_PTR_PREFIX "d"
323 ", available: %8" V8_PTR_PREFIX "d\n", 316 ", available: %8" V8_PTR_PREFIX "d\n",
324 Heap::new_space_.Size(), 317 Heap::new_space_.Size(),
325 new_space_.Available()); 318 new_space_.Available());
326 PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d" 319 PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d"
327 ", available: %8" V8_PTR_PREFIX "d" 320 ", available: %8" V8_PTR_PREFIX "d"
328 ", waste: %8" V8_PTR_PREFIX "d\n", 321 ", waste: %8" V8_PTR_PREFIX "d\n",
329 old_pointer_space_->Size(), 322 old_pointer_space_->Size(),
330 old_pointer_space_->Available(), 323 old_pointer_space_->Available(),
331 old_pointer_space_->Waste()); 324 old_pointer_space_->Waste());
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
376 #elif defined(DEBUG) 369 #elif defined(DEBUG)
377 if (FLAG_heap_stats) ReportHeapStatistics("After GC"); 370 if (FLAG_heap_stats) ReportHeapStatistics("After GC");
378 #elif defined(ENABLE_LOGGING_AND_PROFILING) 371 #elif defined(ENABLE_LOGGING_AND_PROFILING)
379 if (FLAG_log_gc) new_space_.ReportStatistics(); 372 if (FLAG_log_gc) new_space_.ReportStatistics();
380 #endif 373 #endif
381 } 374 }
382 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 375 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
383 376
384 377
385 void Heap::GarbageCollectionPrologue() { 378 void Heap::GarbageCollectionPrologue() {
386 TranscendentalCache::Clear(); 379 isolate_->transcendental_cache()->Clear();
387 ClearJSFunctionResultCaches(); 380 ClearJSFunctionResultCaches();
388 gc_count_++; 381 gc_count_++;
389 unflattened_strings_length_ = 0; 382 unflattened_strings_length_ = 0;
390 #ifdef DEBUG 383 #ifdef DEBUG
391 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 384 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
392 allow_allocation(false); 385 allow_allocation(false);
393 386
394 if (FLAG_verify_heap) { 387 if (FLAG_verify_heap) {
395 Verify(); 388 Verify();
396 } 389 }
(...skipping 20 matching lines...) Expand all
417 void Heap::GarbageCollectionEpilogue() { 410 void Heap::GarbageCollectionEpilogue() {
418 LiveObjectList::GCEpilogue(); 411 LiveObjectList::GCEpilogue();
419 #ifdef DEBUG 412 #ifdef DEBUG
420 allow_allocation(true); 413 allow_allocation(true);
421 ZapFromSpace(); 414 ZapFromSpace();
422 415
423 if (FLAG_verify_heap) { 416 if (FLAG_verify_heap) {
424 Verify(); 417 Verify();
425 } 418 }
426 419
427 if (FLAG_print_global_handles) GlobalHandles::Print(); 420 if (FLAG_print_global_handles) isolate_->global_handles()->Print();
428 if (FLAG_print_handles) PrintHandles(); 421 if (FLAG_print_handles) PrintHandles();
429 if (FLAG_gc_verbose) Print(); 422 if (FLAG_gc_verbose) Print();
430 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 423 if (FLAG_code_stats) ReportCodeStatistics("After GC");
431 #endif 424 #endif
432 425
433 Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects())); 426 isolate_->counters()->alive_after_last_gc()->Set(
427 static_cast<int>(SizeOfObjects()));
434 428
435 Counters::symbol_table_capacity.Set(symbol_table()->Capacity()); 429 isolate_->counters()->symbol_table_capacity()->Set(
436 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); 430 symbol_table()->Capacity());
431 isolate_->counters()->number_of_symbols()->Set(
432 symbol_table()->NumberOfElements());
437 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 433 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
438 ReportStatisticsAfterGC(); 434 ReportStatisticsAfterGC();
439 #endif 435 #endif
440 #ifdef ENABLE_DEBUGGER_SUPPORT 436 #ifdef ENABLE_DEBUGGER_SUPPORT
441 Debug::AfterGarbageCollection(); 437 isolate_->debug()->AfterGarbageCollection();
442 #endif 438 #endif
443 } 439 }
444 440
445 441
446 void Heap::CollectAllGarbage(bool force_compaction) { 442 void Heap::CollectAllGarbage(bool force_compaction) {
447 // Since we are ignoring the return value, the exact choice of space does 443 // Since we are ignoring the return value, the exact choice of space does
448 // not matter, so long as we do not specify NEW_SPACE, which would not 444 // not matter, so long as we do not specify NEW_SPACE, which would not
449 // cause a full GC. 445 // cause a full GC.
450 MarkCompactCollector::SetForceCompaction(force_compaction); 446 mark_compact_collector_.SetForceCompaction(force_compaction);
451 CollectGarbage(OLD_POINTER_SPACE); 447 CollectGarbage(OLD_POINTER_SPACE);
452 MarkCompactCollector::SetForceCompaction(false); 448 mark_compact_collector_.SetForceCompaction(false);
453 } 449 }
454 450
455 451
456 void Heap::CollectAllAvailableGarbage() { 452 void Heap::CollectAllAvailableGarbage() {
457 // Since we are ignoring the return value, the exact choice of space does 453 // Since we are ignoring the return value, the exact choice of space does
458 // not matter, so long as we do not specify NEW_SPACE, which would not 454 // not matter, so long as we do not specify NEW_SPACE, which would not
459 // cause a full GC. 455 // cause a full GC.
460 MarkCompactCollector::SetForceCompaction(true); 456 mark_compact_collector()->SetForceCompaction(true);
461 457
462 // Major GC would invoke weak handle callbacks on weakly reachable 458 // Major GC would invoke weak handle callbacks on weakly reachable
463 // handles, but won't collect weakly reachable objects until next 459 // handles, but won't collect weakly reachable objects until next
464 // major GC. Therefore if we collect aggressively and weak handle callback 460 // major GC. Therefore if we collect aggressively and weak handle callback
465 // has been invoked, we rerun major GC to release objects which become 461 // has been invoked, we rerun major GC to release objects which become
466 // garbage. 462 // garbage.
467 // Note: as weak callbacks can execute arbitrary code, we cannot 463 // Note: as weak callbacks can execute arbitrary code, we cannot
468 // hope that eventually there will be no weak callbacks invocations. 464 // hope that eventually there will be no weak callbacks invocations.
469 // Therefore stop recollecting after several attempts. 465 // Therefore stop recollecting after several attempts.
470 const int kMaxNumberOfAttempts = 7; 466 const int kMaxNumberOfAttempts = 7;
471 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { 467 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
472 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) { 468 if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
473 break; 469 break;
474 } 470 }
475 } 471 }
476 MarkCompactCollector::SetForceCompaction(false); 472 mark_compact_collector()->SetForceCompaction(false);
477 } 473 }
478 474
479 475
480 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) { 476 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
481 // The VM is in the GC state until exiting this function. 477 // The VM is in the GC state until exiting this function.
482 VMState state(GC); 478 VMState state(isolate_, GC);
483 479
484 #ifdef DEBUG 480 #ifdef DEBUG
485 // Reset the allocation timeout to the GC interval, but make sure to 481 // Reset the allocation timeout to the GC interval, but make sure to
486 // allow at least a few allocations after a collection. The reason 482 // allow at least a few allocations after a collection. The reason
487 // for this is that we have a lot of allocation sequences and we 483 // for this is that we have a lot of allocation sequences and we
488 // assume that a garbage collection will allow the subsequent 484 // assume that a garbage collection will allow the subsequent
489 // allocation attempts to go through. 485 // allocation attempts to go through.
490 allocation_timeout_ = Max(6, FLAG_gc_interval); 486 allocation_timeout_ = Max(6, FLAG_gc_interval);
491 #endif 487 #endif
492 488
493 bool next_gc_likely_to_collect_more = false; 489 bool next_gc_likely_to_collect_more = false;
494 490
495 { GCTracer tracer; 491 { GCTracer tracer(this);
496 GarbageCollectionPrologue(); 492 GarbageCollectionPrologue();
497 // The GC count was incremented in the prologue. Tell the tracer about 493 // The GC count was incremented in the prologue. Tell the tracer about
498 // it. 494 // it.
499 tracer.set_gc_count(gc_count_); 495 tracer.set_gc_count(gc_count_);
500 496
501 // Tell the tracer which collector we've selected. 497 // Tell the tracer which collector we've selected.
502 tracer.set_collector(collector); 498 tracer.set_collector(collector);
503 499
504 HistogramTimer* rate = (collector == SCAVENGER) 500 HistogramTimer* rate = (collector == SCAVENGER)
505 ? &Counters::gc_scavenger 501 ? isolate_->counters()->gc_scavenger()
506 : &Counters::gc_compactor; 502 : isolate_->counters()->gc_compactor();
507 rate->Start(); 503 rate->Start();
508 next_gc_likely_to_collect_more = 504 next_gc_likely_to_collect_more =
509 PerformGarbageCollection(collector, &tracer); 505 PerformGarbageCollection(collector, &tracer);
510 rate->Stop(); 506 rate->Stop();
511 507
512 GarbageCollectionEpilogue(); 508 GarbageCollectionEpilogue();
513 } 509 }
514 510
515 511
516 #ifdef ENABLE_LOGGING_AND_PROFILING 512 #ifdef ENABLE_LOGGING_AND_PROFILING
517 if (FLAG_log_gc) HeapProfiler::WriteSample(); 513 if (FLAG_log_gc) HeapProfiler::WriteSample();
518 #endif 514 #endif
519 515
520 return next_gc_likely_to_collect_more; 516 return next_gc_likely_to_collect_more;
521 } 517 }
522 518
523 519
524 void Heap::PerformScavenge() { 520 void Heap::PerformScavenge() {
525 GCTracer tracer; 521 GCTracer tracer(this);
526 PerformGarbageCollection(SCAVENGER, &tracer); 522 PerformGarbageCollection(SCAVENGER, &tracer);
527 } 523 }
528 524
529 525
530 #ifdef DEBUG 526 #ifdef DEBUG
531 // Helper class for verifying the symbol table. 527 // Helper class for verifying the symbol table.
532 class SymbolTableVerifier : public ObjectVisitor { 528 class SymbolTableVerifier : public ObjectVisitor {
533 public: 529 public:
534 SymbolTableVerifier() { }
535 void VisitPointers(Object** start, Object** end) { 530 void VisitPointers(Object** start, Object** end) {
536 // Visit all HeapObject pointers in [start, end). 531 // Visit all HeapObject pointers in [start, end).
537 for (Object** p = start; p < end; p++) { 532 for (Object** p = start; p < end; p++) {
538 if ((*p)->IsHeapObject()) { 533 if ((*p)->IsHeapObject()) {
539 // Check that the symbol is actually a symbol. 534 // Check that the symbol is actually a symbol.
540 ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol()); 535 ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
541 } 536 }
542 } 537 }
543 } 538 }
544 }; 539 };
545 #endif // DEBUG 540 #endif // DEBUG
546 541
547 542
548 static void VerifySymbolTable() { 543 static void VerifySymbolTable() {
549 #ifdef DEBUG 544 #ifdef DEBUG
550 SymbolTableVerifier verifier; 545 SymbolTableVerifier verifier;
551 Heap::symbol_table()->IterateElements(&verifier); 546 HEAP->symbol_table()->IterateElements(&verifier);
552 #endif // DEBUG 547 #endif // DEBUG
553 } 548 }
554 549
555 550
556 void Heap::ReserveSpace( 551 void Heap::ReserveSpace(
557 int new_space_size, 552 int new_space_size,
558 int pointer_space_size, 553 int pointer_space_size,
559 int data_space_size, 554 int data_space_size,
560 int code_space_size, 555 int code_space_size,
561 int map_space_size, 556 int map_space_size,
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
626 Shrink(); 621 Shrink();
627 if (new_space_.CommitFromSpaceIfNeeded()) return; 622 if (new_space_.CommitFromSpaceIfNeeded()) return;
628 623
629 // Committing memory to from space failed again. 624 // Committing memory to from space failed again.
630 // Memory is exhausted and we will die. 625 // Memory is exhausted and we will die.
631 V8::FatalProcessOutOfMemory("Committing semi space failed."); 626 V8::FatalProcessOutOfMemory("Committing semi space failed.");
632 } 627 }
633 628
634 629
635 void Heap::ClearJSFunctionResultCaches() { 630 void Heap::ClearJSFunctionResultCaches() {
636 if (Bootstrapper::IsActive()) return; 631 if (isolate_->bootstrapper()->IsActive()) return;
637 632
638 Object* context = global_contexts_list_; 633 Object* context = global_contexts_list_;
639 while (!context->IsUndefined()) { 634 while (!context->IsUndefined()) {
640 // Get the caches for this context: 635 // Get the caches for this context:
641 FixedArray* caches = 636 FixedArray* caches =
642 Context::cast(context)->jsfunction_result_caches(); 637 Context::cast(context)->jsfunction_result_caches();
643 // Clear the caches: 638 // Clear the caches:
644 int length = caches->length(); 639 int length = caches->length();
645 for (int i = 0; i < length; i++) { 640 for (int i = 0; i < length; i++) {
646 JSFunctionResultCache::cast(caches->get(i))->Clear(); 641 JSFunctionResultCache::cast(caches->get(i))->Clear();
647 } 642 }
648 // Get the next context: 643 // Get the next context:
649 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 644 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
650 } 645 }
651 } 646 }
652 647
653 648
649
654 void Heap::ClearNormalizedMapCaches() { 650 void Heap::ClearNormalizedMapCaches() {
655 if (Bootstrapper::IsActive()) return; 651 if (isolate_->bootstrapper()->IsActive()) return;
656 652
657 Object* context = global_contexts_list_; 653 Object* context = global_contexts_list_;
658 while (!context->IsUndefined()) { 654 while (!context->IsUndefined()) {
659 Context::cast(context)->normalized_map_cache()->Clear(); 655 Context::cast(context)->normalized_map_cache()->Clear();
660 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 656 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
661 } 657 }
662 } 658 }
663 659
664 660
665 #ifdef DEBUG 661 #ifdef DEBUG
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
702 } 698 }
703 699
704 survival_rate_ = survival_rate; 700 survival_rate_ = survival_rate;
705 } 701 }
706 702
707 bool Heap::PerformGarbageCollection(GarbageCollector collector, 703 bool Heap::PerformGarbageCollection(GarbageCollector collector,
708 GCTracer* tracer) { 704 GCTracer* tracer) {
709 bool next_gc_likely_to_collect_more = false; 705 bool next_gc_likely_to_collect_more = false;
710 706
711 if (collector != SCAVENGER) { 707 if (collector != SCAVENGER) {
712 PROFILE(CodeMovingGCEvent()); 708 PROFILE(isolate_, CodeMovingGCEvent());
713 } 709 }
714 710
715 VerifySymbolTable(); 711 VerifySymbolTable();
716 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 712 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
717 ASSERT(!allocation_allowed_); 713 ASSERT(!allocation_allowed_);
718 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 714 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
719 global_gc_prologue_callback_(); 715 global_gc_prologue_callback_();
720 } 716 }
721 717
722 GCType gc_type = 718 GCType gc_type =
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
761 757
762 old_gen_exhausted_ = false; 758 old_gen_exhausted_ = false;
763 } else { 759 } else {
764 tracer_ = tracer; 760 tracer_ = tracer;
765 Scavenge(); 761 Scavenge();
766 tracer_ = NULL; 762 tracer_ = NULL;
767 763
768 UpdateSurvivalRateTrend(start_new_space_size); 764 UpdateSurvivalRateTrend(start_new_space_size);
769 } 765 }
770 766
771 Counters::objs_since_last_young.Set(0); 767 isolate_->counters()->objs_since_last_young()->Set(0);
772 768
773 if (collector == MARK_COMPACTOR) { 769 if (collector == MARK_COMPACTOR) {
774 DisableAssertNoAllocation allow_allocation; 770 DisableAssertNoAllocation allow_allocation;
775 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 771 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
776 next_gc_likely_to_collect_more = 772 next_gc_likely_to_collect_more =
777 GlobalHandles::PostGarbageCollectionProcessing(); 773 isolate_->global_handles()->PostGarbageCollectionProcessing();
778 } 774 }
779 775
780 // Update relocatables. 776 // Update relocatables.
781 Relocatable::PostGarbageCollectionProcessing(); 777 Relocatable::PostGarbageCollectionProcessing();
782 778
783 if (collector == MARK_COMPACTOR) { 779 if (collector == MARK_COMPACTOR) {
784 // Register the amount of external allocated memory. 780 // Register the amount of external allocated memory.
785 amount_of_external_allocated_memory_at_last_global_gc_ = 781 amount_of_external_allocated_memory_at_last_global_gc_ =
786 amount_of_external_allocated_memory_; 782 amount_of_external_allocated_memory_;
787 } 783 }
(...skipping 13 matching lines...) Expand all
801 global_gc_epilogue_callback_(); 797 global_gc_epilogue_callback_();
802 } 798 }
803 VerifySymbolTable(); 799 VerifySymbolTable();
804 800
805 return next_gc_likely_to_collect_more; 801 return next_gc_likely_to_collect_more;
806 } 802 }
807 803
808 804
809 void Heap::MarkCompact(GCTracer* tracer) { 805 void Heap::MarkCompact(GCTracer* tracer) {
810 gc_state_ = MARK_COMPACT; 806 gc_state_ = MARK_COMPACT;
811 LOG(ResourceEvent("markcompact", "begin")); 807 LOG(isolate_, ResourceEvent("markcompact", "begin"));
812 808
813 MarkCompactCollector::Prepare(tracer); 809 mark_compact_collector_.Prepare(tracer);
814 810
815 bool is_compacting = MarkCompactCollector::IsCompacting(); 811 bool is_compacting = mark_compact_collector_.IsCompacting();
816 812
817 if (is_compacting) { 813 if (is_compacting) {
818 mc_count_++; 814 mc_count_++;
819 } else { 815 } else {
820 ms_count_++; 816 ms_count_++;
821 } 817 }
822 tracer->set_full_gc_count(mc_count_ + ms_count_); 818 tracer->set_full_gc_count(mc_count_ + ms_count_);
823 819
824 MarkCompactPrologue(is_compacting); 820 MarkCompactPrologue(is_compacting);
825 821
826 MarkCompactCollector::CollectGarbage(); 822 is_safe_to_read_maps_ = false;
823 mark_compact_collector_.CollectGarbage();
824 is_safe_to_read_maps_ = true;
827 825
828 LOG(ResourceEvent("markcompact", "end")); 826 LOG(isolate_, ResourceEvent("markcompact", "end"));
829 827
830 gc_state_ = NOT_IN_GC; 828 gc_state_ = NOT_IN_GC;
831 829
832 Shrink(); 830 Shrink();
833 831
834 Counters::objs_since_last_full.Set(0); 832 isolate_->counters()->objs_since_last_full()->Set(0);
835 833
836 contexts_disposed_ = 0; 834 contexts_disposed_ = 0;
837 } 835 }
838 836
839 837
840 void Heap::MarkCompactPrologue(bool is_compacting) { 838 void Heap::MarkCompactPrologue(bool is_compacting) {
841 // At any old GC clear the keyed lookup cache to enable collection of unused 839 // At any old GC clear the keyed lookup cache to enable collection of unused
842 // maps. 840 // maps.
843 KeyedLookupCache::Clear(); 841 isolate_->keyed_lookup_cache()->Clear();
844 ContextSlotCache::Clear(); 842 isolate_->context_slot_cache()->Clear();
845 DescriptorLookupCache::Clear(); 843 isolate_->descriptor_lookup_cache()->Clear();
846 844
847 CompilationCache::MarkCompactPrologue(); 845 isolate_->compilation_cache()->MarkCompactPrologue();
848 846
849 CompletelyClearInstanceofCache(); 847 CompletelyClearInstanceofCache();
850 848
851 if (is_compacting) FlushNumberStringCache(); 849 if (is_compacting) FlushNumberStringCache();
852 850
853 ClearNormalizedMapCaches(); 851 ClearNormalizedMapCaches();
854 } 852 }
855 853
856 854
857 Object* Heap::FindCodeObject(Address a) { 855 Object* Heap::FindCodeObject(Address a) {
858 Object* obj = NULL; // Initialization to please compiler. 856 Object* obj = NULL; // Initialization to please compiler.
859 { MaybeObject* maybe_obj = code_space_->FindObject(a); 857 { MaybeObject* maybe_obj = code_space_->FindObject(a);
860 if (!maybe_obj->ToObject(&obj)) { 858 if (!maybe_obj->ToObject(&obj)) {
861 obj = lo_space_->FindObject(a)->ToObjectUnchecked(); 859 obj = lo_space_->FindObject(a)->ToObjectUnchecked();
862 } 860 }
863 } 861 }
864 return obj; 862 return obj;
865 } 863 }
866 864
867 865
868 // Helper class for copying HeapObjects 866 // Helper class for copying HeapObjects
869 class ScavengeVisitor: public ObjectVisitor { 867 class ScavengeVisitor: public ObjectVisitor {
870 public: 868 public:
869 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
871 870
872 void VisitPointer(Object** p) { ScavengePointer(p); } 871 void VisitPointer(Object** p) { ScavengePointer(p); }
873 872
874 void VisitPointers(Object** start, Object** end) { 873 void VisitPointers(Object** start, Object** end) {
875 // Copy all HeapObject pointers in [start, end) 874 // Copy all HeapObject pointers in [start, end)
876 for (Object** p = start; p < end; p++) ScavengePointer(p); 875 for (Object** p = start; p < end; p++) ScavengePointer(p);
877 } 876 }
878 877
879 private: 878 private:
880 void ScavengePointer(Object** p) { 879 void ScavengePointer(Object** p) {
881 Object* object = *p; 880 Object* object = *p;
882 if (!Heap::InNewSpace(object)) return; 881 if (!heap_->InNewSpace(object)) return;
883 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 882 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
884 reinterpret_cast<HeapObject*>(object)); 883 reinterpret_cast<HeapObject*>(object));
885 } 884 }
885
886 Heap* heap_;
886 }; 887 };
887 888
888 889
889 // A queue of objects promoted during scavenge. Each object is accompanied
890 // by it's size to avoid dereferencing a map pointer for scanning.
891 class PromotionQueue {
892 public:
893 void Initialize(Address start_address) {
894 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
895 }
896
897 bool is_empty() { return front_ <= rear_; }
898
899 void insert(HeapObject* target, int size) {
900 *(--rear_) = reinterpret_cast<intptr_t>(target);
901 *(--rear_) = size;
902 // Assert no overflow into live objects.
903 ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
904 }
905
906 void remove(HeapObject** target, int* size) {
907 *target = reinterpret_cast<HeapObject*>(*(--front_));
908 *size = static_cast<int>(*(--front_));
909 // Assert no underflow.
910 ASSERT(front_ >= rear_);
911 }
912
913 private:
914 // The front of the queue is higher in memory than the rear.
915 intptr_t* front_;
916 intptr_t* rear_;
917 };
918
919
920 // Shared state read by the scavenge collector and set by ScavengeObject.
921 static PromotionQueue promotion_queue;
922
923
924 #ifdef DEBUG 890 #ifdef DEBUG
925 // Visitor class to verify pointers in code or data space do not point into 891 // Visitor class to verify pointers in code or data space do not point into
926 // new space. 892 // new space.
927 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { 893 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
928 public: 894 public:
929 void VisitPointers(Object** start, Object**end) { 895 void VisitPointers(Object** start, Object**end) {
930 for (Object** current = start; current < end; current++) { 896 for (Object** current = start; current < end; current++) {
931 if ((*current)->IsHeapObject()) { 897 if ((*current)->IsHeapObject()) {
932 ASSERT(!Heap::InNewSpace(HeapObject::cast(*current))); 898 ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
933 } 899 }
934 } 900 }
935 } 901 }
936 }; 902 };
937 903
938 904
939 static void VerifyNonPointerSpacePointers() { 905 static void VerifyNonPointerSpacePointers() {
940 // Verify that there are no pointers to new space in spaces where we 906 // Verify that there are no pointers to new space in spaces where we
941 // do not expect them. 907 // do not expect them.
942 VerifyNonPointerSpacePointersVisitor v; 908 VerifyNonPointerSpacePointersVisitor v;
943 HeapObjectIterator code_it(Heap::code_space()); 909 HeapObjectIterator code_it(HEAP->code_space());
944 for (HeapObject* object = code_it.next(); 910 for (HeapObject* object = code_it.next();
945 object != NULL; object = code_it.next()) 911 object != NULL; object = code_it.next())
946 object->Iterate(&v); 912 object->Iterate(&v);
947 913
948 HeapObjectIterator data_it(Heap::old_data_space()); 914 HeapObjectIterator data_it(HEAP->old_data_space());
949 for (HeapObject* object = data_it.next(); 915 for (HeapObject* object = data_it.next();
950 object != NULL; object = data_it.next()) 916 object != NULL; object = data_it.next())
951 object->Iterate(&v); 917 object->Iterate(&v);
952 } 918 }
953 #endif 919 #endif
954 920
955 921
956 void Heap::CheckNewSpaceExpansionCriteria() { 922 void Heap::CheckNewSpaceExpansionCriteria() {
957 if (new_space_.Capacity() < new_space_.MaximumCapacity() && 923 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
958 survived_since_last_expansion_ > new_space_.Capacity()) { 924 survived_since_last_expansion_ > new_space_.Capacity()) {
959 // Grow the size of new space if there is room to grow and enough 925 // Grow the size of new space if there is room to grow and enough
960 // data has survived scavenge since the last expansion. 926 // data has survived scavenge since the last expansion.
961 new_space_.Grow(); 927 new_space_.Grow();
962 survived_since_last_expansion_ = 0; 928 survived_since_last_expansion_ = 0;
963 } 929 }
964 } 930 }
965 931
966 932
967 void Heap::Scavenge() { 933 void Heap::Scavenge() {
968 #ifdef DEBUG 934 #ifdef DEBUG
969 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); 935 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
970 #endif 936 #endif
971 937
972 gc_state_ = SCAVENGE; 938 gc_state_ = SCAVENGE;
973 939
974 Page::FlipMeaningOfInvalidatedWatermarkFlag(); 940 Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
975 #ifdef DEBUG 941 #ifdef DEBUG
976 VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID); 942 VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
977 VerifyPageWatermarkValidity(map_space_, ALL_VALID); 943 VerifyPageWatermarkValidity(map_space_, ALL_VALID);
978 #endif 944 #endif
979 945
980 // We do not update an allocation watermark of the top page during linear 946 // We do not update an allocation watermark of the top page during linear
981 // allocation to avoid overhead. So to maintain the watermark invariant 947 // allocation to avoid overhead. So to maintain the watermark invariant
982 // we have to manually cache the watermark and mark the top page as having an 948 // we have to manually cache the watermark and mark the top page as having an
983 // invalid watermark. This guarantees that dirty regions iteration will use a 949 // invalid watermark. This guarantees that dirty regions iteration will use a
984 // correct watermark even if a linear allocation happens. 950 // correct watermark even if a linear allocation happens.
985 old_pointer_space_->FlushTopPageWatermark(); 951 old_pointer_space_->FlushTopPageWatermark();
986 map_space_->FlushTopPageWatermark(); 952 map_space_->FlushTopPageWatermark();
987 953
988 // Implements Cheney's copying algorithm 954 // Implements Cheney's copying algorithm
989 LOG(ResourceEvent("scavenge", "begin")); 955 LOG(isolate_, ResourceEvent("scavenge", "begin"));
990 956
991 // Clear descriptor cache. 957 // Clear descriptor cache.
992 DescriptorLookupCache::Clear(); 958 isolate_->descriptor_lookup_cache()->Clear();
993 959
994 // Used for updating survived_since_last_expansion_ at function end. 960 // Used for updating survived_since_last_expansion_ at function end.
995 intptr_t survived_watermark = PromotedSpaceSize(); 961 intptr_t survived_watermark = PromotedSpaceSize();
996 962
997 CheckNewSpaceExpansionCriteria(); 963 CheckNewSpaceExpansionCriteria();
998 964
999 // Flip the semispaces. After flipping, to space is empty, from space has 965 // Flip the semispaces. After flipping, to space is empty, from space has
1000 // live objects. 966 // live objects.
1001 new_space_.Flip(); 967 new_space_.Flip();
1002 new_space_.ResetAllocationInfo(); 968 new_space_.ResetAllocationInfo();
1003 969
1004 // We need to sweep newly copied objects which can be either in the 970 // We need to sweep newly copied objects which can be either in the
1005 // to space or promoted to the old generation. For to-space 971 // to space or promoted to the old generation. For to-space
1006 // objects, we treat the bottom of the to space as a queue. Newly 972 // objects, we treat the bottom of the to space as a queue. Newly
1007 // copied and unswept objects lie between a 'front' mark and the 973 // copied and unswept objects lie between a 'front' mark and the
1008 // allocation pointer. 974 // allocation pointer.
1009 // 975 //
1010 // Promoted objects can go into various old-generation spaces, and 976 // Promoted objects can go into various old-generation spaces, and
1011 // can be allocated internally in the spaces (from the free list). 977 // can be allocated internally in the spaces (from the free list).
1012 // We treat the top of the to space as a queue of addresses of 978 // We treat the top of the to space as a queue of addresses of
1013 // promoted objects. The addresses of newly promoted and unswept 979 // promoted objects. The addresses of newly promoted and unswept
1014 // objects lie between a 'front' mark and a 'rear' mark that is 980 // objects lie between a 'front' mark and a 'rear' mark that is
1015 // updated as a side effect of promoting an object. 981 // updated as a side effect of promoting an object.
1016 // 982 //
1017 // There is guaranteed to be enough room at the top of the to space 983 // There is guaranteed to be enough room at the top of the to space
1018 // for the addresses of promoted objects: every object promoted 984 // for the addresses of promoted objects: every object promoted
1019 // frees up its size in bytes from the top of the new space, and 985 // frees up its size in bytes from the top of the new space, and
1020 // objects are at least one pointer in size. 986 // objects are at least one pointer in size.
1021 Address new_space_front = new_space_.ToSpaceLow(); 987 Address new_space_front = new_space_.ToSpaceLow();
1022 promotion_queue.Initialize(new_space_.ToSpaceHigh()); 988 promotion_queue_.Initialize(new_space_.ToSpaceHigh());
1023 989
1024 ScavengeVisitor scavenge_visitor; 990 is_safe_to_read_maps_ = false;
991 ScavengeVisitor scavenge_visitor(this);
1025 // Copy roots. 992 // Copy roots.
1026 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); 993 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1027 994
1028 // Copy objects reachable from the old generation. By definition, 995 // Copy objects reachable from the old generation. By definition,
1029 // there are no intergenerational pointers in code or data spaces. 996 // there are no intergenerational pointers in code or data spaces.
1030 IterateDirtyRegions(old_pointer_space_, 997 IterateDirtyRegions(old_pointer_space_,
1031 &IteratePointersInDirtyRegion, 998 &Heap::IteratePointersInDirtyRegion,
1032 &ScavengePointer, 999 &ScavengePointer,
1033 WATERMARK_CAN_BE_INVALID); 1000 WATERMARK_CAN_BE_INVALID);
1034 1001
1035 IterateDirtyRegions(map_space_, 1002 IterateDirtyRegions(map_space_,
1036 &IteratePointersInDirtyMapsRegion, 1003 &IteratePointersInDirtyMapsRegion,
1037 &ScavengePointer, 1004 &ScavengePointer,
1038 WATERMARK_CAN_BE_INVALID); 1005 WATERMARK_CAN_BE_INVALID);
1039 1006
1040 lo_space_->IterateDirtyRegions(&ScavengePointer); 1007 lo_space_->IterateDirtyRegions(&ScavengePointer);
1041 1008
(...skipping 11 matching lines...) Expand all
1053 1020
1054 // Scavenge object reachable from the global contexts list directly. 1021 // Scavenge object reachable from the global contexts list directly.
1055 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); 1022 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
1056 1023
1057 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1024 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1058 1025
1059 UpdateNewSpaceReferencesInExternalStringTable( 1026 UpdateNewSpaceReferencesInExternalStringTable(
1060 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1027 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1061 1028
1062 LiveObjectList::UpdateReferencesForScavengeGC(); 1029 LiveObjectList::UpdateReferencesForScavengeGC();
1063 RuntimeProfiler::UpdateSamplesAfterScavenge(); 1030 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1064 1031
1065 ASSERT(new_space_front == new_space_.top()); 1032 ASSERT(new_space_front == new_space_.top());
1066 1033
1034 is_safe_to_read_maps_ = true;
1035
1067 // Set age mark. 1036 // Set age mark.
1068 new_space_.set_age_mark(new_space_.top()); 1037 new_space_.set_age_mark(new_space_.top());
1069 1038
1070 // Update how much has survived scavenge. 1039 // Update how much has survived scavenge.
1071 IncrementYoungSurvivorsCounter(static_cast<int>( 1040 IncrementYoungSurvivorsCounter(static_cast<int>(
1072 (PromotedSpaceSize() - survived_watermark) + new_space_.Size())); 1041 (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
1073 1042
1074 LOG(ResourceEvent("scavenge", "end")); 1043 LOG(isolate_, ResourceEvent("scavenge", "end"));
1075 1044
1076 gc_state_ = NOT_IN_GC; 1045 gc_state_ = NOT_IN_GC;
1077 } 1046 }
1078 1047
1079 1048
1080 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) { 1049 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1050 Object** p) {
1081 MapWord first_word = HeapObject::cast(*p)->map_word(); 1051 MapWord first_word = HeapObject::cast(*p)->map_word();
1082 1052
1083 if (!first_word.IsForwardingAddress()) { 1053 if (!first_word.IsForwardingAddress()) {
1084 // Unreachable external string can be finalized. 1054 // Unreachable external string can be finalized.
1085 FinalizeExternalString(String::cast(*p)); 1055 heap->FinalizeExternalString(String::cast(*p));
1086 return NULL; 1056 return NULL;
1087 } 1057 }
1088 1058
1089 // String is still reachable. 1059 // String is still reachable.
1090 return String::cast(first_word.ToForwardingAddress()); 1060 return String::cast(first_word.ToForwardingAddress());
1091 } 1061 }
1092 1062
1093 1063
1094 void Heap::UpdateNewSpaceReferencesInExternalStringTable( 1064 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1095 ExternalStringTableUpdaterCallback updater_func) { 1065 ExternalStringTableUpdaterCallback updater_func) {
1096 ExternalStringTable::Verify(); 1066 external_string_table_.Verify();
1097 1067
1098 if (ExternalStringTable::new_space_strings_.is_empty()) return; 1068 if (external_string_table_.new_space_strings_.is_empty()) return;
1099 1069
1100 Object** start = &ExternalStringTable::new_space_strings_[0]; 1070 Object** start = &external_string_table_.new_space_strings_[0];
1101 Object** end = start + ExternalStringTable::new_space_strings_.length(); 1071 Object** end = start + external_string_table_.new_space_strings_.length();
1102 Object** last = start; 1072 Object** last = start;
1103 1073
1104 for (Object** p = start; p < end; ++p) { 1074 for (Object** p = start; p < end; ++p) {
1105 ASSERT(Heap::InFromSpace(*p)); 1075 ASSERT(InFromSpace(*p));
1106 String* target = updater_func(p); 1076 String* target = updater_func(this, p);
1107 1077
1108 if (target == NULL) continue; 1078 if (target == NULL) continue;
1109 1079
1110 ASSERT(target->IsExternalString()); 1080 ASSERT(target->IsExternalString());
1111 1081
1112 if (Heap::InNewSpace(target)) { 1082 if (InNewSpace(target)) {
1113 // String is still in new space. Update the table entry. 1083 // String is still in new space. Update the table entry.
1114 *last = target; 1084 *last = target;
1115 ++last; 1085 ++last;
1116 } else { 1086 } else {
1117 // String got promoted. Move it to the old string list. 1087 // String got promoted. Move it to the old string list.
1118 ExternalStringTable::AddOldString(target); 1088 external_string_table_.AddOldString(target);
1119 } 1089 }
1120 } 1090 }
1121 1091
1122 ASSERT(last <= end); 1092 ASSERT(last <= end);
1123 ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start)); 1093 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1124 } 1094 }
1125 1095
1126 1096
1127 static Object* ProcessFunctionWeakReferences(Object* function, 1097 static Object* ProcessFunctionWeakReferences(Heap* heap,
1098 Object* function,
1128 WeakObjectRetainer* retainer) { 1099 WeakObjectRetainer* retainer) {
1129 Object* head = Heap::undefined_value(); 1100 Object* head = heap->undefined_value();
1130 JSFunction* tail = NULL; 1101 JSFunction* tail = NULL;
1131 Object* candidate = function; 1102 Object* candidate = function;
1132 while (!candidate->IsUndefined()) { 1103 while (candidate != heap->undefined_value()) {
1133 // Check whether to keep the candidate in the list. 1104 // Check whether to keep the candidate in the list.
1134 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate); 1105 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1135 Object* retain = retainer->RetainAs(candidate); 1106 Object* retain = retainer->RetainAs(candidate);
1136 if (retain != NULL) { 1107 if (retain != NULL) {
1137 if (head->IsUndefined()) { 1108 if (head == heap->undefined_value()) {
1138 // First element in the list. 1109 // First element in the list.
1139 head = candidate_function; 1110 head = candidate_function;
1140 } else { 1111 } else {
1141 // Subsequent elements in the list. 1112 // Subsequent elements in the list.
1142 ASSERT(tail != NULL); 1113 ASSERT(tail != NULL);
1143 tail->set_next_function_link(candidate_function); 1114 tail->set_next_function_link(candidate_function);
1144 } 1115 }
1145 // Retained function is new tail. 1116 // Retained function is new tail.
1146 tail = candidate_function; 1117 tail = candidate_function;
1147 } 1118 }
1148 // Move to next element in the list. 1119 // Move to next element in the list.
1149 candidate = candidate_function->next_function_link(); 1120 candidate = candidate_function->next_function_link();
1150 } 1121 }
1151 1122
1152 // Terminate the list if there is one or more elements. 1123 // Terminate the list if there is one or more elements.
1153 if (tail != NULL) { 1124 if (tail != NULL) {
1154 tail->set_next_function_link(Heap::undefined_value()); 1125 tail->set_next_function_link(heap->undefined_value());
1155 } 1126 }
1156 1127
1157 return head; 1128 return head;
1158 } 1129 }
1159 1130
1160 1131
1161 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { 1132 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1162 Object* head = undefined_value(); 1133 Object* head = undefined_value();
1163 Context* tail = NULL; 1134 Context* tail = NULL;
1164 Object* candidate = global_contexts_list_; 1135 Object* candidate = global_contexts_list_;
1165 while (!candidate->IsUndefined()) { 1136 while (candidate != undefined_value()) {
1166 // Check whether to keep the candidate in the list. 1137 // Check whether to keep the candidate in the list.
1167 Context* candidate_context = reinterpret_cast<Context*>(candidate); 1138 Context* candidate_context = reinterpret_cast<Context*>(candidate);
1168 Object* retain = retainer->RetainAs(candidate); 1139 Object* retain = retainer->RetainAs(candidate);
1169 if (retain != NULL) { 1140 if (retain != NULL) {
1170 if (head->IsUndefined()) { 1141 if (head == undefined_value()) {
1171 // First element in the list. 1142 // First element in the list.
1172 head = candidate_context; 1143 head = candidate_context;
1173 } else { 1144 } else {
1174 // Subsequent elements in the list. 1145 // Subsequent elements in the list.
1175 ASSERT(tail != NULL); 1146 ASSERT(tail != NULL);
1176 tail->set_unchecked(Context::NEXT_CONTEXT_LINK, 1147 tail->set_unchecked(this,
1148 Context::NEXT_CONTEXT_LINK,
1177 candidate_context, 1149 candidate_context,
1178 UPDATE_WRITE_BARRIER); 1150 UPDATE_WRITE_BARRIER);
1179 } 1151 }
1180 // Retained context is new tail. 1152 // Retained context is new tail.
1181 tail = candidate_context; 1153 tail = candidate_context;
1182 1154
1183 // Process the weak list of optimized functions for the context. 1155 // Process the weak list of optimized functions for the context.
1184 Object* function_list_head = 1156 Object* function_list_head =
1185 ProcessFunctionWeakReferences( 1157 ProcessFunctionWeakReferences(
1158 this,
1186 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST), 1159 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1187 retainer); 1160 retainer);
1188 candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST, 1161 candidate_context->set_unchecked(this,
1162 Context::OPTIMIZED_FUNCTIONS_LIST,
1189 function_list_head, 1163 function_list_head,
1190 UPDATE_WRITE_BARRIER); 1164 UPDATE_WRITE_BARRIER);
1191 } 1165 }
1192 // Move to next element in the list. 1166 // Move to next element in the list.
1193 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK); 1167 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
1194 } 1168 }
1195 1169
1196 // Terminate the list if there is one or more elements. 1170 // Terminate the list if there is one or more elements.
1197 if (tail != NULL) { 1171 if (tail != NULL) {
1198 tail->set_unchecked(Context::NEXT_CONTEXT_LINK, 1172 tail->set_unchecked(this,
1173 Context::NEXT_CONTEXT_LINK,
1199 Heap::undefined_value(), 1174 Heap::undefined_value(),
1200 UPDATE_WRITE_BARRIER); 1175 UPDATE_WRITE_BARRIER);
1201 } 1176 }
1202 1177
1203 // Update the head of the list of contexts. 1178 // Update the head of the list of contexts.
1204 Heap::global_contexts_list_ = head; 1179 global_contexts_list_ = head;
1205 } 1180 }
1206 1181
1207 1182
1208 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> { 1183 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1209 public: 1184 public:
1210 static inline void VisitPointer(Object** p) { 1185 static inline void VisitPointer(Heap* heap, Object** p) {
1211 Object* object = *p; 1186 Object* object = *p;
1212 if (!Heap::InNewSpace(object)) return; 1187 if (!heap->InNewSpace(object)) return;
1213 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), 1188 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1214 reinterpret_cast<HeapObject*>(object)); 1189 reinterpret_cast<HeapObject*>(object));
1215 } 1190 }
1216 }; 1191 };
1217 1192
1218 1193
1219 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, 1194 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1220 Address new_space_front) { 1195 Address new_space_front) {
1221 do { 1196 do {
1222 ASSERT(new_space_front <= new_space_.top()); 1197 ASSERT(new_space_front <= new_space_.top());
1223 1198
1224 // The addresses new_space_front and new_space_.top() define a 1199 // The addresses new_space_front and new_space_.top() define a
1225 // queue of unprocessed copied objects. Process them until the 1200 // queue of unprocessed copied objects. Process them until the
1226 // queue is empty. 1201 // queue is empty.
1227 while (new_space_front < new_space_.top()) { 1202 while (new_space_front < new_space_.top()) {
1228 HeapObject* object = HeapObject::FromAddress(new_space_front); 1203 HeapObject* object = HeapObject::FromAddress(new_space_front);
1229 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object); 1204 new_space_front += NewSpaceScavenger::IterateBody(object->map(), object);
1230 } 1205 }
1231 1206
1232 // Promote and process all the to-be-promoted objects. 1207 // Promote and process all the to-be-promoted objects.
1233 while (!promotion_queue.is_empty()) { 1208 while (!promotion_queue_.is_empty()) {
1234 HeapObject* target; 1209 HeapObject* target;
1235 int size; 1210 int size;
1236 promotion_queue.remove(&target, &size); 1211 promotion_queue_.remove(&target, &size);
1237 1212
1238 // Promoted object might be already partially visited 1213 // Promoted object might be already partially visited
1239 // during dirty regions iteration. Thus we search specificly 1214 // during dirty regions iteration. Thus we search specificly
1240 // for pointers to from semispace instead of looking for pointers 1215 // for pointers to from semispace instead of looking for pointers
1241 // to new space. 1216 // to new space.
1242 ASSERT(!target->IsMap()); 1217 ASSERT(!target->IsMap());
1243 IterateAndMarkPointersToFromSpace(target->address(), 1218 IterateAndMarkPointersToFromSpace(target->address(),
1244 target->address() + size, 1219 target->address() + size,
1245 &ScavengePointer); 1220 &ScavengePointer);
1246 } 1221 }
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1296 static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) { 1271 static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
1297 table_.GetVisitor(map)(map, slot, obj); 1272 table_.GetVisitor(map)(map, slot, obj);
1298 } 1273 }
1299 1274
1300 1275
1301 private: 1276 private:
1302 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 1277 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1303 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; 1278 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1304 1279
1305 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1280 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1306 static void RecordCopiedObject(HeapObject* obj) { 1281 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1307 bool should_record = false; 1282 bool should_record = false;
1308 #ifdef DEBUG 1283 #ifdef DEBUG
1309 should_record = FLAG_heap_stats; 1284 should_record = FLAG_heap_stats;
1310 #endif 1285 #endif
1311 #ifdef ENABLE_LOGGING_AND_PROFILING 1286 #ifdef ENABLE_LOGGING_AND_PROFILING
1312 should_record = should_record || FLAG_log_gc; 1287 should_record = should_record || FLAG_log_gc;
1313 #endif 1288 #endif
1314 if (should_record) { 1289 if (should_record) {
1315 if (Heap::new_space()->Contains(obj)) { 1290 if (heap->new_space()->Contains(obj)) {
1316 Heap::new_space()->RecordAllocation(obj); 1291 heap->new_space()->RecordAllocation(obj);
1317 } else { 1292 } else {
1318 Heap::new_space()->RecordPromotion(obj); 1293 heap->new_space()->RecordPromotion(obj);
1319 } 1294 }
1320 } 1295 }
1321 } 1296 }
1322 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1297 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1323 1298
1324 // Helper function used by CopyObject to copy a source object to an 1299 // Helper function used by CopyObject to copy a source object to an
1325 // allocated target object and update the forwarding pointer in the source 1300 // allocated target object and update the forwarding pointer in the source
1326 // object. Returns the target object. 1301 // object. Returns the target object.
1327 INLINE(static HeapObject* MigrateObject(HeapObject* source, 1302 INLINE(static HeapObject* MigrateObject(Heap* heap,
1303 HeapObject* source,
1328 HeapObject* target, 1304 HeapObject* target,
1329 int size)) { 1305 int size)) {
1330 // Copy the content of source to target. 1306 // Copy the content of source to target.
1331 Heap::CopyBlock(target->address(), source->address(), size); 1307 heap->CopyBlock(target->address(), source->address(), size);
1332 1308
1333 // Set the forwarding address. 1309 // Set the forwarding address.
1334 source->set_map_word(MapWord::FromForwardingAddress(target)); 1310 source->set_map_word(MapWord::FromForwardingAddress(target));
1335 1311
1336 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1312 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1337 // Update NewSpace stats if necessary. 1313 // Update NewSpace stats if necessary.
1338 RecordCopiedObject(target); 1314 RecordCopiedObject(heap, target);
1339 #endif 1315 #endif
1340 HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address())); 1316 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1341 #if defined(ENABLE_LOGGING_AND_PROFILING) 1317 #if defined(ENABLE_LOGGING_AND_PROFILING)
1342 if (Logger::is_logging() || CpuProfiler::is_profiling()) { 1318 Isolate* isolate = heap->isolate();
1319 if (isolate->logger()->is_logging() ||
1320 isolate->cpu_profiler()->is_profiling()) {
1343 if (target->IsSharedFunctionInfo()) { 1321 if (target->IsSharedFunctionInfo()) {
1344 PROFILE(SharedFunctionInfoMoveEvent( 1322 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1345 source->address(), target->address())); 1323 source->address(), target->address()));
1346 } 1324 }
1347 } 1325 }
1348 #endif 1326 #endif
1349 return target; 1327 return target;
1350 } 1328 }
1351 1329
1352 1330
1353 template<ObjectContents object_contents, SizeRestriction size_restriction> 1331 template<ObjectContents object_contents, SizeRestriction size_restriction>
1354 static inline void EvacuateObject(Map* map, 1332 static inline void EvacuateObject(Map* map,
1355 HeapObject** slot, 1333 HeapObject** slot,
1356 HeapObject* object, 1334 HeapObject* object,
1357 int object_size) { 1335 int object_size) {
1358 ASSERT((size_restriction != SMALL) || 1336 ASSERT((size_restriction != SMALL) ||
1359 (object_size <= Page::kMaxHeapObjectSize)); 1337 (object_size <= Page::kMaxHeapObjectSize));
1360 ASSERT(object->Size() == object_size); 1338 ASSERT(object->Size() == object_size);
1361 1339
1362 if (Heap::ShouldBePromoted(object->address(), object_size)) { 1340 Heap* heap = map->heap();
1341 if (heap->ShouldBePromoted(object->address(), object_size)) {
1363 MaybeObject* maybe_result; 1342 MaybeObject* maybe_result;
1364 1343
1365 if ((size_restriction != SMALL) && 1344 if ((size_restriction != SMALL) &&
1366 (object_size > Page::kMaxHeapObjectSize)) { 1345 (object_size > Page::kMaxHeapObjectSize)) {
1367 maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size); 1346 maybe_result = heap->lo_space()->AllocateRawFixedArray(object_size);
1368 } else { 1347 } else {
1369 if (object_contents == DATA_OBJECT) { 1348 if (object_contents == DATA_OBJECT) {
1370 maybe_result = Heap::old_data_space()->AllocateRaw(object_size); 1349 maybe_result = heap->old_data_space()->AllocateRaw(object_size);
1371 } else { 1350 } else {
1372 maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size); 1351 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
1373 } 1352 }
1374 } 1353 }
1375 1354
1376 Object* result = NULL; // Initialization to please compiler. 1355 Object* result = NULL; // Initialization to please compiler.
1377 if (maybe_result->ToObject(&result)) { 1356 if (maybe_result->ToObject(&result)) {
1378 HeapObject* target = HeapObject::cast(result); 1357 HeapObject* target = HeapObject::cast(result);
1379 *slot = MigrateObject(object, target, object_size); 1358 *slot = MigrateObject(heap, object , target, object_size);
1380 1359
1381 if (object_contents == POINTER_OBJECT) { 1360 if (object_contents == POINTER_OBJECT) {
1382 promotion_queue.insert(target, object_size); 1361 heap->promotion_queue()->insert(target, object_size);
1383 } 1362 }
1384 1363
1385 Heap::tracer()->increment_promoted_objects_size(object_size); 1364 heap->tracer()->increment_promoted_objects_size(object_size);
1386 return; 1365 return;
1387 } 1366 }
1388 } 1367 }
1389 Object* result = 1368 Object* result =
1390 Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked(); 1369 heap->new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
1391 *slot = MigrateObject(object, HeapObject::cast(result), object_size); 1370 *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
1392 return; 1371 return;
1393 } 1372 }
1394 1373
1395 1374
1396 static inline void EvacuateFixedArray(Map* map, 1375 static inline void EvacuateFixedArray(Map* map,
1397 HeapObject** slot, 1376 HeapObject** slot,
1398 HeapObject* object) { 1377 HeapObject* object) {
1399 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); 1378 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1400 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, 1379 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
1401 slot, 1380 slot,
(...skipping 30 matching lines...) Expand all
1432 1411
1433 static inline bool IsShortcutCandidate(int type) { 1412 static inline bool IsShortcutCandidate(int type) {
1434 return ((type & kShortcutTypeMask) == kShortcutTypeTag); 1413 return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1435 } 1414 }
1436 1415
1437 static inline void EvacuateShortcutCandidate(Map* map, 1416 static inline void EvacuateShortcutCandidate(Map* map,
1438 HeapObject** slot, 1417 HeapObject** slot,
1439 HeapObject* object) { 1418 HeapObject* object) {
1440 ASSERT(IsShortcutCandidate(map->instance_type())); 1419 ASSERT(IsShortcutCandidate(map->instance_type()));
1441 1420
1442 if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) { 1421 if (ConsString::cast(object)->unchecked_second() ==
1422 map->heap()->empty_string()) {
1443 HeapObject* first = 1423 HeapObject* first =
1444 HeapObject::cast(ConsString::cast(object)->unchecked_first()); 1424 HeapObject::cast(ConsString::cast(object)->unchecked_first());
1445 1425
1446 *slot = first; 1426 *slot = first;
1447 1427
1448 if (!Heap::InNewSpace(first)) { 1428 if (!map->heap()->InNewSpace(first)) {
1449 object->set_map_word(MapWord::FromForwardingAddress(first)); 1429 object->set_map_word(MapWord::FromForwardingAddress(first));
1450 return; 1430 return;
1451 } 1431 }
1452 1432
1453 MapWord first_word = first->map_word(); 1433 MapWord first_word = first->map_word();
1454 if (first_word.IsForwardingAddress()) { 1434 if (first_word.IsForwardingAddress()) {
1455 HeapObject* target = first_word.ToForwardingAddress(); 1435 HeapObject* target = first_word.ToForwardingAddress();
1456 1436
1457 *slot = target; 1437 *slot = target;
1458 object->set_map_word(MapWord::FromForwardingAddress(target)); 1438 object->set_map_word(MapWord::FromForwardingAddress(target));
(...skipping 30 matching lines...) Expand all
1489 typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object); 1469 typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
1490 1470
1491 static VisitorDispatchTable<Callback> table_; 1471 static VisitorDispatchTable<Callback> table_;
1492 }; 1472 };
1493 1473
1494 1474
1495 VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_; 1475 VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
1496 1476
1497 1477
1498 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 1478 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1499 ASSERT(InFromSpace(object)); 1479 ASSERT(HEAP->InFromSpace(object));
1500 MapWord first_word = object->map_word(); 1480 MapWord first_word = object->map_word();
1501 ASSERT(!first_word.IsForwardingAddress()); 1481 ASSERT(!first_word.IsForwardingAddress());
1502 Map* map = first_word.ToMap(); 1482 Map* map = first_word.ToMap();
1503 ScavengingVisitor::Scavenge(map, p, object); 1483 ScavengingVisitor::Scavenge(map, p, object);
1504 } 1484 }
1505 1485
1506 1486
1507 void Heap::ScavengePointer(HeapObject** p) {
1508 ScavengeObject(p, *p);
1509 }
1510
1511
1512 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, 1487 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1513 int instance_size) { 1488 int instance_size) {
1514 Object* result; 1489 Object* result;
1515 { MaybeObject* maybe_result = AllocateRawMap(); 1490 { MaybeObject* maybe_result = AllocateRawMap();
1516 if (!maybe_result->ToObject(&result)) return maybe_result; 1491 if (!maybe_result->ToObject(&result)) return maybe_result;
1517 } 1492 }
1518 1493
1519 // Map::cast cannot be used due to uninitialized map field. 1494 // Map::cast cannot be used due to uninitialized map field.
1520 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 1495 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1521 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1496 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1522 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1497 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1523 reinterpret_cast<Map*>(result)-> 1498 reinterpret_cast<Map*>(result)->set_visitor_id(
1524 set_visitor_id( 1499 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1525 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1526 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 1500 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1527 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 1501 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1528 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 1502 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1529 reinterpret_cast<Map*>(result)->set_bit_field(0); 1503 reinterpret_cast<Map*>(result)->set_bit_field(0);
1530 reinterpret_cast<Map*>(result)->set_bit_field2(0); 1504 reinterpret_cast<Map*>(result)->set_bit_field2(0);
1531 return result; 1505 return result;
1532 } 1506 }
1533 1507
1534 1508
1535 MaybeObject* Heap::AllocateMap(InstanceType instance_type, int instance_size) { 1509 MaybeObject* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
1624 // Allocate the empty array. 1598 // Allocate the empty array.
1625 { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); 1599 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1626 if (!maybe_obj->ToObject(&obj)) return false; 1600 if (!maybe_obj->ToObject(&obj)) return false;
1627 } 1601 }
1628 set_empty_fixed_array(FixedArray::cast(obj)); 1602 set_empty_fixed_array(FixedArray::cast(obj));
1629 1603
1630 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_DATA_SPACE); 1604 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1631 if (!maybe_obj->ToObject(&obj)) return false; 1605 if (!maybe_obj->ToObject(&obj)) return false;
1632 } 1606 }
1633 set_null_value(obj); 1607 set_null_value(obj);
1608 Oddball::cast(obj)->set_kind(Oddball::kNull);
1634 1609
1635 // Allocate the empty descriptor array. 1610 // Allocate the empty descriptor array.
1636 { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); 1611 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1637 if (!maybe_obj->ToObject(&obj)) return false; 1612 if (!maybe_obj->ToObject(&obj)) return false;
1638 } 1613 }
1639 set_empty_descriptor_array(DescriptorArray::cast(obj)); 1614 set_empty_descriptor_array(DescriptorArray::cast(obj));
1640 1615
1641 // Fix the instance_descriptors for the existing maps. 1616 // Fix the instance_descriptors for the existing maps.
1642 meta_map()->set_instance_descriptors(empty_descriptor_array()); 1617 meta_map()->set_instance_descriptors(empty_descriptor_array());
1643 meta_map()->set_code_cache(empty_fixed_array()); 1618 meta_map()->set_code_cache(empty_fixed_array());
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
1815 if (!maybe_obj->ToObject(&obj)) return false; 1790 if (!maybe_obj->ToObject(&obj)) return false;
1816 } 1791 }
1817 set_shared_function_info_map(Map::cast(obj)); 1792 set_shared_function_info_map(Map::cast(obj));
1818 1793
1819 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE, 1794 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE,
1820 JSMessageObject::kSize); 1795 JSMessageObject::kSize);
1821 if (!maybe_obj->ToObject(&obj)) return false; 1796 if (!maybe_obj->ToObject(&obj)) return false;
1822 } 1797 }
1823 set_message_object_map(Map::cast(obj)); 1798 set_message_object_map(Map::cast(obj));
1824 1799
1825 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); 1800 ASSERT(!InNewSpace(empty_fixed_array()));
1826 return true; 1801 return true;
1827 } 1802 }
1828 1803
1829 1804
1830 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 1805 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
1831 // Statically ensure that it is safe to allocate heap numbers in paged 1806 // Statically ensure that it is safe to allocate heap numbers in paged
1832 // spaces. 1807 // spaces.
1833 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); 1808 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1834 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 1809 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1835 1810
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1868 { MaybeObject* maybe_result = AllocateRawCell(); 1843 { MaybeObject* maybe_result = AllocateRawCell();
1869 if (!maybe_result->ToObject(&result)) return maybe_result; 1844 if (!maybe_result->ToObject(&result)) return maybe_result;
1870 } 1845 }
1871 HeapObject::cast(result)->set_map(global_property_cell_map()); 1846 HeapObject::cast(result)->set_map(global_property_cell_map());
1872 JSGlobalPropertyCell::cast(result)->set_value(value); 1847 JSGlobalPropertyCell::cast(result)->set_value(value);
1873 return result; 1848 return result;
1874 } 1849 }
1875 1850
1876 1851
1877 MaybeObject* Heap::CreateOddball(const char* to_string, 1852 MaybeObject* Heap::CreateOddball(const char* to_string,
1878 Object* to_number) { 1853 Object* to_number,
1854 byte kind) {
1879 Object* result; 1855 Object* result;
1880 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE); 1856 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE);
1881 if (!maybe_result->ToObject(&result)) return maybe_result; 1857 if (!maybe_result->ToObject(&result)) return maybe_result;
1882 } 1858 }
1883 return Oddball::cast(result)->Initialize(to_string, to_number); 1859 return Oddball::cast(result)->Initialize(to_string, to_number, kind);
1884 } 1860 }
1885 1861
1886 1862
1887 bool Heap::CreateApiObjects() { 1863 bool Heap::CreateApiObjects() {
1888 Object* obj; 1864 Object* obj;
1889 1865
1890 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); 1866 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
1891 if (!maybe_obj->ToObject(&obj)) return false; 1867 if (!maybe_obj->ToObject(&obj)) return false;
1892 } 1868 }
1893 set_neander_map(Map::cast(obj)); 1869 set_neander_map(Map::cast(obj));
1894 1870
1895 { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map()); 1871 { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
1896 if (!maybe_obj->ToObject(&obj)) return false; 1872 if (!maybe_obj->ToObject(&obj)) return false;
1897 } 1873 }
1898 Object* elements; 1874 Object* elements;
1899 { MaybeObject* maybe_elements = AllocateFixedArray(2); 1875 { MaybeObject* maybe_elements = AllocateFixedArray(2);
1900 if (!maybe_elements->ToObject(&elements)) return false; 1876 if (!maybe_elements->ToObject(&elements)) return false;
1901 } 1877 }
1902 FixedArray::cast(elements)->set(0, Smi::FromInt(0)); 1878 FixedArray::cast(elements)->set(0, Smi::FromInt(0));
1903 JSObject::cast(obj)->set_elements(FixedArray::cast(elements)); 1879 JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
1904 set_message_listeners(JSObject::cast(obj)); 1880 set_message_listeners(JSObject::cast(obj));
1905 1881
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1950 1926
1951 { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED); 1927 { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED);
1952 if (!maybe_obj->ToObject(&obj)) return false; 1928 if (!maybe_obj->ToObject(&obj)) return false;
1953 } 1929 }
1954 set_nan_value(obj); 1930 set_nan_value(obj);
1955 1931
1956 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_DATA_SPACE); 1932 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1957 if (!maybe_obj->ToObject(&obj)) return false; 1933 if (!maybe_obj->ToObject(&obj)) return false;
1958 } 1934 }
1959 set_undefined_value(obj); 1935 set_undefined_value(obj);
1936 Oddball::cast(obj)->set_kind(Oddball::kUndefined);
1960 ASSERT(!InNewSpace(undefined_value())); 1937 ASSERT(!InNewSpace(undefined_value()));
1961 1938
1962 // Allocate initial symbol table. 1939 // Allocate initial symbol table.
1963 { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize); 1940 { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize);
1964 if (!maybe_obj->ToObject(&obj)) return false; 1941 if (!maybe_obj->ToObject(&obj)) return false;
1965 } 1942 }
1966 // Don't use set_symbol_table() due to asserts. 1943 // Don't use set_symbol_table() due to asserts.
1967 roots_[kSymbolTableRootIndex] = obj; 1944 roots_[kSymbolTableRootIndex] = obj;
1968 1945
1969 // Assign the print strings for oddballs after creating symboltable. 1946 // Assign the print strings for oddballs after creating symboltable.
1970 Object* symbol; 1947 Object* symbol;
1971 { MaybeObject* maybe_symbol = LookupAsciiSymbol("undefined"); 1948 { MaybeObject* maybe_symbol = LookupAsciiSymbol("undefined");
1972 if (!maybe_symbol->ToObject(&symbol)) return false; 1949 if (!maybe_symbol->ToObject(&symbol)) return false;
1973 } 1950 }
1974 Oddball::cast(undefined_value())->set_to_string(String::cast(symbol)); 1951 Oddball::cast(undefined_value())->set_to_string(String::cast(symbol));
1975 Oddball::cast(undefined_value())->set_to_number(nan_value()); 1952 Oddball::cast(undefined_value())->set_to_number(nan_value());
1976 1953
1977 // Allocate the null_value 1954 // Allocate the null_value
1978 { MaybeObject* maybe_obj = 1955 { MaybeObject* maybe_obj =
1979 Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0)); 1956 Oddball::cast(null_value())->Initialize("null",
1957 Smi::FromInt(0),
1958 Oddball::kNull);
1980 if (!maybe_obj->ToObject(&obj)) return false; 1959 if (!maybe_obj->ToObject(&obj)) return false;
1981 } 1960 }
1982 1961
1983 { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1)); 1962 { MaybeObject* maybe_obj = CreateOddball("true",
1963 Smi::FromInt(1),
1964 Oddball::kTrue);
1984 if (!maybe_obj->ToObject(&obj)) return false; 1965 if (!maybe_obj->ToObject(&obj)) return false;
1985 } 1966 }
1986 set_true_value(obj); 1967 set_true_value(obj);
1987 1968
1988 { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0)); 1969 { MaybeObject* maybe_obj = CreateOddball("false",
1970 Smi::FromInt(0),
1971 Oddball::kFalse);
1989 if (!maybe_obj->ToObject(&obj)) return false; 1972 if (!maybe_obj->ToObject(&obj)) return false;
1990 } 1973 }
1991 set_false_value(obj); 1974 set_false_value(obj);
1992 1975
1993 { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1)); 1976 { MaybeObject* maybe_obj = CreateOddball("hole",
1977 Smi::FromInt(-1),
1978 Oddball::kTheHole);
1994 if (!maybe_obj->ToObject(&obj)) return false; 1979 if (!maybe_obj->ToObject(&obj)) return false;
1995 } 1980 }
1996 set_the_hole_value(obj); 1981 set_the_hole_value(obj);
1997 1982
1998 { MaybeObject* maybe_obj = CreateOddball("arguments_marker", 1983 { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
1999 Smi::FromInt(-4)); 1984 Smi::FromInt(-4),
1985 Oddball::kArgumentMarker);
2000 if (!maybe_obj->ToObject(&obj)) return false; 1986 if (!maybe_obj->ToObject(&obj)) return false;
2001 } 1987 }
2002 set_arguments_marker(obj); 1988 set_arguments_marker(obj);
2003 1989
2004 { MaybeObject* maybe_obj = 1990 { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
2005 CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2)); 1991 Smi::FromInt(-2),
1992 Oddball::kOther);
2006 if (!maybe_obj->ToObject(&obj)) return false; 1993 if (!maybe_obj->ToObject(&obj)) return false;
2007 } 1994 }
2008 set_no_interceptor_result_sentinel(obj); 1995 set_no_interceptor_result_sentinel(obj);
2009 1996
2010 { MaybeObject* maybe_obj = 1997 { MaybeObject* maybe_obj = CreateOddball("termination_exception",
2011 CreateOddball("termination_exception", Smi::FromInt(-3)); 1998 Smi::FromInt(-3),
1999 Oddball::kOther);
2012 if (!maybe_obj->ToObject(&obj)) return false; 2000 if (!maybe_obj->ToObject(&obj)) return false;
2013 } 2001 }
2014 set_termination_exception(obj); 2002 set_termination_exception(obj);
2015 2003
2016 // Allocate the empty string. 2004 // Allocate the empty string.
2017 { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED); 2005 { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED);
2018 if (!maybe_obj->ToObject(&obj)) return false; 2006 if (!maybe_obj->ToObject(&obj)) return false;
2019 } 2007 }
2020 set_empty_string(String::cast(obj)); 2008 set_empty_string(String::cast(obj));
2021 2009
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2063 set_instanceof_cache_function(Smi::FromInt(0)); 2051 set_instanceof_cache_function(Smi::FromInt(0));
2064 set_instanceof_cache_map(Smi::FromInt(0)); 2052 set_instanceof_cache_map(Smi::FromInt(0));
2065 set_instanceof_cache_answer(Smi::FromInt(0)); 2053 set_instanceof_cache_answer(Smi::FromInt(0));
2066 2054
2067 CreateFixedStubs(); 2055 CreateFixedStubs();
2068 2056
2069 // Allocate the dictionary of intrinsic function names. 2057 // Allocate the dictionary of intrinsic function names.
2070 { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions); 2058 { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
2071 if (!maybe_obj->ToObject(&obj)) return false; 2059 if (!maybe_obj->ToObject(&obj)) return false;
2072 } 2060 }
2073 { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj); 2061 { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
2062 obj);
2074 if (!maybe_obj->ToObject(&obj)) return false; 2063 if (!maybe_obj->ToObject(&obj)) return false;
2075 } 2064 }
2076 set_intrinsic_function_names(StringDictionary::cast(obj)); 2065 set_intrinsic_function_names(StringDictionary::cast(obj));
2077 2066
2078 if (InitializeNumberStringCache()->IsFailure()) return false; 2067 if (InitializeNumberStringCache()->IsFailure()) return false;
2079 2068
2080 // Allocate cache for single character ASCII strings. 2069 // Allocate cache for single character ASCII strings.
2081 { MaybeObject* maybe_obj = 2070 { MaybeObject* maybe_obj =
2082 AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED); 2071 AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED);
2083 if (!maybe_obj->ToObject(&obj)) return false; 2072 if (!maybe_obj->ToObject(&obj)) return false;
2084 } 2073 }
2085 set_single_character_string_cache(FixedArray::cast(obj)); 2074 set_single_character_string_cache(FixedArray::cast(obj));
2086 2075
2087 // Allocate cache for external strings pointing to native source code. 2076 // Allocate cache for external strings pointing to native source code.
2088 { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount()); 2077 { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
2089 if (!maybe_obj->ToObject(&obj)) return false; 2078 if (!maybe_obj->ToObject(&obj)) return false;
2090 } 2079 }
2091 set_natives_source_cache(FixedArray::cast(obj)); 2080 set_natives_source_cache(FixedArray::cast(obj));
2092 2081
2093 // Handling of script id generation is in Factory::NewScript. 2082 // Handling of script id generation is in FACTORY->NewScript.
2094 set_last_script_id(undefined_value()); 2083 set_last_script_id(undefined_value());
2095 2084
2096 // Initialize keyed lookup cache. 2085 // Initialize keyed lookup cache.
2097 KeyedLookupCache::Clear(); 2086 isolate_->keyed_lookup_cache()->Clear();
2098 2087
2099 // Initialize context slot cache. 2088 // Initialize context slot cache.
2100 ContextSlotCache::Clear(); 2089 isolate_->context_slot_cache()->Clear();
2101 2090
2102 // Initialize descriptor cache. 2091 // Initialize descriptor cache.
2103 DescriptorLookupCache::Clear(); 2092 isolate_->descriptor_lookup_cache()->Clear();
2104 2093
2105 // Initialize compilation cache. 2094 // Initialize compilation cache.
2106 CompilationCache::Clear(); 2095 isolate_->compilation_cache()->Clear();
2107 2096
2108 return true; 2097 return true;
2109 } 2098 }
2110 2099
2111 2100
2112 MaybeObject* Heap::InitializeNumberStringCache() { 2101 MaybeObject* Heap::InitializeNumberStringCache() {
2113 // Compute the size of the number string cache based on the max heap size. 2102 // Compute the size of the number string cache based on the max heap size.
2114 // max_semispace_size_ == 512 KB => number_string_cache_size = 32. 2103 // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
2115 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB. 2104 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB.
2116 int number_string_cache_size = max_semispace_size_ / 512; 2105 int number_string_cache_size = max_semispace_size_ / 512;
2117 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size)); 2106 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
2118 Object* obj; 2107 Object* obj;
2119 MaybeObject* maybe_obj = 2108 MaybeObject* maybe_obj =
2120 AllocateFixedArray(number_string_cache_size * 2, TENURED); 2109 AllocateFixedArray(number_string_cache_size * 2, TENURED);
2121 if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj)); 2110 if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj));
2122 return maybe_obj; 2111 return maybe_obj;
2123 } 2112 }
2124 2113
2125 2114
2126 void Heap::FlushNumberStringCache() { 2115 void Heap::FlushNumberStringCache() {
2127 // Flush the number to string cache. 2116 // Flush the number to string cache.
2128 int len = number_string_cache()->length(); 2117 int len = number_string_cache()->length();
2129 for (int i = 0; i < len; i++) { 2118 for (int i = 0; i < len; i++) {
2130 number_string_cache()->set_undefined(i); 2119 number_string_cache()->set_undefined(this, i);
2131 } 2120 }
2132 } 2121 }
2133 2122
2134 2123
2135 static inline int double_get_hash(double d) { 2124 static inline int double_get_hash(double d) {
2136 DoubleRepresentation rep(d); 2125 DoubleRepresentation rep(d);
2137 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32); 2126 return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
2138 } 2127 }
2139 2128
2140 2129
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2172 } else { 2161 } else {
2173 hash = double_get_hash(number->Number()) & mask; 2162 hash = double_get_hash(number->Number()) & mask;
2174 number_string_cache()->set(hash * 2, number); 2163 number_string_cache()->set(hash * 2, number);
2175 } 2164 }
2176 number_string_cache()->set(hash * 2 + 1, string); 2165 number_string_cache()->set(hash * 2 + 1, string);
2177 } 2166 }
2178 2167
2179 2168
2180 MaybeObject* Heap::NumberToString(Object* number, 2169 MaybeObject* Heap::NumberToString(Object* number,
2181 bool check_number_string_cache) { 2170 bool check_number_string_cache) {
2182 Counters::number_to_string_runtime.Increment(); 2171 isolate_->counters()->number_to_string_runtime()->Increment();
2183 if (check_number_string_cache) { 2172 if (check_number_string_cache) {
2184 Object* cached = GetNumberStringCache(number); 2173 Object* cached = GetNumberStringCache(number);
2185 if (cached != undefined_value()) { 2174 if (cached != undefined_value()) {
2186 return cached; 2175 return cached;
2187 } 2176 }
2188 } 2177 }
2189 2178
2190 char arr[100]; 2179 char arr[100];
2191 Vector<char> buffer(arr, ARRAY_SIZE(arr)); 2180 Vector<char> buffer(arr, ARRAY_SIZE(arr));
2192 const char* str; 2181 const char* str;
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2275 2264
2276 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { 2265 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
2277 Object* result; 2266 Object* result;
2278 { MaybeObject* maybe_result = 2267 { MaybeObject* maybe_result =
2279 Allocate(shared_function_info_map(), OLD_POINTER_SPACE); 2268 Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
2280 if (!maybe_result->ToObject(&result)) return maybe_result; 2269 if (!maybe_result->ToObject(&result)) return maybe_result;
2281 } 2270 }
2282 2271
2283 SharedFunctionInfo* share = SharedFunctionInfo::cast(result); 2272 SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
2284 share->set_name(name); 2273 share->set_name(name);
2285 Code* illegal = Builtins::builtin(Builtins::Illegal); 2274 Code* illegal = isolate_->builtins()->builtin(Builtins::Illegal);
2286 share->set_code(illegal); 2275 share->set_code(illegal);
2287 share->set_scope_info(SerializedScopeInfo::Empty()); 2276 share->set_scope_info(SerializedScopeInfo::Empty());
2288 Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric); 2277 Code* construct_stub = isolate_->builtins()->builtin(
2278 Builtins::JSConstructStubGeneric);
2289 share->set_construct_stub(construct_stub); 2279 share->set_construct_stub(construct_stub);
2290 share->set_expected_nof_properties(0); 2280 share->set_expected_nof_properties(0);
2291 share->set_length(0); 2281 share->set_length(0);
2292 share->set_formal_parameter_count(0); 2282 share->set_formal_parameter_count(0);
2293 share->set_instance_class_name(Object_symbol()); 2283 share->set_instance_class_name(Object_symbol());
2294 share->set_function_data(undefined_value()); 2284 share->set_function_data(undefined_value());
2295 share->set_script(undefined_value()); 2285 share->set_script(undefined_value());
2296 share->set_start_position_and_type(0); 2286 share->set_start_position_and_type(0);
2297 share->set_debug_info(undefined_value()); 2287 share->set_debug_info(undefined_value());
2298 share->set_inferred_name(empty_string()); 2288 share->set_inferred_name(empty_string());
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2336 2326
2337 2327
2338 // Returns true for a character in a range. Both limits are inclusive. 2328 // Returns true for a character in a range. Both limits are inclusive.
2339 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) { 2329 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
2340 // This makes uses of the the unsigned wraparound. 2330 // This makes uses of the the unsigned wraparound.
2341 return character - from <= to - from; 2331 return character - from <= to - from;
2342 } 2332 }
2343 2333
2344 2334
2345 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString( 2335 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
2336 Heap* heap,
2346 uint32_t c1, 2337 uint32_t c1,
2347 uint32_t c2) { 2338 uint32_t c2) {
2348 String* symbol; 2339 String* symbol;
2349 // Numeric strings have a different hash algorithm not known by 2340 // Numeric strings have a different hash algorithm not known by
2350 // LookupTwoCharsSymbolIfExists, so we skip this step for such strings. 2341 // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
2351 if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) && 2342 if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
2352 Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) { 2343 heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
2353 return symbol; 2344 return symbol;
2354 // Now we know the length is 2, we might as well make use of that fact 2345 // Now we know the length is 2, we might as well make use of that fact
2355 // when building the new string. 2346 // when building the new string.
2356 } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this 2347 } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
2357 ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this. 2348 ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
2358 Object* result; 2349 Object* result;
2359 { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2); 2350 { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
2360 if (!maybe_result->ToObject(&result)) return maybe_result; 2351 if (!maybe_result->ToObject(&result)) return maybe_result;
2361 } 2352 }
2362 char* dest = SeqAsciiString::cast(result)->GetChars(); 2353 char* dest = SeqAsciiString::cast(result)->GetChars();
2363 dest[0] = c1; 2354 dest[0] = c1;
2364 dest[1] = c2; 2355 dest[1] = c2;
2365 return result; 2356 return result;
2366 } else { 2357 } else {
2367 Object* result; 2358 Object* result;
2368 { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2); 2359 { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
2369 if (!maybe_result->ToObject(&result)) return maybe_result; 2360 if (!maybe_result->ToObject(&result)) return maybe_result;
2370 } 2361 }
2371 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 2362 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2372 dest[0] = c1; 2363 dest[0] = c1;
2373 dest[1] = c2; 2364 dest[1] = c2;
2374 return result; 2365 return result;
2375 } 2366 }
2376 } 2367 }
2377 2368
2378 2369
2379 MaybeObject* Heap::AllocateConsString(String* first, String* second) { 2370 MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2380 int first_length = first->length(); 2371 int first_length = first->length();
2381 if (first_length == 0) { 2372 if (first_length == 0) {
2382 return second; 2373 return second;
2383 } 2374 }
2384 2375
2385 int second_length = second->length(); 2376 int second_length = second->length();
2386 if (second_length == 0) { 2377 if (second_length == 0) {
2387 return first; 2378 return first;
2388 } 2379 }
2389 2380
2390 int length = first_length + second_length; 2381 int length = first_length + second_length;
2391 2382
2392 // Optimization for 2-byte strings often used as keys in a decompression 2383 // Optimization for 2-byte strings often used as keys in a decompression
2393 // dictionary. Check whether we already have the string in the symbol 2384 // dictionary. Check whether we already have the string in the symbol
2394 // table to prevent creation of many unneccesary strings. 2385 // table to prevent creation of many unneccesary strings.
2395 if (length == 2) { 2386 if (length == 2) {
2396 unsigned c1 = first->Get(0); 2387 unsigned c1 = first->Get(0);
2397 unsigned c2 = second->Get(0); 2388 unsigned c2 = second->Get(0);
2398 return MakeOrFindTwoCharacterString(c1, c2); 2389 return MakeOrFindTwoCharacterString(this, c1, c2);
2399 } 2390 }
2400 2391
2401 bool first_is_ascii = first->IsAsciiRepresentation(); 2392 bool first_is_ascii = first->IsAsciiRepresentation();
2402 bool second_is_ascii = second->IsAsciiRepresentation(); 2393 bool second_is_ascii = second->IsAsciiRepresentation();
2403 bool is_ascii = first_is_ascii && second_is_ascii; 2394 bool is_ascii = first_is_ascii && second_is_ascii;
2404 2395
2405 // Make sure that an out of memory exception is thrown if the length 2396 // Make sure that an out of memory exception is thrown if the length
2406 // of the new cons string is too large. 2397 // of the new cons string is too large.
2407 if (length > String::kMaxLength || length < 0) { 2398 if (length > String::kMaxLength || length < 0) {
2408 Top::context()->mark_out_of_memory(); 2399 isolate()->context()->mark_out_of_memory();
2409 return Failure::OutOfMemoryException(); 2400 return Failure::OutOfMemoryException();
2410 } 2401 }
2411 2402
2412 bool is_ascii_data_in_two_byte_string = false; 2403 bool is_ascii_data_in_two_byte_string = false;
2413 if (!is_ascii) { 2404 if (!is_ascii) {
2414 // At least one of the strings uses two-byte representation so we 2405 // At least one of the strings uses two-byte representation so we
2415 // can't use the fast case code for short ascii strings below, but 2406 // can't use the fast case code for short ascii strings below, but
2416 // we can try to save memory if all chars actually fit in ascii. 2407 // we can try to save memory if all chars actually fit in ascii.
2417 is_ascii_data_in_two_byte_string = 2408 is_ascii_data_in_two_byte_string =
2418 first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars(); 2409 first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
2419 if (is_ascii_data_in_two_byte_string) { 2410 if (is_ascii_data_in_two_byte_string) {
2420 Counters::string_add_runtime_ext_to_ascii.Increment(); 2411 isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2421 } 2412 }
2422 } 2413 }
2423 2414
2424 // If the resulting string is small make a flat string. 2415 // If the resulting string is small make a flat string.
2425 if (length < String::kMinNonFlatLength) { 2416 if (length < String::kMinNonFlatLength) {
2426 ASSERT(first->IsFlat()); 2417 ASSERT(first->IsFlat());
2427 ASSERT(second->IsFlat()); 2418 ASSERT(second->IsFlat());
2428 if (is_ascii) { 2419 if (is_ascii) {
2429 Object* result; 2420 Object* result;
2430 { MaybeObject* maybe_result = AllocateRawAsciiString(length); 2421 { MaybeObject* maybe_result = AllocateRawAsciiString(length);
(...skipping 20 matching lines...) Expand all
2451 } else { 2442 } else {
2452 if (is_ascii_data_in_two_byte_string) { 2443 if (is_ascii_data_in_two_byte_string) {
2453 Object* result; 2444 Object* result;
2454 { MaybeObject* maybe_result = AllocateRawAsciiString(length); 2445 { MaybeObject* maybe_result = AllocateRawAsciiString(length);
2455 if (!maybe_result->ToObject(&result)) return maybe_result; 2446 if (!maybe_result->ToObject(&result)) return maybe_result;
2456 } 2447 }
2457 // Copy the characters into the new object. 2448 // Copy the characters into the new object.
2458 char* dest = SeqAsciiString::cast(result)->GetChars(); 2449 char* dest = SeqAsciiString::cast(result)->GetChars();
2459 String::WriteToFlat(first, dest, 0, first_length); 2450 String::WriteToFlat(first, dest, 0, first_length);
2460 String::WriteToFlat(second, dest + first_length, 0, second_length); 2451 String::WriteToFlat(second, dest + first_length, 0, second_length);
2452 isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2461 return result; 2453 return result;
2462 } 2454 }
2463 2455
2464 Object* result; 2456 Object* result;
2465 { MaybeObject* maybe_result = AllocateRawTwoByteString(length); 2457 { MaybeObject* maybe_result = AllocateRawTwoByteString(length);
2466 if (!maybe_result->ToObject(&result)) return maybe_result; 2458 if (!maybe_result->ToObject(&result)) return maybe_result;
2467 } 2459 }
2468 // Copy the characters into the new object. 2460 // Copy the characters into the new object.
2469 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 2461 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2470 String::WriteToFlat(first, dest, 0, first_length); 2462 String::WriteToFlat(first, dest, 0, first_length);
(...skipping 21 matching lines...) Expand all
2492 } 2484 }
2493 2485
2494 2486
2495 MaybeObject* Heap::AllocateSubString(String* buffer, 2487 MaybeObject* Heap::AllocateSubString(String* buffer,
2496 int start, 2488 int start,
2497 int end, 2489 int end,
2498 PretenureFlag pretenure) { 2490 PretenureFlag pretenure) {
2499 int length = end - start; 2491 int length = end - start;
2500 2492
2501 if (length == 1) { 2493 if (length == 1) {
2502 return Heap::LookupSingleCharacterStringFromCode( 2494 return LookupSingleCharacterStringFromCode(buffer->Get(start));
2503 buffer->Get(start));
2504 } else if (length == 2) { 2495 } else if (length == 2) {
2505 // Optimization for 2-byte strings often used as keys in a decompression 2496 // Optimization for 2-byte strings often used as keys in a decompression
2506 // dictionary. Check whether we already have the string in the symbol 2497 // dictionary. Check whether we already have the string in the symbol
2507 // table to prevent creation of many unneccesary strings. 2498 // table to prevent creation of many unneccesary strings.
2508 unsigned c1 = buffer->Get(start); 2499 unsigned c1 = buffer->Get(start);
2509 unsigned c2 = buffer->Get(start + 1); 2500 unsigned c2 = buffer->Get(start + 1);
2510 return MakeOrFindTwoCharacterString(c1, c2); 2501 return MakeOrFindTwoCharacterString(this, c1, c2);
2511 } 2502 }
2512 2503
2513 // Make an attempt to flatten the buffer to reduce access time. 2504 // Make an attempt to flatten the buffer to reduce access time.
2514 buffer = buffer->TryFlattenGetString(); 2505 buffer = buffer->TryFlattenGetString();
2515 2506
2516 Object* result; 2507 Object* result;
2517 { MaybeObject* maybe_result = buffer->IsAsciiRepresentation() 2508 { MaybeObject* maybe_result = buffer->IsAsciiRepresentation()
2518 ? AllocateRawAsciiString(length, pretenure ) 2509 ? AllocateRawAsciiString(length, pretenure )
2519 : AllocateRawTwoByteString(length, pretenure); 2510 : AllocateRawTwoByteString(length, pretenure);
2520 if (!maybe_result->ToObject(&result)) return maybe_result; 2511 if (!maybe_result->ToObject(&result)) return maybe_result;
(...skipping 11 matching lines...) Expand all
2532 } 2523 }
2533 2524
2534 return result; 2525 return result;
2535 } 2526 }
2536 2527
2537 2528
2538 MaybeObject* Heap::AllocateExternalStringFromAscii( 2529 MaybeObject* Heap::AllocateExternalStringFromAscii(
2539 ExternalAsciiString::Resource* resource) { 2530 ExternalAsciiString::Resource* resource) {
2540 size_t length = resource->length(); 2531 size_t length = resource->length();
2541 if (length > static_cast<size_t>(String::kMaxLength)) { 2532 if (length > static_cast<size_t>(String::kMaxLength)) {
2542 Top::context()->mark_out_of_memory(); 2533 isolate()->context()->mark_out_of_memory();
2543 return Failure::OutOfMemoryException(); 2534 return Failure::OutOfMemoryException();
2544 } 2535 }
2545 2536
2546 Map* map = external_ascii_string_map(); 2537 Map* map = external_ascii_string_map();
2547 Object* result; 2538 Object* result;
2548 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 2539 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2549 if (!maybe_result->ToObject(&result)) return maybe_result; 2540 if (!maybe_result->ToObject(&result)) return maybe_result;
2550 } 2541 }
2551 2542
2552 ExternalAsciiString* external_string = ExternalAsciiString::cast(result); 2543 ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
2553 external_string->set_length(static_cast<int>(length)); 2544 external_string->set_length(static_cast<int>(length));
2554 external_string->set_hash_field(String::kEmptyHashField); 2545 external_string->set_hash_field(String::kEmptyHashField);
2555 external_string->set_resource(resource); 2546 external_string->set_resource(resource);
2556 2547
2557 return result; 2548 return result;
2558 } 2549 }
2559 2550
2560 2551
2561 MaybeObject* Heap::AllocateExternalStringFromTwoByte( 2552 MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2562 ExternalTwoByteString::Resource* resource) { 2553 ExternalTwoByteString::Resource* resource) {
2563 size_t length = resource->length(); 2554 size_t length = resource->length();
2564 if (length > static_cast<size_t>(String::kMaxLength)) { 2555 if (length > static_cast<size_t>(String::kMaxLength)) {
2565 Top::context()->mark_out_of_memory(); 2556 isolate()->context()->mark_out_of_memory();
2566 return Failure::OutOfMemoryException(); 2557 return Failure::OutOfMemoryException();
2567 } 2558 }
2568 2559
2569 // For small strings we check whether the resource contains only 2560 // For small strings we check whether the resource contains only
2570 // ASCII characters. If yes, we use a different string map. 2561 // ASCII characters. If yes, we use a different string map.
2571 static const size_t kAsciiCheckLengthLimit = 32; 2562 static const size_t kAsciiCheckLengthLimit = 32;
2572 bool is_ascii = length <= kAsciiCheckLengthLimit && 2563 bool is_ascii = length <= kAsciiCheckLengthLimit &&
2573 String::IsAscii(resource->data(), static_cast<int>(length)); 2564 String::IsAscii(resource->data(), static_cast<int>(length));
2574 Map* map = is_ascii ? 2565 Map* map = is_ascii ?
2575 Heap::external_string_with_ascii_data_map() : Heap::external_string_map(); 2566 external_string_with_ascii_data_map() : external_string_map();
2576 Object* result; 2567 Object* result;
2577 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 2568 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2578 if (!maybe_result->ToObject(&result)) return maybe_result; 2569 if (!maybe_result->ToObject(&result)) return maybe_result;
2579 } 2570 }
2580 2571
2581 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result); 2572 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
2582 external_string->set_length(static_cast<int>(length)); 2573 external_string->set_length(static_cast<int>(length));
2583 external_string->set_hash_field(String::kEmptyHashField); 2574 external_string->set_hash_field(String::kEmptyHashField);
2584 external_string->set_resource(resource); 2575 external_string->set_resource(resource);
2585 2576
2586 return result; 2577 return result;
2587 } 2578 }
2588 2579
2589 2580
2590 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) { 2581 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
2591 if (code <= String::kMaxAsciiCharCode) { 2582 if (code <= String::kMaxAsciiCharCode) {
2592 Object* value = Heap::single_character_string_cache()->get(code); 2583 Object* value = single_character_string_cache()->get(code);
2593 if (value != Heap::undefined_value()) return value; 2584 if (value != undefined_value()) return value;
2594 2585
2595 char buffer[1]; 2586 char buffer[1];
2596 buffer[0] = static_cast<char>(code); 2587 buffer[0] = static_cast<char>(code);
2597 Object* result; 2588 Object* result;
2598 MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1)); 2589 MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
2599 2590
2600 if (!maybe_result->ToObject(&result)) return maybe_result; 2591 if (!maybe_result->ToObject(&result)) return maybe_result;
2601 Heap::single_character_string_cache()->set(code, result); 2592 single_character_string_cache()->set(code, result);
2602 return result; 2593 return result;
2603 } 2594 }
2604 2595
2605 Object* result; 2596 Object* result;
2606 { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1); 2597 { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
2607 if (!maybe_result->ToObject(&result)) return maybe_result; 2598 if (!maybe_result->ToObject(&result)) return maybe_result;
2608 } 2599 }
2609 String* answer = String::cast(result); 2600 String* answer = String::cast(result);
2610 answer->Set(0, code); 2601 answer->Set(0, code);
2611 return answer; 2602 return answer;
2612 } 2603 }
2613 2604
2614 2605
2615 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 2606 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
2616 if (length < 0 || length > ByteArray::kMaxLength) { 2607 if (length < 0 || length > ByteArray::kMaxLength) {
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
2710 } else { 2701 } else {
2711 maybe_result = code_space_->AllocateRaw(obj_size); 2702 maybe_result = code_space_->AllocateRaw(obj_size);
2712 } 2703 }
2713 2704
2714 Object* result; 2705 Object* result;
2715 if (!maybe_result->ToObject(&result)) return maybe_result; 2706 if (!maybe_result->ToObject(&result)) return maybe_result;
2716 2707
2717 // Initialize the object 2708 // Initialize the object
2718 HeapObject::cast(result)->set_map(code_map()); 2709 HeapObject::cast(result)->set_map(code_map());
2719 Code* code = Code::cast(result); 2710 Code* code = Code::cast(result);
2720 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2711 ASSERT(!isolate_->code_range()->exists() ||
2712 isolate_->code_range()->contains(code->address()));
2721 code->set_instruction_size(desc.instr_size); 2713 code->set_instruction_size(desc.instr_size);
2722 code->set_relocation_info(ByteArray::cast(reloc_info)); 2714 code->set_relocation_info(ByteArray::cast(reloc_info));
2723 code->set_flags(flags); 2715 code->set_flags(flags);
2724 if (code->is_call_stub() || code->is_keyed_call_stub()) { 2716 if (code->is_call_stub() || code->is_keyed_call_stub()) {
2725 code->set_check_type(RECEIVER_MAP_CHECK); 2717 code->set_check_type(RECEIVER_MAP_CHECK);
2726 } 2718 }
2727 code->set_deoptimization_data(empty_fixed_array()); 2719 code->set_deoptimization_data(empty_fixed_array());
2728 // Allow self references to created code object by patching the handle to 2720 // Allow self references to created code object by patching the handle to
2729 // point to the newly allocated Code object. 2721 // point to the newly allocated Code object.
2730 if (!self_reference.is_null()) { 2722 if (!self_reference.is_null()) {
(...skipping 25 matching lines...) Expand all
2756 2748
2757 Object* result; 2749 Object* result;
2758 if (!maybe_result->ToObject(&result)) return maybe_result; 2750 if (!maybe_result->ToObject(&result)) return maybe_result;
2759 2751
2760 // Copy code object. 2752 // Copy code object.
2761 Address old_addr = code->address(); 2753 Address old_addr = code->address();
2762 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 2754 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
2763 CopyBlock(new_addr, old_addr, obj_size); 2755 CopyBlock(new_addr, old_addr, obj_size);
2764 // Relocate the copy. 2756 // Relocate the copy.
2765 Code* new_code = Code::cast(result); 2757 Code* new_code = Code::cast(result);
2766 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2758 ASSERT(!isolate_->code_range()->exists() ||
2759 isolate_->code_range()->contains(code->address()));
2767 new_code->Relocate(new_addr - old_addr); 2760 new_code->Relocate(new_addr - old_addr);
2768 return new_code; 2761 return new_code;
2769 } 2762 }
2770 2763
2771 2764
2772 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { 2765 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
2773 // Allocate ByteArray before the Code object, so that we do not risk 2766 // Allocate ByteArray before the Code object, so that we do not risk
2774 // leaving uninitialized Code object (and breaking the heap). 2767 // leaving uninitialized Code object (and breaking the heap).
2775 Object* reloc_info_array; 2768 Object* reloc_info_array;
2776 { MaybeObject* maybe_reloc_info_array = 2769 { MaybeObject* maybe_reloc_info_array =
(...skipping 28 matching lines...) Expand all
2805 // Copy header and instructions. 2798 // Copy header and instructions.
2806 memcpy(new_addr, old_addr, relocation_offset); 2799 memcpy(new_addr, old_addr, relocation_offset);
2807 2800
2808 Code* new_code = Code::cast(result); 2801 Code* new_code = Code::cast(result);
2809 new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); 2802 new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
2810 2803
2811 // Copy patched rinfo. 2804 // Copy patched rinfo.
2812 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); 2805 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
2813 2806
2814 // Relocate the copy. 2807 // Relocate the copy.
2815 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2808 ASSERT(!isolate_->code_range()->exists() ||
2809 isolate_->code_range()->contains(code->address()));
2816 new_code->Relocate(new_addr - old_addr); 2810 new_code->Relocate(new_addr - old_addr);
2817 2811
2818 #ifdef DEBUG 2812 #ifdef DEBUG
2819 code->Verify(); 2813 code->Verify();
2820 #endif 2814 #endif
2821 return new_code; 2815 return new_code;
2822 } 2816 }
2823 2817
2824 2818
2825 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { 2819 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
2826 ASSERT(gc_state_ == NOT_IN_GC); 2820 ASSERT(gc_state_ == NOT_IN_GC);
2827 ASSERT(map->instance_type() != MAP_TYPE); 2821 ASSERT(map->instance_type() != MAP_TYPE);
2828 // If allocation failures are disallowed, we may allocate in a different 2822 // If allocation failures are disallowed, we may allocate in a different
2829 // space when new space is full and the object is not a large object. 2823 // space when new space is full and the object is not a large object.
2830 AllocationSpace retry_space = 2824 AllocationSpace retry_space =
2831 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); 2825 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
2832 Object* result; 2826 Object* result;
2833 { MaybeObject* maybe_result = 2827 { MaybeObject* maybe_result =
2834 AllocateRaw(map->instance_size(), space, retry_space); 2828 AllocateRaw(map->instance_size(), space, retry_space);
2835 if (!maybe_result->ToObject(&result)) return maybe_result; 2829 if (!maybe_result->ToObject(&result)) return maybe_result;
2836 } 2830 }
2837 HeapObject::cast(result)->set_map(map); 2831 HeapObject::cast(result)->set_map(map);
2838 #ifdef ENABLE_LOGGING_AND_PROFILING 2832 #ifdef ENABLE_LOGGING_AND_PROFILING
2839 ProducerHeapProfile::RecordJSObjectAllocation(result); 2833 isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
2840 #endif 2834 #endif
2841 return result; 2835 return result;
2842 } 2836 }
2843 2837
2844 2838
2845 MaybeObject* Heap::InitializeFunction(JSFunction* function, 2839 MaybeObject* Heap::InitializeFunction(JSFunction* function,
2846 SharedFunctionInfo* shared, 2840 SharedFunctionInfo* shared,
2847 Object* prototype) { 2841 Object* prototype) {
2848 ASSERT(!prototype->IsMap()); 2842 ASSERT(!prototype->IsMap());
2849 function->initialize_properties(); 2843 function->initialize_properties();
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2897 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { 2891 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
2898 // To get fast allocation and map sharing for arguments objects we 2892 // To get fast allocation and map sharing for arguments objects we
2899 // allocate them based on an arguments boilerplate. 2893 // allocate them based on an arguments boilerplate.
2900 2894
2901 JSObject* boilerplate; 2895 JSObject* boilerplate;
2902 int arguments_object_size; 2896 int arguments_object_size;
2903 bool strict_mode_callee = callee->IsJSFunction() && 2897 bool strict_mode_callee = callee->IsJSFunction() &&
2904 JSFunction::cast(callee)->shared()->strict_mode(); 2898 JSFunction::cast(callee)->shared()->strict_mode();
2905 if (strict_mode_callee) { 2899 if (strict_mode_callee) {
2906 boilerplate = 2900 boilerplate =
2907 Top::context()->global_context()->strict_mode_arguments_boilerplate(); 2901 isolate()->context()->global_context()->
2902 strict_mode_arguments_boilerplate();
2908 arguments_object_size = kArgumentsObjectSizeStrict; 2903 arguments_object_size = kArgumentsObjectSizeStrict;
2909 } else { 2904 } else {
2910 boilerplate = Top::context()->global_context()->arguments_boilerplate(); 2905 boilerplate =
2906 isolate()->context()->global_context()->arguments_boilerplate();
2911 arguments_object_size = kArgumentsObjectSize; 2907 arguments_object_size = kArgumentsObjectSize;
2912 } 2908 }
2913 2909
2914 // This calls Copy directly rather than using Heap::AllocateRaw so we 2910 // This calls Copy directly rather than using Heap::AllocateRaw so we
2915 // duplicate the check here. 2911 // duplicate the check here.
2916 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 2912 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2917 2913
2918 // Check that the size of the boilerplate matches our 2914 // Check that the size of the boilerplate matches our
2919 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 2915 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
2920 // on the size being a known constant. 2916 // on the size being a known constant.
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2967 2963
2968 2964
2969 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) { 2965 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
2970 ASSERT(!fun->has_initial_map()); 2966 ASSERT(!fun->has_initial_map());
2971 2967
2972 // First create a new map with the size and number of in-object properties 2968 // First create a new map with the size and number of in-object properties
2973 // suggested by the function. 2969 // suggested by the function.
2974 int instance_size = fun->shared()->CalculateInstanceSize(); 2970 int instance_size = fun->shared()->CalculateInstanceSize();
2975 int in_object_properties = fun->shared()->CalculateInObjectProperties(); 2971 int in_object_properties = fun->shared()->CalculateInObjectProperties();
2976 Object* map_obj; 2972 Object* map_obj;
2977 { MaybeObject* maybe_map_obj = 2973 { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
2978 Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
2979 if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj; 2974 if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
2980 } 2975 }
2981 2976
2982 // Fetch or allocate prototype. 2977 // Fetch or allocate prototype.
2983 Object* prototype; 2978 Object* prototype;
2984 if (fun->has_instance_prototype()) { 2979 if (fun->has_instance_prototype()) {
2985 prototype = fun->instance_prototype(); 2980 prototype = fun->instance_prototype();
2986 } else { 2981 } else {
2987 { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun); 2982 { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
2988 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype; 2983 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
3164 3159
3165 // The global object might be created from an object template with accessors. 3160 // The global object might be created from an object template with accessors.
3166 // Fill these accessors into the dictionary. 3161 // Fill these accessors into the dictionary.
3167 DescriptorArray* descs = map->instance_descriptors(); 3162 DescriptorArray* descs = map->instance_descriptors();
3168 for (int i = 0; i < descs->number_of_descriptors(); i++) { 3163 for (int i = 0; i < descs->number_of_descriptors(); i++) {
3169 PropertyDetails details = descs->GetDetails(i); 3164 PropertyDetails details = descs->GetDetails(i);
3170 ASSERT(details.type() == CALLBACKS); // Only accessors are expected. 3165 ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
3171 PropertyDetails d = 3166 PropertyDetails d =
3172 PropertyDetails(details.attributes(), CALLBACKS, details.index()); 3167 PropertyDetails(details.attributes(), CALLBACKS, details.index());
3173 Object* value = descs->GetCallbacksObject(i); 3168 Object* value = descs->GetCallbacksObject(i);
3174 { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value); 3169 { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
3175 if (!maybe_value->ToObject(&value)) return maybe_value; 3170 if (!maybe_value->ToObject(&value)) return maybe_value;
3176 } 3171 }
3177 3172
3178 Object* result; 3173 Object* result;
3179 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d); 3174 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
3180 if (!maybe_result->ToObject(&result)) return maybe_result; 3175 if (!maybe_result->ToObject(&result)) return maybe_result;
3181 } 3176 }
3182 dictionary = StringDictionary::cast(result); 3177 dictionary = StringDictionary::cast(result);
3183 } 3178 }
3184 3179
3185 // Allocate the global object and initialize it with the backing store. 3180 // Allocate the global object and initialize it with the backing store.
3186 { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE); 3181 { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE);
3187 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3182 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3188 } 3183 }
3189 JSObject* global = JSObject::cast(obj); 3184 JSObject* global = JSObject::cast(obj);
3190 InitializeJSObjectFromMap(global, dictionary, map); 3185 InitializeJSObjectFromMap(global, dictionary, map);
3191 3186
3192 // Create a new map for the global object. 3187 // Create a new map for the global object.
3193 { MaybeObject* maybe_obj = map->CopyDropDescriptors(); 3188 { MaybeObject* maybe_obj = map->CopyDropDescriptors();
3194 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3189 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3195 } 3190 }
3196 Map* new_map = Map::cast(obj); 3191 Map* new_map = Map::cast(obj);
3197 3192
3198 // Setup the global object as a normalized object. 3193 // Setup the global object as a normalized object.
3199 global->set_map(new_map); 3194 global->set_map(new_map);
3200 global->map()->set_instance_descriptors(Heap::empty_descriptor_array()); 3195 global->map()->set_instance_descriptors(empty_descriptor_array());
3201 global->set_properties(dictionary); 3196 global->set_properties(dictionary);
3202 3197
3203 // Make sure result is a global object with properties in dictionary. 3198 // Make sure result is a global object with properties in dictionary.
3204 ASSERT(global->IsGlobalObject()); 3199 ASSERT(global->IsGlobalObject());
3205 ASSERT(!global->HasFastProperties()); 3200 ASSERT(!global->HasFastProperties());
3206 return global; 3201 return global;
3207 } 3202 }
3208 3203
3209 3204
3210 MaybeObject* Heap::CopyJSObject(JSObject* source) { 3205 MaybeObject* Heap::CopyJSObject(JSObject* source) {
(...skipping 18 matching lines...) Expand all
3229 source->address(), 3224 source->address(),
3230 object_size); 3225 object_size);
3231 // Update write barrier for all fields that lie beyond the header. 3226 // Update write barrier for all fields that lie beyond the header.
3232 RecordWrites(clone_address, 3227 RecordWrites(clone_address,
3233 JSObject::kHeaderSize, 3228 JSObject::kHeaderSize,
3234 (object_size - JSObject::kHeaderSize) / kPointerSize); 3229 (object_size - JSObject::kHeaderSize) / kPointerSize);
3235 } else { 3230 } else {
3236 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); 3231 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
3237 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3232 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3238 } 3233 }
3239 ASSERT(Heap::InNewSpace(clone)); 3234 ASSERT(InNewSpace(clone));
3240 // Since we know the clone is allocated in new space, we can copy 3235 // Since we know the clone is allocated in new space, we can copy
3241 // the contents without worrying about updating the write barrier. 3236 // the contents without worrying about updating the write barrier.
3242 CopyBlock(HeapObject::cast(clone)->address(), 3237 CopyBlock(HeapObject::cast(clone)->address(),
3243 source->address(), 3238 source->address(),
3244 object_size); 3239 object_size);
3245 } 3240 }
3246 3241
3247 FixedArray* elements = FixedArray::cast(source->elements()); 3242 FixedArray* elements = FixedArray::cast(source->elements());
3248 FixedArray* properties = FixedArray::cast(source->properties()); 3243 FixedArray* properties = FixedArray::cast(source->properties());
3249 // Update elements if necessary. 3244 // Update elements if necessary.
3250 if (elements->length() > 0) { 3245 if (elements->length() > 0) {
3251 Object* elem; 3246 Object* elem;
3252 { MaybeObject* maybe_elem = 3247 { MaybeObject* maybe_elem =
3253 (elements->map() == fixed_cow_array_map()) ? 3248 (elements->map() == fixed_cow_array_map()) ?
3254 elements : CopyFixedArray(elements); 3249 elements : CopyFixedArray(elements);
3255 if (!maybe_elem->ToObject(&elem)) return maybe_elem; 3250 if (!maybe_elem->ToObject(&elem)) return maybe_elem;
3256 } 3251 }
3257 JSObject::cast(clone)->set_elements(FixedArray::cast(elem)); 3252 JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
3258 } 3253 }
3259 // Update properties if necessary. 3254 // Update properties if necessary.
3260 if (properties->length() > 0) { 3255 if (properties->length() > 0) {
3261 Object* prop; 3256 Object* prop;
3262 { MaybeObject* maybe_prop = CopyFixedArray(properties); 3257 { MaybeObject* maybe_prop = CopyFixedArray(properties);
3263 if (!maybe_prop->ToObject(&prop)) return maybe_prop; 3258 if (!maybe_prop->ToObject(&prop)) return maybe_prop;
3264 } 3259 }
3265 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); 3260 JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
3266 } 3261 }
3267 // Return the new clone. 3262 // Return the new clone.
3268 #ifdef ENABLE_LOGGING_AND_PROFILING 3263 #ifdef ENABLE_LOGGING_AND_PROFILING
3269 ProducerHeapProfile::RecordJSObjectAllocation(clone); 3264 isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
3270 #endif 3265 #endif
3271 return clone; 3266 return clone;
3272 } 3267 }
3273 3268
3274 3269
3275 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, 3270 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
3276 JSGlobalProxy* object) { 3271 JSGlobalProxy* object) {
3277 ASSERT(constructor->has_initial_map()); 3272 ASSERT(constructor->has_initial_map());
3278 Map* map = constructor->initial_map(); 3273 Map* map = constructor->initial_map();
3279 3274
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
3315 } 3310 }
3316 3311
3317 3312
3318 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, 3313 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
3319 PretenureFlag pretenure) { 3314 PretenureFlag pretenure) {
3320 // V8 only supports characters in the Basic Multilingual Plane. 3315 // V8 only supports characters in the Basic Multilingual Plane.
3321 const uc32 kMaxSupportedChar = 0xFFFF; 3316 const uc32 kMaxSupportedChar = 0xFFFF;
3322 // Count the number of characters in the UTF-8 string and check if 3317 // Count the number of characters in the UTF-8 string and check if
3323 // it is an ASCII string. 3318 // it is an ASCII string.
3324 Access<ScannerConstants::Utf8Decoder> 3319 Access<ScannerConstants::Utf8Decoder>
3325 decoder(ScannerConstants::utf8_decoder()); 3320 decoder(isolate_->scanner_constants()->utf8_decoder());
3326 decoder->Reset(string.start(), string.length()); 3321 decoder->Reset(string.start(), string.length());
3327 int chars = 0; 3322 int chars = 0;
3328 while (decoder->has_more()) { 3323 while (decoder->has_more()) {
3329 decoder->GetNext(); 3324 decoder->GetNext();
3330 chars++; 3325 chars++;
3331 } 3326 }
3332 3327
3333 Object* result; 3328 Object* result;
3334 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure); 3329 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
3335 if (!maybe_result->ToObject(&result)) return maybe_result; 3330 if (!maybe_result->ToObject(&result)) return maybe_result;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3368 return result; 3363 return result;
3369 } 3364 }
3370 3365
3371 3366
3372 Map* Heap::SymbolMapForString(String* string) { 3367 Map* Heap::SymbolMapForString(String* string) {
3373 // If the string is in new space it cannot be used as a symbol. 3368 // If the string is in new space it cannot be used as a symbol.
3374 if (InNewSpace(string)) return NULL; 3369 if (InNewSpace(string)) return NULL;
3375 3370
3376 // Find the corresponding symbol map for strings. 3371 // Find the corresponding symbol map for strings.
3377 Map* map = string->map(); 3372 Map* map = string->map();
3378 if (map == ascii_string_map()) return ascii_symbol_map(); 3373 if (map == ascii_string_map()) {
3379 if (map == string_map()) return symbol_map(); 3374 return ascii_symbol_map();
3380 if (map == cons_string_map()) return cons_symbol_map(); 3375 }
3381 if (map == cons_ascii_string_map()) return cons_ascii_symbol_map(); 3376 if (map == string_map()) {
3382 if (map == external_string_map()) return external_symbol_map(); 3377 return symbol_map();
3383 if (map == external_ascii_string_map()) return external_ascii_symbol_map(); 3378 }
3379 if (map == cons_string_map()) {
3380 return cons_symbol_map();
3381 }
3382 if (map == cons_ascii_string_map()) {
3383 return cons_ascii_symbol_map();
3384 }
3385 if (map == external_string_map()) {
3386 return external_symbol_map();
3387 }
3388 if (map == external_ascii_string_map()) {
3389 return external_ascii_symbol_map();
3390 }
3384 if (map == external_string_with_ascii_data_map()) { 3391 if (map == external_string_with_ascii_data_map()) {
3385 return external_symbol_with_ascii_data_map(); 3392 return external_symbol_with_ascii_data_map();
3386 } 3393 }
3387 3394
3388 // No match found. 3395 // No match found.
3389 return NULL; 3396 return NULL;
3390 } 3397 }
3391 3398
3392 3399
3393 MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer, 3400 MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
3547 : lo_space_->AllocateRawFixedArray(size); 3554 : lo_space_->AllocateRawFixedArray(size);
3548 } 3555 }
3549 3556
3550 3557
3551 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 3558 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
3552 int len = src->length(); 3559 int len = src->length();
3553 Object* obj; 3560 Object* obj;
3554 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 3561 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
3555 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3562 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3556 } 3563 }
3557 if (Heap::InNewSpace(obj)) { 3564 if (InNewSpace(obj)) {
3558 HeapObject* dst = HeapObject::cast(obj); 3565 HeapObject* dst = HeapObject::cast(obj);
3559 dst->set_map(map); 3566 dst->set_map(map);
3560 CopyBlock(dst->address() + kPointerSize, 3567 CopyBlock(dst->address() + kPointerSize,
3561 src->address() + kPointerSize, 3568 src->address() + kPointerSize,
3562 FixedArray::SizeFor(len) - kPointerSize); 3569 FixedArray::SizeFor(len) - kPointerSize);
3563 return obj; 3570 return obj;
3564 } 3571 }
3565 HeapObject::cast(obj)->set_map(map); 3572 HeapObject::cast(obj)->set_map(map);
3566 FixedArray* result = FixedArray::cast(obj); 3573 FixedArray* result = FixedArray::cast(obj);
3567 result->set_length(len); 3574 result->set_length(len);
(...skipping 11 matching lines...) Expand all
3579 if (length == 0) return empty_fixed_array(); 3586 if (length == 0) return empty_fixed_array();
3580 Object* result; 3587 Object* result;
3581 { MaybeObject* maybe_result = AllocateRawFixedArray(length); 3588 { MaybeObject* maybe_result = AllocateRawFixedArray(length);
3582 if (!maybe_result->ToObject(&result)) return maybe_result; 3589 if (!maybe_result->ToObject(&result)) return maybe_result;
3583 } 3590 }
3584 // Initialize header. 3591 // Initialize header.
3585 FixedArray* array = reinterpret_cast<FixedArray*>(result); 3592 FixedArray* array = reinterpret_cast<FixedArray*>(result);
3586 array->set_map(fixed_array_map()); 3593 array->set_map(fixed_array_map());
3587 array->set_length(length); 3594 array->set_length(length);
3588 // Initialize body. 3595 // Initialize body.
3589 ASSERT(!Heap::InNewSpace(undefined_value())); 3596 ASSERT(!InNewSpace(undefined_value()));
3590 MemsetPointer(array->data_start(), undefined_value(), length); 3597 MemsetPointer(array->data_start(), undefined_value(), length);
3591 return result; 3598 return result;
3592 } 3599 }
3593 3600
3594 3601
3595 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 3602 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
3596 if (length < 0 || length > FixedArray::kMaxLength) { 3603 if (length < 0 || length > FixedArray::kMaxLength) {
3597 return Failure::OutOfMemoryException(); 3604 return Failure::OutOfMemoryException();
3598 } 3605 }
3599 3606
(...skipping 10 matching lines...) Expand all
3610 } 3617 }
3611 3618
3612 AllocationSpace retry_space = 3619 AllocationSpace retry_space =
3613 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE; 3620 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE;
3614 3621
3615 return AllocateRaw(size, space, retry_space); 3622 return AllocateRaw(size, space, retry_space);
3616 } 3623 }
3617 3624
3618 3625
3619 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( 3626 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
3627 Heap* heap,
3620 int length, 3628 int length,
3621 PretenureFlag pretenure, 3629 PretenureFlag pretenure,
3622 Object* filler) { 3630 Object* filler) {
3623 ASSERT(length >= 0); 3631 ASSERT(length >= 0);
3624 ASSERT(Heap::empty_fixed_array()->IsFixedArray()); 3632 ASSERT(heap->empty_fixed_array()->IsFixedArray());
3625 if (length == 0) return Heap::empty_fixed_array(); 3633 if (length == 0) return heap->empty_fixed_array();
3626 3634
3627 ASSERT(!Heap::InNewSpace(filler)); 3635 ASSERT(!heap->InNewSpace(filler));
3628 Object* result; 3636 Object* result;
3629 { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure); 3637 { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
3630 if (!maybe_result->ToObject(&result)) return maybe_result; 3638 if (!maybe_result->ToObject(&result)) return maybe_result;
3631 } 3639 }
3632 3640
3633 HeapObject::cast(result)->set_map(Heap::fixed_array_map()); 3641 HeapObject::cast(result)->set_map(heap->fixed_array_map());
3634 FixedArray* array = FixedArray::cast(result); 3642 FixedArray* array = FixedArray::cast(result);
3635 array->set_length(length); 3643 array->set_length(length);
3636 MemsetPointer(array->data_start(), filler, length); 3644 MemsetPointer(array->data_start(), filler, length);
3637 return array; 3645 return array;
3638 } 3646 }
3639 3647
3640 3648
3641 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { 3649 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
3642 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); 3650 return AllocateFixedArrayWithFiller(this,
3651 length,
3652 pretenure,
3653 undefined_value());
3643 } 3654 }
3644 3655
3645 3656
3646 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length, 3657 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
3647 PretenureFlag pretenure) { 3658 PretenureFlag pretenure) {
3648 return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value()); 3659 return AllocateFixedArrayWithFiller(this,
3660 length,
3661 pretenure,
3662 the_hole_value());
3649 } 3663 }
3650 3664
3651 3665
3652 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) { 3666 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
3653 if (length == 0) return empty_fixed_array(); 3667 if (length == 0) return empty_fixed_array();
3654 3668
3655 Object* obj; 3669 Object* obj;
3656 { MaybeObject* maybe_obj = AllocateRawFixedArray(length); 3670 { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
3657 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3671 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3658 } 3672 }
3659 3673
3660 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map()); 3674 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map());
3661 FixedArray::cast(obj)->set_length(length); 3675 FixedArray::cast(obj)->set_length(length);
3662 return obj; 3676 return obj;
3663 } 3677 }
3664 3678
3665 3679
3666 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 3680 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
3667 Object* result; 3681 Object* result;
3668 { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure); 3682 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
3669 if (!maybe_result->ToObject(&result)) return maybe_result; 3683 if (!maybe_result->ToObject(&result)) return maybe_result;
3670 } 3684 }
3671 reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map()); 3685 reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
3672 ASSERT(result->IsHashTable()); 3686 ASSERT(result->IsHashTable());
3673 return result; 3687 return result;
3674 } 3688 }
3675 3689
3676 3690
3677 MaybeObject* Heap::AllocateGlobalContext() { 3691 MaybeObject* Heap::AllocateGlobalContext() {
3678 Object* result; 3692 Object* result;
3679 { MaybeObject* maybe_result = 3693 { MaybeObject* maybe_result =
3680 Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); 3694 AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
3681 if (!maybe_result->ToObject(&result)) return maybe_result; 3695 if (!maybe_result->ToObject(&result)) return maybe_result;
3682 } 3696 }
3683 Context* context = reinterpret_cast<Context*>(result); 3697 Context* context = reinterpret_cast<Context*>(result);
3684 context->set_map(global_context_map()); 3698 context->set_map(global_context_map());
3685 ASSERT(context->IsGlobalContext()); 3699 ASSERT(context->IsGlobalContext());
3686 ASSERT(result->IsContext()); 3700 ASSERT(result->IsContext());
3687 return result; 3701 return result;
3688 } 3702 }
3689 3703
3690 3704
3691 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) { 3705 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
3692 ASSERT(length >= Context::MIN_CONTEXT_SLOTS); 3706 ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
3693 Object* result; 3707 Object* result;
3694 { MaybeObject* maybe_result = Heap::AllocateFixedArray(length); 3708 { MaybeObject* maybe_result = AllocateFixedArray(length);
3695 if (!maybe_result->ToObject(&result)) return maybe_result; 3709 if (!maybe_result->ToObject(&result)) return maybe_result;
3696 } 3710 }
3697 Context* context = reinterpret_cast<Context*>(result); 3711 Context* context = reinterpret_cast<Context*>(result);
3698 context->set_map(context_map()); 3712 context->set_map(context_map());
3699 context->set_closure(function); 3713 context->set_closure(function);
3700 context->set_fcontext(context); 3714 context->set_fcontext(context);
3701 context->set_previous(NULL); 3715 context->set_previous(NULL);
3702 context->set_extension(NULL); 3716 context->set_extension(NULL);
3703 context->set_global(function->context()->global()); 3717 context->set_global(function->context()->global());
3704 ASSERT(!context->IsGlobalContext()); 3718 ASSERT(!context->IsGlobalContext());
3705 ASSERT(context->is_function_context()); 3719 ASSERT(context->is_function_context());
3706 ASSERT(result->IsContext()); 3720 ASSERT(result->IsContext());
3707 return result; 3721 return result;
3708 } 3722 }
3709 3723
3710 3724
3711 MaybeObject* Heap::AllocateWithContext(Context* previous, 3725 MaybeObject* Heap::AllocateWithContext(Context* previous,
3712 JSObject* extension, 3726 JSObject* extension,
3713 bool is_catch_context) { 3727 bool is_catch_context) {
3714 Object* result; 3728 Object* result;
3715 { MaybeObject* maybe_result = 3729 { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3716 Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3717 if (!maybe_result->ToObject(&result)) return maybe_result; 3730 if (!maybe_result->ToObject(&result)) return maybe_result;
3718 } 3731 }
3719 Context* context = reinterpret_cast<Context*>(result); 3732 Context* context = reinterpret_cast<Context*>(result);
3720 context->set_map(is_catch_context ? catch_context_map() : context_map()); 3733 context->set_map(is_catch_context ? catch_context_map() :
3734 context_map());
3721 context->set_closure(previous->closure()); 3735 context->set_closure(previous->closure());
3722 context->set_fcontext(previous->fcontext()); 3736 context->set_fcontext(previous->fcontext());
3723 context->set_previous(previous); 3737 context->set_previous(previous);
3724 context->set_extension(extension); 3738 context->set_extension(extension);
3725 context->set_global(previous->global()); 3739 context->set_global(previous->global());
3726 ASSERT(!context->IsGlobalContext()); 3740 ASSERT(!context->IsGlobalContext());
3727 ASSERT(!context->is_function_context()); 3741 ASSERT(!context->is_function_context());
3728 ASSERT(result->IsContext()); 3742 ASSERT(result->IsContext());
3729 return result; 3743 return result;
3730 } 3744 }
3731 3745
3732 3746
3733 MaybeObject* Heap::AllocateStruct(InstanceType type) { 3747 MaybeObject* Heap::AllocateStruct(InstanceType type) {
3734 Map* map; 3748 Map* map;
3735 switch (type) { 3749 switch (type) {
3736 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break; 3750 #define MAKE_CASE(NAME, Name, name) \
3751 case NAME##_TYPE: map = name##_map(); break;
3737 STRUCT_LIST(MAKE_CASE) 3752 STRUCT_LIST(MAKE_CASE)
3738 #undef MAKE_CASE 3753 #undef MAKE_CASE
3739 default: 3754 default:
3740 UNREACHABLE(); 3755 UNREACHABLE();
3741 return Failure::InternalError(); 3756 return Failure::InternalError();
3742 } 3757 }
3743 int size = map->instance_size(); 3758 int size = map->instance_size();
3744 AllocationSpace space = 3759 AllocationSpace space =
3745 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE; 3760 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
3746 Object* result; 3761 Object* result;
3747 { MaybeObject* maybe_result = Heap::Allocate(map, space); 3762 { MaybeObject* maybe_result = Allocate(map, space);
3748 if (!maybe_result->ToObject(&result)) return maybe_result; 3763 if (!maybe_result->ToObject(&result)) return maybe_result;
3749 } 3764 }
3750 Struct::cast(result)->InitializeBody(size); 3765 Struct::cast(result)->InitializeBody(size);
3751 return result; 3766 return result;
3752 } 3767 }
3753 3768
3754 3769
3755 bool Heap::IdleNotification() { 3770 bool Heap::IdleNotification() {
3756 static const int kIdlesBeforeScavenge = 4; 3771 static const int kIdlesBeforeScavenge = 4;
3757 static const int kIdlesBeforeMarkSweep = 7; 3772 static const int kIdlesBeforeMarkSweep = 7;
3758 static const int kIdlesBeforeMarkCompact = 8; 3773 static const int kIdlesBeforeMarkCompact = 8;
3759 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1; 3774 static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
3760 static const unsigned int kGCsBetweenCleanup = 4; 3775 static const unsigned int kGCsBetweenCleanup = 4;
3761 static int number_idle_notifications = 0; 3776
3762 static unsigned int last_gc_count = gc_count_; 3777 if (!last_idle_notification_gc_count_init_) {
3778 last_idle_notification_gc_count_ = gc_count_;
3779 last_idle_notification_gc_count_init_ = true;
3780 }
3763 3781
3764 bool uncommit = true; 3782 bool uncommit = true;
3765 bool finished = false; 3783 bool finished = false;
3766 3784
3767 // Reset the number of idle notifications received when a number of 3785 // Reset the number of idle notifications received when a number of
3768 // GCs have taken place. This allows another round of cleanup based 3786 // GCs have taken place. This allows another round of cleanup based
3769 // on idle notifications if enough work has been carried out to 3787 // on idle notifications if enough work has been carried out to
3770 // provoke a number of garbage collections. 3788 // provoke a number of garbage collections.
3771 if (gc_count_ - last_gc_count < kGCsBetweenCleanup) { 3789 if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
3772 number_idle_notifications = 3790 number_idle_notifications_ =
3773 Min(number_idle_notifications + 1, kMaxIdleCount); 3791 Min(number_idle_notifications_ + 1, kMaxIdleCount);
3774 } else { 3792 } else {
3775 number_idle_notifications = 0; 3793 number_idle_notifications_ = 0;
3776 last_gc_count = gc_count_; 3794 last_idle_notification_gc_count_ = gc_count_;
3777 } 3795 }
3778 3796
3779 if (number_idle_notifications == kIdlesBeforeScavenge) { 3797 if (number_idle_notifications_ == kIdlesBeforeScavenge) {
3780 if (contexts_disposed_ > 0) { 3798 if (contexts_disposed_ > 0) {
3781 HistogramTimerScope scope(&Counters::gc_context); 3799 HistogramTimerScope scope(isolate_->counters()->gc_context());
3782 CollectAllGarbage(false); 3800 CollectAllGarbage(false);
3783 } else { 3801 } else {
3784 CollectGarbage(NEW_SPACE); 3802 CollectGarbage(NEW_SPACE);
3785 } 3803 }
3786 new_space_.Shrink(); 3804 new_space_.Shrink();
3787 last_gc_count = gc_count_; 3805 last_idle_notification_gc_count_ = gc_count_;
3788 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) { 3806 } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
3789 // Before doing the mark-sweep collections we clear the 3807 // Before doing the mark-sweep collections we clear the
3790 // compilation cache to avoid hanging on to source code and 3808 // compilation cache to avoid hanging on to source code and
3791 // generated code for cached functions. 3809 // generated code for cached functions.
3792 CompilationCache::Clear(); 3810 isolate_->compilation_cache()->Clear();
3793 3811
3794 CollectAllGarbage(false); 3812 CollectAllGarbage(false);
3795 new_space_.Shrink(); 3813 new_space_.Shrink();
3796 last_gc_count = gc_count_; 3814 last_idle_notification_gc_count_ = gc_count_;
3797 3815
3798 } else if (number_idle_notifications == kIdlesBeforeMarkCompact) { 3816 } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
3799 CollectAllGarbage(true); 3817 CollectAllGarbage(true);
3800 new_space_.Shrink(); 3818 new_space_.Shrink();
3801 last_gc_count = gc_count_; 3819 last_idle_notification_gc_count_ = gc_count_;
3820 number_idle_notifications_ = 0;
3802 finished = true; 3821 finished = true;
3803
3804 } else if (contexts_disposed_ > 0) { 3822 } else if (contexts_disposed_ > 0) {
3805 if (FLAG_expose_gc) { 3823 if (FLAG_expose_gc) {
3806 contexts_disposed_ = 0; 3824 contexts_disposed_ = 0;
3807 } else { 3825 } else {
3808 HistogramTimerScope scope(&Counters::gc_context); 3826 HistogramTimerScope scope(isolate_->counters()->gc_context());
3809 CollectAllGarbage(false); 3827 CollectAllGarbage(false);
3810 last_gc_count = gc_count_; 3828 last_idle_notification_gc_count_ = gc_count_;
3811 } 3829 }
3812 // If this is the first idle notification, we reset the 3830 // If this is the first idle notification, we reset the
3813 // notification count to avoid letting idle notifications for 3831 // notification count to avoid letting idle notifications for
3814 // context disposal garbage collections start a potentially too 3832 // context disposal garbage collections start a potentially too
3815 // aggressive idle GC cycle. 3833 // aggressive idle GC cycle.
3816 if (number_idle_notifications <= 1) { 3834 if (number_idle_notifications_ <= 1) {
3817 number_idle_notifications = 0; 3835 number_idle_notifications_ = 0;
3818 uncommit = false; 3836 uncommit = false;
3819 } 3837 }
3820 } else if (number_idle_notifications > kIdlesBeforeMarkCompact) { 3838 } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
3821 // If we have received more than kIdlesBeforeMarkCompact idle 3839 // If we have received more than kIdlesBeforeMarkCompact idle
3822 // notifications we do not perform any cleanup because we don't 3840 // notifications we do not perform any cleanup because we don't
3823 // expect to gain much by doing so. 3841 // expect to gain much by doing so.
3824 finished = true; 3842 finished = true;
3825 } 3843 }
3826 3844
3827 // Make sure that we have no pending context disposals and 3845 // Make sure that we have no pending context disposals and
3828 // conditionally uncommit from space. 3846 // conditionally uncommit from space.
3829 ASSERT(contexts_disposed_ == 0); 3847 ASSERT(contexts_disposed_ == 0);
3830 if (uncommit) Heap::UncommitFromSpace(); 3848 if (uncommit) UncommitFromSpace();
3831 return finished; 3849 return finished;
3832 } 3850 }
3833 3851
3834 3852
3835 #ifdef DEBUG 3853 #ifdef DEBUG
3836 3854
3837 void Heap::Print() { 3855 void Heap::Print() {
3838 if (!HasBeenSetup()) return; 3856 if (!HasBeenSetup()) return;
3839 Top::PrintStack(); 3857 isolate()->PrintStack();
3840 AllSpaces spaces; 3858 AllSpaces spaces;
3841 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) 3859 for (Space* space = spaces.next(); space != NULL; space = spaces.next())
3842 space->Print(); 3860 space->Print();
3843 } 3861 }
3844 3862
3845 3863
3846 void Heap::ReportCodeStatistics(const char* title) { 3864 void Heap::ReportCodeStatistics(const char* title) {
3847 PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title); 3865 PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
3848 PagedSpace::ResetCodeStatistics(); 3866 PagedSpace::ResetCodeStatistics();
3849 // We do not look for code in new space, map space, or old space. If code 3867 // We do not look for code in new space, map space, or old space. If code
(...skipping 12 matching lines...) Expand all
3862 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n", 3880 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
3863 title, gc_count_); 3881 title, gc_count_);
3864 PrintF("mark-compact GC : %d\n", mc_count_); 3882 PrintF("mark-compact GC : %d\n", mc_count_);
3865 PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n", 3883 PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n",
3866 old_gen_promotion_limit_); 3884 old_gen_promotion_limit_);
3867 PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n", 3885 PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n",
3868 old_gen_allocation_limit_); 3886 old_gen_allocation_limit_);
3869 3887
3870 PrintF("\n"); 3888 PrintF("\n");
3871 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles()); 3889 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
3872 GlobalHandles::PrintStats(); 3890 isolate_->global_handles()->PrintStats();
3873 PrintF("\n"); 3891 PrintF("\n");
3874 3892
3875 PrintF("Heap statistics : "); 3893 PrintF("Heap statistics : ");
3876 MemoryAllocator::ReportStatistics(); 3894 isolate_->memory_allocator()->ReportStatistics();
3877 PrintF("To space : "); 3895 PrintF("To space : ");
3878 new_space_.ReportStatistics(); 3896 new_space_.ReportStatistics();
3879 PrintF("Old pointer space : "); 3897 PrintF("Old pointer space : ");
3880 old_pointer_space_->ReportStatistics(); 3898 old_pointer_space_->ReportStatistics();
3881 PrintF("Old data space : "); 3899 PrintF("Old data space : ");
3882 old_data_space_->ReportStatistics(); 3900 old_data_space_->ReportStatistics();
3883 PrintF("Code space : "); 3901 PrintF("Code space : ");
3884 code_space_->ReportStatistics(); 3902 code_space_->ReportStatistics();
3885 PrintF("Map space : "); 3903 PrintF("Map space : ");
3886 map_space_->ReportStatistics(); 3904 map_space_->ReportStatistics();
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
3949 static void VerifyPointersUnderWatermark( 3967 static void VerifyPointersUnderWatermark(
3950 PagedSpace* space, 3968 PagedSpace* space,
3951 DirtyRegionCallback visit_dirty_region) { 3969 DirtyRegionCallback visit_dirty_region) {
3952 PageIterator it(space, PageIterator::PAGES_IN_USE); 3970 PageIterator it(space, PageIterator::PAGES_IN_USE);
3953 3971
3954 while (it.has_next()) { 3972 while (it.has_next()) {
3955 Page* page = it.next(); 3973 Page* page = it.next();
3956 Address start = page->ObjectAreaStart(); 3974 Address start = page->ObjectAreaStart();
3957 Address end = page->AllocationWatermark(); 3975 Address end = page->AllocationWatermark();
3958 3976
3959 Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks, 3977 HEAP->IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
3960 start, 3978 start,
3961 end, 3979 end,
3962 visit_dirty_region, 3980 visit_dirty_region,
3963 &DummyScavengePointer); 3981 &DummyScavengePointer);
3964 } 3982 }
3965 } 3983 }
3966 3984
3967 3985
3968 static void VerifyPointersUnderWatermark(LargeObjectSpace* space) { 3986 static void VerifyPointersUnderWatermark(LargeObjectSpace* space) {
3969 LargeObjectIterator it(space); 3987 LargeObjectIterator it(space);
3970 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { 3988 for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
3971 if (object->IsFixedArray()) { 3989 if (object->IsFixedArray()) {
3972 Address slot_address = object->address(); 3990 Address slot_address = object->address();
3973 Address end = object->address() + object->Size(); 3991 Address end = object->address() + object->Size();
3974 3992
3975 while (slot_address < end) { 3993 while (slot_address < end) {
3976 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); 3994 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address);
3977 // When we are not in GC the Heap::InNewSpace() predicate 3995 // When we are not in GC the Heap::InNewSpace() predicate
3978 // checks that pointers which satisfy predicate point into 3996 // checks that pointers which satisfy predicate point into
3979 // the active semispace. 3997 // the active semispace.
3980 Heap::InNewSpace(*slot); 3998 HEAP->InNewSpace(*slot);
3981 slot_address += kPointerSize; 3999 slot_address += kPointerSize;
3982 } 4000 }
3983 } 4001 }
3984 } 4002 }
3985 } 4003 }
3986 4004
3987 4005
3988 void Heap::Verify() { 4006 void Heap::Verify() {
3989 ASSERT(HasBeenSetup()); 4007 ASSERT(HasBeenSetup());
3990 4008
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
4091 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure()); 4109 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure());
4092 for (Address a = new_space_.FromSpaceLow(); 4110 for (Address a = new_space_.FromSpaceLow();
4093 a < new_space_.FromSpaceHigh(); 4111 a < new_space_.FromSpaceHigh();
4094 a += kPointerSize) { 4112 a += kPointerSize) {
4095 Memory::Address_at(a) = kFromSpaceZapValue; 4113 Memory::Address_at(a) = kFromSpaceZapValue;
4096 } 4114 }
4097 } 4115 }
4098 #endif // DEBUG 4116 #endif // DEBUG
4099 4117
4100 4118
4101 bool Heap::IteratePointersInDirtyRegion(Address start, 4119 bool Heap::IteratePointersInDirtyRegion(Heap* heap,
4120 Address start,
4102 Address end, 4121 Address end,
4103 ObjectSlotCallback copy_object_func) { 4122 ObjectSlotCallback copy_object_func) {
4104 Address slot_address = start; 4123 Address slot_address = start;
4105 bool pointers_to_new_space_found = false; 4124 bool pointers_to_new_space_found = false;
4106 4125
4107 while (slot_address < end) { 4126 while (slot_address < end) {
4108 Object** slot = reinterpret_cast<Object**>(slot_address); 4127 Object** slot = reinterpret_cast<Object**>(slot_address);
4109 if (Heap::InNewSpace(*slot)) { 4128 if (heap->InNewSpace(*slot)) {
4110 ASSERT((*slot)->IsHeapObject()); 4129 ASSERT((*slot)->IsHeapObject());
4111 copy_object_func(reinterpret_cast<HeapObject**>(slot)); 4130 copy_object_func(reinterpret_cast<HeapObject**>(slot));
4112 if (Heap::InNewSpace(*slot)) { 4131 if (heap->InNewSpace(*slot)) {
4113 ASSERT((*slot)->IsHeapObject()); 4132 ASSERT((*slot)->IsHeapObject());
4114 pointers_to_new_space_found = true; 4133 pointers_to_new_space_found = true;
4115 } 4134 }
4116 } 4135 }
4117 slot_address += kPointerSize; 4136 slot_address += kPointerSize;
4118 } 4137 }
4119 return pointers_to_new_space_found; 4138 return pointers_to_new_space_found;
4120 } 4139 }
4121 4140
4122 4141
(...skipping 13 matching lines...) Expand all
4136 4155
4137 static bool IteratePointersInDirtyMaps(Address start, 4156 static bool IteratePointersInDirtyMaps(Address start,
4138 Address end, 4157 Address end,
4139 ObjectSlotCallback copy_object_func) { 4158 ObjectSlotCallback copy_object_func) {
4140 ASSERT(MapStartAlign(start) == start); 4159 ASSERT(MapStartAlign(start) == start);
4141 ASSERT(MapEndAlign(end) == end); 4160 ASSERT(MapEndAlign(end) == end);
4142 4161
4143 Address map_address = start; 4162 Address map_address = start;
4144 bool pointers_to_new_space_found = false; 4163 bool pointers_to_new_space_found = false;
4145 4164
4165 Heap* heap = HEAP;
4146 while (map_address < end) { 4166 while (map_address < end) {
4147 ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address))); 4167 ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
4148 ASSERT(Memory::Object_at(map_address)->IsMap()); 4168 ASSERT(Memory::Object_at(map_address)->IsMap());
4149 4169
4150 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset; 4170 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
4151 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset; 4171 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
4152 4172
4153 if (Heap::IteratePointersInDirtyRegion(pointer_fields_start, 4173 if (Heap::IteratePointersInDirtyRegion(heap,
4174 pointer_fields_start,
4154 pointer_fields_end, 4175 pointer_fields_end,
4155 copy_object_func)) { 4176 copy_object_func)) {
4156 pointers_to_new_space_found = true; 4177 pointers_to_new_space_found = true;
4157 } 4178 }
4158 4179
4159 map_address += Map::kSize; 4180 map_address += Map::kSize;
4160 } 4181 }
4161 4182
4162 return pointers_to_new_space_found; 4183 return pointers_to_new_space_found;
4163 } 4184 }
4164 4185
4165 4186
4166 bool Heap::IteratePointersInDirtyMapsRegion( 4187 bool Heap::IteratePointersInDirtyMapsRegion(
4188 Heap* heap,
4167 Address start, 4189 Address start,
4168 Address end, 4190 Address end,
4169 ObjectSlotCallback copy_object_func) { 4191 ObjectSlotCallback copy_object_func) {
4170 Address map_aligned_start = MapStartAlign(start); 4192 Address map_aligned_start = MapStartAlign(start);
4171 Address map_aligned_end = MapEndAlign(end); 4193 Address map_aligned_end = MapEndAlign(end);
4172 4194
4173 bool contains_pointers_to_new_space = false; 4195 bool contains_pointers_to_new_space = false;
4174 4196
4175 if (map_aligned_start != start) { 4197 if (map_aligned_start != start) {
4176 Address prev_map = map_aligned_start - Map::kSize; 4198 Address prev_map = map_aligned_start - Map::kSize;
4177 ASSERT(Memory::Object_at(prev_map)->IsMap()); 4199 ASSERT(Memory::Object_at(prev_map)->IsMap());
4178 4200
4179 Address pointer_fields_start = 4201 Address pointer_fields_start =
4180 Max(start, prev_map + Map::kPointerFieldsBeginOffset); 4202 Max(start, prev_map + Map::kPointerFieldsBeginOffset);
4181 4203
4182 Address pointer_fields_end = 4204 Address pointer_fields_end =
4183 Min(prev_map + Map::kPointerFieldsEndOffset, end); 4205 Min(prev_map + Map::kPointerFieldsEndOffset, end);
4184 4206
4185 contains_pointers_to_new_space = 4207 contains_pointers_to_new_space =
4186 IteratePointersInDirtyRegion(pointer_fields_start, 4208 IteratePointersInDirtyRegion(heap,
4209 pointer_fields_start,
4187 pointer_fields_end, 4210 pointer_fields_end,
4188 copy_object_func) 4211 copy_object_func)
4189 || contains_pointers_to_new_space; 4212 || contains_pointers_to_new_space;
4190 } 4213 }
4191 4214
4192 contains_pointers_to_new_space = 4215 contains_pointers_to_new_space =
4193 IteratePointersInDirtyMaps(map_aligned_start, 4216 IteratePointersInDirtyMaps(map_aligned_start,
4194 map_aligned_end, 4217 map_aligned_end,
4195 copy_object_func) 4218 copy_object_func)
4196 || contains_pointers_to_new_space; 4219 || contains_pointers_to_new_space;
4197 4220
4198 if (map_aligned_end != end) { 4221 if (map_aligned_end != end) {
4199 ASSERT(Memory::Object_at(map_aligned_end)->IsMap()); 4222 ASSERT(Memory::Object_at(map_aligned_end)->IsMap());
4200 4223
4201 Address pointer_fields_start = 4224 Address pointer_fields_start =
4202 map_aligned_end + Map::kPointerFieldsBeginOffset; 4225 map_aligned_end + Map::kPointerFieldsBeginOffset;
4203 4226
4204 Address pointer_fields_end = 4227 Address pointer_fields_end =
4205 Min(end, map_aligned_end + Map::kPointerFieldsEndOffset); 4228 Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
4206 4229
4207 contains_pointers_to_new_space = 4230 contains_pointers_to_new_space =
4208 IteratePointersInDirtyRegion(pointer_fields_start, 4231 IteratePointersInDirtyRegion(heap,
4232 pointer_fields_start,
4209 pointer_fields_end, 4233 pointer_fields_end,
4210 copy_object_func) 4234 copy_object_func)
4211 || contains_pointers_to_new_space; 4235 || contains_pointers_to_new_space;
4212 } 4236 }
4213 4237
4214 return contains_pointers_to_new_space; 4238 return contains_pointers_to_new_space;
4215 } 4239 }
4216 4240
4217 4241
4218 void Heap::IterateAndMarkPointersToFromSpace(Address start, 4242 void Heap::IterateAndMarkPointersToFromSpace(Address start,
4219 Address end, 4243 Address end,
4220 ObjectSlotCallback callback) { 4244 ObjectSlotCallback callback) {
4221 Address slot_address = start; 4245 Address slot_address = start;
4222 Page* page = Page::FromAddress(start); 4246 Page* page = Page::FromAddress(start);
4223 4247
4224 uint32_t marks = page->GetRegionMarks(); 4248 uint32_t marks = page->GetRegionMarks();
4225 4249
4226 while (slot_address < end) { 4250 while (slot_address < end) {
4227 Object** slot = reinterpret_cast<Object**>(slot_address); 4251 Object** slot = reinterpret_cast<Object**>(slot_address);
4228 if (Heap::InFromSpace(*slot)) { 4252 if (InFromSpace(*slot)) {
4229 ASSERT((*slot)->IsHeapObject()); 4253 ASSERT((*slot)->IsHeapObject());
4230 callback(reinterpret_cast<HeapObject**>(slot)); 4254 callback(reinterpret_cast<HeapObject**>(slot));
4231 if (Heap::InNewSpace(*slot)) { 4255 if (InNewSpace(*slot)) {
4232 ASSERT((*slot)->IsHeapObject()); 4256 ASSERT((*slot)->IsHeapObject());
4233 marks |= page->GetRegionMaskForAddress(slot_address); 4257 marks |= page->GetRegionMaskForAddress(slot_address);
4234 } 4258 }
4235 } 4259 }
4236 slot_address += kPointerSize; 4260 slot_address += kPointerSize;
4237 } 4261 }
4238 4262
4239 page->SetRegionMarks(marks); 4263 page->SetRegionMarks(marks);
4240 } 4264 }
4241 4265
(...skipping 18 matching lines...) Expand all
4260 // area_start by Page::kRegionSize. 4284 // area_start by Page::kRegionSize.
4261 Address second_region = 4285 Address second_region =
4262 reinterpret_cast<Address>( 4286 reinterpret_cast<Address>(
4263 reinterpret_cast<intptr_t>(area_start + Page::kRegionSize) & 4287 reinterpret_cast<intptr_t>(area_start + Page::kRegionSize) &
4264 ~Page::kRegionAlignmentMask); 4288 ~Page::kRegionAlignmentMask);
4265 4289
4266 // Next region might be beyond area_end. 4290 // Next region might be beyond area_end.
4267 Address region_end = Min(second_region, area_end); 4291 Address region_end = Min(second_region, area_end);
4268 4292
4269 if (marks & mask) { 4293 if (marks & mask) {
4270 if (visit_dirty_region(region_start, region_end, copy_object_func)) { 4294 if (visit_dirty_region(this, region_start, region_end, copy_object_func)) {
4271 newmarks |= mask; 4295 newmarks |= mask;
4272 } 4296 }
4273 } 4297 }
4274 mask <<= 1; 4298 mask <<= 1;
4275 4299
4276 // Iterate subsequent regions which fully lay inside [area_start, area_end[. 4300 // Iterate subsequent regions which fully lay inside [area_start, area_end[.
4277 region_start = region_end; 4301 region_start = region_end;
4278 region_end = region_start + Page::kRegionSize; 4302 region_end = region_start + Page::kRegionSize;
4279 4303
4280 while (region_end <= area_end) { 4304 while (region_end <= area_end) {
4281 if (marks & mask) { 4305 if (marks & mask) {
4282 if (visit_dirty_region(region_start, region_end, copy_object_func)) { 4306 if (visit_dirty_region(this,
4307 region_start,
4308 region_end,
4309 copy_object_func)) {
4283 newmarks |= mask; 4310 newmarks |= mask;
4284 } 4311 }
4285 } 4312 }
4286 4313
4287 region_start = region_end; 4314 region_start = region_end;
4288 region_end = region_start + Page::kRegionSize; 4315 region_end = region_start + Page::kRegionSize;
4289 4316
4290 mask <<= 1; 4317 mask <<= 1;
4291 } 4318 }
4292 4319
4293 if (region_start != area_end) { 4320 if (region_start != area_end) {
4294 // A small piece of area left uniterated because area_end does not coincide 4321 // A small piece of area left uniterated because area_end does not coincide
4295 // with region end. Check whether region covering last part of area is 4322 // with region end. Check whether region covering last part of area is
4296 // dirty. 4323 // dirty.
4297 if (marks & mask) { 4324 if (marks & mask) {
4298 if (visit_dirty_region(region_start, area_end, copy_object_func)) { 4325 if (visit_dirty_region(this, region_start, area_end, copy_object_func)) {
4299 newmarks |= mask; 4326 newmarks |= mask;
4300 } 4327 }
4301 } 4328 }
4302 } 4329 }
4303 4330
4304 return newmarks; 4331 return newmarks;
4305 } 4332 }
4306 4333
4307 4334
4308 4335
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
4354 IterateStrongRoots(v, mode); 4381 IterateStrongRoots(v, mode);
4355 IterateWeakRoots(v, mode); 4382 IterateWeakRoots(v, mode);
4356 } 4383 }
4357 4384
4358 4385
4359 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { 4386 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
4360 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); 4387 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
4361 v->Synchronize("symbol_table"); 4388 v->Synchronize("symbol_table");
4362 if (mode != VISIT_ALL_IN_SCAVENGE) { 4389 if (mode != VISIT_ALL_IN_SCAVENGE) {
4363 // Scavenge collections have special processing for this. 4390 // Scavenge collections have special processing for this.
4364 ExternalStringTable::Iterate(v); 4391 external_string_table_.Iterate(v);
4365 } 4392 }
4366 v->Synchronize("external_string_table"); 4393 v->Synchronize("external_string_table");
4367 } 4394 }
4368 4395
4369 4396
4370 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { 4397 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
4371 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); 4398 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
4372 v->Synchronize("strong_root_list"); 4399 v->Synchronize("strong_root_list");
4373 4400
4374 v->VisitPointer(BitCast<Object**>(&hidden_symbol_)); 4401 v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
4375 v->Synchronize("symbol"); 4402 v->Synchronize("symbol");
4376 4403
4377 Bootstrapper::Iterate(v); 4404 isolate_->bootstrapper()->Iterate(v);
4378 v->Synchronize("bootstrapper"); 4405 v->Synchronize("bootstrapper");
4379 Top::Iterate(v); 4406 isolate_->Iterate(v);
4380 v->Synchronize("top"); 4407 v->Synchronize("top");
4381 Relocatable::Iterate(v); 4408 Relocatable::Iterate(v);
4382 v->Synchronize("relocatable"); 4409 v->Synchronize("relocatable");
4383 4410
4384 #ifdef ENABLE_DEBUGGER_SUPPORT 4411 #ifdef ENABLE_DEBUGGER_SUPPORT
4385 Debug::Iterate(v); 4412 isolate_->debug()->Iterate(v);
4386 #endif 4413 #endif
4387 v->Synchronize("debug"); 4414 v->Synchronize("debug");
4388 CompilationCache::Iterate(v); 4415 isolate_->compilation_cache()->Iterate(v);
4389 v->Synchronize("compilationcache"); 4416 v->Synchronize("compilationcache");
4390 4417
4391 // Iterate over local handles in handle scopes. 4418 // Iterate over local handles in handle scopes.
4392 HandleScopeImplementer::Iterate(v); 4419 isolate_->handle_scope_implementer()->Iterate(v);
4393 v->Synchronize("handlescope"); 4420 v->Synchronize("handlescope");
4394 4421
4395 // Iterate over the builtin code objects and code stubs in the 4422 // Iterate over the builtin code objects and code stubs in the
4396 // heap. Note that it is not necessary to iterate over code objects 4423 // heap. Note that it is not necessary to iterate over code objects
4397 // on scavenge collections. 4424 // on scavenge collections.
4398 if (mode != VISIT_ALL_IN_SCAVENGE) { 4425 if (mode != VISIT_ALL_IN_SCAVENGE) {
4399 Builtins::IterateBuiltins(v); 4426 isolate_->builtins()->IterateBuiltins(v);
4400 } 4427 }
4401 v->Synchronize("builtins"); 4428 v->Synchronize("builtins");
4402 4429
4403 // Iterate over global handles. 4430 // Iterate over global handles.
4404 if (mode == VISIT_ONLY_STRONG) { 4431 if (mode == VISIT_ONLY_STRONG) {
4405 GlobalHandles::IterateStrongRoots(v); 4432 isolate_->global_handles()->IterateStrongRoots(v);
4406 } else { 4433 } else {
4407 GlobalHandles::IterateAllRoots(v); 4434 isolate_->global_handles()->IterateAllRoots(v);
4408 } 4435 }
4409 v->Synchronize("globalhandles"); 4436 v->Synchronize("globalhandles");
4410 4437
4411 // Iterate over pointers being held by inactive threads. 4438 // Iterate over pointers being held by inactive threads.
4412 ThreadManager::Iterate(v); 4439 isolate_->thread_manager()->Iterate(v);
4413 v->Synchronize("threadmanager"); 4440 v->Synchronize("threadmanager");
4414 4441
4415 // Iterate over the pointers the Serialization/Deserialization code is 4442 // Iterate over the pointers the Serialization/Deserialization code is
4416 // holding. 4443 // holding.
4417 // During garbage collection this keeps the partial snapshot cache alive. 4444 // During garbage collection this keeps the partial snapshot cache alive.
4418 // During deserialization of the startup snapshot this creates the partial 4445 // During deserialization of the startup snapshot this creates the partial
4419 // snapshot cache and deserializes the objects it refers to. During 4446 // snapshot cache and deserializes the objects it refers to. During
4420 // serialization this does nothing, since the partial snapshot cache is 4447 // serialization this does nothing, since the partial snapshot cache is
4421 // empty. However the next thing we do is create the partial snapshot, 4448 // empty. However the next thing we do is create the partial snapshot,
4422 // filling up the partial snapshot cache with objects it needs as we go. 4449 // filling up the partial snapshot cache with objects it needs as we go.
4423 SerializerDeserializer::Iterate(v); 4450 SerializerDeserializer::Iterate(v);
4424 // We don't do a v->Synchronize call here, because in debug mode that will 4451 // We don't do a v->Synchronize call here, because in debug mode that will
4425 // output a flag to the snapshot. However at this point the serializer and 4452 // output a flag to the snapshot. However at this point the serializer and
4426 // deserializer are deliberately a little unsynchronized (see above) so the 4453 // deserializer are deliberately a little unsynchronized (see above) so the
4427 // checking of the sync flag in the snapshot would fail. 4454 // checking of the sync flag in the snapshot would fail.
4428 } 4455 }
4429 4456
4430 4457
4431 // Flag is set when the heap has been configured. The heap can be repeatedly
4432 // configured through the API until it is setup.
4433 static bool heap_configured = false;
4434
4435 // TODO(1236194): Since the heap size is configurable on the command line 4458 // TODO(1236194): Since the heap size is configurable on the command line
4436 // and through the API, we should gracefully handle the case that the heap 4459 // and through the API, we should gracefully handle the case that the heap
4437 // size is not big enough to fit all the initial objects. 4460 // size is not big enough to fit all the initial objects.
4438 bool Heap::ConfigureHeap(int max_semispace_size, 4461 bool Heap::ConfigureHeap(int max_semispace_size,
4439 int max_old_gen_size, 4462 int max_old_gen_size,
4440 int max_executable_size) { 4463 int max_executable_size) {
4441 if (HasBeenSetup()) return false; 4464 if (HasBeenSetup()) return false;
4442 4465
4443 if (max_semispace_size > 0) max_semispace_size_ = max_semispace_size; 4466 if (max_semispace_size > 0) max_semispace_size_ = max_semispace_size;
4444 4467
(...skipping 26 matching lines...) Expand all
4471 // The new space size must be a power of two to support single-bit testing 4494 // The new space size must be a power of two to support single-bit testing
4472 // for containment. 4495 // for containment.
4473 max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_); 4496 max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
4474 reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_); 4497 reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
4475 initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_); 4498 initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
4476 external_allocation_limit_ = 10 * max_semispace_size_; 4499 external_allocation_limit_ = 10 * max_semispace_size_;
4477 4500
4478 // The old generation is paged. 4501 // The old generation is paged.
4479 max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize); 4502 max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
4480 4503
4481 heap_configured = true; 4504 configured_ = true;
4482 return true; 4505 return true;
4483 } 4506 }
4484 4507
4485 4508
4486 bool Heap::ConfigureHeapDefault() { 4509 bool Heap::ConfigureHeapDefault() {
4487 return ConfigureHeap(FLAG_max_new_space_size / 2 * KB, 4510 return ConfigureHeap(FLAG_max_new_space_size / 2 * KB,
4488 FLAG_max_old_space_size * MB, 4511 FLAG_max_old_space_size * MB,
4489 FLAG_max_executable_size * MB); 4512 FLAG_max_executable_size * MB);
4490 } 4513 }
4491 4514
4492 4515
4493 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { 4516 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
4494 *stats->start_marker = HeapStats::kStartMarker; 4517 *stats->start_marker = HeapStats::kStartMarker;
4495 *stats->end_marker = HeapStats::kEndMarker; 4518 *stats->end_marker = HeapStats::kEndMarker;
4496 *stats->new_space_size = new_space_.SizeAsInt(); 4519 *stats->new_space_size = new_space_.SizeAsInt();
4497 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity()); 4520 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
4498 *stats->old_pointer_space_size = old_pointer_space_->Size(); 4521 *stats->old_pointer_space_size = old_pointer_space_->Size();
4499 *stats->old_pointer_space_capacity = old_pointer_space_->Capacity(); 4522 *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
4500 *stats->old_data_space_size = old_data_space_->Size(); 4523 *stats->old_data_space_size = old_data_space_->Size();
4501 *stats->old_data_space_capacity = old_data_space_->Capacity(); 4524 *stats->old_data_space_capacity = old_data_space_->Capacity();
4502 *stats->code_space_size = code_space_->Size(); 4525 *stats->code_space_size = code_space_->Size();
4503 *stats->code_space_capacity = code_space_->Capacity(); 4526 *stats->code_space_capacity = code_space_->Capacity();
4504 *stats->map_space_size = map_space_->Size(); 4527 *stats->map_space_size = map_space_->Size();
4505 *stats->map_space_capacity = map_space_->Capacity(); 4528 *stats->map_space_capacity = map_space_->Capacity();
4506 *stats->cell_space_size = cell_space_->Size(); 4529 *stats->cell_space_size = cell_space_->Size();
4507 *stats->cell_space_capacity = cell_space_->Capacity(); 4530 *stats->cell_space_capacity = cell_space_->Capacity();
4508 *stats->lo_space_size = lo_space_->Size(); 4531 *stats->lo_space_size = lo_space_->Size();
4509 GlobalHandles::RecordStats(stats); 4532 isolate_->global_handles()->RecordStats(stats);
4510 *stats->memory_allocator_size = MemoryAllocator::Size(); 4533 *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
4511 *stats->memory_allocator_capacity = 4534 *stats->memory_allocator_capacity =
4512 MemoryAllocator::Size() + MemoryAllocator::Available(); 4535 isolate()->memory_allocator()->Size() +
4536 isolate()->memory_allocator()->Available();
4513 *stats->os_error = OS::GetLastError(); 4537 *stats->os_error = OS::GetLastError();
4538 isolate()->memory_allocator()->Available();
4514 if (take_snapshot) { 4539 if (take_snapshot) {
4515 HeapIterator iterator(HeapIterator::kFilterFreeListNodes); 4540 HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
4516 for (HeapObject* obj = iterator.next(); 4541 for (HeapObject* obj = iterator.next();
4517 obj != NULL; 4542 obj != NULL;
4518 obj = iterator.next()) { 4543 obj = iterator.next()) {
4519 InstanceType type = obj->map()->instance_type(); 4544 InstanceType type = obj->map()->instance_type();
4520 ASSERT(0 <= type && type <= LAST_TYPE); 4545 ASSERT(0 <= type && type <= LAST_TYPE);
4521 stats->objects_per_type[type]++; 4546 stats->objects_per_type[type]++;
4522 stats->size_per_type[type] += obj->Size(); 4547 stats->size_per_type[type] += obj->Size();
4523 } 4548 }
(...skipping 11 matching lines...) Expand all
4535 } 4560 }
4536 4561
4537 4562
4538 int Heap::PromotedExternalMemorySize() { 4563 int Heap::PromotedExternalMemorySize() {
4539 if (amount_of_external_allocated_memory_ 4564 if (amount_of_external_allocated_memory_
4540 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; 4565 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
4541 return amount_of_external_allocated_memory_ 4566 return amount_of_external_allocated_memory_
4542 - amount_of_external_allocated_memory_at_last_global_gc_; 4567 - amount_of_external_allocated_memory_at_last_global_gc_;
4543 } 4568 }
4544 4569
4570 #ifdef DEBUG
4571
4572 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
4573 static const int kMarkTag = 2;
4574
4575
4576 class HeapDebugUtils {
4577 public:
4578 explicit HeapDebugUtils(Heap* heap)
4579 : search_for_any_global_(false),
4580 search_target_(NULL),
4581 found_target_(false),
4582 object_stack_(20),
4583 heap_(heap) {
4584 }
4585
4586 class MarkObjectVisitor : public ObjectVisitor {
4587 public:
4588 explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4589
4590 void VisitPointers(Object** start, Object** end) {
4591 // Copy all HeapObject pointers in [start, end)
4592 for (Object** p = start; p < end; p++) {
4593 if ((*p)->IsHeapObject())
4594 utils_->MarkObjectRecursively(p);
4595 }
4596 }
4597
4598 HeapDebugUtils* utils_;
4599 };
4600
4601 void MarkObjectRecursively(Object** p) {
4602 if (!(*p)->IsHeapObject()) return;
4603
4604 HeapObject* obj = HeapObject::cast(*p);
4605
4606 Object* map = obj->map();
4607
4608 if (!map->IsHeapObject()) return; // visited before
4609
4610 if (found_target_) return; // stop if target found
4611 object_stack_.Add(obj);
4612 if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
4613 (!search_for_any_global_ && (obj == search_target_))) {
4614 found_target_ = true;
4615 return;
4616 }
4617
4618 // not visited yet
4619 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
4620
4621 Address map_addr = map_p->address();
4622
4623 obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
4624
4625 MarkObjectRecursively(&map);
4626
4627 MarkObjectVisitor mark_visitor(this);
4628
4629 obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
4630 &mark_visitor);
4631
4632 if (!found_target_) // don't pop if found the target
4633 object_stack_.RemoveLast();
4634 }
4635
4636
4637 class UnmarkObjectVisitor : public ObjectVisitor {
4638 public:
4639 explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4640
4641 void VisitPointers(Object** start, Object** end) {
4642 // Copy all HeapObject pointers in [start, end)
4643 for (Object** p = start; p < end; p++) {
4644 if ((*p)->IsHeapObject())
4645 utils_->UnmarkObjectRecursively(p);
4646 }
4647 }
4648
4649 HeapDebugUtils* utils_;
4650 };
4651
4652
4653 void UnmarkObjectRecursively(Object** p) {
4654 if (!(*p)->IsHeapObject()) return;
4655
4656 HeapObject* obj = HeapObject::cast(*p);
4657
4658 Object* map = obj->map();
4659
4660 if (map->IsHeapObject()) return; // unmarked already
4661
4662 Address map_addr = reinterpret_cast<Address>(map);
4663
4664 map_addr -= kMarkTag;
4665
4666 ASSERT_TAG_ALIGNED(map_addr);
4667
4668 HeapObject* map_p = HeapObject::FromAddress(map_addr);
4669
4670 obj->set_map(reinterpret_cast<Map*>(map_p));
4671
4672 UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
4673
4674 UnmarkObjectVisitor unmark_visitor(this);
4675
4676 obj->IterateBody(Map::cast(map_p)->instance_type(),
4677 obj->SizeFromMap(Map::cast(map_p)),
4678 &unmark_visitor);
4679 }
4680
4681
4682 void MarkRootObjectRecursively(Object** root) {
4683 if (search_for_any_global_) {
4684 ASSERT(search_target_ == NULL);
4685 } else {
4686 ASSERT(search_target_->IsHeapObject());
4687 }
4688 found_target_ = false;
4689 object_stack_.Clear();
4690
4691 MarkObjectRecursively(root);
4692 UnmarkObjectRecursively(root);
4693
4694 if (found_target_) {
4695 PrintF("=====================================\n");
4696 PrintF("==== Path to object ====\n");
4697 PrintF("=====================================\n\n");
4698
4699 ASSERT(!object_stack_.is_empty());
4700 for (int i = 0; i < object_stack_.length(); i++) {
4701 if (i > 0) PrintF("\n |\n |\n V\n\n");
4702 Object* obj = object_stack_[i];
4703 obj->Print();
4704 }
4705 PrintF("=====================================\n");
4706 }
4707 }
4708
4709 // Helper class for visiting HeapObjects recursively.
4710 class MarkRootVisitor: public ObjectVisitor {
4711 public:
4712 explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4713
4714 void VisitPointers(Object** start, Object** end) {
4715 // Visit all HeapObject pointers in [start, end)
4716 for (Object** p = start; p < end; p++) {
4717 if ((*p)->IsHeapObject())
4718 utils_->MarkRootObjectRecursively(p);
4719 }
4720 }
4721
4722 HeapDebugUtils* utils_;
4723 };
4724
4725 bool search_for_any_global_;
4726 Object* search_target_;
4727 bool found_target_;
4728 List<Object*> object_stack_;
4729 Heap* heap_;
4730
4731 friend class Heap;
4732 };
4733
4734 #endif
4545 4735
4546 bool Heap::Setup(bool create_heap_objects) { 4736 bool Heap::Setup(bool create_heap_objects) {
4737 #ifdef DEBUG
4738 debug_utils_ = new HeapDebugUtils(this);
4739 #endif
4740
4547 // Initialize heap spaces and initial maps and objects. Whenever something 4741 // Initialize heap spaces and initial maps and objects. Whenever something
4548 // goes wrong, just return false. The caller should check the results and 4742 // goes wrong, just return false. The caller should check the results and
4549 // call Heap::TearDown() to release allocated memory. 4743 // call Heap::TearDown() to release allocated memory.
4550 // 4744 //
4551 // If the heap is not yet configured (eg, through the API), configure it. 4745 // If the heap is not yet configured (eg, through the API), configure it.
4552 // Configuration is based on the flags new-space-size (really the semispace 4746 // Configuration is based on the flags new-space-size (really the semispace
4553 // size) and old-space-size if set or the initial values of semispace_size_ 4747 // size) and old-space-size if set or the initial values of semispace_size_
4554 // and old_generation_size_ otherwise. 4748 // and old_generation_size_ otherwise.
4555 if (!heap_configured) { 4749 if (!configured_) {
4556 if (!ConfigureHeapDefault()) return false; 4750 if (!ConfigureHeapDefault()) return false;
4557 } 4751 }
4558 4752
4559 ScavengingVisitor::Initialize(); 4753 gc_initializer_mutex->Lock();
4560 NewSpaceScavenger::Initialize(); 4754 static bool initialized_gc = false;
4561 MarkCompactCollector::Initialize(); 4755 if (!initialized_gc) {
4756 initialized_gc = true;
4757 ScavengingVisitor::Initialize();
4758 NewSpaceScavenger::Initialize();
4759 MarkCompactCollector::Initialize();
4760 }
4761 gc_initializer_mutex->Unlock();
4562 4762
4563 MarkMapPointersAsEncoded(false); 4763 MarkMapPointersAsEncoded(false);
4564 4764
4565 // Setup memory allocator and reserve a chunk of memory for new 4765 // Setup memory allocator and reserve a chunk of memory for new
4566 // space. The chunk is double the size of the requested reserved 4766 // space. The chunk is double the size of the requested reserved
4567 // new space size to ensure that we can find a pair of semispaces that 4767 // new space size to ensure that we can find a pair of semispaces that
4568 // are contiguous and aligned to their size. 4768 // are contiguous and aligned to their size.
4569 if (!MemoryAllocator::Setup(MaxReserved(), MaxExecutableSize())) return false; 4769 if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
4770 return false;
4570 void* chunk = 4771 void* chunk =
4571 MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_); 4772 isolate_->memory_allocator()->ReserveInitialChunk(
4773 4 * reserved_semispace_size_);
4572 if (chunk == NULL) return false; 4774 if (chunk == NULL) return false;
4573 4775
4574 // Align the pair of semispaces to their size, which must be a power 4776 // Align the pair of semispaces to their size, which must be a power
4575 // of 2. 4777 // of 2.
4576 Address new_space_start = 4778 Address new_space_start =
4577 RoundUp(reinterpret_cast<byte*>(chunk), 2 * reserved_semispace_size_); 4779 RoundUp(reinterpret_cast<byte*>(chunk), 2 * reserved_semispace_size_);
4578 if (!new_space_.Setup(new_space_start, 2 * reserved_semispace_size_)) { 4780 if (!new_space_.Setup(new_space_start, 2 * reserved_semispace_size_)) {
4579 return false; 4781 return false;
4580 } 4782 }
4581 4783
4582 // Initialize old pointer space. 4784 // Initialize old pointer space.
4583 old_pointer_space_ = 4785 old_pointer_space_ =
4584 new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE); 4786 new OldSpace(this,
4787 max_old_generation_size_,
4788 OLD_POINTER_SPACE,
4789 NOT_EXECUTABLE);
4585 if (old_pointer_space_ == NULL) return false; 4790 if (old_pointer_space_ == NULL) return false;
4586 if (!old_pointer_space_->Setup(NULL, 0)) return false; 4791 if (!old_pointer_space_->Setup(NULL, 0)) return false;
4587 4792
4588 // Initialize old data space. 4793 // Initialize old data space.
4589 old_data_space_ = 4794 old_data_space_ =
4590 new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE); 4795 new OldSpace(this,
4796 max_old_generation_size_,
4797 OLD_DATA_SPACE,
4798 NOT_EXECUTABLE);
4591 if (old_data_space_ == NULL) return false; 4799 if (old_data_space_ == NULL) return false;
4592 if (!old_data_space_->Setup(NULL, 0)) return false; 4800 if (!old_data_space_->Setup(NULL, 0)) return false;
4593 4801
4594 // Initialize the code space, set its maximum capacity to the old 4802 // Initialize the code space, set its maximum capacity to the old
4595 // generation size. It needs executable memory. 4803 // generation size. It needs executable memory.
4596 // On 64-bit platform(s), we put all code objects in a 2 GB range of 4804 // On 64-bit platform(s), we put all code objects in a 2 GB range of
4597 // virtual address space, so that they can call each other with near calls. 4805 // virtual address space, so that they can call each other with near calls.
4598 if (code_range_size_ > 0) { 4806 if (code_range_size_ > 0) {
4599 if (!CodeRange::Setup(code_range_size_)) { 4807 if (!isolate_->code_range()->Setup(code_range_size_)) {
4600 return false; 4808 return false;
4601 } 4809 }
4602 } 4810 }
4603 4811
4604 code_space_ = 4812 code_space_ =
4605 new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE); 4813 new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
4606 if (code_space_ == NULL) return false; 4814 if (code_space_ == NULL) return false;
4607 if (!code_space_->Setup(NULL, 0)) return false; 4815 if (!code_space_->Setup(NULL, 0)) return false;
4608 4816
4609 // Initialize map space. 4817 // Initialize map space.
4610 map_space_ = new MapSpace(FLAG_use_big_map_space 4818 map_space_ = new MapSpace(this, FLAG_use_big_map_space
4611 ? max_old_generation_size_ 4819 ? max_old_generation_size_
4612 : MapSpace::kMaxMapPageIndex * Page::kPageSize, 4820 : MapSpace::kMaxMapPageIndex * Page::kPageSize,
4613 FLAG_max_map_space_pages, 4821 FLAG_max_map_space_pages,
4614 MAP_SPACE); 4822 MAP_SPACE);
4615 if (map_space_ == NULL) return false; 4823 if (map_space_ == NULL) return false;
4616 if (!map_space_->Setup(NULL, 0)) return false; 4824 if (!map_space_->Setup(NULL, 0)) return false;
4617 4825
4618 // Initialize global property cell space. 4826 // Initialize global property cell space.
4619 cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE); 4827 cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
4620 if (cell_space_ == NULL) return false; 4828 if (cell_space_ == NULL) return false;
4621 if (!cell_space_->Setup(NULL, 0)) return false; 4829 if (!cell_space_->Setup(NULL, 0)) return false;
4622 4830
4623 // The large object code space may contain code or data. We set the memory 4831 // The large object code space may contain code or data. We set the memory
4624 // to be non-executable here for safety, but this means we need to enable it 4832 // to be non-executable here for safety, but this means we need to enable it
4625 // explicitly when allocating large code objects. 4833 // explicitly when allocating large code objects.
4626 lo_space_ = new LargeObjectSpace(LO_SPACE); 4834 lo_space_ = new LargeObjectSpace(this, LO_SPACE);
4627 if (lo_space_ == NULL) return false; 4835 if (lo_space_ == NULL) return false;
4628 if (!lo_space_->Setup()) return false; 4836 if (!lo_space_->Setup()) return false;
4629 4837
4630 if (create_heap_objects) { 4838 if (create_heap_objects) {
4631 // Create initial maps. 4839 // Create initial maps.
4632 if (!CreateInitialMaps()) return false; 4840 if (!CreateInitialMaps()) return false;
4633 if (!CreateApiObjects()) return false; 4841 if (!CreateApiObjects()) return false;
4634 4842
4635 // Create initial objects 4843 // Create initial objects
4636 if (!CreateInitialObjects()) return false; 4844 if (!CreateInitialObjects()) return false;
4637 4845
4638 global_contexts_list_ = undefined_value(); 4846 global_contexts_list_ = undefined_value();
4639 } 4847 }
4640 4848
4641 LOG(IntPtrTEvent("heap-capacity", Capacity())); 4849 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
4642 LOG(IntPtrTEvent("heap-available", Available())); 4850 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
4643 4851
4644 #ifdef ENABLE_LOGGING_AND_PROFILING 4852 #ifdef ENABLE_LOGGING_AND_PROFILING
4645 // This should be called only after initial objects have been created. 4853 // This should be called only after initial objects have been created.
4646 ProducerHeapProfile::Setup(); 4854 isolate_->producer_heap_profile()->Setup();
4647 #endif 4855 #endif
4648 4856
4649 return true; 4857 return true;
4650 } 4858 }
4651 4859
4652 4860
4653 void Heap::SetStackLimits() { 4861 void Heap::SetStackLimits() {
4862 ASSERT(isolate_ != NULL);
4863 ASSERT(isolate_ == isolate());
4654 // On 64 bit machines, pointers are generally out of range of Smis. We write 4864 // On 64 bit machines, pointers are generally out of range of Smis. We write
4655 // something that looks like an out of range Smi to the GC. 4865 // something that looks like an out of range Smi to the GC.
4656 4866
4657 // Set up the special root array entries containing the stack limits. 4867 // Set up the special root array entries containing the stack limits.
4658 // These are actually addresses, but the tag makes the GC ignore it. 4868 // These are actually addresses, but the tag makes the GC ignore it.
4659 roots_[kStackLimitRootIndex] = 4869 roots_[kStackLimitRootIndex] =
4660 reinterpret_cast<Object*>( 4870 reinterpret_cast<Object*>(
4661 (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag); 4871 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
4662 roots_[kRealStackLimitRootIndex] = 4872 roots_[kRealStackLimitRootIndex] =
4663 reinterpret_cast<Object*>( 4873 reinterpret_cast<Object*>(
4664 (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag); 4874 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
4665 } 4875 }
4666 4876
4667 4877
4668 void Heap::TearDown() { 4878 void Heap::TearDown() {
4669 if (FLAG_print_cumulative_gc_stat) { 4879 if (FLAG_print_cumulative_gc_stat) {
4670 PrintF("\n\n"); 4880 PrintF("\n\n");
4671 PrintF("gc_count=%d ", gc_count_); 4881 PrintF("gc_count=%d ", gc_count_);
4672 PrintF("mark_sweep_count=%d ", ms_count_); 4882 PrintF("mark_sweep_count=%d ", ms_count_);
4673 PrintF("mark_compact_count=%d ", mc_count_); 4883 PrintF("mark_compact_count=%d ", mc_count_);
4674 PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause()); 4884 PrintF("max_gc_pause=%d ", get_max_gc_pause());
4675 PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator()); 4885 PrintF("min_in_mutator=%d ", get_min_in_mutator());
4676 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", 4886 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
4677 GCTracer::get_max_alive_after_gc()); 4887 get_max_alive_after_gc());
4678 PrintF("\n\n"); 4888 PrintF("\n\n");
4679 } 4889 }
4680 4890
4681 GlobalHandles::TearDown(); 4891 isolate_->global_handles()->TearDown();
4682 4892
4683 ExternalStringTable::TearDown(); 4893 external_string_table_.TearDown();
4684 4894
4685 new_space_.TearDown(); 4895 new_space_.TearDown();
4686 4896
4687 if (old_pointer_space_ != NULL) { 4897 if (old_pointer_space_ != NULL) {
4688 old_pointer_space_->TearDown(); 4898 old_pointer_space_->TearDown();
4689 delete old_pointer_space_; 4899 delete old_pointer_space_;
4690 old_pointer_space_ = NULL; 4900 old_pointer_space_ = NULL;
4691 } 4901 }
4692 4902
4693 if (old_data_space_ != NULL) { 4903 if (old_data_space_ != NULL) {
(...skipping 19 matching lines...) Expand all
4713 delete cell_space_; 4923 delete cell_space_;
4714 cell_space_ = NULL; 4924 cell_space_ = NULL;
4715 } 4925 }
4716 4926
4717 if (lo_space_ != NULL) { 4927 if (lo_space_ != NULL) {
4718 lo_space_->TearDown(); 4928 lo_space_->TearDown();
4719 delete lo_space_; 4929 delete lo_space_;
4720 lo_space_ = NULL; 4930 lo_space_ = NULL;
4721 } 4931 }
4722 4932
4723 MemoryAllocator::TearDown(); 4933 isolate_->memory_allocator()->TearDown();
4934
4935 #ifdef DEBUG
4936 delete debug_utils_;
4937 debug_utils_ = NULL;
4938 #endif
4724 } 4939 }
4725 4940
4726 4941
4727 void Heap::Shrink() { 4942 void Heap::Shrink() {
4728 // Try to shrink all paged spaces. 4943 // Try to shrink all paged spaces.
4729 PagedSpaces spaces; 4944 PagedSpaces spaces;
4730 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next()) 4945 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
4731 space->Shrink(); 4946 space->Shrink();
4732 } 4947 }
4733 4948
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
4802 for (Object** p = start; p < end; p++) 5017 for (Object** p = start; p < end; p++)
4803 PrintF(" handle %p to %p\n", 5018 PrintF(" handle %p to %p\n",
4804 reinterpret_cast<void*>(p), 5019 reinterpret_cast<void*>(p),
4805 reinterpret_cast<void*>(*p)); 5020 reinterpret_cast<void*>(*p));
4806 } 5021 }
4807 }; 5022 };
4808 5023
4809 void Heap::PrintHandles() { 5024 void Heap::PrintHandles() {
4810 PrintF("Handles:\n"); 5025 PrintF("Handles:\n");
4811 PrintHandleVisitor v; 5026 PrintHandleVisitor v;
4812 HandleScopeImplementer::Iterate(&v); 5027 isolate_->handle_scope_implementer()->Iterate(&v);
4813 } 5028 }
4814 5029
4815 #endif 5030 #endif
4816 5031
4817 5032
4818 Space* AllSpaces::next() { 5033 Space* AllSpaces::next() {
4819 switch (counter_++) { 5034 switch (counter_++) {
4820 case NEW_SPACE: 5035 case NEW_SPACE:
4821 return Heap::new_space(); 5036 return HEAP->new_space();
4822 case OLD_POINTER_SPACE: 5037 case OLD_POINTER_SPACE:
4823 return Heap::old_pointer_space(); 5038 return HEAP->old_pointer_space();
4824 case OLD_DATA_SPACE: 5039 case OLD_DATA_SPACE:
4825 return Heap::old_data_space(); 5040 return HEAP->old_data_space();
4826 case CODE_SPACE: 5041 case CODE_SPACE:
4827 return Heap::code_space(); 5042 return HEAP->code_space();
4828 case MAP_SPACE: 5043 case MAP_SPACE:
4829 return Heap::map_space(); 5044 return HEAP->map_space();
4830 case CELL_SPACE: 5045 case CELL_SPACE:
4831 return Heap::cell_space(); 5046 return HEAP->cell_space();
4832 case LO_SPACE: 5047 case LO_SPACE:
4833 return Heap::lo_space(); 5048 return HEAP->lo_space();
4834 default: 5049 default:
4835 return NULL; 5050 return NULL;
4836 } 5051 }
4837 } 5052 }
4838 5053
4839 5054
4840 PagedSpace* PagedSpaces::next() { 5055 PagedSpace* PagedSpaces::next() {
4841 switch (counter_++) { 5056 switch (counter_++) {
4842 case OLD_POINTER_SPACE: 5057 case OLD_POINTER_SPACE:
4843 return Heap::old_pointer_space(); 5058 return HEAP->old_pointer_space();
4844 case OLD_DATA_SPACE: 5059 case OLD_DATA_SPACE:
4845 return Heap::old_data_space(); 5060 return HEAP->old_data_space();
4846 case CODE_SPACE: 5061 case CODE_SPACE:
4847 return Heap::code_space(); 5062 return HEAP->code_space();
4848 case MAP_SPACE: 5063 case MAP_SPACE:
4849 return Heap::map_space(); 5064 return HEAP->map_space();
4850 case CELL_SPACE: 5065 case CELL_SPACE:
4851 return Heap::cell_space(); 5066 return HEAP->cell_space();
4852 default: 5067 default:
4853 return NULL; 5068 return NULL;
4854 } 5069 }
4855 } 5070 }
4856 5071
4857 5072
4858 5073
4859 OldSpace* OldSpaces::next() { 5074 OldSpace* OldSpaces::next() {
4860 switch (counter_++) { 5075 switch (counter_++) {
4861 case OLD_POINTER_SPACE: 5076 case OLD_POINTER_SPACE:
4862 return Heap::old_pointer_space(); 5077 return HEAP->old_pointer_space();
4863 case OLD_DATA_SPACE: 5078 case OLD_DATA_SPACE:
4864 return Heap::old_data_space(); 5079 return HEAP->old_data_space();
4865 case CODE_SPACE: 5080 case CODE_SPACE:
4866 return Heap::code_space(); 5081 return HEAP->code_space();
4867 default: 5082 default:
4868 return NULL; 5083 return NULL;
4869 } 5084 }
4870 } 5085 }
4871 5086
4872 5087
4873 SpaceIterator::SpaceIterator() 5088 SpaceIterator::SpaceIterator()
4874 : current_space_(FIRST_SPACE), 5089 : current_space_(FIRST_SPACE),
4875 iterator_(NULL), 5090 iterator_(NULL),
4876 size_func_(NULL) { 5091 size_func_(NULL) {
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4911 return CreateIterator(); 5126 return CreateIterator();
4912 } 5127 }
4913 5128
4914 5129
4915 // Create an iterator for the space to iterate. 5130 // Create an iterator for the space to iterate.
4916 ObjectIterator* SpaceIterator::CreateIterator() { 5131 ObjectIterator* SpaceIterator::CreateIterator() {
4917 ASSERT(iterator_ == NULL); 5132 ASSERT(iterator_ == NULL);
4918 5133
4919 switch (current_space_) { 5134 switch (current_space_) {
4920 case NEW_SPACE: 5135 case NEW_SPACE:
4921 iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_); 5136 iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
4922 break; 5137 break;
4923 case OLD_POINTER_SPACE: 5138 case OLD_POINTER_SPACE:
4924 iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_); 5139 iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
4925 break; 5140 break;
4926 case OLD_DATA_SPACE: 5141 case OLD_DATA_SPACE:
4927 iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_); 5142 iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
4928 break; 5143 break;
4929 case CODE_SPACE: 5144 case CODE_SPACE:
4930 iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_); 5145 iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
4931 break; 5146 break;
4932 case MAP_SPACE: 5147 case MAP_SPACE:
4933 iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_); 5148 iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
4934 break; 5149 break;
4935 case CELL_SPACE: 5150 case CELL_SPACE:
4936 iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_); 5151 iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
4937 break; 5152 break;
4938 case LO_SPACE: 5153 case LO_SPACE:
4939 iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_); 5154 iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
4940 break; 5155 break;
4941 } 5156 }
4942 5157
4943 // Return the newly allocated iterator; 5158 // Return the newly allocated iterator;
4944 ASSERT(iterator_ != NULL); 5159 ASSERT(iterator_ != NULL);
4945 return iterator_; 5160 return iterator_;
4946 } 5161 }
4947 5162
4948 5163
4949 class HeapObjectsFilter { 5164 class HeapObjectsFilter {
(...skipping 13 matching lines...) Expand all
4963 if (object->IsMarked()) { 5178 if (object->IsMarked()) {
4964 object->ClearMark(); 5179 object->ClearMark();
4965 return true; 5180 return true;
4966 } else { 5181 } else {
4967 return false; 5182 return false;
4968 } 5183 }
4969 } 5184 }
4970 5185
4971 private: 5186 private:
4972 void MarkFreeListNodes() { 5187 void MarkFreeListNodes() {
4973 Heap::old_pointer_space()->MarkFreeListNodes(); 5188 Heap* heap = HEAP;
4974 Heap::old_data_space()->MarkFreeListNodes(); 5189 heap->old_pointer_space()->MarkFreeListNodes();
4975 MarkCodeSpaceFreeListNodes(); 5190 heap->old_data_space()->MarkFreeListNodes();
4976 Heap::map_space()->MarkFreeListNodes(); 5191 MarkCodeSpaceFreeListNodes(heap);
4977 Heap::cell_space()->MarkFreeListNodes(); 5192 heap->map_space()->MarkFreeListNodes();
5193 heap->cell_space()->MarkFreeListNodes();
4978 } 5194 }
4979 5195
4980 void MarkCodeSpaceFreeListNodes() { 5196 void MarkCodeSpaceFreeListNodes(Heap* heap) {
4981 // For code space, using FreeListNode::IsFreeListNode is OK. 5197 // For code space, using FreeListNode::IsFreeListNode is OK.
4982 HeapObjectIterator iter(Heap::code_space()); 5198 HeapObjectIterator iter(heap->code_space());
4983 for (HeapObject* obj = iter.next_object(); 5199 for (HeapObject* obj = iter.next_object();
4984 obj != NULL; 5200 obj != NULL;
4985 obj = iter.next_object()) { 5201 obj = iter.next_object()) {
4986 if (FreeListNode::IsFreeListNode(obj)) obj->SetMark(); 5202 if (FreeListNode::IsFreeListNode(obj)) obj->SetMark();
4987 } 5203 }
4988 } 5204 }
4989 5205
4990 AssertNoAllocation no_alloc; 5206 AssertNoAllocation no_alloc;
4991 }; 5207 };
4992 5208
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
5034 }; 5250 };
5035 5251
5036 void MarkUnreachableObjects() { 5252 void MarkUnreachableObjects() {
5037 HeapIterator iterator; 5253 HeapIterator iterator;
5038 for (HeapObject* obj = iterator.next(); 5254 for (HeapObject* obj = iterator.next();
5039 obj != NULL; 5255 obj != NULL;
5040 obj = iterator.next()) { 5256 obj = iterator.next()) {
5041 obj->SetMark(); 5257 obj->SetMark();
5042 } 5258 }
5043 UnmarkingVisitor visitor; 5259 UnmarkingVisitor visitor;
5044 Heap::IterateRoots(&visitor, VISIT_ALL); 5260 HEAP->IterateRoots(&visitor, VISIT_ALL);
5045 while (visitor.can_process()) 5261 while (visitor.can_process())
5046 visitor.ProcessNext(); 5262 visitor.ProcessNext();
5047 } 5263 }
5048 5264
5049 AssertNoAllocation no_alloc; 5265 AssertNoAllocation no_alloc;
5050 }; 5266 };
5051 5267
5052 5268
5053 HeapIterator::HeapIterator() 5269 HeapIterator::HeapIterator()
5054 : filtering_(HeapIterator::kNoFiltering), 5270 : filtering_(HeapIterator::kNoFiltering),
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after
5337 OldSpaces spaces; 5553 OldSpaces spaces;
5338 for (OldSpace* space = spaces.next(); 5554 for (OldSpace* space = spaces.next();
5339 space != NULL; 5555 space != NULL;
5340 space = spaces.next()) { 5556 space = spaces.next()) {
5341 holes_size += space->Waste() + space->AvailableFree(); 5557 holes_size += space->Waste() + space->AvailableFree();
5342 } 5558 }
5343 return holes_size; 5559 return holes_size;
5344 } 5560 }
5345 5561
5346 5562
5347 GCTracer::GCTracer() 5563 GCTracer::GCTracer(Heap* heap)
5348 : start_time_(0.0), 5564 : start_time_(0.0),
5349 start_size_(0), 5565 start_size_(0),
5350 gc_count_(0), 5566 gc_count_(0),
5351 full_gc_count_(0), 5567 full_gc_count_(0),
5352 is_compacting_(false), 5568 is_compacting_(false),
5353 marked_count_(0), 5569 marked_count_(0),
5354 allocated_since_last_gc_(0), 5570 allocated_since_last_gc_(0),
5355 spent_in_mutator_(0), 5571 spent_in_mutator_(0),
5356 promoted_objects_size_(0) { 5572 promoted_objects_size_(0),
5573 heap_(heap) {
5357 // These two fields reflect the state of the previous full collection. 5574 // These two fields reflect the state of the previous full collection.
5358 // Set them before they are changed by the collector. 5575 // Set them before they are changed by the collector.
5359 previous_has_compacted_ = MarkCompactCollector::HasCompacted(); 5576 previous_has_compacted_ = heap_->mark_compact_collector_.HasCompacted();
5360 previous_marked_count_ = MarkCompactCollector::previous_marked_count(); 5577 previous_marked_count_ =
5578 heap_->mark_compact_collector_.previous_marked_count();
5361 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return; 5579 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
5362 start_time_ = OS::TimeCurrentMillis(); 5580 start_time_ = OS::TimeCurrentMillis();
5363 start_size_ = Heap::SizeOfObjects(); 5581 start_size_ = heap_->SizeOfObjects();
5364 5582
5365 for (int i = 0; i < Scope::kNumberOfScopes; i++) { 5583 for (int i = 0; i < Scope::kNumberOfScopes; i++) {
5366 scopes_[i] = 0; 5584 scopes_[i] = 0;
5367 } 5585 }
5368 5586
5369 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize(); 5587 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
5370 5588
5371 allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_; 5589 allocated_since_last_gc_ =
5590 heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
5372 5591
5373 if (last_gc_end_timestamp_ > 0) { 5592 if (heap_->last_gc_end_timestamp_ > 0) {
5374 spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0); 5593 spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
5375 } 5594 }
5376 } 5595 }
5377 5596
5378 5597
5379 GCTracer::~GCTracer() { 5598 GCTracer::~GCTracer() {
5380 // Printf ONE line iff flag is set. 5599 // Printf ONE line iff flag is set.
5381 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return; 5600 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
5382 5601
5383 bool first_gc = (last_gc_end_timestamp_ == 0); 5602 bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
5384 5603
5385 alive_after_last_gc_ = Heap::SizeOfObjects(); 5604 heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
5386 last_gc_end_timestamp_ = OS::TimeCurrentMillis(); 5605 heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
5387 5606
5388 int time = static_cast<int>(last_gc_end_timestamp_ - start_time_); 5607 int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
5389 5608
5390 // Update cumulative GC statistics if required. 5609 // Update cumulative GC statistics if required.
5391 if (FLAG_print_cumulative_gc_stat) { 5610 if (FLAG_print_cumulative_gc_stat) {
5392 max_gc_pause_ = Max(max_gc_pause_, time); 5611 heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
5393 max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_); 5612 heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
5613 heap_->alive_after_last_gc_);
5394 if (!first_gc) { 5614 if (!first_gc) {
5395 min_in_mutator_ = Min(min_in_mutator_, 5615 heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
5396 static_cast<int>(spent_in_mutator_)); 5616 static_cast<int>(spent_in_mutator_));
5397 } 5617 }
5398 } 5618 }
5399 5619
5400 if (!FLAG_trace_gc_nvp) { 5620 if (!FLAG_trace_gc_nvp) {
5401 int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]); 5621 int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
5402 5622
5403 PrintF("%s %.1f -> %.1f MB, ", 5623 PrintF("%s %.1f -> %.1f MB, ",
5404 CollectorString(), 5624 CollectorString(),
5405 static_cast<double>(start_size_) / MB, 5625 static_cast<double>(start_size_) / MB,
5406 SizeOfHeapObjects()); 5626 SizeOfHeapObjects());
5407 5627
5408 if (external_time > 0) PrintF("%d / ", external_time); 5628 if (external_time > 0) PrintF("%d / ", external_time);
5409 PrintF("%d ms.\n", time); 5629 PrintF("%d ms.\n", time);
5410 } else { 5630 } else {
5411 PrintF("pause=%d ", time); 5631 PrintF("pause=%d ", time);
5412 PrintF("mutator=%d ", 5632 PrintF("mutator=%d ",
5413 static_cast<int>(spent_in_mutator_)); 5633 static_cast<int>(spent_in_mutator_));
5414 5634
5415 PrintF("gc="); 5635 PrintF("gc=");
5416 switch (collector_) { 5636 switch (collector_) {
5417 case SCAVENGER: 5637 case SCAVENGER:
5418 PrintF("s"); 5638 PrintF("s");
5419 break; 5639 break;
5420 case MARK_COMPACTOR: 5640 case MARK_COMPACTOR:
5421 PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms"); 5641 PrintF("%s",
5642 heap_->mark_compact_collector_.HasCompacted() ? "mc" : "ms");
5422 break; 5643 break;
5423 default: 5644 default:
5424 UNREACHABLE(); 5645 UNREACHABLE();
5425 } 5646 }
5426 PrintF(" "); 5647 PrintF(" ");
5427 5648
5428 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL])); 5649 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
5429 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK])); 5650 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
5430 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP])); 5651 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
5431 PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE])); 5652 PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
5432 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT])); 5653 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
5433 5654
5434 PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_); 5655 PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
5435 PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects()); 5656 PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
5436 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ", 5657 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
5437 in_free_list_or_wasted_before_gc_); 5658 in_free_list_or_wasted_before_gc_);
5438 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize()); 5659 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
5439 5660
5440 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_); 5661 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
5441 PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_); 5662 PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
5442 5663
5443 PrintF("\n"); 5664 PrintF("\n");
5444 } 5665 }
5445 5666
5446 #if defined(ENABLE_LOGGING_AND_PROFILING) 5667 #if defined(ENABLE_LOGGING_AND_PROFILING)
5447 Heap::PrintShortHeapStatistics(); 5668 heap_->PrintShortHeapStatistics();
5448 #endif 5669 #endif
5449 } 5670 }
5450 5671
5451 5672
5452 const char* GCTracer::CollectorString() { 5673 const char* GCTracer::CollectorString() {
5453 switch (collector_) { 5674 switch (collector_) {
5454 case SCAVENGER: 5675 case SCAVENGER:
5455 return "Scavenge"; 5676 return "Scavenge";
5456 case MARK_COMPACTOR: 5677 case MARK_COMPACTOR:
5457 return MarkCompactCollector::HasCompacted() ? "Mark-compact" 5678 return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
5458 : "Mark-sweep"; 5679 : "Mark-sweep";
5459 } 5680 }
5460 return "Unknown GC"; 5681 return "Unknown GC";
5461 } 5682 }
5462 5683
5463 5684
5464 int KeyedLookupCache::Hash(Map* map, String* name) { 5685 int KeyedLookupCache::Hash(Map* map, String* name) {
5465 // Uses only lower 32 bits if pointers are larger. 5686 // Uses only lower 32 bits if pointers are larger.
5466 uintptr_t addr_hash = 5687 uintptr_t addr_hash =
5467 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift; 5688 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift;
5468 return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask); 5689 return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask);
5469 } 5690 }
5470 5691
5471 5692
5472 int KeyedLookupCache::Lookup(Map* map, String* name) { 5693 int KeyedLookupCache::Lookup(Map* map, String* name) {
5473 int index = Hash(map, name); 5694 int index = Hash(map, name);
5474 Key& key = keys_[index]; 5695 Key& key = keys_[index];
5475 if ((key.map == map) && key.name->Equals(name)) { 5696 if ((key.map == map) && key.name->Equals(name)) {
5476 return field_offsets_[index]; 5697 return field_offsets_[index];
5477 } 5698 }
5478 return -1; 5699 return kNotFound;
5479 } 5700 }
5480 5701
5481 5702
5482 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) { 5703 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
5483 String* symbol; 5704 String* symbol;
5484 if (Heap::LookupSymbolIfExists(name, &symbol)) { 5705 if (HEAP->LookupSymbolIfExists(name, &symbol)) {
5485 int index = Hash(map, symbol); 5706 int index = Hash(map, symbol);
5486 Key& key = keys_[index]; 5707 Key& key = keys_[index];
5487 key.map = map; 5708 key.map = map;
5488 key.name = symbol; 5709 key.name = symbol;
5489 field_offsets_[index] = field_offset; 5710 field_offsets_[index] = field_offset;
5490 } 5711 }
5491 } 5712 }
5492 5713
5493 5714
5494 void KeyedLookupCache::Clear() { 5715 void KeyedLookupCache::Clear() {
5495 for (int index = 0; index < kLength; index++) keys_[index].map = NULL; 5716 for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
5496 } 5717 }
5497 5718
5498 5719
5499 KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
5500
5501
5502 int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
5503
5504
5505 void DescriptorLookupCache::Clear() { 5720 void DescriptorLookupCache::Clear() {
5506 for (int index = 0; index < kLength; index++) keys_[index].array = NULL; 5721 for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
5507 } 5722 }
5508 5723
5509 5724
5510 DescriptorLookupCache::Key
5511 DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
5512
5513 int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
5514
5515
5516 #ifdef DEBUG 5725 #ifdef DEBUG
5517 void Heap::GarbageCollectionGreedyCheck() { 5726 void Heap::GarbageCollectionGreedyCheck() {
5518 ASSERT(FLAG_gc_greedy); 5727 ASSERT(FLAG_gc_greedy);
5519 if (Bootstrapper::IsActive()) return; 5728 if (isolate_->bootstrapper()->IsActive()) return;
5520 if (disallow_allocation_failure()) return; 5729 if (disallow_allocation_failure()) return;
5521 CollectGarbage(NEW_SPACE); 5730 CollectGarbage(NEW_SPACE);
5522 } 5731 }
5523 #endif 5732 #endif
5524 5733
5525 5734
5526 TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t) 5735 TranscendentalCache::SubCache::SubCache(Type t)
5527 : type_(t) { 5736 : type_(t),
5737 isolate_(Isolate::Current()) {
5528 uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't 5738 uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't
5529 uint32_t in1 = 0xffffffffu; // generated by the FPU. 5739 uint32_t in1 = 0xffffffffu; // generated by the FPU.
5530 for (int i = 0; i < kCacheSize; i++) { 5740 for (int i = 0; i < kCacheSize; i++) {
5531 elements_[i].in[0] = in0; 5741 elements_[i].in[0] = in0;
5532 elements_[i].in[1] = in1; 5742 elements_[i].in[1] = in1;
5533 elements_[i].output = NULL; 5743 elements_[i].output = NULL;
5534 } 5744 }
5535 } 5745 }
5536 5746
5537 5747
5538 TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
5539
5540
5541 void TranscendentalCache::Clear() { 5748 void TranscendentalCache::Clear() {
5542 for (int i = 0; i < kNumberOfCaches; i++) { 5749 for (int i = 0; i < kNumberOfCaches; i++) {
5543 if (caches_[i] != NULL) { 5750 if (caches_[i] != NULL) {
5544 delete caches_[i]; 5751 delete caches_[i];
5545 caches_[i] = NULL; 5752 caches_[i] = NULL;
5546 } 5753 }
5547 } 5754 }
5548 } 5755 }
5549 5756
5550 5757
5551 void ExternalStringTable::CleanUp() { 5758 void ExternalStringTable::CleanUp() {
5552 int last = 0; 5759 int last = 0;
5553 for (int i = 0; i < new_space_strings_.length(); ++i) { 5760 for (int i = 0; i < new_space_strings_.length(); ++i) {
5554 if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue; 5761 if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
5555 if (Heap::InNewSpace(new_space_strings_[i])) { 5762 if (heap_->InNewSpace(new_space_strings_[i])) {
5556 new_space_strings_[last++] = new_space_strings_[i]; 5763 new_space_strings_[last++] = new_space_strings_[i];
5557 } else { 5764 } else {
5558 old_space_strings_.Add(new_space_strings_[i]); 5765 old_space_strings_.Add(new_space_strings_[i]);
5559 } 5766 }
5560 } 5767 }
5561 new_space_strings_.Rewind(last); 5768 new_space_strings_.Rewind(last);
5562 last = 0; 5769 last = 0;
5563 for (int i = 0; i < old_space_strings_.length(); ++i) { 5770 for (int i = 0; i < old_space_strings_.length(); ++i) {
5564 if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue; 5771 if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
5565 ASSERT(!Heap::InNewSpace(old_space_strings_[i])); 5772 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
5566 old_space_strings_[last++] = old_space_strings_[i]; 5773 old_space_strings_[last++] = old_space_strings_[i];
5567 } 5774 }
5568 old_space_strings_.Rewind(last); 5775 old_space_strings_.Rewind(last);
5569 Verify(); 5776 Verify();
5570 } 5777 }
5571 5778
5572 5779
5573 void ExternalStringTable::TearDown() { 5780 void ExternalStringTable::TearDown() {
5574 new_space_strings_.Free(); 5781 new_space_strings_.Free();
5575 old_space_strings_.Free(); 5782 old_space_strings_.Free();
5576 } 5783 }
5577 5784
5578 5785
5579 List<Object*> ExternalStringTable::new_space_strings_;
5580 List<Object*> ExternalStringTable::old_space_strings_;
5581
5582 } } // namespace v8::internal 5786 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698