| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/once.h" | 9 #include "src/base/once.h" |
| 10 #include "src/base/utils/random-number-generator.h" | 10 #include "src/base/utils/random-number-generator.h" |
| 11 #include "src/bootstrapper.h" | 11 #include "src/bootstrapper.h" |
| 12 #include "src/codegen.h" | 12 #include "src/codegen.h" |
| 13 #include "src/compilation-cache.h" | 13 #include "src/compilation-cache.h" |
| 14 #include "src/conversions.h" | 14 #include "src/conversions.h" |
| 15 #include "src/cpu-profiler.h" | 15 #include "src/cpu-profiler.h" |
| 16 #include "src/debug.h" | 16 #include "src/debug.h" |
| 17 #include "src/deoptimizer.h" | 17 #include "src/deoptimizer.h" |
| 18 #include "src/global-handles.h" | 18 #include "src/global-handles.h" |
| 19 #include "src/heap/incremental-marking.h" |
| 20 #include "src/heap/mark-compact.h" |
| 19 #include "src/heap-profiler.h" | 21 #include "src/heap-profiler.h" |
| 20 #include "src/incremental-marking.h" | |
| 21 #include "src/isolate-inl.h" | 22 #include "src/isolate-inl.h" |
| 22 #include "src/mark-compact.h" | |
| 23 #include "src/natives.h" | 23 #include "src/natives.h" |
| 24 #include "src/objects-visiting-inl.h" | 24 #include "src/objects-visiting-inl.h" |
| 25 #include "src/objects-visiting.h" | 25 #include "src/objects-visiting.h" |
| 26 #include "src/runtime-profiler.h" | 26 #include "src/runtime-profiler.h" |
| 27 #include "src/scopeinfo.h" | 27 #include "src/scopeinfo.h" |
| 28 #include "src/snapshot.h" | 28 #include "src/snapshot.h" |
| 29 #include "src/store-buffer.h" | 29 #include "src/store-buffer.h" |
| 30 #include "src/utils.h" | 30 #include "src/utils.h" |
| 31 #include "src/v8threads.h" | 31 #include "src/v8threads.h" |
| 32 #include "src/vm-state-inl.h" | 32 #include "src/vm-state-inl.h" |
| 33 | 33 |
| 34 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP | 34 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP |
| 35 #include "src/regexp-macro-assembler.h" // NOLINT | 35 #include "src/regexp-macro-assembler.h" // NOLINT |
| 36 #include "src/arm/regexp-macro-assembler-arm.h" // NOLINT | 36 #include "src/arm/regexp-macro-assembler-arm.h" // NOLINT |
| 37 #endif | 37 #endif |
| 38 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP | 38 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP |
| 39 #include "src/regexp-macro-assembler.h" // NOLINT | 39 #include "src/regexp-macro-assembler.h" // NOLINT |
| 40 #include "src/mips/regexp-macro-assembler-mips.h" // NOLINT | 40 #include "src/mips/regexp-macro-assembler-mips.h" // NOLINT |
| 41 #endif | 41 #endif |
| 42 #if V8_TARGET_ARCH_MIPS64 && !V8_INTERPRETED_REGEXP | 42 #if V8_TARGET_ARCH_MIPS64 && !V8_INTERPRETED_REGEXP |
| 43 #include "src/regexp-macro-assembler.h" | 43 #include "src/regexp-macro-assembler.h" |
| 44 #include "src/mips64/regexp-macro-assembler-mips64.h" | 44 #include "src/mips64/regexp-macro-assembler-mips64.h" |
| 45 #endif | 45 #endif |
| 46 | 46 |
| 47 namespace v8 { | 47 namespace v8 { |
| 48 namespace internal { | 48 namespace internal { |
| 49 | 49 |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 130 gcs_since_last_deopt_(0), | 130 gcs_since_last_deopt_(0), |
| 131 #ifdef VERIFY_HEAP | 131 #ifdef VERIFY_HEAP |
| 132 no_weak_object_verification_scope_depth_(0), | 132 no_weak_object_verification_scope_depth_(0), |
| 133 #endif | 133 #endif |
| 134 allocation_sites_scratchpad_length_(0), | 134 allocation_sites_scratchpad_length_(0), |
| 135 promotion_queue_(this), | 135 promotion_queue_(this), |
| 136 configured_(false), | 136 configured_(false), |
| 137 external_string_table_(this), | 137 external_string_table_(this), |
| 138 chunks_queued_for_free_(NULL), | 138 chunks_queued_for_free_(NULL), |
| 139 gc_callbacks_depth_(0) { | 139 gc_callbacks_depth_(0) { |
| 140 // Allow build-time customization of the max semispace size. Building | 140 // Allow build-time customization of the max semispace size. Building |
| 141 // V8 with snapshots and a non-default max semispace size is much | 141 // V8 with snapshots and a non-default max semispace size is much |
| 142 // easier if you can define it as part of the build environment. | 142 // easier if you can define it as part of the build environment. |
| 143 #if defined(V8_MAX_SEMISPACE_SIZE) | 143 #if defined(V8_MAX_SEMISPACE_SIZE) |
| 144 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 144 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
| 145 #endif | 145 #endif |
| 146 | 146 |
| 147 // Ensure old_generation_size_ is a multiple of kPageSize. | 147 // Ensure old_generation_size_ is a multiple of kPageSize. |
| 148 DCHECK(MB >= Page::kPageSize); | 148 DCHECK(MB >= Page::kPageSize); |
| 149 | 149 |
| 150 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); | 150 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); |
| 151 set_native_contexts_list(NULL); | 151 set_native_contexts_list(NULL); |
| 152 set_array_buffers_list(Smi::FromInt(0)); | 152 set_array_buffers_list(Smi::FromInt(0)); |
| 153 set_allocation_sites_list(Smi::FromInt(0)); | 153 set_allocation_sites_list(Smi::FromInt(0)); |
| 154 set_encountered_weak_collections(Smi::FromInt(0)); | 154 set_encountered_weak_collections(Smi::FromInt(0)); |
| 155 // Put a dummy entry in the remembered pages so we can find the list the | 155 // Put a dummy entry in the remembered pages so we can find the list the |
| 156 // minidump even if there are no real unmapped pages. | 156 // minidump even if there are no real unmapped pages. |
| 157 RememberUnmappedPage(NULL, false); | 157 RememberUnmappedPage(NULL, false); |
| 158 | 158 |
| 159 ClearObjectStats(true); | 159 ClearObjectStats(true); |
| 160 } | 160 } |
| 161 | 161 |
| 162 | 162 |
| 163 intptr_t Heap::Capacity() { | 163 intptr_t Heap::Capacity() { |
| 164 if (!HasBeenSetUp()) return 0; | 164 if (!HasBeenSetUp()) return 0; |
| 165 | 165 |
| 166 return new_space_.Capacity() + | 166 return new_space_.Capacity() + old_pointer_space_->Capacity() + |
| 167 old_pointer_space_->Capacity() + | 167 old_data_space_->Capacity() + code_space_->Capacity() + |
| 168 old_data_space_->Capacity() + | 168 map_space_->Capacity() + cell_space_->Capacity() + |
| 169 code_space_->Capacity() + | 169 property_cell_space_->Capacity(); |
| 170 map_space_->Capacity() + | |
| 171 cell_space_->Capacity() + | |
| 172 property_cell_space_->Capacity(); | |
| 173 } | 170 } |
| 174 | 171 |
| 175 | 172 |
| 176 intptr_t Heap::CommittedMemory() { | 173 intptr_t Heap::CommittedMemory() { |
| 177 if (!HasBeenSetUp()) return 0; | 174 if (!HasBeenSetUp()) return 0; |
| 178 | 175 |
| 179 return new_space_.CommittedMemory() + | 176 return new_space_.CommittedMemory() + old_pointer_space_->CommittedMemory() + |
| 180 old_pointer_space_->CommittedMemory() + | 177 old_data_space_->CommittedMemory() + code_space_->CommittedMemory() + |
| 181 old_data_space_->CommittedMemory() + | 178 map_space_->CommittedMemory() + cell_space_->CommittedMemory() + |
| 182 code_space_->CommittedMemory() + | 179 property_cell_space_->CommittedMemory() + lo_space_->Size(); |
| 183 map_space_->CommittedMemory() + | |
| 184 cell_space_->CommittedMemory() + | |
| 185 property_cell_space_->CommittedMemory() + | |
| 186 lo_space_->Size(); | |
| 187 } | 180 } |
| 188 | 181 |
| 189 | 182 |
| 190 size_t Heap::CommittedPhysicalMemory() { | 183 size_t Heap::CommittedPhysicalMemory() { |
| 191 if (!HasBeenSetUp()) return 0; | 184 if (!HasBeenSetUp()) return 0; |
| 192 | 185 |
| 193 return new_space_.CommittedPhysicalMemory() + | 186 return new_space_.CommittedPhysicalMemory() + |
| 194 old_pointer_space_->CommittedPhysicalMemory() + | 187 old_pointer_space_->CommittedPhysicalMemory() + |
| 195 old_data_space_->CommittedPhysicalMemory() + | 188 old_data_space_->CommittedPhysicalMemory() + |
| 196 code_space_->CommittedPhysicalMemory() + | 189 code_space_->CommittedPhysicalMemory() + |
| 197 map_space_->CommittedPhysicalMemory() + | 190 map_space_->CommittedPhysicalMemory() + |
| 198 cell_space_->CommittedPhysicalMemory() + | 191 cell_space_->CommittedPhysicalMemory() + |
| 199 property_cell_space_->CommittedPhysicalMemory() + | 192 property_cell_space_->CommittedPhysicalMemory() + |
| 200 lo_space_->CommittedPhysicalMemory(); | 193 lo_space_->CommittedPhysicalMemory(); |
| 201 } | 194 } |
| 202 | 195 |
| 203 | 196 |
| 204 intptr_t Heap::CommittedMemoryExecutable() { | 197 intptr_t Heap::CommittedMemoryExecutable() { |
| 205 if (!HasBeenSetUp()) return 0; | 198 if (!HasBeenSetUp()) return 0; |
| 206 | 199 |
| 207 return isolate()->memory_allocator()->SizeExecutable(); | 200 return isolate()->memory_allocator()->SizeExecutable(); |
| 208 } | 201 } |
| 209 | 202 |
| 210 | 203 |
| 211 void Heap::UpdateMaximumCommitted() { | 204 void Heap::UpdateMaximumCommitted() { |
| 212 if (!HasBeenSetUp()) return; | 205 if (!HasBeenSetUp()) return; |
| 213 | 206 |
| 214 intptr_t current_committed_memory = CommittedMemory(); | 207 intptr_t current_committed_memory = CommittedMemory(); |
| 215 if (current_committed_memory > maximum_committed_) { | 208 if (current_committed_memory > maximum_committed_) { |
| 216 maximum_committed_ = current_committed_memory; | 209 maximum_committed_ = current_committed_memory; |
| 217 } | 210 } |
| 218 } | 211 } |
| 219 | 212 |
| 220 | 213 |
| 221 intptr_t Heap::Available() { | 214 intptr_t Heap::Available() { |
| 222 if (!HasBeenSetUp()) return 0; | 215 if (!HasBeenSetUp()) return 0; |
| 223 | 216 |
| 224 return new_space_.Available() + | 217 return new_space_.Available() + old_pointer_space_->Available() + |
| 225 old_pointer_space_->Available() + | 218 old_data_space_->Available() + code_space_->Available() + |
| 226 old_data_space_->Available() + | 219 map_space_->Available() + cell_space_->Available() + |
| 227 code_space_->Available() + | 220 property_cell_space_->Available(); |
| 228 map_space_->Available() + | |
| 229 cell_space_->Available() + | |
| 230 property_cell_space_->Available(); | |
| 231 } | 221 } |
| 232 | 222 |
| 233 | 223 |
| 234 bool Heap::HasBeenSetUp() { | 224 bool Heap::HasBeenSetUp() { |
| 235 return old_pointer_space_ != NULL && | 225 return old_pointer_space_ != NULL && old_data_space_ != NULL && |
| 236 old_data_space_ != NULL && | 226 code_space_ != NULL && map_space_ != NULL && cell_space_ != NULL && |
| 237 code_space_ != NULL && | 227 property_cell_space_ != NULL && lo_space_ != NULL; |
| 238 map_space_ != NULL && | |
| 239 cell_space_ != NULL && | |
| 240 property_cell_space_ != NULL && | |
| 241 lo_space_ != NULL; | |
| 242 } | 228 } |
| 243 | 229 |
| 244 | 230 |
| 245 int Heap::GcSafeSizeOfOldObject(HeapObject* object) { | 231 int Heap::GcSafeSizeOfOldObject(HeapObject* object) { |
| 246 if (IntrusiveMarking::IsMarked(object)) { | 232 if (IntrusiveMarking::IsMarked(object)) { |
| 247 return IntrusiveMarking::SizeOfMarkedObject(object); | 233 return IntrusiveMarking::SizeOfMarkedObject(object); |
| 248 } | 234 } |
| 249 return object->SizeFromMap(object->map()); | 235 return object->SizeFromMap(object->map()); |
| 250 } | 236 } |
| 251 | 237 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 266 | 252 |
| 267 // Is enough data promoted to justify a global GC? | 253 // Is enough data promoted to justify a global GC? |
| 268 if (OldGenerationAllocationLimitReached()) { | 254 if (OldGenerationAllocationLimitReached()) { |
| 269 isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment(); | 255 isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment(); |
| 270 *reason = "promotion limit reached"; | 256 *reason = "promotion limit reached"; |
| 271 return MARK_COMPACTOR; | 257 return MARK_COMPACTOR; |
| 272 } | 258 } |
| 273 | 259 |
| 274 // Have allocation in OLD and LO failed? | 260 // Have allocation in OLD and LO failed? |
| 275 if (old_gen_exhausted_) { | 261 if (old_gen_exhausted_) { |
| 276 isolate_->counters()-> | 262 isolate_->counters() |
| 277 gc_compactor_caused_by_oldspace_exhaustion()->Increment(); | 263 ->gc_compactor_caused_by_oldspace_exhaustion() |
| 264 ->Increment(); |
| 278 *reason = "old generations exhausted"; | 265 *reason = "old generations exhausted"; |
| 279 return MARK_COMPACTOR; | 266 return MARK_COMPACTOR; |
| 280 } | 267 } |
| 281 | 268 |
| 282 // Is there enough space left in OLD to guarantee that a scavenge can | 269 // Is there enough space left in OLD to guarantee that a scavenge can |
| 283 // succeed? | 270 // succeed? |
| 284 // | 271 // |
| 285 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available | 272 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available |
| 286 // for object promotion. It counts only the bytes that the memory | 273 // for object promotion. It counts only the bytes that the memory |
| 287 // allocator has not yet allocated from the OS and assigned to any space, | 274 // allocator has not yet allocated from the OS and assigned to any space, |
| 288 // and does not count available bytes already in the old space or code | 275 // and does not count available bytes already in the old space or code |
| 289 // space. Undercounting is safe---we may get an unrequested full GC when | 276 // space. Undercounting is safe---we may get an unrequested full GC when |
| 290 // a scavenge would have succeeded. | 277 // a scavenge would have succeeded. |
| 291 if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) { | 278 if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) { |
| 292 isolate_->counters()-> | 279 isolate_->counters() |
| 293 gc_compactor_caused_by_oldspace_exhaustion()->Increment(); | 280 ->gc_compactor_caused_by_oldspace_exhaustion() |
| 281 ->Increment(); |
| 294 *reason = "scavenge might not succeed"; | 282 *reason = "scavenge might not succeed"; |
| 295 return MARK_COMPACTOR; | 283 return MARK_COMPACTOR; |
| 296 } | 284 } |
| 297 | 285 |
| 298 // Default | 286 // Default |
| 299 *reason = NULL; | 287 *reason = NULL; |
| 300 return SCAVENGER; | 288 return SCAVENGER; |
| 301 } | 289 } |
| 302 | 290 |
| 303 | 291 |
| 304 // TODO(1238405): Combine the infrastructure for --heap-stats and | 292 // TODO(1238405): Combine the infrastructure for --heap-stats and |
| 305 // --log-gc to avoid the complicated preprocessor and flag testing. | 293 // --log-gc to avoid the complicated preprocessor and flag testing. |
| 306 void Heap::ReportStatisticsBeforeGC() { | 294 void Heap::ReportStatisticsBeforeGC() { |
| 307 // Heap::ReportHeapStatistics will also log NewSpace statistics when | 295 // Heap::ReportHeapStatistics will also log NewSpace statistics when |
| 308 // compiled --log-gc is set. The following logic is used to avoid | 296 // compiled --log-gc is set. The following logic is used to avoid |
| 309 // double logging. | 297 // double logging. |
| 310 #ifdef DEBUG | 298 #ifdef DEBUG |
| 311 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics(); | 299 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics(); |
| 312 if (FLAG_heap_stats) { | 300 if (FLAG_heap_stats) { |
| 313 ReportHeapStatistics("Before GC"); | 301 ReportHeapStatistics("Before GC"); |
| 314 } else if (FLAG_log_gc) { | 302 } else if (FLAG_log_gc) { |
| 315 new_space_.ReportStatistics(); | 303 new_space_.ReportStatistics(); |
| 316 } | 304 } |
| 317 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms(); | 305 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms(); |
| 318 #else | 306 #else |
| 319 if (FLAG_log_gc) { | 307 if (FLAG_log_gc) { |
| 320 new_space_.CollectStatistics(); | 308 new_space_.CollectStatistics(); |
| 321 new_space_.ReportStatistics(); | 309 new_space_.ReportStatistics(); |
| 322 new_space_.ClearHistograms(); | 310 new_space_.ClearHistograms(); |
| 323 } | 311 } |
| 324 #endif // DEBUG | 312 #endif // DEBUG |
| 325 } | 313 } |
| 326 | 314 |
| 327 | 315 |
| 328 void Heap::PrintShortHeapStatistics() { | 316 void Heap::PrintShortHeapStatistics() { |
| 329 if (!FLAG_trace_gc_verbose) return; | 317 if (!FLAG_trace_gc_verbose) return; |
| 330 PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX "d KB" | 318 PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX |
| 331 ", available: %6" V8_PTR_PREFIX "d KB\n", | 319 "d KB" |
| 320 ", available: %6" V8_PTR_PREFIX "d KB\n", |
| 332 isolate_->memory_allocator()->Size() / KB, | 321 isolate_->memory_allocator()->Size() / KB, |
| 333 isolate_->memory_allocator()->Available() / KB); | 322 isolate_->memory_allocator()->Available() / KB); |
| 334 PrintPID("New space, used: %6" V8_PTR_PREFIX "d KB" | 323 PrintPID("New space, used: %6" V8_PTR_PREFIX |
| 335 ", available: %6" V8_PTR_PREFIX "d KB" | 324 "d KB" |
| 336 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 325 ", available: %6" V8_PTR_PREFIX |
| 337 new_space_.Size() / KB, | 326 "d KB" |
| 338 new_space_.Available() / KB, | 327 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 328 new_space_.Size() / KB, new_space_.Available() / KB, |
| 339 new_space_.CommittedMemory() / KB); | 329 new_space_.CommittedMemory() / KB); |
| 340 PrintPID("Old pointers, used: %6" V8_PTR_PREFIX "d KB" | 330 PrintPID("Old pointers, used: %6" V8_PTR_PREFIX |
| 341 ", available: %6" V8_PTR_PREFIX "d KB" | 331 "d KB" |
| 342 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 332 ", available: %6" V8_PTR_PREFIX |
| 333 "d KB" |
| 334 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 343 old_pointer_space_->SizeOfObjects() / KB, | 335 old_pointer_space_->SizeOfObjects() / KB, |
| 344 old_pointer_space_->Available() / KB, | 336 old_pointer_space_->Available() / KB, |
| 345 old_pointer_space_->CommittedMemory() / KB); | 337 old_pointer_space_->CommittedMemory() / KB); |
| 346 PrintPID("Old data space, used: %6" V8_PTR_PREFIX "d KB" | 338 PrintPID("Old data space, used: %6" V8_PTR_PREFIX |
| 347 ", available: %6" V8_PTR_PREFIX "d KB" | 339 "d KB" |
| 348 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 340 ", available: %6" V8_PTR_PREFIX |
| 341 "d KB" |
| 342 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 349 old_data_space_->SizeOfObjects() / KB, | 343 old_data_space_->SizeOfObjects() / KB, |
| 350 old_data_space_->Available() / KB, | 344 old_data_space_->Available() / KB, |
| 351 old_data_space_->CommittedMemory() / KB); | 345 old_data_space_->CommittedMemory() / KB); |
| 352 PrintPID("Code space, used: %6" V8_PTR_PREFIX "d KB" | 346 PrintPID("Code space, used: %6" V8_PTR_PREFIX |
| 353 ", available: %6" V8_PTR_PREFIX "d KB" | 347 "d KB" |
| 354 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 348 ", available: %6" V8_PTR_PREFIX |
| 355 code_space_->SizeOfObjects() / KB, | 349 "d KB" |
| 356 code_space_->Available() / KB, | 350 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 351 code_space_->SizeOfObjects() / KB, code_space_->Available() / KB, |
| 357 code_space_->CommittedMemory() / KB); | 352 code_space_->CommittedMemory() / KB); |
| 358 PrintPID("Map space, used: %6" V8_PTR_PREFIX "d KB" | 353 PrintPID("Map space, used: %6" V8_PTR_PREFIX |
| 359 ", available: %6" V8_PTR_PREFIX "d KB" | 354 "d KB" |
| 360 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 355 ", available: %6" V8_PTR_PREFIX |
| 361 map_space_->SizeOfObjects() / KB, | 356 "d KB" |
| 362 map_space_->Available() / KB, | 357 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 358 map_space_->SizeOfObjects() / KB, map_space_->Available() / KB, |
| 363 map_space_->CommittedMemory() / KB); | 359 map_space_->CommittedMemory() / KB); |
| 364 PrintPID("Cell space, used: %6" V8_PTR_PREFIX "d KB" | 360 PrintPID("Cell space, used: %6" V8_PTR_PREFIX |
| 365 ", available: %6" V8_PTR_PREFIX "d KB" | 361 "d KB" |
| 366 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 362 ", available: %6" V8_PTR_PREFIX |
| 367 cell_space_->SizeOfObjects() / KB, | 363 "d KB" |
| 368 cell_space_->Available() / KB, | 364 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 365 cell_space_->SizeOfObjects() / KB, cell_space_->Available() / KB, |
| 369 cell_space_->CommittedMemory() / KB); | 366 cell_space_->CommittedMemory() / KB); |
| 370 PrintPID("PropertyCell space, used: %6" V8_PTR_PREFIX "d KB" | 367 PrintPID("PropertyCell space, used: %6" V8_PTR_PREFIX |
| 371 ", available: %6" V8_PTR_PREFIX "d KB" | 368 "d KB" |
| 372 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 369 ", available: %6" V8_PTR_PREFIX |
| 370 "d KB" |
| 371 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 373 property_cell_space_->SizeOfObjects() / KB, | 372 property_cell_space_->SizeOfObjects() / KB, |
| 374 property_cell_space_->Available() / KB, | 373 property_cell_space_->Available() / KB, |
| 375 property_cell_space_->CommittedMemory() / KB); | 374 property_cell_space_->CommittedMemory() / KB); |
| 376 PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB" | 375 PrintPID("Large object space, used: %6" V8_PTR_PREFIX |
| 377 ", available: %6" V8_PTR_PREFIX "d KB" | 376 "d KB" |
| 378 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 377 ", available: %6" V8_PTR_PREFIX |
| 379 lo_space_->SizeOfObjects() / KB, | 378 "d KB" |
| 380 lo_space_->Available() / KB, | 379 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 380 lo_space_->SizeOfObjects() / KB, lo_space_->Available() / KB, |
| 381 lo_space_->CommittedMemory() / KB); | 381 lo_space_->CommittedMemory() / KB); |
| 382 PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB" | 382 PrintPID("All spaces, used: %6" V8_PTR_PREFIX |
| 383 ", available: %6" V8_PTR_PREFIX "d KB" | 383 "d KB" |
| 384 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 384 ", available: %6" V8_PTR_PREFIX |
| 385 this->SizeOfObjects() / KB, | 385 "d KB" |
| 386 this->Available() / KB, | 386 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 387 this->SizeOfObjects() / KB, this->Available() / KB, |
| 387 this->CommittedMemory() / KB); | 388 this->CommittedMemory() / KB); |
| 388 PrintPID("External memory reported: %6" V8_PTR_PREFIX "d KB\n", | 389 PrintPID("External memory reported: %6" V8_PTR_PREFIX "d KB\n", |
| 389 static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB)); | 390 static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB)); |
| 390 PrintPID("Total time spent in GC : %.1f ms\n", total_gc_time_ms_); | 391 PrintPID("Total time spent in GC : %.1f ms\n", total_gc_time_ms_); |
| 391 } | 392 } |
| 392 | 393 |
| 393 | 394 |
| 394 // TODO(1238405): Combine the infrastructure for --heap-stats and | 395 // TODO(1238405): Combine the infrastructure for --heap-stats and |
| 395 // --log-gc to avoid the complicated preprocessor and flag testing. | 396 // --log-gc to avoid the complicated preprocessor and flag testing. |
| 396 void Heap::ReportStatisticsAfterGC() { | 397 void Heap::ReportStatisticsAfterGC() { |
| 397 // Similar to the before GC, we use some complicated logic to ensure that | 398 // Similar to the before GC, we use some complicated logic to ensure that |
| 398 // NewSpace statistics are logged exactly once when --log-gc is turned on. | 399 // NewSpace statistics are logged exactly once when --log-gc is turned on. |
| 399 #if defined(DEBUG) | 400 #if defined(DEBUG) |
| 400 if (FLAG_heap_stats) { | 401 if (FLAG_heap_stats) { |
| 401 new_space_.CollectStatistics(); | 402 new_space_.CollectStatistics(); |
| 402 ReportHeapStatistics("After GC"); | 403 ReportHeapStatistics("After GC"); |
| 403 } else if (FLAG_log_gc) { | 404 } else if (FLAG_log_gc) { |
| 404 new_space_.ReportStatistics(); | 405 new_space_.ReportStatistics(); |
| 405 } | 406 } |
| 406 #else | 407 #else |
| 407 if (FLAG_log_gc) new_space_.ReportStatistics(); | 408 if (FLAG_log_gc) new_space_.ReportStatistics(); |
| 408 #endif // DEBUG | 409 #endif // DEBUG |
| 409 } | 410 } |
| 410 | 411 |
| 411 | 412 |
| 412 void Heap::GarbageCollectionPrologue() { | 413 void Heap::GarbageCollectionPrologue() { |
| 413 { AllowHeapAllocation for_the_first_part_of_prologue; | 414 { |
| 415 AllowHeapAllocation for_the_first_part_of_prologue; |
| 414 ClearJSFunctionResultCaches(); | 416 ClearJSFunctionResultCaches(); |
| 415 gc_count_++; | 417 gc_count_++; |
| 416 unflattened_strings_length_ = 0; | 418 unflattened_strings_length_ = 0; |
| 417 | 419 |
| 418 if (FLAG_flush_code && FLAG_flush_code_incrementally) { | 420 if (FLAG_flush_code && FLAG_flush_code_incrementally) { |
| 419 mark_compact_collector()->EnableCodeFlushing(true); | 421 mark_compact_collector()->EnableCodeFlushing(true); |
| 420 } | 422 } |
| 421 | 423 |
| 422 #ifdef VERIFY_HEAP | 424 #ifdef VERIFY_HEAP |
| 423 if (FLAG_verify_heap) { | 425 if (FLAG_verify_heap) { |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 477 if (current_kind == Code::FUNCTION || | 479 if (current_kind == Code::FUNCTION || |
| 478 current_kind == Code::OPTIMIZED_FUNCTION) { | 480 current_kind == Code::OPTIMIZED_FUNCTION) { |
| 479 code->ClearInlineCaches(kind); | 481 code->ClearInlineCaches(kind); |
| 480 } | 482 } |
| 481 } | 483 } |
| 482 } | 484 } |
| 483 | 485 |
| 484 | 486 |
| 485 void Heap::RepairFreeListsAfterBoot() { | 487 void Heap::RepairFreeListsAfterBoot() { |
| 486 PagedSpaces spaces(this); | 488 PagedSpaces spaces(this); |
| 487 for (PagedSpace* space = spaces.next(); | 489 for (PagedSpace* space = spaces.next(); space != NULL; |
| 488 space != NULL; | |
| 489 space = spaces.next()) { | 490 space = spaces.next()) { |
| 490 space->RepairFreeListsAfterBoot(); | 491 space->RepairFreeListsAfterBoot(); |
| 491 } | 492 } |
| 492 } | 493 } |
| 493 | 494 |
| 494 | 495 |
| 495 void Heap::ProcessPretenuringFeedback() { | 496 void Heap::ProcessPretenuringFeedback() { |
| 496 if (FLAG_allocation_site_pretenuring) { | 497 if (FLAG_allocation_site_pretenuring) { |
| 497 int tenure_decisions = 0; | 498 int tenure_decisions = 0; |
| 498 int dont_tenure_decisions = 0; | 499 int dont_tenure_decisions = 0; |
| 499 int allocation_mementos_found = 0; | 500 int allocation_mementos_found = 0; |
| 500 int allocation_sites = 0; | 501 int allocation_sites = 0; |
| 501 int active_allocation_sites = 0; | 502 int active_allocation_sites = 0; |
| 502 | 503 |
| 503 // If the scratchpad overflowed, we have to iterate over the allocation | 504 // If the scratchpad overflowed, we have to iterate over the allocation |
| 504 // sites list. | 505 // sites list. |
| 505 // TODO(hpayer): We iterate over the whole list of allocation sites when | 506 // TODO(hpayer): We iterate over the whole list of allocation sites when |
| 506 // we grew to the maximum semi-space size to deopt maybe tenured | 507 // we grew to the maximum semi-space size to deopt maybe tenured |
| 507 // allocation sites. We could hold the maybe tenured allocation sites | 508 // allocation sites. We could hold the maybe tenured allocation sites |
| 508 // in a seperate data structure if this is a performance problem. | 509 // in a seperate data structure if this is a performance problem. |
| 509 bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites(); | 510 bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites(); |
| 510 bool use_scratchpad = | 511 bool use_scratchpad = |
| 511 allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize && | 512 allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize && |
| 512 !deopt_maybe_tenured; | 513 !deopt_maybe_tenured; |
| 513 | 514 |
| 514 int i = 0; | 515 int i = 0; |
| 515 Object* list_element = allocation_sites_list(); | 516 Object* list_element = allocation_sites_list(); |
| 516 bool trigger_deoptimization = false; | 517 bool trigger_deoptimization = false; |
| 517 bool maximum_size_scavenge = MaximumSizeScavenge(); | 518 bool maximum_size_scavenge = MaximumSizeScavenge(); |
| 518 while (use_scratchpad ? | 519 while (use_scratchpad ? i < allocation_sites_scratchpad_length_ |
| 519 i < allocation_sites_scratchpad_length_ : | 520 : list_element->IsAllocationSite()) { |
| 520 list_element->IsAllocationSite()) { | 521 AllocationSite* site = |
| 521 AllocationSite* site = use_scratchpad ? | 522 use_scratchpad |
| 522 AllocationSite::cast(allocation_sites_scratchpad()->get(i)) : | 523 ? AllocationSite::cast(allocation_sites_scratchpad()->get(i)) |
| 523 AllocationSite::cast(list_element); | 524 : AllocationSite::cast(list_element); |
| 524 allocation_mementos_found += site->memento_found_count(); | 525 allocation_mementos_found += site->memento_found_count(); |
| 525 if (site->memento_found_count() > 0) { | 526 if (site->memento_found_count() > 0) { |
| 526 active_allocation_sites++; | 527 active_allocation_sites++; |
| 527 if (site->DigestPretenuringFeedback(maximum_size_scavenge)) { | 528 if (site->DigestPretenuringFeedback(maximum_size_scavenge)) { |
| 528 trigger_deoptimization = true; | 529 trigger_deoptimization = true; |
| 529 } | 530 } |
| 530 if (site->GetPretenureMode() == TENURED) { | 531 if (site->GetPretenureMode() == TENURED) { |
| 531 tenure_decisions++; | 532 tenure_decisions++; |
| 532 } else { | 533 } else { |
| 533 dont_tenure_decisions++; | 534 dont_tenure_decisions++; |
| (...skipping 13 matching lines...) Expand all Loading... |
| 547 } | 548 } |
| 548 } | 549 } |
| 549 | 550 |
| 550 if (trigger_deoptimization) { | 551 if (trigger_deoptimization) { |
| 551 isolate_->stack_guard()->RequestDeoptMarkedAllocationSites(); | 552 isolate_->stack_guard()->RequestDeoptMarkedAllocationSites(); |
| 552 } | 553 } |
| 553 | 554 |
| 554 FlushAllocationSitesScratchpad(); | 555 FlushAllocationSitesScratchpad(); |
| 555 | 556 |
| 556 if (FLAG_trace_pretenuring_statistics && | 557 if (FLAG_trace_pretenuring_statistics && |
| 557 (allocation_mementos_found > 0 || | 558 (allocation_mementos_found > 0 || tenure_decisions > 0 || |
| 558 tenure_decisions > 0 || | |
| 559 dont_tenure_decisions > 0)) { | 559 dont_tenure_decisions > 0)) { |
| 560 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " | 560 PrintF( |
| 561 "#mementos, #tenure decisions, #donttenure decisions) " | 561 "GC: (mode, #visited allocation sites, #active allocation sites, " |
| 562 "(%s, %d, %d, %d, %d, %d)\n", | 562 "#mementos, #tenure decisions, #donttenure decisions) " |
| 563 use_scratchpad ? "use scratchpad" : "use list", | 563 "(%s, %d, %d, %d, %d, %d)\n", |
| 564 allocation_sites, | 564 use_scratchpad ? "use scratchpad" : "use list", allocation_sites, |
| 565 active_allocation_sites, | 565 active_allocation_sites, allocation_mementos_found, tenure_decisions, |
| 566 allocation_mementos_found, | 566 dont_tenure_decisions); |
| 567 tenure_decisions, | |
| 568 dont_tenure_decisions); | |
| 569 } | 567 } |
| 570 } | 568 } |
| 571 } | 569 } |
| 572 | 570 |
| 573 | 571 |
| 574 void Heap::DeoptMarkedAllocationSites() { | 572 void Heap::DeoptMarkedAllocationSites() { |
| 575 // TODO(hpayer): If iterating over the allocation sites list becomes a | 573 // TODO(hpayer): If iterating over the allocation sites list becomes a |
| 576 // performance issue, use a cache heap data structure instead (similar to the | 574 // performance issue, use a cache heap data structure instead (similar to the |
| 577 // allocation sites scratchpad). | 575 // allocation sites scratchpad). |
| 578 Object* list_element = allocation_sites_list(); | 576 Object* list_element = allocation_sites_list(); |
| 579 while (list_element->IsAllocationSite()) { | 577 while (list_element->IsAllocationSite()) { |
| 580 AllocationSite* site = AllocationSite::cast(list_element); | 578 AllocationSite* site = AllocationSite::cast(list_element); |
| 581 if (site->deopt_dependent_code()) { | 579 if (site->deopt_dependent_code()) { |
| 582 site->dependent_code()->MarkCodeForDeoptimization( | 580 site->dependent_code()->MarkCodeForDeoptimization( |
| 583 isolate_, | 581 isolate_, DependentCode::kAllocationSiteTenuringChangedGroup); |
| 584 DependentCode::kAllocationSiteTenuringChangedGroup); | |
| 585 site->set_deopt_dependent_code(false); | 582 site->set_deopt_dependent_code(false); |
| 586 } | 583 } |
| 587 list_element = site->weak_next(); | 584 list_element = site->weak_next(); |
| 588 } | 585 } |
| 589 Deoptimizer::DeoptimizeMarkedCode(isolate_); | 586 Deoptimizer::DeoptimizeMarkedCode(isolate_); |
| 590 } | 587 } |
| 591 | 588 |
| 592 | 589 |
| 593 void Heap::GarbageCollectionEpilogue() { | 590 void Heap::GarbageCollectionEpilogue() { |
| 594 store_buffer()->GCEpilogue(); | 591 store_buffer()->GCEpilogue(); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 631 static_cast<int>(SizeOfObjects())); | 628 static_cast<int>(SizeOfObjects())); |
| 632 | 629 |
| 633 isolate_->counters()->string_table_capacity()->Set( | 630 isolate_->counters()->string_table_capacity()->Set( |
| 634 string_table()->Capacity()); | 631 string_table()->Capacity()); |
| 635 isolate_->counters()->number_of_symbols()->Set( | 632 isolate_->counters()->number_of_symbols()->Set( |
| 636 string_table()->NumberOfElements()); | 633 string_table()->NumberOfElements()); |
| 637 | 634 |
| 638 if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) { | 635 if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) { |
| 639 isolate_->counters()->codegen_fraction_crankshaft()->AddSample( | 636 isolate_->counters()->codegen_fraction_crankshaft()->AddSample( |
| 640 static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) / | 637 static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) / |
| 641 (crankshaft_codegen_bytes_generated_ | 638 (crankshaft_codegen_bytes_generated_ + |
| 642 + full_codegen_bytes_generated_))); | 639 full_codegen_bytes_generated_))); |
| 643 } | 640 } |
| 644 | 641 |
| 645 if (CommittedMemory() > 0) { | 642 if (CommittedMemory() > 0) { |
| 646 isolate_->counters()->external_fragmentation_total()->AddSample( | 643 isolate_->counters()->external_fragmentation_total()->AddSample( |
| 647 static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory())); | 644 static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory())); |
| 648 | 645 |
| 649 isolate_->counters()->heap_fraction_new_space()-> | 646 isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>( |
| 650 AddSample(static_cast<int>( | 647 (new_space()->CommittedMemory() * 100.0) / CommittedMemory())); |
| 651 (new_space()->CommittedMemory() * 100.0) / CommittedMemory())); | |
| 652 isolate_->counters()->heap_fraction_old_pointer_space()->AddSample( | 648 isolate_->counters()->heap_fraction_old_pointer_space()->AddSample( |
| 653 static_cast<int>( | 649 static_cast<int>((old_pointer_space()->CommittedMemory() * 100.0) / |
| 654 (old_pointer_space()->CommittedMemory() * 100.0) / | 650 CommittedMemory())); |
| 655 CommittedMemory())); | |
| 656 isolate_->counters()->heap_fraction_old_data_space()->AddSample( | 651 isolate_->counters()->heap_fraction_old_data_space()->AddSample( |
| 657 static_cast<int>( | 652 static_cast<int>((old_data_space()->CommittedMemory() * 100.0) / |
| 658 (old_data_space()->CommittedMemory() * 100.0) / | 653 CommittedMemory())); |
| 659 CommittedMemory())); | 654 isolate_->counters()->heap_fraction_code_space()->AddSample( |
| 660 isolate_->counters()->heap_fraction_code_space()-> | 655 static_cast<int>((code_space()->CommittedMemory() * 100.0) / |
| 661 AddSample(static_cast<int>( | 656 CommittedMemory())); |
| 662 (code_space()->CommittedMemory() * 100.0) / CommittedMemory())); | 657 isolate_->counters()->heap_fraction_map_space()->AddSample(static_cast<int>( |
| 663 isolate_->counters()->heap_fraction_map_space()->AddSample( | 658 (map_space()->CommittedMemory() * 100.0) / CommittedMemory())); |
| 664 static_cast<int>( | |
| 665 (map_space()->CommittedMemory() * 100.0) / CommittedMemory())); | |
| 666 isolate_->counters()->heap_fraction_cell_space()->AddSample( | 659 isolate_->counters()->heap_fraction_cell_space()->AddSample( |
| 667 static_cast<int>( | 660 static_cast<int>((cell_space()->CommittedMemory() * 100.0) / |
| 668 (cell_space()->CommittedMemory() * 100.0) / CommittedMemory())); | 661 CommittedMemory())); |
| 669 isolate_->counters()->heap_fraction_property_cell_space()-> | 662 isolate_->counters()->heap_fraction_property_cell_space()->AddSample( |
| 670 AddSample(static_cast<int>( | 663 static_cast<int>((property_cell_space()->CommittedMemory() * 100.0) / |
| 671 (property_cell_space()->CommittedMemory() * 100.0) / | 664 CommittedMemory())); |
| 672 CommittedMemory())); | 665 isolate_->counters()->heap_fraction_lo_space()->AddSample(static_cast<int>( |
| 673 isolate_->counters()->heap_fraction_lo_space()-> | 666 (lo_space()->CommittedMemory() * 100.0) / CommittedMemory())); |
| 674 AddSample(static_cast<int>( | |
| 675 (lo_space()->CommittedMemory() * 100.0) / CommittedMemory())); | |
| 676 | 667 |
| 677 isolate_->counters()->heap_sample_total_committed()->AddSample( | 668 isolate_->counters()->heap_sample_total_committed()->AddSample( |
| 678 static_cast<int>(CommittedMemory() / KB)); | 669 static_cast<int>(CommittedMemory() / KB)); |
| 679 isolate_->counters()->heap_sample_total_used()->AddSample( | 670 isolate_->counters()->heap_sample_total_used()->AddSample( |
| 680 static_cast<int>(SizeOfObjects() / KB)); | 671 static_cast<int>(SizeOfObjects() / KB)); |
| 681 isolate_->counters()->heap_sample_map_space_committed()->AddSample( | 672 isolate_->counters()->heap_sample_map_space_committed()->AddSample( |
| 682 static_cast<int>(map_space()->CommittedMemory() / KB)); | 673 static_cast<int>(map_space()->CommittedMemory() / KB)); |
| 683 isolate_->counters()->heap_sample_cell_space_committed()->AddSample( | 674 isolate_->counters()->heap_sample_cell_space_committed()->AddSample( |
| 684 static_cast<int>(cell_space()->CommittedMemory() / KB)); | 675 static_cast<int>(cell_space()->CommittedMemory() / KB)); |
| 685 isolate_->counters()-> | 676 isolate_->counters() |
| 686 heap_sample_property_cell_space_committed()-> | 677 ->heap_sample_property_cell_space_committed() |
| 687 AddSample(static_cast<int>( | 678 ->AddSample( |
| 688 property_cell_space()->CommittedMemory() / KB)); | 679 static_cast<int>(property_cell_space()->CommittedMemory() / KB)); |
| 689 isolate_->counters()->heap_sample_code_space_committed()->AddSample( | 680 isolate_->counters()->heap_sample_code_space_committed()->AddSample( |
| 690 static_cast<int>(code_space()->CommittedMemory() / KB)); | 681 static_cast<int>(code_space()->CommittedMemory() / KB)); |
| 691 | 682 |
| 692 isolate_->counters()->heap_sample_maximum_committed()->AddSample( | 683 isolate_->counters()->heap_sample_maximum_committed()->AddSample( |
| 693 static_cast<int>(MaximumCommittedMemory() / KB)); | 684 static_cast<int>(MaximumCommittedMemory() / KB)); |
| 694 } | 685 } |
| 695 | 686 |
| 696 #define UPDATE_COUNTERS_FOR_SPACE(space) \ | 687 #define UPDATE_COUNTERS_FOR_SPACE(space) \ |
| 697 isolate_->counters()->space##_bytes_available()->Set( \ | 688 isolate_->counters()->space##_bytes_available()->Set( \ |
| 698 static_cast<int>(space()->Available())); \ | 689 static_cast<int>(space()->Available())); \ |
| 699 isolate_->counters()->space##_bytes_committed()->Set( \ | 690 isolate_->counters()->space##_bytes_committed()->Set( \ |
| 700 static_cast<int>(space()->CommittedMemory())); \ | 691 static_cast<int>(space()->CommittedMemory())); \ |
| 701 isolate_->counters()->space##_bytes_used()->Set( \ | 692 isolate_->counters()->space##_bytes_used()->Set( \ |
| 702 static_cast<int>(space()->SizeOfObjects())); | 693 static_cast<int>(space()->SizeOfObjects())); |
| 703 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \ | 694 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \ |
| 704 if (space()->CommittedMemory() > 0) { \ | 695 if (space()->CommittedMemory() > 0) { \ |
| 705 isolate_->counters()->external_fragmentation_##space()->AddSample( \ | 696 isolate_->counters()->external_fragmentation_##space()->AddSample( \ |
| 706 static_cast<int>(100 - \ | 697 static_cast<int>(100 - \ |
| 707 (space()->SizeOfObjects() * 100.0) / space()->CommittedMemory())); \ | 698 (space()->SizeOfObjects() * 100.0) / \ |
| 699 space()->CommittedMemory())); \ |
| 708 } | 700 } |
| 709 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \ | 701 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \ |
| 710 UPDATE_COUNTERS_FOR_SPACE(space) \ | 702 UPDATE_COUNTERS_FOR_SPACE(space) \ |
| 711 UPDATE_FRAGMENTATION_FOR_SPACE(space) | 703 UPDATE_FRAGMENTATION_FOR_SPACE(space) |
| 712 | 704 |
| 713 UPDATE_COUNTERS_FOR_SPACE(new_space) | 705 UPDATE_COUNTERS_FOR_SPACE(new_space) |
| 714 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space) | 706 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space) |
| 715 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space) | 707 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space) |
| 716 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space) | 708 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space) |
| 717 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space) | 709 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space) |
| 718 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space) | 710 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space) |
| 719 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(property_cell_space) | 711 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(property_cell_space) |
| 720 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space) | 712 UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space) |
| 721 #undef UPDATE_COUNTERS_FOR_SPACE | 713 #undef UPDATE_COUNTERS_FOR_SPACE |
| 722 #undef UPDATE_FRAGMENTATION_FOR_SPACE | 714 #undef UPDATE_FRAGMENTATION_FOR_SPACE |
| 723 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE | 715 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE |
| 724 | 716 |
| 725 #ifdef DEBUG | 717 #ifdef DEBUG |
| 726 ReportStatisticsAfterGC(); | 718 ReportStatisticsAfterGC(); |
| 727 #endif // DEBUG | 719 #endif // DEBUG |
| 728 | 720 |
| 729 // Remember the last top pointer so that we can later find out | 721 // Remember the last top pointer so that we can later find out |
| 730 // whether we allocated in new space since the last GC. | 722 // whether we allocated in new space since the last GC. |
| 731 new_space_top_after_last_gc_ = new_space()->top(); | 723 new_space_top_after_last_gc_ = new_space()->top(); |
| 732 } | 724 } |
| 733 | 725 |
| 734 | 726 |
| 735 void Heap::CollectAllGarbage(int flags, | 727 void Heap::CollectAllGarbage(int flags, const char* gc_reason, |
| 736 const char* gc_reason, | |
| 737 const v8::GCCallbackFlags gc_callback_flags) { | 728 const v8::GCCallbackFlags gc_callback_flags) { |
| 738 // Since we are ignoring the return value, the exact choice of space does | 729 // Since we are ignoring the return value, the exact choice of space does |
| 739 // not matter, so long as we do not specify NEW_SPACE, which would not | 730 // not matter, so long as we do not specify NEW_SPACE, which would not |
| 740 // cause a full GC. | 731 // cause a full GC. |
| 741 mark_compact_collector_.SetFlags(flags); | 732 mark_compact_collector_.SetFlags(flags); |
| 742 CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags); | 733 CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags); |
| 743 mark_compact_collector_.SetFlags(kNoGCFlags); | 734 mark_compact_collector_.SetFlags(kNoGCFlags); |
| 744 } | 735 } |
| 745 | 736 |
| 746 | 737 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 787 // identify the unused space. | 778 // identify the unused space. |
| 788 Address from_top = new_space_.top(); | 779 Address from_top = new_space_.top(); |
| 789 Address from_limit = new_space_.limit(); | 780 Address from_limit = new_space_.limit(); |
| 790 if (from_top < from_limit) { | 781 if (from_top < from_limit) { |
| 791 int remaining_in_page = static_cast<int>(from_limit - from_top); | 782 int remaining_in_page = static_cast<int>(from_limit - from_top); |
| 792 CreateFillerObjectAt(from_top, remaining_in_page); | 783 CreateFillerObjectAt(from_top, remaining_in_page); |
| 793 } | 784 } |
| 794 } | 785 } |
| 795 | 786 |
| 796 | 787 |
| 797 bool Heap::CollectGarbage(GarbageCollector collector, | 788 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, |
| 798 const char* gc_reason, | |
| 799 const char* collector_reason, | 789 const char* collector_reason, |
| 800 const v8::GCCallbackFlags gc_callback_flags) { | 790 const v8::GCCallbackFlags gc_callback_flags) { |
| 801 // The VM is in the GC state until exiting this function. | 791 // The VM is in the GC state until exiting this function. |
| 802 VMState<GC> state(isolate_); | 792 VMState<GC> state(isolate_); |
| 803 | 793 |
| 804 #ifdef DEBUG | 794 #ifdef DEBUG |
| 805 // Reset the allocation timeout to the GC interval, but make sure to | 795 // Reset the allocation timeout to the GC interval, but make sure to |
| 806 // allow at least a few allocations after a collection. The reason | 796 // allow at least a few allocations after a collection. The reason |
| 807 // for this is that we have a lot of allocation sequences and we | 797 // for this is that we have a lot of allocation sequences and we |
| 808 // assume that a garbage collection will allow the subsequent | 798 // assume that a garbage collection will allow the subsequent |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 853 } | 843 } |
| 854 | 844 |
| 855 GarbageCollectionEpilogue(); | 845 GarbageCollectionEpilogue(); |
| 856 tracer()->Stop(); | 846 tracer()->Stop(); |
| 857 } | 847 } |
| 858 | 848 |
| 859 // Start incremental marking for the next cycle. The heap snapshot | 849 // Start incremental marking for the next cycle. The heap snapshot |
| 860 // generator needs incremental marking to stay off after it aborted. | 850 // generator needs incremental marking to stay off after it aborted. |
| 861 if (!mark_compact_collector()->abort_incremental_marking() && | 851 if (!mark_compact_collector()->abort_incremental_marking() && |
| 862 incremental_marking()->IsStopped() && | 852 incremental_marking()->IsStopped() && |
| 863 incremental_marking()->WorthActivating() && | 853 incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) { |
| 864 NextGCIsLikelyToBeFull()) { | |
| 865 incremental_marking()->Start(); | 854 incremental_marking()->Start(); |
| 866 } | 855 } |
| 867 | 856 |
| 868 return next_gc_likely_to_collect_more; | 857 return next_gc_likely_to_collect_more; |
| 869 } | 858 } |
| 870 | 859 |
| 871 | 860 |
| 872 int Heap::NotifyContextDisposed() { | 861 int Heap::NotifyContextDisposed() { |
| 873 if (isolate()->concurrent_recompilation_enabled()) { | 862 if (isolate()->concurrent_recompilation_enabled()) { |
| 874 // Flush the queued recompilation tasks. | 863 // Flush the queued recompilation tasks. |
| 875 isolate()->optimizing_compiler_thread()->Flush(); | 864 isolate()->optimizing_compiler_thread()->Flush(); |
| 876 } | 865 } |
| 877 flush_monomorphic_ics_ = true; | 866 flush_monomorphic_ics_ = true; |
| 878 AgeInlineCaches(); | 867 AgeInlineCaches(); |
| 879 return ++contexts_disposed_; | 868 return ++contexts_disposed_; |
| 880 } | 869 } |
| 881 | 870 |
| 882 | 871 |
| 883 void Heap::MoveElements(FixedArray* array, | 872 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index, |
| 884 int dst_index, | |
| 885 int src_index, | |
| 886 int len) { | 873 int len) { |
| 887 if (len == 0) return; | 874 if (len == 0) return; |
| 888 | 875 |
| 889 DCHECK(array->map() != fixed_cow_array_map()); | 876 DCHECK(array->map() != fixed_cow_array_map()); |
| 890 Object** dst_objects = array->data_start() + dst_index; | 877 Object** dst_objects = array->data_start() + dst_index; |
| 891 MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); | 878 MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); |
| 892 if (!InNewSpace(array)) { | 879 if (!InNewSpace(array)) { |
| 893 for (int i = 0; i < len; i++) { | 880 for (int i = 0; i < len; i++) { |
| 894 // TODO(hpayer): check store buffer for entries | 881 // TODO(hpayer): check store buffer for entries |
| 895 if (InNewSpace(dst_objects[i])) { | 882 if (InNewSpace(dst_objects[i])) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 919 | 906 |
| 920 | 907 |
| 921 static void VerifyStringTable(Heap* heap) { | 908 static void VerifyStringTable(Heap* heap) { |
| 922 StringTableVerifier verifier; | 909 StringTableVerifier verifier; |
| 923 heap->string_table()->IterateElements(&verifier); | 910 heap->string_table()->IterateElements(&verifier); |
| 924 } | 911 } |
| 925 #endif // VERIFY_HEAP | 912 #endif // VERIFY_HEAP |
| 926 | 913 |
| 927 | 914 |
| 928 static bool AbortIncrementalMarkingAndCollectGarbage( | 915 static bool AbortIncrementalMarkingAndCollectGarbage( |
| 929 Heap* heap, | 916 Heap* heap, AllocationSpace space, const char* gc_reason = NULL) { |
| 930 AllocationSpace space, | |
| 931 const char* gc_reason = NULL) { | |
| 932 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); | 917 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask); |
| 933 bool result = heap->CollectGarbage(space, gc_reason); | 918 bool result = heap->CollectGarbage(space, gc_reason); |
| 934 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); | 919 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags); |
| 935 return result; | 920 return result; |
| 936 } | 921 } |
| 937 | 922 |
| 938 | 923 |
| 939 void Heap::ReserveSpace(int *sizes, Address *locations_out) { | 924 void Heap::ReserveSpace(int* sizes, Address* locations_out) { |
| 940 bool gc_performed = true; | 925 bool gc_performed = true; |
| 941 int counter = 0; | 926 int counter = 0; |
| 942 static const int kThreshold = 20; | 927 static const int kThreshold = 20; |
| 943 while (gc_performed && counter++ < kThreshold) { | 928 while (gc_performed && counter++ < kThreshold) { |
| 944 gc_performed = false; | 929 gc_performed = false; |
| 945 DCHECK(NEW_SPACE == FIRST_PAGED_SPACE - 1); | 930 DCHECK(NEW_SPACE == FIRST_PAGED_SPACE - 1); |
| 946 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { | 931 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { |
| 947 if (sizes[space] != 0) { | 932 if (sizes[space] != 0) { |
| 948 AllocationResult allocation; | 933 AllocationResult allocation; |
| 949 if (space == NEW_SPACE) { | 934 if (space == NEW_SPACE) { |
| 950 allocation = new_space()->AllocateRaw(sizes[space]); | 935 allocation = new_space()->AllocateRaw(sizes[space]); |
| 951 } else { | 936 } else { |
| 952 allocation = paged_space(space)->AllocateRaw(sizes[space]); | 937 allocation = paged_space(space)->AllocateRaw(sizes[space]); |
| 953 } | 938 } |
| 954 FreeListNode* node; | 939 FreeListNode* node; |
| 955 if (!allocation.To(&node)) { | 940 if (!allocation.To(&node)) { |
| 956 if (space == NEW_SPACE) { | 941 if (space == NEW_SPACE) { |
| 957 Heap::CollectGarbage(NEW_SPACE, | 942 Heap::CollectGarbage(NEW_SPACE, |
| 958 "failed to reserve space in the new space"); | 943 "failed to reserve space in the new space"); |
| 959 } else { | 944 } else { |
| 960 AbortIncrementalMarkingAndCollectGarbage( | 945 AbortIncrementalMarkingAndCollectGarbage( |
| 961 this, | 946 this, static_cast<AllocationSpace>(space), |
| 962 static_cast<AllocationSpace>(space), | |
| 963 "failed to reserve space in paged space"); | 947 "failed to reserve space in paged space"); |
| 964 } | 948 } |
| 965 gc_performed = true; | 949 gc_performed = true; |
| 966 break; | 950 break; |
| 967 } else { | 951 } else { |
| 968 // Mark with a free list node, in case we have a GC before | 952 // Mark with a free list node, in case we have a GC before |
| 969 // deserializing. | 953 // deserializing. |
| 970 node->set_size(this, sizes[space]); | 954 node->set_size(this, sizes[space]); |
| 971 locations_out[space] = node->address(); | 955 locations_out[space] = node->address(); |
| 972 } | 956 } |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1029 NormalizedMapCache::cast(cache)->Clear(); | 1013 NormalizedMapCache::cast(cache)->Clear(); |
| 1030 } | 1014 } |
| 1031 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 1015 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 1032 } | 1016 } |
| 1033 } | 1017 } |
| 1034 | 1018 |
| 1035 | 1019 |
| 1036 void Heap::UpdateSurvivalStatistics(int start_new_space_size) { | 1020 void Heap::UpdateSurvivalStatistics(int start_new_space_size) { |
| 1037 if (start_new_space_size == 0) return; | 1021 if (start_new_space_size == 0) return; |
| 1038 | 1022 |
| 1039 promotion_rate_ = | 1023 promotion_rate_ = (static_cast<double>(promoted_objects_size_) / |
| 1040 (static_cast<double>(promoted_objects_size_) / | 1024 static_cast<double>(start_new_space_size) * 100); |
| 1041 static_cast<double>(start_new_space_size) * 100); | |
| 1042 | 1025 |
| 1043 semi_space_copied_rate_ = | 1026 semi_space_copied_rate_ = |
| 1044 (static_cast<double>(semi_space_copied_object_size_) / | 1027 (static_cast<double>(semi_space_copied_object_size_) / |
| 1045 static_cast<double>(start_new_space_size) * 100); | 1028 static_cast<double>(start_new_space_size) * 100); |
| 1046 | 1029 |
| 1047 double survival_rate = promotion_rate_ + semi_space_copied_rate_; | 1030 double survival_rate = promotion_rate_ + semi_space_copied_rate_; |
| 1048 | 1031 |
| 1049 if (survival_rate > kYoungSurvivalRateHighThreshold) { | 1032 if (survival_rate > kYoungSurvivalRateHighThreshold) { |
| 1050 high_survival_rate_period_length_++; | 1033 high_survival_rate_period_length_++; |
| 1051 } else { | 1034 } else { |
| 1052 high_survival_rate_period_length_ = 0; | 1035 high_survival_rate_period_length_ = 0; |
| 1053 } | 1036 } |
| 1054 } | 1037 } |
| 1055 | 1038 |
| 1056 bool Heap::PerformGarbageCollection( | 1039 bool Heap::PerformGarbageCollection( |
| 1057 GarbageCollector collector, | 1040 GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { |
| 1058 const v8::GCCallbackFlags gc_callback_flags) { | |
| 1059 int freed_global_handles = 0; | 1041 int freed_global_handles = 0; |
| 1060 | 1042 |
| 1061 if (collector != SCAVENGER) { | 1043 if (collector != SCAVENGER) { |
| 1062 PROFILE(isolate_, CodeMovingGCEvent()); | 1044 PROFILE(isolate_, CodeMovingGCEvent()); |
| 1063 } | 1045 } |
| 1064 | 1046 |
| 1065 #ifdef VERIFY_HEAP | 1047 #ifdef VERIFY_HEAP |
| 1066 if (FLAG_verify_heap) { | 1048 if (FLAG_verify_heap) { |
| 1067 VerifyStringTable(this); | 1049 VerifyStringTable(this); |
| 1068 } | 1050 } |
| 1069 #endif | 1051 #endif |
| 1070 | 1052 |
| 1071 GCType gc_type = | 1053 GCType gc_type = |
| 1072 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; | 1054 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; |
| 1073 | 1055 |
| 1074 { GCCallbacksScope scope(this); | 1056 { |
| 1057 GCCallbacksScope scope(this); |
| 1075 if (scope.CheckReenter()) { | 1058 if (scope.CheckReenter()) { |
| 1076 AllowHeapAllocation allow_allocation; | 1059 AllowHeapAllocation allow_allocation; |
| 1077 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1060 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
| 1078 VMState<EXTERNAL> state(isolate_); | 1061 VMState<EXTERNAL> state(isolate_); |
| 1079 HandleScope handle_scope(isolate_); | 1062 HandleScope handle_scope(isolate_); |
| 1080 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); | 1063 CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags); |
| 1081 } | 1064 } |
| 1082 } | 1065 } |
| 1083 | 1066 |
| 1084 EnsureFromSpaceIsCommitted(); | 1067 EnsureFromSpaceIsCommitted(); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1108 | 1091 |
| 1109 UpdateSurvivalStatistics(start_new_space_size); | 1092 UpdateSurvivalStatistics(start_new_space_size); |
| 1110 | 1093 |
| 1111 isolate_->counters()->objs_since_last_young()->Set(0); | 1094 isolate_->counters()->objs_since_last_young()->Set(0); |
| 1112 | 1095 |
| 1113 // Callbacks that fire after this point might trigger nested GCs and | 1096 // Callbacks that fire after this point might trigger nested GCs and |
| 1114 // restart incremental marking, the assertion can't be moved down. | 1097 // restart incremental marking, the assertion can't be moved down. |
| 1115 DCHECK(collector == SCAVENGER || incremental_marking()->IsStopped()); | 1098 DCHECK(collector == SCAVENGER || incremental_marking()->IsStopped()); |
| 1116 | 1099 |
| 1117 gc_post_processing_depth_++; | 1100 gc_post_processing_depth_++; |
| 1118 { AllowHeapAllocation allow_allocation; | 1101 { |
| 1102 AllowHeapAllocation allow_allocation; |
| 1119 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1103 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
| 1120 freed_global_handles = | 1104 freed_global_handles = |
| 1121 isolate_->global_handles()->PostGarbageCollectionProcessing(collector); | 1105 isolate_->global_handles()->PostGarbageCollectionProcessing(collector); |
| 1122 } | 1106 } |
| 1123 gc_post_processing_depth_--; | 1107 gc_post_processing_depth_--; |
| 1124 | 1108 |
| 1125 isolate_->eternal_handles()->PostGarbageCollectionProcessing(this); | 1109 isolate_->eternal_handles()->PostGarbageCollectionProcessing(this); |
| 1126 | 1110 |
| 1127 // Update relocatables. | 1111 // Update relocatables. |
| 1128 Relocatable::PostGarbageCollectionProcessing(isolate_); | 1112 Relocatable::PostGarbageCollectionProcessing(isolate_); |
| 1129 | 1113 |
| 1130 if (collector == MARK_COMPACTOR) { | 1114 if (collector == MARK_COMPACTOR) { |
| 1131 // Register the amount of external allocated memory. | 1115 // Register the amount of external allocated memory. |
| 1132 amount_of_external_allocated_memory_at_last_global_gc_ = | 1116 amount_of_external_allocated_memory_at_last_global_gc_ = |
| 1133 amount_of_external_allocated_memory_; | 1117 amount_of_external_allocated_memory_; |
| 1134 old_generation_allocation_limit_ = | 1118 old_generation_allocation_limit_ = OldGenerationAllocationLimit( |
| 1135 OldGenerationAllocationLimit(PromotedSpaceSizeOfObjects(), | 1119 PromotedSpaceSizeOfObjects(), freed_global_handles); |
| 1136 freed_global_handles); | |
| 1137 } | 1120 } |
| 1138 | 1121 |
| 1139 { GCCallbacksScope scope(this); | 1122 { |
| 1123 GCCallbacksScope scope(this); |
| 1140 if (scope.CheckReenter()) { | 1124 if (scope.CheckReenter()) { |
| 1141 AllowHeapAllocation allow_allocation; | 1125 AllowHeapAllocation allow_allocation; |
| 1142 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); | 1126 GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL); |
| 1143 VMState<EXTERNAL> state(isolate_); | 1127 VMState<EXTERNAL> state(isolate_); |
| 1144 HandleScope handle_scope(isolate_); | 1128 HandleScope handle_scope(isolate_); |
| 1145 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); | 1129 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); |
| 1146 } | 1130 } |
| 1147 } | 1131 } |
| 1148 | 1132 |
| 1149 #ifdef VERIFY_HEAP | 1133 #ifdef VERIFY_HEAP |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1177 GCCallbackFlags gc_callback_flags) { | 1161 GCCallbackFlags gc_callback_flags) { |
| 1178 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { | 1162 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { |
| 1179 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { | 1163 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { |
| 1180 if (!gc_epilogue_callbacks_[i].pass_isolate_) { | 1164 if (!gc_epilogue_callbacks_[i].pass_isolate_) { |
| 1181 v8::GCPrologueCallback callback = | 1165 v8::GCPrologueCallback callback = |
| 1182 reinterpret_cast<v8::GCPrologueCallback>( | 1166 reinterpret_cast<v8::GCPrologueCallback>( |
| 1183 gc_epilogue_callbacks_[i].callback); | 1167 gc_epilogue_callbacks_[i].callback); |
| 1184 callback(gc_type, gc_callback_flags); | 1168 callback(gc_type, gc_callback_flags); |
| 1185 } else { | 1169 } else { |
| 1186 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate()); | 1170 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate()); |
| 1187 gc_epilogue_callbacks_[i].callback( | 1171 gc_epilogue_callbacks_[i].callback(isolate, gc_type, gc_callback_flags); |
| 1188 isolate, gc_type, gc_callback_flags); | |
| 1189 } | 1172 } |
| 1190 } | 1173 } |
| 1191 } | 1174 } |
| 1192 } | 1175 } |
| 1193 | 1176 |
| 1194 | 1177 |
| 1195 void Heap::MarkCompact() { | 1178 void Heap::MarkCompact() { |
| 1196 gc_state_ = MARK_COMPACT; | 1179 gc_state_ = MARK_COMPACT; |
| 1197 LOG(isolate_, ResourceEvent("markcompact", "begin")); | 1180 LOG(isolate_, ResourceEvent("markcompact", "begin")); |
| 1198 | 1181 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1236 FlushNumberStringCache(); | 1219 FlushNumberStringCache(); |
| 1237 if (FLAG_cleanup_code_caches_at_gc) { | 1220 if (FLAG_cleanup_code_caches_at_gc) { |
| 1238 polymorphic_code_cache()->set_cache(undefined_value()); | 1221 polymorphic_code_cache()->set_cache(undefined_value()); |
| 1239 } | 1222 } |
| 1240 | 1223 |
| 1241 ClearNormalizedMapCaches(); | 1224 ClearNormalizedMapCaches(); |
| 1242 } | 1225 } |
| 1243 | 1226 |
| 1244 | 1227 |
| 1245 // Helper class for copying HeapObjects | 1228 // Helper class for copying HeapObjects |
| 1246 class ScavengeVisitor: public ObjectVisitor { | 1229 class ScavengeVisitor : public ObjectVisitor { |
| 1247 public: | 1230 public: |
| 1248 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {} | 1231 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {} |
| 1249 | 1232 |
| 1250 void VisitPointer(Object** p) { ScavengePointer(p); } | 1233 void VisitPointer(Object** p) { ScavengePointer(p); } |
| 1251 | 1234 |
| 1252 void VisitPointers(Object** start, Object** end) { | 1235 void VisitPointers(Object** start, Object** end) { |
| 1253 // Copy all HeapObject pointers in [start, end) | 1236 // Copy all HeapObject pointers in [start, end) |
| 1254 for (Object** p = start; p < end; p++) ScavengePointer(p); | 1237 for (Object** p = start; p < end; p++) ScavengePointer(p); |
| 1255 } | 1238 } |
| 1256 | 1239 |
| 1257 private: | 1240 private: |
| 1258 void ScavengePointer(Object** p) { | 1241 void ScavengePointer(Object** p) { |
| 1259 Object* object = *p; | 1242 Object* object = *p; |
| 1260 if (!heap_->InNewSpace(object)) return; | 1243 if (!heap_->InNewSpace(object)) return; |
| 1261 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 1244 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
| 1262 reinterpret_cast<HeapObject*>(object)); | 1245 reinterpret_cast<HeapObject*>(object)); |
| 1263 } | 1246 } |
| 1264 | 1247 |
| 1265 Heap* heap_; | 1248 Heap* heap_; |
| 1266 }; | 1249 }; |
| 1267 | 1250 |
| 1268 | 1251 |
| 1269 #ifdef VERIFY_HEAP | 1252 #ifdef VERIFY_HEAP |
| 1270 // Visitor class to verify pointers in code or data space do not point into | 1253 // Visitor class to verify pointers in code or data space do not point into |
| 1271 // new space. | 1254 // new space. |
| 1272 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor { | 1255 class VerifyNonPointerSpacePointersVisitor : public ObjectVisitor { |
| 1273 public: | 1256 public: |
| 1274 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {} | 1257 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {} |
| 1275 void VisitPointers(Object** start, Object**end) { | 1258 void VisitPointers(Object** start, Object** end) { |
| 1276 for (Object** current = start; current < end; current++) { | 1259 for (Object** current = start; current < end; current++) { |
| 1277 if ((*current)->IsHeapObject()) { | 1260 if ((*current)->IsHeapObject()) { |
| 1278 CHECK(!heap_->InNewSpace(HeapObject::cast(*current))); | 1261 CHECK(!heap_->InNewSpace(HeapObject::cast(*current))); |
| 1279 } | 1262 } |
| 1280 } | 1263 } |
| 1281 } | 1264 } |
| 1282 | 1265 |
| 1283 private: | 1266 private: |
| 1284 Heap* heap_; | 1267 Heap* heap_; |
| 1285 }; | 1268 }; |
| 1286 | 1269 |
| 1287 | 1270 |
| 1288 static void VerifyNonPointerSpacePointers(Heap* heap) { | 1271 static void VerifyNonPointerSpacePointers(Heap* heap) { |
| 1289 // Verify that there are no pointers to new space in spaces where we | 1272 // Verify that there are no pointers to new space in spaces where we |
| 1290 // do not expect them. | 1273 // do not expect them. |
| 1291 VerifyNonPointerSpacePointersVisitor v(heap); | 1274 VerifyNonPointerSpacePointersVisitor v(heap); |
| 1292 HeapObjectIterator code_it(heap->code_space()); | 1275 HeapObjectIterator code_it(heap->code_space()); |
| 1293 for (HeapObject* object = code_it.Next(); | 1276 for (HeapObject* object = code_it.Next(); object != NULL; |
| 1294 object != NULL; object = code_it.Next()) | 1277 object = code_it.Next()) |
| 1295 object->Iterate(&v); | 1278 object->Iterate(&v); |
| 1296 | 1279 |
| 1297 // The old data space was normally swept conservatively so that the iterator | 1280 // The old data space was normally swept conservatively so that the iterator |
| 1298 // doesn't work, so we normally skip the next bit. | 1281 // doesn't work, so we normally skip the next bit. |
| 1299 if (heap->old_data_space()->swept_precisely()) { | 1282 if (heap->old_data_space()->swept_precisely()) { |
| 1300 HeapObjectIterator data_it(heap->old_data_space()); | 1283 HeapObjectIterator data_it(heap->old_data_space()); |
| 1301 for (HeapObject* object = data_it.Next(); | 1284 for (HeapObject* object = data_it.Next(); object != NULL; |
| 1302 object != NULL; object = data_it.Next()) | 1285 object = data_it.Next()) |
| 1303 object->Iterate(&v); | 1286 object->Iterate(&v); |
| 1304 } | 1287 } |
| 1305 } | 1288 } |
| 1306 #endif // VERIFY_HEAP | 1289 #endif // VERIFY_HEAP |
| 1307 | 1290 |
| 1308 | 1291 |
| 1309 void Heap::CheckNewSpaceExpansionCriteria() { | 1292 void Heap::CheckNewSpaceExpansionCriteria() { |
| 1310 if (new_space_.Capacity() < new_space_.MaximumCapacity() && | 1293 if (new_space_.Capacity() < new_space_.MaximumCapacity() && |
| 1311 survived_since_last_expansion_ > new_space_.Capacity()) { | 1294 survived_since_last_expansion_ > new_space_.Capacity()) { |
| 1312 // Grow the size of new space if there is room to grow, enough data | 1295 // Grow the size of new space if there is room to grow, enough data |
| 1313 // has survived scavenge since the last expansion and we are not in | 1296 // has survived scavenge since the last expansion and we are not in |
| 1314 // high promotion mode. | 1297 // high promotion mode. |
| 1315 new_space_.Grow(); | 1298 new_space_.Grow(); |
| 1316 survived_since_last_expansion_ = 0; | 1299 survived_since_last_expansion_ = 0; |
| 1317 } | 1300 } |
| 1318 } | 1301 } |
| 1319 | 1302 |
| 1320 | 1303 |
| 1321 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1304 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
| 1322 return heap->InNewSpace(*p) && | 1305 return heap->InNewSpace(*p) && |
| 1323 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1306 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
| 1324 } | 1307 } |
| 1325 | 1308 |
| 1326 | 1309 |
| 1327 void Heap::ScavengeStoreBufferCallback( | 1310 void Heap::ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page, |
| 1328 Heap* heap, | 1311 StoreBufferEvent event) { |
| 1329 MemoryChunk* page, | |
| 1330 StoreBufferEvent event) { | |
| 1331 heap->store_buffer_rebuilder_.Callback(page, event); | 1312 heap->store_buffer_rebuilder_.Callback(page, event); |
| 1332 } | 1313 } |
| 1333 | 1314 |
| 1334 | 1315 |
| 1335 void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) { | 1316 void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) { |
| 1336 if (event == kStoreBufferStartScanningPagesEvent) { | 1317 if (event == kStoreBufferStartScanningPagesEvent) { |
| 1337 start_of_current_page_ = NULL; | 1318 start_of_current_page_ = NULL; |
| 1338 current_page_ = NULL; | 1319 current_page_ = NULL; |
| 1339 } else if (event == kStoreBufferScanningPageEvent) { | 1320 } else if (event == kStoreBufferScanningPageEvent) { |
| 1340 if (current_page_ != NULL) { | 1321 if (current_page_ != NULL) { |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1379 } else { | 1360 } else { |
| 1380 UNREACHABLE(); | 1361 UNREACHABLE(); |
| 1381 } | 1362 } |
| 1382 } | 1363 } |
| 1383 | 1364 |
| 1384 | 1365 |
| 1385 void PromotionQueue::Initialize() { | 1366 void PromotionQueue::Initialize() { |
| 1386 // Assumes that a NewSpacePage exactly fits a number of promotion queue | 1367 // Assumes that a NewSpacePage exactly fits a number of promotion queue |
| 1387 // entries (where each is a pair of intptr_t). This allows us to simplify | 1368 // entries (where each is a pair of intptr_t). This allows us to simplify |
| 1388 // the test fpr when to switch pages. | 1369 // the test fpr when to switch pages. |
| 1389 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) | 1370 DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) == |
| 1390 == 0); | 1371 0); |
| 1391 limit_ = reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceStart()); | 1372 limit_ = reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceStart()); |
| 1392 front_ = rear_ = | 1373 front_ = rear_ = |
| 1393 reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceEnd()); | 1374 reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceEnd()); |
| 1394 emergency_stack_ = NULL; | 1375 emergency_stack_ = NULL; |
| 1395 guard_ = false; | 1376 guard_ = false; |
| 1396 } | 1377 } |
| 1397 | 1378 |
| 1398 | 1379 |
| 1399 void PromotionQueue::RelocateQueueHead() { | 1380 void PromotionQueue::RelocateQueueHead() { |
| 1400 DCHECK(emergency_stack_ == NULL); | 1381 DCHECK(emergency_stack_ == NULL); |
| 1401 | 1382 |
| 1402 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_)); | 1383 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_)); |
| 1403 intptr_t* head_start = rear_; | 1384 intptr_t* head_start = rear_; |
| 1404 intptr_t* head_end = | 1385 intptr_t* head_end = Min(front_, reinterpret_cast<intptr_t*>(p->area_end())); |
| 1405 Min(front_, reinterpret_cast<intptr_t*>(p->area_end())); | |
| 1406 | 1386 |
| 1407 int entries_count = | 1387 int entries_count = |
| 1408 static_cast<int>(head_end - head_start) / kEntrySizeInWords; | 1388 static_cast<int>(head_end - head_start) / kEntrySizeInWords; |
| 1409 | 1389 |
| 1410 emergency_stack_ = new List<Entry>(2 * entries_count); | 1390 emergency_stack_ = new List<Entry>(2 * entries_count); |
| 1411 | 1391 |
| 1412 while (head_start != head_end) { | 1392 while (head_start != head_end) { |
| 1413 int size = static_cast<int>(*(head_start++)); | 1393 int size = static_cast<int>(*(head_start++)); |
| 1414 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); | 1394 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); |
| 1415 emergency_stack_->Add(Entry(obj, size)); | 1395 emergency_stack_->Add(Entry(obj, size)); |
| 1416 } | 1396 } |
| 1417 rear_ = head_end; | 1397 rear_ = head_end; |
| 1418 } | 1398 } |
| 1419 | 1399 |
| 1420 | 1400 |
| 1421 class ScavengeWeakObjectRetainer : public WeakObjectRetainer { | 1401 class ScavengeWeakObjectRetainer : public WeakObjectRetainer { |
| 1422 public: | 1402 public: |
| 1423 explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { } | 1403 explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) {} |
| 1424 | 1404 |
| 1425 virtual Object* RetainAs(Object* object) { | 1405 virtual Object* RetainAs(Object* object) { |
| 1426 if (!heap_->InFromSpace(object)) { | 1406 if (!heap_->InFromSpace(object)) { |
| 1427 return object; | 1407 return object; |
| 1428 } | 1408 } |
| 1429 | 1409 |
| 1430 MapWord map_word = HeapObject::cast(object)->map_word(); | 1410 MapWord map_word = HeapObject::cast(object)->map_word(); |
| 1431 if (map_word.IsForwardingAddress()) { | 1411 if (map_word.IsForwardingAddress()) { |
| 1432 return map_word.ToForwardingAddress(); | 1412 return map_word.ToForwardingAddress(); |
| 1433 } | 1413 } |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1489 #ifdef DEBUG | 1469 #ifdef DEBUG |
| 1490 store_buffer()->Clean(); | 1470 store_buffer()->Clean(); |
| 1491 #endif | 1471 #endif |
| 1492 | 1472 |
| 1493 ScavengeVisitor scavenge_visitor(this); | 1473 ScavengeVisitor scavenge_visitor(this); |
| 1494 // Copy roots. | 1474 // Copy roots. |
| 1495 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); | 1475 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); |
| 1496 | 1476 |
| 1497 // Copy objects reachable from the old generation. | 1477 // Copy objects reachable from the old generation. |
| 1498 { | 1478 { |
| 1499 StoreBufferRebuildScope scope(this, | 1479 StoreBufferRebuildScope scope(this, store_buffer(), |
| 1500 store_buffer(), | |
| 1501 &ScavengeStoreBufferCallback); | 1480 &ScavengeStoreBufferCallback); |
| 1502 store_buffer()->IteratePointersToNewSpace(&ScavengeObject); | 1481 store_buffer()->IteratePointersToNewSpace(&ScavengeObject); |
| 1503 } | 1482 } |
| 1504 | 1483 |
| 1505 // Copy objects reachable from simple cells by scavenging cell values | 1484 // Copy objects reachable from simple cells by scavenging cell values |
| 1506 // directly. | 1485 // directly. |
| 1507 HeapObjectIterator cell_iterator(cell_space_); | 1486 HeapObjectIterator cell_iterator(cell_space_); |
| 1508 for (HeapObject* heap_object = cell_iterator.Next(); | 1487 for (HeapObject* heap_object = cell_iterator.Next(); heap_object != NULL; |
| 1509 heap_object != NULL; | |
| 1510 heap_object = cell_iterator.Next()) { | 1488 heap_object = cell_iterator.Next()) { |
| 1511 if (heap_object->IsCell()) { | 1489 if (heap_object->IsCell()) { |
| 1512 Cell* cell = Cell::cast(heap_object); | 1490 Cell* cell = Cell::cast(heap_object); |
| 1513 Address value_address = cell->ValueAddress(); | 1491 Address value_address = cell->ValueAddress(); |
| 1514 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 1492 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
| 1515 } | 1493 } |
| 1516 } | 1494 } |
| 1517 | 1495 |
| 1518 // Copy objects reachable from global property cells by scavenging global | 1496 // Copy objects reachable from global property cells by scavenging global |
| 1519 // property cell values directly. | 1497 // property cell values directly. |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1631 } | 1609 } |
| 1632 } | 1610 } |
| 1633 | 1611 |
| 1634 DCHECK(last <= end); | 1612 DCHECK(last <= end); |
| 1635 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start)); | 1613 external_string_table_.ShrinkNewStrings(static_cast<int>(last - start)); |
| 1636 } | 1614 } |
| 1637 | 1615 |
| 1638 | 1616 |
| 1639 void Heap::UpdateReferencesInExternalStringTable( | 1617 void Heap::UpdateReferencesInExternalStringTable( |
| 1640 ExternalStringTableUpdaterCallback updater_func) { | 1618 ExternalStringTableUpdaterCallback updater_func) { |
| 1641 | |
| 1642 // Update old space string references. | 1619 // Update old space string references. |
| 1643 if (external_string_table_.old_space_strings_.length() > 0) { | 1620 if (external_string_table_.old_space_strings_.length() > 0) { |
| 1644 Object** start = &external_string_table_.old_space_strings_[0]; | 1621 Object** start = &external_string_table_.old_space_strings_[0]; |
| 1645 Object** end = start + external_string_table_.old_space_strings_.length(); | 1622 Object** end = start + external_string_table_.old_space_strings_.length(); |
| 1646 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1623 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
| 1647 } | 1624 } |
| 1648 | 1625 |
| 1649 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1626 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
| 1650 } | 1627 } |
| 1651 | 1628 |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1706 } | 1683 } |
| 1707 if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites(); | 1684 if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites(); |
| 1708 } | 1685 } |
| 1709 | 1686 |
| 1710 | 1687 |
| 1711 void Heap::EvaluateOldSpaceLocalPretenuring( | 1688 void Heap::EvaluateOldSpaceLocalPretenuring( |
| 1712 uint64_t size_of_objects_before_gc) { | 1689 uint64_t size_of_objects_before_gc) { |
| 1713 uint64_t size_of_objects_after_gc = SizeOfObjects(); | 1690 uint64_t size_of_objects_after_gc = SizeOfObjects(); |
| 1714 double old_generation_survival_rate = | 1691 double old_generation_survival_rate = |
| 1715 (static_cast<double>(size_of_objects_after_gc) * 100) / | 1692 (static_cast<double>(size_of_objects_after_gc) * 100) / |
| 1716 static_cast<double>(size_of_objects_before_gc); | 1693 static_cast<double>(size_of_objects_before_gc); |
| 1717 | 1694 |
| 1718 if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) { | 1695 if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) { |
| 1719 // Too many objects died in the old generation, pretenuring of wrong | 1696 // Too many objects died in the old generation, pretenuring of wrong |
| 1720 // allocation sites may be the cause for that. We have to deopt all | 1697 // allocation sites may be the cause for that. We have to deopt all |
| 1721 // dependent code registered in the allocation sites to re-evaluate | 1698 // dependent code registered in the allocation sites to re-evaluate |
| 1722 // our pretenuring decisions. | 1699 // our pretenuring decisions. |
| 1723 ResetAllAllocationSitesDependentCode(TENURED); | 1700 ResetAllAllocationSitesDependentCode(TENURED); |
| 1724 if (FLAG_trace_pretenuring) { | 1701 if (FLAG_trace_pretenuring) { |
| 1725 PrintF("Deopt all allocation sites dependent code due to low survival " | 1702 PrintF( |
| 1726 "rate in the old generation %f\n", old_generation_survival_rate); | 1703 "Deopt all allocation sites dependent code due to low survival " |
| 1704 "rate in the old generation %f\n", |
| 1705 old_generation_survival_rate); |
| 1727 } | 1706 } |
| 1728 } | 1707 } |
| 1729 } | 1708 } |
| 1730 | 1709 |
| 1731 | 1710 |
| 1732 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { | 1711 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { |
| 1733 DisallowHeapAllocation no_allocation; | 1712 DisallowHeapAllocation no_allocation; |
| 1734 // All external strings are listed in the external string table. | 1713 // All external strings are listed in the external string table. |
| 1735 | 1714 |
| 1736 class ExternalStringTableVisitorAdapter : public ObjectVisitor { | 1715 class ExternalStringTableVisitorAdapter : public ObjectVisitor { |
| 1737 public: | 1716 public: |
| 1738 explicit ExternalStringTableVisitorAdapter( | 1717 explicit ExternalStringTableVisitorAdapter( |
| 1739 v8::ExternalResourceVisitor* visitor) : visitor_(visitor) {} | 1718 v8::ExternalResourceVisitor* visitor) |
| 1719 : visitor_(visitor) {} |
| 1740 virtual void VisitPointers(Object** start, Object** end) { | 1720 virtual void VisitPointers(Object** start, Object** end) { |
| 1741 for (Object** p = start; p < end; p++) { | 1721 for (Object** p = start; p < end; p++) { |
| 1742 DCHECK((*p)->IsExternalString()); | 1722 DCHECK((*p)->IsExternalString()); |
| 1743 visitor_->VisitExternalString(Utils::ToLocal( | 1723 visitor_->VisitExternalString( |
| 1744 Handle<String>(String::cast(*p)))); | 1724 Utils::ToLocal(Handle<String>(String::cast(*p)))); |
| 1745 } | 1725 } |
| 1746 } | 1726 } |
| 1727 |
| 1747 private: | 1728 private: |
| 1748 v8::ExternalResourceVisitor* visitor_; | 1729 v8::ExternalResourceVisitor* visitor_; |
| 1749 } external_string_table_visitor(visitor); | 1730 } external_string_table_visitor(visitor); |
| 1750 | 1731 |
| 1751 external_string_table_.Iterate(&external_string_table_visitor); | 1732 external_string_table_.Iterate(&external_string_table_visitor); |
| 1752 } | 1733 } |
| 1753 | 1734 |
| 1754 | 1735 |
| 1755 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> { | 1736 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> { |
| 1756 public: | 1737 public: |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1767 Address new_space_front) { | 1748 Address new_space_front) { |
| 1768 do { | 1749 do { |
| 1769 SemiSpace::AssertValidRange(new_space_front, new_space_.top()); | 1750 SemiSpace::AssertValidRange(new_space_front, new_space_.top()); |
| 1770 // The addresses new_space_front and new_space_.top() define a | 1751 // The addresses new_space_front and new_space_.top() define a |
| 1771 // queue of unprocessed copied objects. Process them until the | 1752 // queue of unprocessed copied objects. Process them until the |
| 1772 // queue is empty. | 1753 // queue is empty. |
| 1773 while (new_space_front != new_space_.top()) { | 1754 while (new_space_front != new_space_.top()) { |
| 1774 if (!NewSpacePage::IsAtEnd(new_space_front)) { | 1755 if (!NewSpacePage::IsAtEnd(new_space_front)) { |
| 1775 HeapObject* object = HeapObject::FromAddress(new_space_front); | 1756 HeapObject* object = HeapObject::FromAddress(new_space_front); |
| 1776 new_space_front += | 1757 new_space_front += |
| 1777 NewSpaceScavenger::IterateBody(object->map(), object); | 1758 NewSpaceScavenger::IterateBody(object->map(), object); |
| 1778 } else { | 1759 } else { |
| 1779 new_space_front = | 1760 new_space_front = |
| 1780 NewSpacePage::FromLimit(new_space_front)->next_page()->area_start(); | 1761 NewSpacePage::FromLimit(new_space_front)->next_page()->area_start(); |
| 1781 } | 1762 } |
| 1782 } | 1763 } |
| 1783 | 1764 |
| 1784 // Promote and process all the to-be-promoted objects. | 1765 // Promote and process all the to-be-promoted objects. |
| 1785 { | 1766 { |
| 1786 StoreBufferRebuildScope scope(this, | 1767 StoreBufferRebuildScope scope(this, store_buffer(), |
| 1787 store_buffer(), | |
| 1788 &ScavengeStoreBufferCallback); | 1768 &ScavengeStoreBufferCallback); |
| 1789 while (!promotion_queue()->is_empty()) { | 1769 while (!promotion_queue()->is_empty()) { |
| 1790 HeapObject* target; | 1770 HeapObject* target; |
| 1791 int size; | 1771 int size; |
| 1792 promotion_queue()->remove(&target, &size); | 1772 promotion_queue()->remove(&target, &size); |
| 1793 | 1773 |
| 1794 // Promoted object might be already partially visited | 1774 // Promoted object might be already partially visited |
| 1795 // during old space pointer iteration. Thus we search specificly | 1775 // during old space pointer iteration. Thus we search specificly |
| 1796 // for pointers to from semispace instead of looking for pointers | 1776 // for pointers to from semispace instead of looking for pointers |
| 1797 // to new space. | 1777 // to new space. |
| 1798 DCHECK(!target->IsMap()); | 1778 DCHECK(!target->IsMap()); |
| 1799 IterateAndMarkPointersToFromSpace(target->address(), | 1779 IterateAndMarkPointersToFromSpace( |
| 1800 target->address() + size, | 1780 target->address(), target->address() + size, &ScavengeObject); |
| 1801 &ScavengeObject); | |
| 1802 } | 1781 } |
| 1803 } | 1782 } |
| 1804 | 1783 |
| 1805 // Take another spin if there are now unswept objects in new space | 1784 // Take another spin if there are now unswept objects in new space |
| 1806 // (there are currently no more unswept promoted objects). | 1785 // (there are currently no more unswept promoted objects). |
| 1807 } while (new_space_front != new_space_.top()); | 1786 } while (new_space_front != new_space_.top()); |
| 1808 | 1787 |
| 1809 return new_space_front; | 1788 return new_space_front; |
| 1810 } | 1789 } |
| 1811 | 1790 |
| 1812 | 1791 |
| 1813 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & | 1792 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == |
| 1814 kDoubleAlignmentMask) == 0); // NOLINT | 1793 0); // NOLINT |
| 1815 STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & | 1794 STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & kDoubleAlignmentMask) == |
| 1816 kDoubleAlignmentMask) == 0); // NOLINT | 1795 0); // NOLINT |
| 1817 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & | 1796 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & |
| 1818 kDoubleAlignmentMask) == 0); // NOLINT | 1797 kDoubleAlignmentMask) == 0); // NOLINT |
| 1819 | 1798 |
| 1820 | 1799 |
| 1821 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, | 1800 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, HeapObject* object, |
| 1822 HeapObject* object, | |
| 1823 int size)); | 1801 int size)); |
| 1824 | 1802 |
| 1825 static HeapObject* EnsureDoubleAligned(Heap* heap, | 1803 static HeapObject* EnsureDoubleAligned(Heap* heap, HeapObject* object, |
| 1826 HeapObject* object, | |
| 1827 int size) { | 1804 int size) { |
| 1828 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { | 1805 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { |
| 1829 heap->CreateFillerObjectAt(object->address(), kPointerSize); | 1806 heap->CreateFillerObjectAt(object->address(), kPointerSize); |
| 1830 return HeapObject::FromAddress(object->address() + kPointerSize); | 1807 return HeapObject::FromAddress(object->address() + kPointerSize); |
| 1831 } else { | 1808 } else { |
| 1832 heap->CreateFillerObjectAt(object->address() + size - kPointerSize, | 1809 heap->CreateFillerObjectAt(object->address() + size - kPointerSize, |
| 1833 kPointerSize); | 1810 kPointerSize); |
| 1834 return object; | 1811 return object; |
| 1835 } | 1812 } |
| 1836 } | 1813 } |
| 1837 | 1814 |
| 1838 | 1815 |
| 1839 enum LoggingAndProfiling { | 1816 enum LoggingAndProfiling { |
| 1840 LOGGING_AND_PROFILING_ENABLED, | 1817 LOGGING_AND_PROFILING_ENABLED, |
| 1841 LOGGING_AND_PROFILING_DISABLED | 1818 LOGGING_AND_PROFILING_DISABLED |
| 1842 }; | 1819 }; |
| 1843 | 1820 |
| 1844 | 1821 |
| 1845 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 1822 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
| 1846 | 1823 |
| 1847 | 1824 |
| 1848 template<MarksHandling marks_handling, | 1825 template <MarksHandling marks_handling, |
| 1849 LoggingAndProfiling logging_and_profiling_mode> | 1826 LoggingAndProfiling logging_and_profiling_mode> |
| 1850 class ScavengingVisitor : public StaticVisitorBase { | 1827 class ScavengingVisitor : public StaticVisitorBase { |
| 1851 public: | 1828 public: |
| 1852 static void Initialize() { | 1829 static void Initialize() { |
| 1853 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); | 1830 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); |
| 1854 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); | 1831 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
| 1855 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); | 1832 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
| 1856 table_.Register(kVisitByteArray, &EvacuateByteArray); | 1833 table_.Register(kVisitByteArray, &EvacuateByteArray); |
| 1857 table_.Register(kVisitFixedArray, &EvacuateFixedArray); | 1834 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
| 1858 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); | 1835 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
| 1859 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); | 1836 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
| 1860 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); | 1837 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
| 1861 | 1838 |
| 1862 table_.Register(kVisitNativeContext, | 1839 table_.Register( |
| 1863 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1840 kVisitNativeContext, |
| 1864 template VisitSpecialized<Context::kSize>); | 1841 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 1842 Context::kSize>); |
| 1865 | 1843 |
| 1866 table_.Register(kVisitConsString, | 1844 table_.Register( |
| 1867 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1845 kVisitConsString, |
| 1868 template VisitSpecialized<ConsString::kSize>); | 1846 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 1847 ConsString::kSize>); |
| 1869 | 1848 |
| 1870 table_.Register(kVisitSlicedString, | 1849 table_.Register( |
| 1871 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1850 kVisitSlicedString, |
| 1872 template VisitSpecialized<SlicedString::kSize>); | 1851 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 1852 SlicedString::kSize>); |
| 1873 | 1853 |
| 1874 table_.Register(kVisitSymbol, | 1854 table_.Register( |
| 1875 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1855 kVisitSymbol, |
| 1876 template VisitSpecialized<Symbol::kSize>); | 1856 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 1857 Symbol::kSize>); |
| 1877 | 1858 |
| 1878 table_.Register(kVisitSharedFunctionInfo, | 1859 table_.Register( |
| 1879 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1860 kVisitSharedFunctionInfo, |
| 1880 template VisitSpecialized<SharedFunctionInfo::kSize>); | 1861 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 1862 SharedFunctionInfo::kSize>); |
| 1881 | 1863 |
| 1882 table_.Register(kVisitJSWeakCollection, | 1864 table_.Register(kVisitJSWeakCollection, |
| 1883 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1865 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 1884 Visit); | |
| 1885 | 1866 |
| 1886 table_.Register(kVisitJSArrayBuffer, | 1867 table_.Register(kVisitJSArrayBuffer, |
| 1887 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1868 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 1888 Visit); | |
| 1889 | 1869 |
| 1890 table_.Register(kVisitJSTypedArray, | 1870 table_.Register(kVisitJSTypedArray, |
| 1891 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1871 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 1892 Visit); | |
| 1893 | 1872 |
| 1894 table_.Register(kVisitJSDataView, | 1873 table_.Register(kVisitJSDataView, |
| 1895 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1874 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 1896 Visit); | |
| 1897 | 1875 |
| 1898 table_.Register(kVisitJSRegExp, | 1876 table_.Register(kVisitJSRegExp, |
| 1899 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1877 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
| 1900 Visit); | |
| 1901 | 1878 |
| 1902 if (marks_handling == IGNORE_MARKS) { | 1879 if (marks_handling == IGNORE_MARKS) { |
| 1903 table_.Register(kVisitJSFunction, | 1880 table_.Register( |
| 1904 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1881 kVisitJSFunction, |
| 1905 template VisitSpecialized<JSFunction::kSize>); | 1882 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 1883 JSFunction::kSize>); |
| 1906 } else { | 1884 } else { |
| 1907 table_.Register(kVisitJSFunction, &EvacuateJSFunction); | 1885 table_.Register(kVisitJSFunction, &EvacuateJSFunction); |
| 1908 } | 1886 } |
| 1909 | 1887 |
| 1910 table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>, | 1888 table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>, |
| 1911 kVisitDataObject, | 1889 kVisitDataObject, kVisitDataObjectGeneric>(); |
| 1912 kVisitDataObjectGeneric>(); | |
| 1913 | 1890 |
| 1914 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, | 1891 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
| 1915 kVisitJSObject, | 1892 kVisitJSObject, kVisitJSObjectGeneric>(); |
| 1916 kVisitJSObjectGeneric>(); | |
| 1917 | 1893 |
| 1918 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, | 1894 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>, |
| 1919 kVisitStruct, | 1895 kVisitStruct, kVisitStructGeneric>(); |
| 1920 kVisitStructGeneric>(); | |
| 1921 } | 1896 } |
| 1922 | 1897 |
| 1923 static VisitorDispatchTable<ScavengingCallback>* GetTable() { | 1898 static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
| 1924 return &table_; | 1899 return &table_; |
| 1925 } | 1900 } |
| 1926 | 1901 |
| 1927 private: | 1902 private: |
| 1928 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; | 1903 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
| 1929 | 1904 |
| 1930 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { | 1905 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
| 1931 bool should_record = false; | 1906 bool should_record = false; |
| 1932 #ifdef DEBUG | 1907 #ifdef DEBUG |
| 1933 should_record = FLAG_heap_stats; | 1908 should_record = FLAG_heap_stats; |
| 1934 #endif | 1909 #endif |
| 1935 should_record = should_record || FLAG_log_gc; | 1910 should_record = should_record || FLAG_log_gc; |
| 1936 if (should_record) { | 1911 if (should_record) { |
| 1937 if (heap->new_space()->Contains(obj)) { | 1912 if (heap->new_space()->Contains(obj)) { |
| 1938 heap->new_space()->RecordAllocation(obj); | 1913 heap->new_space()->RecordAllocation(obj); |
| 1939 } else { | 1914 } else { |
| 1940 heap->new_space()->RecordPromotion(obj); | 1915 heap->new_space()->RecordPromotion(obj); |
| 1941 } | 1916 } |
| 1942 } | 1917 } |
| 1943 } | 1918 } |
| 1944 | 1919 |
| 1945 // Helper function used by CopyObject to copy a source object to an | 1920 // Helper function used by CopyObject to copy a source object to an |
| 1946 // allocated target object and update the forwarding pointer in the source | 1921 // allocated target object and update the forwarding pointer in the source |
| 1947 // object. Returns the target object. | 1922 // object. Returns the target object. |
| 1948 INLINE(static void MigrateObject(Heap* heap, | 1923 INLINE(static void MigrateObject(Heap* heap, HeapObject* source, |
| 1949 HeapObject* source, | 1924 HeapObject* target, int size)) { |
| 1950 HeapObject* target, | |
| 1951 int size)) { | |
| 1952 // If we migrate into to-space, then the to-space top pointer should be | 1925 // If we migrate into to-space, then the to-space top pointer should be |
| 1953 // right after the target object. Incorporate double alignment | 1926 // right after the target object. Incorporate double alignment |
| 1954 // over-allocation. | 1927 // over-allocation. |
| 1955 DCHECK(!heap->InToSpace(target) || | 1928 DCHECK(!heap->InToSpace(target) || |
| 1956 target->address() + size == heap->new_space()->top() || | 1929 target->address() + size == heap->new_space()->top() || |
| 1957 target->address() + size + kPointerSize == heap->new_space()->top()); | 1930 target->address() + size + kPointerSize == heap->new_space()->top()); |
| 1958 | 1931 |
| 1959 // Make sure that we do not overwrite the promotion queue which is at | 1932 // Make sure that we do not overwrite the promotion queue which is at |
| 1960 // the end of to-space. | 1933 // the end of to-space. |
| 1961 DCHECK(!heap->InToSpace(target) || | 1934 DCHECK(!heap->InToSpace(target) || |
| 1962 heap->promotion_queue()->IsBelowPromotionQueue( | 1935 heap->promotion_queue()->IsBelowPromotionQueue( |
| 1963 heap->new_space()->top())); | 1936 heap->new_space()->top())); |
| 1964 | 1937 |
| 1965 // Copy the content of source to target. | 1938 // Copy the content of source to target. |
| 1966 heap->CopyBlock(target->address(), source->address(), size); | 1939 heap->CopyBlock(target->address(), source->address(), size); |
| 1967 | 1940 |
| 1968 // Set the forwarding address. | 1941 // Set the forwarding address. |
| 1969 source->set_map_word(MapWord::FromForwardingAddress(target)); | 1942 source->set_map_word(MapWord::FromForwardingAddress(target)); |
| 1970 | 1943 |
| 1971 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { | 1944 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { |
| 1972 // Update NewSpace stats if necessary. | 1945 // Update NewSpace stats if necessary. |
| 1973 RecordCopiedObject(heap, target); | 1946 RecordCopiedObject(heap, target); |
| 1974 heap->OnMoveEvent(target, source, size); | 1947 heap->OnMoveEvent(target, source, size); |
| 1975 } | 1948 } |
| 1976 | 1949 |
| 1977 if (marks_handling == TRANSFER_MARKS) { | 1950 if (marks_handling == TRANSFER_MARKS) { |
| 1978 if (Marking::TransferColor(source, target)) { | 1951 if (Marking::TransferColor(source, target)) { |
| 1979 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); | 1952 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); |
| 1980 } | 1953 } |
| 1981 } | 1954 } |
| 1982 } | 1955 } |
| 1983 | 1956 |
| 1984 template<int alignment> | 1957 template <int alignment> |
| 1985 static inline bool SemiSpaceCopyObject(Map* map, | 1958 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, |
| 1986 HeapObject** slot, | 1959 HeapObject* object, int object_size) { |
| 1987 HeapObject* object, | |
| 1988 int object_size) { | |
| 1989 Heap* heap = map->GetHeap(); | 1960 Heap* heap = map->GetHeap(); |
| 1990 | 1961 |
| 1991 int allocation_size = object_size; | 1962 int allocation_size = object_size; |
| 1992 if (alignment != kObjectAlignment) { | 1963 if (alignment != kObjectAlignment) { |
| 1993 DCHECK(alignment == kDoubleAlignment); | 1964 DCHECK(alignment == kDoubleAlignment); |
| 1994 allocation_size += kPointerSize; | 1965 allocation_size += kPointerSize; |
| 1995 } | 1966 } |
| 1996 | 1967 |
| 1997 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); | 1968 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); |
| 1998 AllocationResult allocation = | 1969 AllocationResult allocation = |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2015 *slot = target; | 1986 *slot = target; |
| 2016 MigrateObject(heap, object, target, object_size); | 1987 MigrateObject(heap, object, target, object_size); |
| 2017 | 1988 |
| 2018 heap->IncrementSemiSpaceCopiedObjectSize(object_size); | 1989 heap->IncrementSemiSpaceCopiedObjectSize(object_size); |
| 2019 return true; | 1990 return true; |
| 2020 } | 1991 } |
| 2021 return false; | 1992 return false; |
| 2022 } | 1993 } |
| 2023 | 1994 |
| 2024 | 1995 |
| 2025 template<ObjectContents object_contents, int alignment> | 1996 template <ObjectContents object_contents, int alignment> |
| 2026 static inline bool PromoteObject(Map* map, | 1997 static inline bool PromoteObject(Map* map, HeapObject** slot, |
| 2027 HeapObject** slot, | 1998 HeapObject* object, int object_size) { |
| 2028 HeapObject* object, | |
| 2029 int object_size) { | |
| 2030 Heap* heap = map->GetHeap(); | 1999 Heap* heap = map->GetHeap(); |
| 2031 | 2000 |
| 2032 int allocation_size = object_size; | 2001 int allocation_size = object_size; |
| 2033 if (alignment != kObjectAlignment) { | 2002 if (alignment != kObjectAlignment) { |
| 2034 DCHECK(alignment == kDoubleAlignment); | 2003 DCHECK(alignment == kDoubleAlignment); |
| 2035 allocation_size += kPointerSize; | 2004 allocation_size += kPointerSize; |
| 2036 } | 2005 } |
| 2037 | 2006 |
| 2038 AllocationResult allocation; | 2007 AllocationResult allocation; |
| 2039 if (object_contents == DATA_OBJECT) { | 2008 if (object_contents == DATA_OBJECT) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2051 } | 2020 } |
| 2052 | 2021 |
| 2053 // Order is important: slot might be inside of the target if target | 2022 // Order is important: slot might be inside of the target if target |
| 2054 // was allocated over a dead object and slot comes from the store | 2023 // was allocated over a dead object and slot comes from the store |
| 2055 // buffer. | 2024 // buffer. |
| 2056 *slot = target; | 2025 *slot = target; |
| 2057 MigrateObject(heap, object, target, object_size); | 2026 MigrateObject(heap, object, target, object_size); |
| 2058 | 2027 |
| 2059 if (object_contents == POINTER_OBJECT) { | 2028 if (object_contents == POINTER_OBJECT) { |
| 2060 if (map->instance_type() == JS_FUNCTION_TYPE) { | 2029 if (map->instance_type() == JS_FUNCTION_TYPE) { |
| 2061 heap->promotion_queue()->insert( | 2030 heap->promotion_queue()->insert(target, |
| 2062 target, JSFunction::kNonWeakFieldsEndOffset); | 2031 JSFunction::kNonWeakFieldsEndOffset); |
| 2063 } else { | 2032 } else { |
| 2064 heap->promotion_queue()->insert(target, object_size); | 2033 heap->promotion_queue()->insert(target, object_size); |
| 2065 } | 2034 } |
| 2066 } | 2035 } |
| 2067 heap->IncrementPromotedObjectsSize(object_size); | 2036 heap->IncrementPromotedObjectsSize(object_size); |
| 2068 return true; | 2037 return true; |
| 2069 } | 2038 } |
| 2070 return false; | 2039 return false; |
| 2071 } | 2040 } |
| 2072 | 2041 |
| 2073 | 2042 |
| 2074 template<ObjectContents object_contents, int alignment> | 2043 template <ObjectContents object_contents, int alignment> |
| 2075 static inline void EvacuateObject(Map* map, | 2044 static inline void EvacuateObject(Map* map, HeapObject** slot, |
| 2076 HeapObject** slot, | 2045 HeapObject* object, int object_size) { |
| 2077 HeapObject* object, | |
| 2078 int object_size) { | |
| 2079 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 2046 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 2080 SLOW_DCHECK(object->Size() == object_size); | 2047 SLOW_DCHECK(object->Size() == object_size); |
| 2081 Heap* heap = map->GetHeap(); | 2048 Heap* heap = map->GetHeap(); |
| 2082 | 2049 |
| 2083 if (!heap->ShouldBePromoted(object->address(), object_size)) { | 2050 if (!heap->ShouldBePromoted(object->address(), object_size)) { |
| 2084 // A semi-space copy may fail due to fragmentation. In that case, we | 2051 // A semi-space copy may fail due to fragmentation. In that case, we |
| 2085 // try to promote the object. | 2052 // try to promote the object. |
| 2086 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 2053 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { |
| 2087 return; | 2054 return; |
| 2088 } | 2055 } |
| 2089 } | 2056 } |
| 2090 | 2057 |
| 2091 if (PromoteObject<object_contents, alignment>( | 2058 if (PromoteObject<object_contents, alignment>(map, slot, object, |
| 2092 map, slot, object, object_size)) { | 2059 object_size)) { |
| 2093 return; | 2060 return; |
| 2094 } | 2061 } |
| 2095 | 2062 |
| 2096 // If promotion failed, we try to copy the object to the other semi-space | 2063 // If promotion failed, we try to copy the object to the other semi-space |
| 2097 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; | 2064 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; |
| 2098 | 2065 |
| 2099 UNREACHABLE(); | 2066 UNREACHABLE(); |
| 2100 } | 2067 } |
| 2101 | 2068 |
| 2102 | 2069 |
| 2103 static inline void EvacuateJSFunction(Map* map, | 2070 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
| 2104 HeapObject** slot, | |
| 2105 HeapObject* object) { | 2071 HeapObject* object) { |
| 2106 ObjectEvacuationStrategy<POINTER_OBJECT>:: | 2072 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
| 2107 template VisitSpecialized<JSFunction::kSize>(map, slot, object); | 2073 JSFunction::kSize>(map, slot, object); |
| 2108 | 2074 |
| 2109 HeapObject* target = *slot; | 2075 HeapObject* target = *slot; |
| 2110 MarkBit mark_bit = Marking::MarkBitFrom(target); | 2076 MarkBit mark_bit = Marking::MarkBitFrom(target); |
| 2111 if (Marking::IsBlack(mark_bit)) { | 2077 if (Marking::IsBlack(mark_bit)) { |
| 2112 // This object is black and it might not be rescanned by marker. | 2078 // This object is black and it might not be rescanned by marker. |
| 2113 // We should explicitly record code entry slot for compaction because | 2079 // We should explicitly record code entry slot for compaction because |
| 2114 // promotion queue processing (IterateAndMarkPointersToFromSpace) will | 2080 // promotion queue processing (IterateAndMarkPointersToFromSpace) will |
| 2115 // miss it as it is not HeapObject-tagged. | 2081 // miss it as it is not HeapObject-tagged. |
| 2116 Address code_entry_slot = | 2082 Address code_entry_slot = |
| 2117 target->address() + JSFunction::kCodeEntryOffset; | 2083 target->address() + JSFunction::kCodeEntryOffset; |
| 2118 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 2084 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
| 2119 map->GetHeap()->mark_compact_collector()-> | 2085 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( |
| 2120 RecordCodeEntrySlot(code_entry_slot, code); | 2086 code_entry_slot, code); |
| 2121 } | 2087 } |
| 2122 } | 2088 } |
| 2123 | 2089 |
| 2124 | 2090 |
| 2125 static inline void EvacuateFixedArray(Map* map, | 2091 static inline void EvacuateFixedArray(Map* map, HeapObject** slot, |
| 2126 HeapObject** slot, | |
| 2127 HeapObject* object) { | 2092 HeapObject* object) { |
| 2128 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); | 2093 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); |
| 2129 EvacuateObject<POINTER_OBJECT, kObjectAlignment>( | 2094 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(map, slot, object, |
| 2130 map, slot, object, object_size); | 2095 object_size); |
| 2131 } | 2096 } |
| 2132 | 2097 |
| 2133 | 2098 |
| 2134 static inline void EvacuateFixedDoubleArray(Map* map, | 2099 static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot, |
| 2135 HeapObject** slot, | |
| 2136 HeapObject* object) { | 2100 HeapObject* object) { |
| 2137 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); | 2101 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); |
| 2138 int object_size = FixedDoubleArray::SizeFor(length); | 2102 int object_size = FixedDoubleArray::SizeFor(length); |
| 2139 EvacuateObject<DATA_OBJECT, kDoubleAlignment>( | 2103 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object, |
| 2140 map, slot, object, object_size); | 2104 object_size); |
| 2141 } | 2105 } |
| 2142 | 2106 |
| 2143 | 2107 |
| 2144 static inline void EvacuateFixedTypedArray(Map* map, | 2108 static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot, |
| 2145 HeapObject** slot, | |
| 2146 HeapObject* object) { | 2109 HeapObject* object) { |
| 2147 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); | 2110 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); |
| 2148 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2111 EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, |
| 2149 map, slot, object, object_size); | 2112 object_size); |
| 2150 } | 2113 } |
| 2151 | 2114 |
| 2152 | 2115 |
| 2153 static inline void EvacuateFixedFloat64Array(Map* map, | 2116 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, |
| 2154 HeapObject** slot, | |
| 2155 HeapObject* object) { | 2117 HeapObject* object) { |
| 2156 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); | 2118 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); |
| 2157 EvacuateObject<DATA_OBJECT, kDoubleAlignment>( | 2119 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object, |
| 2158 map, slot, object, object_size); | 2120 object_size); |
| 2159 } | 2121 } |
| 2160 | 2122 |
| 2161 | 2123 |
| 2162 static inline void EvacuateByteArray(Map* map, | 2124 static inline void EvacuateByteArray(Map* map, HeapObject** slot, |
| 2163 HeapObject** slot, | |
| 2164 HeapObject* object) { | 2125 HeapObject* object) { |
| 2165 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); | 2126 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |
| 2166 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2127 EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, |
| 2167 map, slot, object, object_size); | 2128 object_size); |
| 2168 } | 2129 } |
| 2169 | 2130 |
| 2170 | 2131 |
| 2171 static inline void EvacuateSeqOneByteString(Map* map, | 2132 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, |
| 2172 HeapObject** slot, | 2133 HeapObject* object) { |
| 2173 HeapObject* object) { | 2134 int object_size = SeqOneByteString::cast(object) |
| 2174 int object_size = SeqOneByteString::cast(object)-> | 2135 ->SeqOneByteStringSize(map->instance_type()); |
| 2175 SeqOneByteStringSize(map->instance_type()); | 2136 EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, |
| 2176 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2137 object_size); |
| 2177 map, slot, object, object_size); | |
| 2178 } | 2138 } |
| 2179 | 2139 |
| 2180 | 2140 |
| 2181 static inline void EvacuateSeqTwoByteString(Map* map, | 2141 static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot, |
| 2182 HeapObject** slot, | |
| 2183 HeapObject* object) { | 2142 HeapObject* object) { |
| 2184 int object_size = SeqTwoByteString::cast(object)-> | 2143 int object_size = SeqTwoByteString::cast(object) |
| 2185 SeqTwoByteStringSize(map->instance_type()); | 2144 ->SeqTwoByteStringSize(map->instance_type()); |
| 2186 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2145 EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, |
| 2187 map, slot, object, object_size); | 2146 object_size); |
| 2188 } | 2147 } |
| 2189 | 2148 |
| 2190 | 2149 |
| 2191 static inline void EvacuateShortcutCandidate(Map* map, | 2150 static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, |
| 2192 HeapObject** slot, | |
| 2193 HeapObject* object) { | 2151 HeapObject* object) { |
| 2194 DCHECK(IsShortcutCandidate(map->instance_type())); | 2152 DCHECK(IsShortcutCandidate(map->instance_type())); |
| 2195 | 2153 |
| 2196 Heap* heap = map->GetHeap(); | 2154 Heap* heap = map->GetHeap(); |
| 2197 | 2155 |
| 2198 if (marks_handling == IGNORE_MARKS && | 2156 if (marks_handling == IGNORE_MARKS && |
| 2199 ConsString::cast(object)->unchecked_second() == | 2157 ConsString::cast(object)->unchecked_second() == heap->empty_string()) { |
| 2200 heap->empty_string()) { | |
| 2201 HeapObject* first = | 2158 HeapObject* first = |
| 2202 HeapObject::cast(ConsString::cast(object)->unchecked_first()); | 2159 HeapObject::cast(ConsString::cast(object)->unchecked_first()); |
| 2203 | 2160 |
| 2204 *slot = first; | 2161 *slot = first; |
| 2205 | 2162 |
| 2206 if (!heap->InNewSpace(first)) { | 2163 if (!heap->InNewSpace(first)) { |
| 2207 object->set_map_word(MapWord::FromForwardingAddress(first)); | 2164 object->set_map_word(MapWord::FromForwardingAddress(first)); |
| 2208 return; | 2165 return; |
| 2209 } | 2166 } |
| 2210 | 2167 |
| 2211 MapWord first_word = first->map_word(); | 2168 MapWord first_word = first->map_word(); |
| 2212 if (first_word.IsForwardingAddress()) { | 2169 if (first_word.IsForwardingAddress()) { |
| 2213 HeapObject* target = first_word.ToForwardingAddress(); | 2170 HeapObject* target = first_word.ToForwardingAddress(); |
| 2214 | 2171 |
| 2215 *slot = target; | 2172 *slot = target; |
| 2216 object->set_map_word(MapWord::FromForwardingAddress(target)); | 2173 object->set_map_word(MapWord::FromForwardingAddress(target)); |
| 2217 return; | 2174 return; |
| 2218 } | 2175 } |
| 2219 | 2176 |
| 2220 heap->DoScavengeObject(first->map(), slot, first); | 2177 heap->DoScavengeObject(first->map(), slot, first); |
| 2221 object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 2178 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
| 2222 return; | 2179 return; |
| 2223 } | 2180 } |
| 2224 | 2181 |
| 2225 int object_size = ConsString::kSize; | 2182 int object_size = ConsString::kSize; |
| 2226 EvacuateObject<POINTER_OBJECT, kObjectAlignment>( | 2183 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(map, slot, object, |
| 2227 map, slot, object, object_size); | 2184 object_size); |
| 2228 } | 2185 } |
| 2229 | 2186 |
| 2230 template<ObjectContents object_contents> | 2187 template <ObjectContents object_contents> |
| 2231 class ObjectEvacuationStrategy { | 2188 class ObjectEvacuationStrategy { |
| 2232 public: | 2189 public: |
| 2233 template<int object_size> | 2190 template <int object_size> |
| 2234 static inline void VisitSpecialized(Map* map, | 2191 static inline void VisitSpecialized(Map* map, HeapObject** slot, |
| 2235 HeapObject** slot, | |
| 2236 HeapObject* object) { | 2192 HeapObject* object) { |
| 2237 EvacuateObject<object_contents, kObjectAlignment>( | 2193 EvacuateObject<object_contents, kObjectAlignment>(map, slot, object, |
| 2238 map, slot, object, object_size); | 2194 object_size); |
| 2239 } | 2195 } |
| 2240 | 2196 |
| 2241 static inline void Visit(Map* map, | 2197 static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) { |
| 2242 HeapObject** slot, | |
| 2243 HeapObject* object) { | |
| 2244 int object_size = map->instance_size(); | 2198 int object_size = map->instance_size(); |
| 2245 EvacuateObject<object_contents, kObjectAlignment>( | 2199 EvacuateObject<object_contents, kObjectAlignment>(map, slot, object, |
| 2246 map, slot, object, object_size); | 2200 object_size); |
| 2247 } | 2201 } |
| 2248 }; | 2202 }; |
| 2249 | 2203 |
| 2250 static VisitorDispatchTable<ScavengingCallback> table_; | 2204 static VisitorDispatchTable<ScavengingCallback> table_; |
| 2251 }; | 2205 }; |
| 2252 | 2206 |
| 2253 | 2207 |
| 2254 template<MarksHandling marks_handling, | 2208 template <MarksHandling marks_handling, |
| 2255 LoggingAndProfiling logging_and_profiling_mode> | 2209 LoggingAndProfiling logging_and_profiling_mode> |
| 2256 VisitorDispatchTable<ScavengingCallback> | 2210 VisitorDispatchTable<ScavengingCallback> |
| 2257 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_; | 2211 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_; |
| 2258 | 2212 |
| 2259 | 2213 |
| 2260 static void InitializeScavengingVisitorsTables() { | 2214 static void InitializeScavengingVisitorsTables() { |
| 2261 ScavengingVisitor<TRANSFER_MARKS, | 2215 ScavengingVisitor<TRANSFER_MARKS, |
| 2262 LOGGING_AND_PROFILING_DISABLED>::Initialize(); | 2216 LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
| 2263 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); | 2217 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
| 2264 ScavengingVisitor<TRANSFER_MARKS, | 2218 ScavengingVisitor<TRANSFER_MARKS, |
| 2265 LOGGING_AND_PROFILING_ENABLED>::Initialize(); | 2219 LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
| 2266 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); | 2220 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
| 2267 } | 2221 } |
| 2268 | 2222 |
| 2269 | 2223 |
| 2270 void Heap::SelectScavengingVisitorsTable() { | 2224 void Heap::SelectScavengingVisitorsTable() { |
| 2271 bool logging_and_profiling = | 2225 bool logging_and_profiling = |
| 2272 FLAG_verify_predictable || | 2226 FLAG_verify_predictable || isolate()->logger()->is_logging() || |
| 2273 isolate()->logger()->is_logging() || | |
| 2274 isolate()->cpu_profiler()->is_profiling() || | 2227 isolate()->cpu_profiler()->is_profiling() || |
| 2275 (isolate()->heap_profiler() != NULL && | 2228 (isolate()->heap_profiler() != NULL && |
| 2276 isolate()->heap_profiler()->is_tracking_object_moves()); | 2229 isolate()->heap_profiler()->is_tracking_object_moves()); |
| 2277 | 2230 |
| 2278 if (!incremental_marking()->IsMarking()) { | 2231 if (!incremental_marking()->IsMarking()) { |
| 2279 if (!logging_and_profiling) { | 2232 if (!logging_and_profiling) { |
| 2280 scavenging_visitors_table_.CopyFrom( | 2233 scavenging_visitors_table_.CopyFrom(ScavengingVisitor< |
| 2281 ScavengingVisitor<IGNORE_MARKS, | 2234 IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::GetTable()); |
| 2282 LOGGING_AND_PROFILING_DISABLED>::GetTable()); | |
| 2283 } else { | 2235 } else { |
| 2284 scavenging_visitors_table_.CopyFrom( | 2236 scavenging_visitors_table_.CopyFrom(ScavengingVisitor< |
| 2285 ScavengingVisitor<IGNORE_MARKS, | 2237 IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::GetTable()); |
| 2286 LOGGING_AND_PROFILING_ENABLED>::GetTable()); | |
| 2287 } | 2238 } |
| 2288 } else { | 2239 } else { |
| 2289 if (!logging_and_profiling) { | 2240 if (!logging_and_profiling) { |
| 2290 scavenging_visitors_table_.CopyFrom( | 2241 scavenging_visitors_table_.CopyFrom(ScavengingVisitor< |
| 2291 ScavengingVisitor<TRANSFER_MARKS, | 2242 TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>::GetTable()); |
| 2292 LOGGING_AND_PROFILING_DISABLED>::GetTable()); | |
| 2293 } else { | 2243 } else { |
| 2294 scavenging_visitors_table_.CopyFrom( | 2244 scavenging_visitors_table_.CopyFrom(ScavengingVisitor< |
| 2295 ScavengingVisitor<TRANSFER_MARKS, | 2245 TRANSFER_MARKS, LOGGING_AND_PROFILING_ENABLED>::GetTable()); |
| 2296 LOGGING_AND_PROFILING_ENABLED>::GetTable()); | |
| 2297 } | 2246 } |
| 2298 | 2247 |
| 2299 if (incremental_marking()->IsCompacting()) { | 2248 if (incremental_marking()->IsCompacting()) { |
| 2300 // When compacting forbid short-circuiting of cons-strings. | 2249 // When compacting forbid short-circuiting of cons-strings. |
| 2301 // Scavenging code relies on the fact that new space object | 2250 // Scavenging code relies on the fact that new space object |
| 2302 // can't be evacuated into evacuation candidate but | 2251 // can't be evacuated into evacuation candidate but |
| 2303 // short-circuiting violates this assumption. | 2252 // short-circuiting violates this assumption. |
| 2304 scavenging_visitors_table_.Register( | 2253 scavenging_visitors_table_.Register( |
| 2305 StaticVisitorBase::kVisitShortcutCandidate, | 2254 StaticVisitorBase::kVisitShortcutCandidate, |
| 2306 scavenging_visitors_table_.GetVisitorById( | 2255 scavenging_visitors_table_.GetVisitorById( |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2323 int instance_size) { | 2272 int instance_size) { |
| 2324 Object* result; | 2273 Object* result; |
| 2325 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); | 2274 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); |
| 2326 if (!allocation.To(&result)) return allocation; | 2275 if (!allocation.To(&result)) return allocation; |
| 2327 | 2276 |
| 2328 // Map::cast cannot be used due to uninitialized map field. | 2277 // Map::cast cannot be used due to uninitialized map field. |
| 2329 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); | 2278 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); |
| 2330 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); | 2279 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); |
| 2331 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); | 2280 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); |
| 2332 reinterpret_cast<Map*>(result)->set_visitor_id( | 2281 reinterpret_cast<Map*>(result)->set_visitor_id( |
| 2333 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); | 2282 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); |
| 2334 reinterpret_cast<Map*>(result)->set_inobject_properties(0); | 2283 reinterpret_cast<Map*>(result)->set_inobject_properties(0); |
| 2335 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); | 2284 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); |
| 2336 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); | 2285 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); |
| 2337 reinterpret_cast<Map*>(result)->set_bit_field(0); | 2286 reinterpret_cast<Map*>(result)->set_bit_field(0); |
| 2338 reinterpret_cast<Map*>(result)->set_bit_field2(0); | 2287 reinterpret_cast<Map*>(result)->set_bit_field2(0); |
| 2339 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | | 2288 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | |
| 2340 Map::OwnsDescriptors::encode(true); | 2289 Map::OwnsDescriptors::encode(true); |
| 2341 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); | 2290 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); |
| 2342 return result; | 2291 return result; |
| 2343 } | 2292 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2370 map->set_bit_field2(1 << Map::kIsExtensible); | 2319 map->set_bit_field2(1 << Map::kIsExtensible); |
| 2371 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | | 2320 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | |
| 2372 Map::OwnsDescriptors::encode(true); | 2321 Map::OwnsDescriptors::encode(true); |
| 2373 map->set_bit_field3(bit_field3); | 2322 map->set_bit_field3(bit_field3); |
| 2374 map->set_elements_kind(elements_kind); | 2323 map->set_elements_kind(elements_kind); |
| 2375 | 2324 |
| 2376 return map; | 2325 return map; |
| 2377 } | 2326 } |
| 2378 | 2327 |
| 2379 | 2328 |
| 2380 AllocationResult Heap::AllocateFillerObject(int size, | 2329 AllocationResult Heap::AllocateFillerObject(int size, bool double_align, |
| 2381 bool double_align, | |
| 2382 AllocationSpace space) { | 2330 AllocationSpace space) { |
| 2383 HeapObject* obj; | 2331 HeapObject* obj; |
| 2384 { AllocationResult allocation = AllocateRaw(size, space, space); | 2332 { |
| 2333 AllocationResult allocation = AllocateRaw(size, space, space); |
| 2385 if (!allocation.To(&obj)) return allocation; | 2334 if (!allocation.To(&obj)) return allocation; |
| 2386 } | 2335 } |
| 2387 #ifdef DEBUG | 2336 #ifdef DEBUG |
| 2388 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 2337 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 2389 DCHECK(chunk->owner()->identity() == space); | 2338 DCHECK(chunk->owner()->identity() == space); |
| 2390 #endif | 2339 #endif |
| 2391 CreateFillerObjectAt(obj->address(), size); | 2340 CreateFillerObjectAt(obj->address(), size); |
| 2392 return obj; | 2341 return obj; |
| 2393 } | 2342 } |
| 2394 | 2343 |
| 2395 | 2344 |
| 2396 const Heap::StringTypeTable Heap::string_type_table[] = { | 2345 const Heap::StringTypeTable Heap::string_type_table[] = { |
| 2397 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ | 2346 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ |
| 2398 {type, size, k##camel_name##MapRootIndex}, | 2347 { type, size, k##camel_name##MapRootIndex } \ |
| 2399 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) | 2348 , |
| 2349 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) |
| 2400 #undef STRING_TYPE_ELEMENT | 2350 #undef STRING_TYPE_ELEMENT |
| 2401 }; | 2351 }; |
| 2402 | 2352 |
| 2403 | 2353 |
| 2404 const Heap::ConstantStringTable Heap::constant_string_table[] = { | 2354 const Heap::ConstantStringTable Heap::constant_string_table[] = { |
| 2405 #define CONSTANT_STRING_ELEMENT(name, contents) \ | 2355 #define CONSTANT_STRING_ELEMENT(name, contents) \ |
| 2406 {contents, k##name##RootIndex}, | 2356 { contents, k##name##RootIndex } \ |
| 2407 INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT) | 2357 , |
| 2358 INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT) |
| 2408 #undef CONSTANT_STRING_ELEMENT | 2359 #undef CONSTANT_STRING_ELEMENT |
| 2409 }; | 2360 }; |
| 2410 | 2361 |
| 2411 | 2362 |
| 2412 const Heap::StructTable Heap::struct_table[] = { | 2363 const Heap::StructTable Heap::struct_table[] = { |
| 2413 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ | 2364 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ |
| 2414 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex }, | 2365 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex } \ |
| 2415 STRUCT_LIST(STRUCT_TABLE_ELEMENT) | 2366 , |
| 2367 STRUCT_LIST(STRUCT_TABLE_ELEMENT) |
| 2416 #undef STRUCT_TABLE_ELEMENT | 2368 #undef STRUCT_TABLE_ELEMENT |
| 2417 }; | 2369 }; |
| 2418 | 2370 |
| 2419 | 2371 |
| 2420 bool Heap::CreateInitialMaps() { | 2372 bool Heap::CreateInitialMaps() { |
| 2421 HeapObject* obj; | 2373 HeapObject* obj; |
| 2422 { AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); | 2374 { |
| 2375 AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); |
| 2423 if (!allocation.To(&obj)) return false; | 2376 if (!allocation.To(&obj)) return false; |
| 2424 } | 2377 } |
| 2425 // Map::cast cannot be used due to uninitialized map field. | 2378 // Map::cast cannot be used due to uninitialized map field. |
| 2426 Map* new_meta_map = reinterpret_cast<Map*>(obj); | 2379 Map* new_meta_map = reinterpret_cast<Map*>(obj); |
| 2427 set_meta_map(new_meta_map); | 2380 set_meta_map(new_meta_map); |
| 2428 new_meta_map->set_map(new_meta_map); | 2381 new_meta_map->set_map(new_meta_map); |
| 2429 | 2382 |
| 2430 { // Partial map allocation | 2383 { // Partial map allocation |
| 2431 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ | 2384 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ |
| 2432 { Map* map; \ | 2385 { \ |
| 2433 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ | 2386 Map* map; \ |
| 2434 set_##field_name##_map(map); \ | 2387 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ |
| 2435 } | 2388 set_##field_name##_map(map); \ |
| 2389 } |
| 2436 | 2390 |
| 2437 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); | 2391 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); |
| 2438 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); | 2392 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); |
| 2439 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); | 2393 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); |
| 2440 ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel, | 2394 ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel, |
| 2441 constant_pool_array); | 2395 constant_pool_array); |
| 2442 | 2396 |
| 2443 #undef ALLOCATE_PARTIAL_MAP | 2397 #undef ALLOCATE_PARTIAL_MAP |
| 2444 } | 2398 } |
| 2445 | 2399 |
| 2446 // Allocate the empty array. | 2400 // Allocate the empty array. |
| 2447 { AllocationResult allocation = AllocateEmptyFixedArray(); | 2401 { |
| 2402 AllocationResult allocation = AllocateEmptyFixedArray(); |
| 2448 if (!allocation.To(&obj)) return false; | 2403 if (!allocation.To(&obj)) return false; |
| 2449 } | 2404 } |
| 2450 set_empty_fixed_array(FixedArray::cast(obj)); | 2405 set_empty_fixed_array(FixedArray::cast(obj)); |
| 2451 | 2406 |
| 2452 { AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE); | 2407 { |
| 2408 AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE); |
| 2453 if (!allocation.To(&obj)) return false; | 2409 if (!allocation.To(&obj)) return false; |
| 2454 } | 2410 } |
| 2455 set_null_value(Oddball::cast(obj)); | 2411 set_null_value(Oddball::cast(obj)); |
| 2456 Oddball::cast(obj)->set_kind(Oddball::kNull); | 2412 Oddball::cast(obj)->set_kind(Oddball::kNull); |
| 2457 | 2413 |
| 2458 { AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE); | 2414 { |
| 2415 AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE); |
| 2459 if (!allocation.To(&obj)) return false; | 2416 if (!allocation.To(&obj)) return false; |
| 2460 } | 2417 } |
| 2461 set_undefined_value(Oddball::cast(obj)); | 2418 set_undefined_value(Oddball::cast(obj)); |
| 2462 Oddball::cast(obj)->set_kind(Oddball::kUndefined); | 2419 Oddball::cast(obj)->set_kind(Oddball::kUndefined); |
| 2463 DCHECK(!InNewSpace(undefined_value())); | 2420 DCHECK(!InNewSpace(undefined_value())); |
| 2464 | 2421 |
| 2465 // Set preliminary exception sentinel value before actually initializing it. | 2422 // Set preliminary exception sentinel value before actually initializing it. |
| 2466 set_exception(null_value()); | 2423 set_exception(null_value()); |
| 2467 | 2424 |
| 2468 // Allocate the empty descriptor array. | 2425 // Allocate the empty descriptor array. |
| 2469 { AllocationResult allocation = AllocateEmptyFixedArray(); | 2426 { |
| 2427 AllocationResult allocation = AllocateEmptyFixedArray(); |
| 2470 if (!allocation.To(&obj)) return false; | 2428 if (!allocation.To(&obj)) return false; |
| 2471 } | 2429 } |
| 2472 set_empty_descriptor_array(DescriptorArray::cast(obj)); | 2430 set_empty_descriptor_array(DescriptorArray::cast(obj)); |
| 2473 | 2431 |
| 2474 // Allocate the constant pool array. | 2432 // Allocate the constant pool array. |
| 2475 { AllocationResult allocation = AllocateEmptyConstantPoolArray(); | 2433 { |
| 2434 AllocationResult allocation = AllocateEmptyConstantPoolArray(); |
| 2476 if (!allocation.To(&obj)) return false; | 2435 if (!allocation.To(&obj)) return false; |
| 2477 } | 2436 } |
| 2478 set_empty_constant_pool_array(ConstantPoolArray::cast(obj)); | 2437 set_empty_constant_pool_array(ConstantPoolArray::cast(obj)); |
| 2479 | 2438 |
| 2480 // Fix the instance_descriptors for the existing maps. | 2439 // Fix the instance_descriptors for the existing maps. |
| 2481 meta_map()->set_code_cache(empty_fixed_array()); | 2440 meta_map()->set_code_cache(empty_fixed_array()); |
| 2482 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); | 2441 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); |
| 2483 meta_map()->init_back_pointer(undefined_value()); | 2442 meta_map()->init_back_pointer(undefined_value()); |
| 2484 meta_map()->set_instance_descriptors(empty_descriptor_array()); | 2443 meta_map()->set_instance_descriptors(empty_descriptor_array()); |
| 2485 | 2444 |
| (...skipping 29 matching lines...) Expand all Loading... |
| 2515 undefined_map()->set_prototype(null_value()); | 2474 undefined_map()->set_prototype(null_value()); |
| 2516 undefined_map()->set_constructor(null_value()); | 2475 undefined_map()->set_constructor(null_value()); |
| 2517 | 2476 |
| 2518 null_map()->set_prototype(null_value()); | 2477 null_map()->set_prototype(null_value()); |
| 2519 null_map()->set_constructor(null_value()); | 2478 null_map()->set_constructor(null_value()); |
| 2520 | 2479 |
| 2521 constant_pool_array_map()->set_prototype(null_value()); | 2480 constant_pool_array_map()->set_prototype(null_value()); |
| 2522 constant_pool_array_map()->set_constructor(null_value()); | 2481 constant_pool_array_map()->set_constructor(null_value()); |
| 2523 | 2482 |
| 2524 { // Map allocation | 2483 { // Map allocation |
| 2525 #define ALLOCATE_MAP(instance_type, size, field_name) \ | 2484 #define ALLOCATE_MAP(instance_type, size, field_name) \ |
| 2526 { Map* map; \ | 2485 { \ |
| 2527 if (!AllocateMap((instance_type), size).To(&map)) return false; \ | 2486 Map* map; \ |
| 2528 set_##field_name##_map(map); \ | 2487 if (!AllocateMap((instance_type), size).To(&map)) return false; \ |
| 2529 } | 2488 set_##field_name##_map(map); \ |
| 2489 } |
| 2530 | 2490 |
| 2531 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \ | 2491 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \ |
| 2532 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name) | 2492 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name) |
| 2533 | 2493 |
| 2534 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array) | 2494 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array) |
| 2535 DCHECK(fixed_array_map() != fixed_cow_array_map()); | 2495 DCHECK(fixed_array_map() != fixed_cow_array_map()); |
| 2536 | 2496 |
| 2537 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info) | 2497 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info) |
| 2538 ALLOCATE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number) | 2498 ALLOCATE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number) |
| 2539 ALLOCATE_MAP( | 2499 ALLOCATE_MAP(MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize, |
| 2540 MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize, mutable_heap_number) | 2500 mutable_heap_number) |
| 2541 ALLOCATE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol) | 2501 ALLOCATE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol) |
| 2542 ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign) | 2502 ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign) |
| 2543 | 2503 |
| 2544 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole); | 2504 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole); |
| 2545 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean); | 2505 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean); |
| 2546 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized); | 2506 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized); |
| 2547 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker); | 2507 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker); |
| 2548 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, no_interceptor_result_sentinel); | 2508 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, no_interceptor_result_sentinel); |
| 2549 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception); | 2509 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception); |
| 2550 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception); | 2510 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception); |
| 2551 | 2511 |
| 2552 for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { | 2512 for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { |
| 2553 const StringTypeTable& entry = string_type_table[i]; | 2513 const StringTypeTable& entry = string_type_table[i]; |
| 2554 { AllocationResult allocation = AllocateMap(entry.type, entry.size); | 2514 { |
| 2515 AllocationResult allocation = AllocateMap(entry.type, entry.size); |
| 2555 if (!allocation.To(&obj)) return false; | 2516 if (!allocation.To(&obj)) return false; |
| 2556 } | 2517 } |
| 2557 // Mark cons string maps as unstable, because their objects can change | 2518 // Mark cons string maps as unstable, because their objects can change |
| 2558 // maps during GC. | 2519 // maps during GC. |
| 2559 Map* map = Map::cast(obj); | 2520 Map* map = Map::cast(obj); |
| 2560 if (StringShape(entry.type).IsCons()) map->mark_unstable(); | 2521 if (StringShape(entry.type).IsCons()) map->mark_unstable(); |
| 2561 roots_[entry.index] = map; | 2522 roots_[entry.index] = map; |
| 2562 } | 2523 } |
| 2563 | 2524 |
| 2564 ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string) | 2525 ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string) |
| 2565 undetectable_string_map()->set_is_undetectable(); | 2526 undetectable_string_map()->set_is_undetectable(); |
| 2566 | 2527 |
| 2567 ALLOCATE_VARSIZE_MAP(ASCII_STRING_TYPE, undetectable_ascii_string); | 2528 ALLOCATE_VARSIZE_MAP(ASCII_STRING_TYPE, undetectable_ascii_string); |
| 2568 undetectable_ascii_string_map()->set_is_undetectable(); | 2529 undetectable_ascii_string_map()->set_is_undetectable(); |
| 2569 | 2530 |
| 2570 ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array) | 2531 ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array) |
| 2571 ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array) | 2532 ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array) |
| 2572 ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space) | 2533 ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space) |
| 2573 | 2534 |
| 2574 #define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size) \ | 2535 #define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size) \ |
| 2575 ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \ | 2536 ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \ |
| 2576 external_##type##_array) | 2537 external_##type##_array) |
| 2577 | 2538 |
| 2578 TYPED_ARRAYS(ALLOCATE_EXTERNAL_ARRAY_MAP) | 2539 TYPED_ARRAYS(ALLOCATE_EXTERNAL_ARRAY_MAP) |
| 2579 #undef ALLOCATE_EXTERNAL_ARRAY_MAP | 2540 #undef ALLOCATE_EXTERNAL_ARRAY_MAP |
| 2580 | 2541 |
| 2581 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \ | 2542 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \ |
| 2582 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, \ | 2543 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, fixed_##type##_array) |
| 2583 fixed_##type##_array) | |
| 2584 | 2544 |
| 2585 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) | 2545 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) |
| 2586 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP | 2546 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP |
| 2587 | 2547 |
| 2588 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) | 2548 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) |
| 2589 | 2549 |
| 2590 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) | 2550 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) |
| 2591 | 2551 |
| 2592 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) | 2552 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) |
| 2593 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) | 2553 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) |
| 2594 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) | 2554 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) |
| 2595 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) | 2555 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) |
| 2596 | 2556 |
| 2597 | 2557 |
| 2598 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { | 2558 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { |
| 2599 const StructTable& entry = struct_table[i]; | 2559 const StructTable& entry = struct_table[i]; |
| 2600 Map* map; | 2560 Map* map; |
| 2601 if (!AllocateMap(entry.type, entry.size).To(&map)) | 2561 if (!AllocateMap(entry.type, entry.size).To(&map)) return false; |
| 2602 return false; | |
| 2603 roots_[entry.index] = map; | 2562 roots_[entry.index] = map; |
| 2604 } | 2563 } |
| 2605 | 2564 |
| 2606 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, hash_table) | 2565 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, hash_table) |
| 2607 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table) | 2566 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table) |
| 2608 | 2567 |
| 2609 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context) | 2568 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context) |
| 2610 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context) | 2569 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context) |
| 2611 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context) | 2570 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context) |
| 2612 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, block_context) | 2571 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, block_context) |
| 2613 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_context) | 2572 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_context) |
| 2614 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, global_context) | 2573 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, global_context) |
| 2615 | 2574 |
| 2616 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, native_context) | 2575 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, native_context) |
| 2617 native_context_map()->set_dictionary_map(true); | 2576 native_context_map()->set_dictionary_map(true); |
| 2618 native_context_map()->set_visitor_id( | 2577 native_context_map()->set_visitor_id( |
| 2619 StaticVisitorBase::kVisitNativeContext); | 2578 StaticVisitorBase::kVisitNativeContext); |
| 2620 | 2579 |
| 2621 ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kAlignedSize, | 2580 ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kAlignedSize, |
| 2622 shared_function_info) | 2581 shared_function_info) |
| 2623 | 2582 |
| 2624 ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kSize, | 2583 ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kSize, message_object) |
| 2625 message_object) | 2584 ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize, external) |
| 2626 ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize, | |
| 2627 external) | |
| 2628 external_map()->set_is_extensible(false); | 2585 external_map()->set_is_extensible(false); |
| 2629 #undef ALLOCATE_VARSIZE_MAP | 2586 #undef ALLOCATE_VARSIZE_MAP |
| 2630 #undef ALLOCATE_MAP | 2587 #undef ALLOCATE_MAP |
| 2631 } | 2588 } |
| 2632 | 2589 |
| 2633 { // Empty arrays | 2590 { // Empty arrays |
| 2634 { ByteArray* byte_array; | 2591 { |
| 2592 ByteArray* byte_array; |
| 2635 if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false; | 2593 if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false; |
| 2636 set_empty_byte_array(byte_array); | 2594 set_empty_byte_array(byte_array); |
| 2637 } | 2595 } |
| 2638 | 2596 |
| 2639 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \ | 2597 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \ |
| 2640 { ExternalArray* obj; \ | 2598 { \ |
| 2641 if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \ | 2599 ExternalArray* obj; \ |
| 2642 return false; \ | 2600 if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \ |
| 2643 set_empty_external_##type##_array(obj); \ | 2601 return false; \ |
| 2644 } | 2602 set_empty_external_##type##_array(obj); \ |
| 2603 } |
| 2645 | 2604 |
| 2646 TYPED_ARRAYS(ALLOCATE_EMPTY_EXTERNAL_ARRAY) | 2605 TYPED_ARRAYS(ALLOCATE_EMPTY_EXTERNAL_ARRAY) |
| 2647 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY | 2606 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY |
| 2648 | 2607 |
| 2649 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ | 2608 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ |
| 2650 { FixedTypedArrayBase* obj; \ | 2609 { \ |
| 2651 if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \ | 2610 FixedTypedArrayBase* obj; \ |
| 2652 return false; \ | 2611 if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \ |
| 2653 set_empty_fixed_##type##_array(obj); \ | 2612 return false; \ |
| 2654 } | 2613 set_empty_fixed_##type##_array(obj); \ |
| 2614 } |
| 2655 | 2615 |
| 2656 TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY) | 2616 TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY) |
| 2657 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY | 2617 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY |
| 2658 } | 2618 } |
| 2659 DCHECK(!InNewSpace(empty_fixed_array())); | 2619 DCHECK(!InNewSpace(empty_fixed_array())); |
| 2660 return true; | 2620 return true; |
| 2661 } | 2621 } |
| 2662 | 2622 |
| 2663 | 2623 |
| 2664 AllocationResult Heap::AllocateHeapNumber(double value, | 2624 AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode, |
| 2665 MutableMode mode, | |
| 2666 PretenureFlag pretenure) { | 2625 PretenureFlag pretenure) { |
| 2667 // Statically ensure that it is safe to allocate heap numbers in paged | 2626 // Statically ensure that it is safe to allocate heap numbers in paged |
| 2668 // spaces. | 2627 // spaces. |
| 2669 int size = HeapNumber::kSize; | 2628 int size = HeapNumber::kSize; |
| 2670 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); | 2629 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); |
| 2671 | 2630 |
| 2672 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 2631 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 2673 | 2632 |
| 2674 HeapObject* result; | 2633 HeapObject* result; |
| 2675 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 2634 { |
| 2635 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 2676 if (!allocation.To(&result)) return allocation; | 2636 if (!allocation.To(&result)) return allocation; |
| 2677 } | 2637 } |
| 2678 | 2638 |
| 2679 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); | 2639 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); |
| 2680 HeapObject::cast(result)->set_map_no_write_barrier(map); | 2640 HeapObject::cast(result)->set_map_no_write_barrier(map); |
| 2681 HeapNumber::cast(result)->set_value(value); | 2641 HeapNumber::cast(result)->set_value(value); |
| 2682 return result; | 2642 return result; |
| 2683 } | 2643 } |
| 2684 | 2644 |
| 2685 | 2645 |
| 2686 AllocationResult Heap::AllocateCell(Object* value) { | 2646 AllocationResult Heap::AllocateCell(Object* value) { |
| 2687 int size = Cell::kSize; | 2647 int size = Cell::kSize; |
| 2688 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); | 2648 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); |
| 2689 | 2649 |
| 2690 HeapObject* result; | 2650 HeapObject* result; |
| 2691 { AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE); | 2651 { |
| 2652 AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE); |
| 2692 if (!allocation.To(&result)) return allocation; | 2653 if (!allocation.To(&result)) return allocation; |
| 2693 } | 2654 } |
| 2694 result->set_map_no_write_barrier(cell_map()); | 2655 result->set_map_no_write_barrier(cell_map()); |
| 2695 Cell::cast(result)->set_value(value); | 2656 Cell::cast(result)->set_value(value); |
| 2696 return result; | 2657 return result; |
| 2697 } | 2658 } |
| 2698 | 2659 |
| 2699 | 2660 |
| 2700 AllocationResult Heap::AllocatePropertyCell() { | 2661 AllocationResult Heap::AllocatePropertyCell() { |
| 2701 int size = PropertyCell::kSize; | 2662 int size = PropertyCell::kSize; |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2792 set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED)); | 2753 set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED)); |
| 2793 | 2754 |
| 2794 // The hole has not been created yet, but we want to put something | 2755 // The hole has not been created yet, but we want to put something |
| 2795 // predictable in the gaps in the string table, so lets make that Smi zero. | 2756 // predictable in the gaps in the string table, so lets make that Smi zero. |
| 2796 set_the_hole_value(reinterpret_cast<Oddball*>(Smi::FromInt(0))); | 2757 set_the_hole_value(reinterpret_cast<Oddball*>(Smi::FromInt(0))); |
| 2797 | 2758 |
| 2798 // Allocate initial string table. | 2759 // Allocate initial string table. |
| 2799 set_string_table(*StringTable::New(isolate(), kInitialStringTableSize)); | 2760 set_string_table(*StringTable::New(isolate(), kInitialStringTableSize)); |
| 2800 | 2761 |
| 2801 // Finish initializing oddballs after creating the string table. | 2762 // Finish initializing oddballs after creating the string table. |
| 2802 Oddball::Initialize(isolate(), | 2763 Oddball::Initialize(isolate(), factory->undefined_value(), "undefined", |
| 2803 factory->undefined_value(), | 2764 factory->nan_value(), Oddball::kUndefined); |
| 2804 "undefined", | |
| 2805 factory->nan_value(), | |
| 2806 Oddball::kUndefined); | |
| 2807 | 2765 |
| 2808 // Initialize the null_value. | 2766 // Initialize the null_value. |
| 2809 Oddball::Initialize(isolate(), | 2767 Oddball::Initialize(isolate(), factory->null_value(), "null", |
| 2810 factory->null_value(), | 2768 handle(Smi::FromInt(0), isolate()), Oddball::kNull); |
| 2811 "null", | |
| 2812 handle(Smi::FromInt(0), isolate()), | |
| 2813 Oddball::kNull); | |
| 2814 | 2769 |
| 2815 set_true_value(*factory->NewOddball(factory->boolean_map(), | 2770 set_true_value(*factory->NewOddball(factory->boolean_map(), "true", |
| 2816 "true", | |
| 2817 handle(Smi::FromInt(1), isolate()), | 2771 handle(Smi::FromInt(1), isolate()), |
| 2818 Oddball::kTrue)); | 2772 Oddball::kTrue)); |
| 2819 | 2773 |
| 2820 set_false_value(*factory->NewOddball(factory->boolean_map(), | 2774 set_false_value(*factory->NewOddball(factory->boolean_map(), "false", |
| 2821 "false", | |
| 2822 handle(Smi::FromInt(0), isolate()), | 2775 handle(Smi::FromInt(0), isolate()), |
| 2823 Oddball::kFalse)); | 2776 Oddball::kFalse)); |
| 2824 | 2777 |
| 2825 set_the_hole_value(*factory->NewOddball(factory->the_hole_map(), | 2778 set_the_hole_value(*factory->NewOddball(factory->the_hole_map(), "hole", |
| 2826 "hole", | |
| 2827 handle(Smi::FromInt(-1), isolate()), | 2779 handle(Smi::FromInt(-1), isolate()), |
| 2828 Oddball::kTheHole)); | 2780 Oddball::kTheHole)); |
| 2829 | 2781 |
| 2830 set_uninitialized_value( | 2782 set_uninitialized_value(*factory->NewOddball( |
| 2831 *factory->NewOddball(factory->uninitialized_map(), | 2783 factory->uninitialized_map(), "uninitialized", |
| 2832 "uninitialized", | 2784 handle(Smi::FromInt(-1), isolate()), Oddball::kUninitialized)); |
| 2833 handle(Smi::FromInt(-1), isolate()), | |
| 2834 Oddball::kUninitialized)); | |
| 2835 | 2785 |
| 2836 set_arguments_marker(*factory->NewOddball(factory->arguments_marker_map(), | 2786 set_arguments_marker(*factory->NewOddball( |
| 2837 "arguments_marker", | 2787 factory->arguments_marker_map(), "arguments_marker", |
| 2838 handle(Smi::FromInt(-4), isolate()), | 2788 handle(Smi::FromInt(-4), isolate()), Oddball::kArgumentMarker)); |
| 2839 Oddball::kArgumentMarker)); | |
| 2840 | 2789 |
| 2841 set_no_interceptor_result_sentinel( | 2790 set_no_interceptor_result_sentinel(*factory->NewOddball( |
| 2842 *factory->NewOddball(factory->no_interceptor_result_sentinel_map(), | 2791 factory->no_interceptor_result_sentinel_map(), |
| 2843 "no_interceptor_result_sentinel", | 2792 "no_interceptor_result_sentinel", handle(Smi::FromInt(-2), isolate()), |
| 2844 handle(Smi::FromInt(-2), isolate()), | 2793 Oddball::kOther)); |
| 2845 Oddball::kOther)); | |
| 2846 | 2794 |
| 2847 set_termination_exception( | 2795 set_termination_exception(*factory->NewOddball( |
| 2848 *factory->NewOddball(factory->termination_exception_map(), | 2796 factory->termination_exception_map(), "termination_exception", |
| 2849 "termination_exception", | 2797 handle(Smi::FromInt(-3), isolate()), Oddball::kOther)); |
| 2850 handle(Smi::FromInt(-3), isolate()), | |
| 2851 Oddball::kOther)); | |
| 2852 | 2798 |
| 2853 set_exception( | 2799 set_exception(*factory->NewOddball(factory->exception_map(), "exception", |
| 2854 *factory->NewOddball(factory->exception_map(), | 2800 handle(Smi::FromInt(-5), isolate()), |
| 2855 "exception", | 2801 Oddball::kException)); |
| 2856 handle(Smi::FromInt(-5), isolate()), | |
| 2857 Oddball::kException)); | |
| 2858 | 2802 |
| 2859 for (unsigned i = 0; i < ARRAY_SIZE(constant_string_table); i++) { | 2803 for (unsigned i = 0; i < ARRAY_SIZE(constant_string_table); i++) { |
| 2860 Handle<String> str = | 2804 Handle<String> str = |
| 2861 factory->InternalizeUtf8String(constant_string_table[i].contents); | 2805 factory->InternalizeUtf8String(constant_string_table[i].contents); |
| 2862 roots_[constant_string_table[i].index] = *str; | 2806 roots_[constant_string_table[i].index] = *str; |
| 2863 } | 2807 } |
| 2864 | 2808 |
| 2865 // Allocate the hidden string which is used to identify the hidden properties | 2809 // Allocate the hidden string which is used to identify the hidden properties |
| 2866 // in JSObjects. The hash code has a special value so that it will not match | 2810 // in JSObjects. The hash code has a special value so that it will not match |
| 2867 // the empty string when searching for the property. It cannot be part of the | 2811 // the empty string when searching for the property. It cannot be part of the |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2887 set_instanceof_cache_answer(Smi::FromInt(0)); | 2831 set_instanceof_cache_answer(Smi::FromInt(0)); |
| 2888 | 2832 |
| 2889 CreateFixedStubs(); | 2833 CreateFixedStubs(); |
| 2890 | 2834 |
| 2891 // Allocate the dictionary of intrinsic function names. | 2835 // Allocate the dictionary of intrinsic function names. |
| 2892 Handle<NameDictionary> intrinsic_names = | 2836 Handle<NameDictionary> intrinsic_names = |
| 2893 NameDictionary::New(isolate(), Runtime::kNumFunctions); | 2837 NameDictionary::New(isolate(), Runtime::kNumFunctions); |
| 2894 Runtime::InitializeIntrinsicFunctionNames(isolate(), intrinsic_names); | 2838 Runtime::InitializeIntrinsicFunctionNames(isolate(), intrinsic_names); |
| 2895 set_intrinsic_function_names(*intrinsic_names); | 2839 set_intrinsic_function_names(*intrinsic_names); |
| 2896 | 2840 |
| 2897 set_number_string_cache(*factory->NewFixedArray( | 2841 set_number_string_cache( |
| 2898 kInitialNumberStringCacheSize * 2, TENURED)); | 2842 *factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED)); |
| 2899 | 2843 |
| 2900 // Allocate cache for single character one byte strings. | 2844 // Allocate cache for single character one byte strings. |
| 2901 set_single_character_string_cache(*factory->NewFixedArray( | 2845 set_single_character_string_cache( |
| 2902 String::kMaxOneByteCharCode + 1, TENURED)); | 2846 *factory->NewFixedArray(String::kMaxOneByteCharCode + 1, TENURED)); |
| 2903 | 2847 |
| 2904 // Allocate cache for string split and regexp-multiple. | 2848 // Allocate cache for string split and regexp-multiple. |
| 2905 set_string_split_cache(*factory->NewFixedArray( | 2849 set_string_split_cache(*factory->NewFixedArray( |
| 2906 RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); | 2850 RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); |
| 2907 set_regexp_multiple_cache(*factory->NewFixedArray( | 2851 set_regexp_multiple_cache(*factory->NewFixedArray( |
| 2908 RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); | 2852 RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); |
| 2909 | 2853 |
| 2910 // Allocate cache for external strings pointing to native source code. | 2854 // Allocate cache for external strings pointing to native source code. |
| 2911 set_natives_source_cache(*factory->NewFixedArray( | 2855 set_natives_source_cache( |
| 2912 Natives::GetBuiltinsCount())); | 2856 *factory->NewFixedArray(Natives::GetBuiltinsCount())); |
| 2913 | 2857 |
| 2914 set_undefined_cell(*factory->NewCell(factory->undefined_value())); | 2858 set_undefined_cell(*factory->NewCell(factory->undefined_value())); |
| 2915 | 2859 |
| 2916 // The symbol registry is initialized lazily. | 2860 // The symbol registry is initialized lazily. |
| 2917 set_symbol_registry(undefined_value()); | 2861 set_symbol_registry(undefined_value()); |
| 2918 | 2862 |
| 2919 // Allocate object to hold object observation state. | 2863 // Allocate object to hold object observation state. |
| 2920 set_observation_state(*factory->NewJSObjectFromMap( | 2864 set_observation_state(*factory->NewJSObjectFromMap( |
| 2921 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize))); | 2865 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize))); |
| 2922 | 2866 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2937 Handle<SeededNumberDictionary> slow_element_dictionary = | 2881 Handle<SeededNumberDictionary> slow_element_dictionary = |
| 2938 SeededNumberDictionary::New(isolate(), 0, TENURED); | 2882 SeededNumberDictionary::New(isolate(), 0, TENURED); |
| 2939 slow_element_dictionary->set_requires_slow_elements(); | 2883 slow_element_dictionary->set_requires_slow_elements(); |
| 2940 set_empty_slow_element_dictionary(*slow_element_dictionary); | 2884 set_empty_slow_element_dictionary(*slow_element_dictionary); |
| 2941 | 2885 |
| 2942 set_materialized_objects(*factory->NewFixedArray(0, TENURED)); | 2886 set_materialized_objects(*factory->NewFixedArray(0, TENURED)); |
| 2943 | 2887 |
| 2944 // Handling of script id generation is in Factory::NewScript. | 2888 // Handling of script id generation is in Factory::NewScript. |
| 2945 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); | 2889 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); |
| 2946 | 2890 |
| 2947 set_allocation_sites_scratchpad(*factory->NewFixedArray( | 2891 set_allocation_sites_scratchpad( |
| 2948 kAllocationSiteScratchpadSize, TENURED)); | 2892 *factory->NewFixedArray(kAllocationSiteScratchpadSize, TENURED)); |
| 2949 InitializeAllocationSitesScratchpad(); | 2893 InitializeAllocationSitesScratchpad(); |
| 2950 | 2894 |
| 2951 // Initialize keyed lookup cache. | 2895 // Initialize keyed lookup cache. |
| 2952 isolate_->keyed_lookup_cache()->Clear(); | 2896 isolate_->keyed_lookup_cache()->Clear(); |
| 2953 | 2897 |
| 2954 // Initialize context slot cache. | 2898 // Initialize context slot cache. |
| 2955 isolate_->context_slot_cache()->Clear(); | 2899 isolate_->context_slot_cache()->Clear(); |
| 2956 | 2900 |
| 2957 // Initialize descriptor cache. | 2901 // Initialize descriptor cache. |
| 2958 isolate_->descriptor_lookup_cache()->Clear(); | 2902 isolate_->descriptor_lookup_cache()->Clear(); |
| 2959 | 2903 |
| 2960 // Initialize compilation cache. | 2904 // Initialize compilation cache. |
| 2961 isolate_->compilation_cache()->Clear(); | 2905 isolate_->compilation_cache()->Clear(); |
| 2962 } | 2906 } |
| 2963 | 2907 |
| 2964 | 2908 |
| 2965 bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) { | 2909 bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) { |
| 2966 RootListIndex writable_roots[] = { | 2910 RootListIndex writable_roots[] = { |
| 2967 kStoreBufferTopRootIndex, | 2911 kStoreBufferTopRootIndex, |
| 2968 kStackLimitRootIndex, | 2912 kStackLimitRootIndex, |
| 2969 kNumberStringCacheRootIndex, | 2913 kNumberStringCacheRootIndex, |
| 2970 kInstanceofCacheFunctionRootIndex, | 2914 kInstanceofCacheFunctionRootIndex, |
| 2971 kInstanceofCacheMapRootIndex, | 2915 kInstanceofCacheMapRootIndex, |
| 2972 kInstanceofCacheAnswerRootIndex, | 2916 kInstanceofCacheAnswerRootIndex, |
| 2973 kCodeStubsRootIndex, | 2917 kCodeStubsRootIndex, |
| 2974 kNonMonomorphicCacheRootIndex, | 2918 kNonMonomorphicCacheRootIndex, |
| 2975 kPolymorphicCodeCacheRootIndex, | 2919 kPolymorphicCodeCacheRootIndex, |
| 2976 kLastScriptIdRootIndex, | 2920 kLastScriptIdRootIndex, |
| 2977 kEmptyScriptRootIndex, | 2921 kEmptyScriptRootIndex, |
| 2978 kRealStackLimitRootIndex, | 2922 kRealStackLimitRootIndex, |
| 2979 kArgumentsAdaptorDeoptPCOffsetRootIndex, | 2923 kArgumentsAdaptorDeoptPCOffsetRootIndex, |
| 2980 kConstructStubDeoptPCOffsetRootIndex, | 2924 kConstructStubDeoptPCOffsetRootIndex, |
| 2981 kGetterStubDeoptPCOffsetRootIndex, | 2925 kGetterStubDeoptPCOffsetRootIndex, |
| 2982 kSetterStubDeoptPCOffsetRootIndex, | 2926 kSetterStubDeoptPCOffsetRootIndex, |
| 2983 kStringTableRootIndex, | 2927 kStringTableRootIndex, |
| 2984 }; | 2928 }; |
| 2985 | 2929 |
| 2986 for (unsigned int i = 0; i < ARRAY_SIZE(writable_roots); i++) { | 2930 for (unsigned int i = 0; i < ARRAY_SIZE(writable_roots); i++) { |
| 2987 if (root_index == writable_roots[i]) | 2931 if (root_index == writable_roots[i]) return true; |
| 2988 return true; | |
| 2989 } | 2932 } |
| 2990 return false; | 2933 return false; |
| 2991 } | 2934 } |
| 2992 | 2935 |
| 2993 | 2936 |
| 2994 bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) { | 2937 bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) { |
| 2995 return !RootCanBeWrittenAfterInitialization(root_index) && | 2938 return !RootCanBeWrittenAfterInitialization(root_index) && |
| 2996 !InNewSpace(roots_array_start()[root_index]); | 2939 !InNewSpace(roots_array_start()[root_index]); |
| 2997 } | 2940 } |
| 2998 | 2941 |
| 2999 | 2942 |
| 3000 Object* RegExpResultsCache::Lookup(Heap* heap, | 2943 Object* RegExpResultsCache::Lookup(Heap* heap, String* key_string, |
| 3001 String* key_string, | 2944 Object* key_pattern, ResultsCacheType type) { |
| 3002 Object* key_pattern, | |
| 3003 ResultsCacheType type) { | |
| 3004 FixedArray* cache; | 2945 FixedArray* cache; |
| 3005 if (!key_string->IsInternalizedString()) return Smi::FromInt(0); | 2946 if (!key_string->IsInternalizedString()) return Smi::FromInt(0); |
| 3006 if (type == STRING_SPLIT_SUBSTRINGS) { | 2947 if (type == STRING_SPLIT_SUBSTRINGS) { |
| 3007 DCHECK(key_pattern->IsString()); | 2948 DCHECK(key_pattern->IsString()); |
| 3008 if (!key_pattern->IsInternalizedString()) return Smi::FromInt(0); | 2949 if (!key_pattern->IsInternalizedString()) return Smi::FromInt(0); |
| 3009 cache = heap->string_split_cache(); | 2950 cache = heap->string_split_cache(); |
| 3010 } else { | 2951 } else { |
| 3011 DCHECK(type == REGEXP_MULTIPLE_INDICES); | 2952 DCHECK(type == REGEXP_MULTIPLE_INDICES); |
| 3012 DCHECK(key_pattern->IsFixedArray()); | 2953 DCHECK(key_pattern->IsFixedArray()); |
| 3013 cache = heap->regexp_multiple_cache(); | 2954 cache = heap->regexp_multiple_cache(); |
| 3014 } | 2955 } |
| 3015 | 2956 |
| 3016 uint32_t hash = key_string->Hash(); | 2957 uint32_t hash = key_string->Hash(); |
| 3017 uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) & | 2958 uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) & |
| 3018 ~(kArrayEntriesPerCacheEntry - 1)); | 2959 ~(kArrayEntriesPerCacheEntry - 1)); |
| 3019 if (cache->get(index + kStringOffset) == key_string && | 2960 if (cache->get(index + kStringOffset) == key_string && |
| 3020 cache->get(index + kPatternOffset) == key_pattern) { | 2961 cache->get(index + kPatternOffset) == key_pattern) { |
| 3021 return cache->get(index + kArrayOffset); | 2962 return cache->get(index + kArrayOffset); |
| 3022 } | 2963 } |
| 3023 index = | 2964 index = |
| 3024 ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1)); | 2965 ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1)); |
| 3025 if (cache->get(index + kStringOffset) == key_string && | 2966 if (cache->get(index + kStringOffset) == key_string && |
| 3026 cache->get(index + kPatternOffset) == key_pattern) { | 2967 cache->get(index + kPatternOffset) == key_pattern) { |
| 3027 return cache->get(index + kArrayOffset); | 2968 return cache->get(index + kArrayOffset); |
| 3028 } | 2969 } |
| 3029 return Smi::FromInt(0); | 2970 return Smi::FromInt(0); |
| 3030 } | 2971 } |
| 3031 | 2972 |
| 3032 | 2973 |
| 3033 void RegExpResultsCache::Enter(Isolate* isolate, | 2974 void RegExpResultsCache::Enter(Isolate* isolate, Handle<String> key_string, |
| 3034 Handle<String> key_string, | |
| 3035 Handle<Object> key_pattern, | 2975 Handle<Object> key_pattern, |
| 3036 Handle<FixedArray> value_array, | 2976 Handle<FixedArray> value_array, |
| 3037 ResultsCacheType type) { | 2977 ResultsCacheType type) { |
| 3038 Factory* factory = isolate->factory(); | 2978 Factory* factory = isolate->factory(); |
| 3039 Handle<FixedArray> cache; | 2979 Handle<FixedArray> cache; |
| 3040 if (!key_string->IsInternalizedString()) return; | 2980 if (!key_string->IsInternalizedString()) return; |
| 3041 if (type == STRING_SPLIT_SUBSTRINGS) { | 2981 if (type == STRING_SPLIT_SUBSTRINGS) { |
| 3042 DCHECK(key_pattern->IsString()); | 2982 DCHECK(key_pattern->IsString()); |
| 3043 if (!key_pattern->IsInternalizedString()) return; | 2983 if (!key_pattern->IsInternalizedString()) return; |
| 3044 cache = factory->string_split_cache(); | 2984 cache = factory->string_split_cache(); |
| 3045 } else { | 2985 } else { |
| 3046 DCHECK(type == REGEXP_MULTIPLE_INDICES); | 2986 DCHECK(type == REGEXP_MULTIPLE_INDICES); |
| 3047 DCHECK(key_pattern->IsFixedArray()); | 2987 DCHECK(key_pattern->IsFixedArray()); |
| 3048 cache = factory->regexp_multiple_cache(); | 2988 cache = factory->regexp_multiple_cache(); |
| 3049 } | 2989 } |
| 3050 | 2990 |
| 3051 uint32_t hash = key_string->Hash(); | 2991 uint32_t hash = key_string->Hash(); |
| 3052 uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) & | 2992 uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) & |
| 3053 ~(kArrayEntriesPerCacheEntry - 1)); | 2993 ~(kArrayEntriesPerCacheEntry - 1)); |
| 3054 if (cache->get(index + kStringOffset) == Smi::FromInt(0)) { | 2994 if (cache->get(index + kStringOffset) == Smi::FromInt(0)) { |
| 3055 cache->set(index + kStringOffset, *key_string); | 2995 cache->set(index + kStringOffset, *key_string); |
| 3056 cache->set(index + kPatternOffset, *key_pattern); | 2996 cache->set(index + kPatternOffset, *key_pattern); |
| 3057 cache->set(index + kArrayOffset, *value_array); | 2997 cache->set(index + kArrayOffset, *value_array); |
| 3058 } else { | 2998 } else { |
| 3059 uint32_t index2 = | 2999 uint32_t index2 = |
| 3060 ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1)); | 3000 ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1)); |
| 3061 if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) { | 3001 if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) { |
| 3062 cache->set(index2 + kStringOffset, *key_string); | 3002 cache->set(index2 + kStringOffset, *key_string); |
| 3063 cache->set(index2 + kPatternOffset, *key_pattern); | 3003 cache->set(index2 + kPatternOffset, *key_pattern); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3130 } | 3070 } |
| 3131 } | 3071 } |
| 3132 | 3072 |
| 3133 | 3073 |
| 3134 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site, | 3074 void Heap::AddAllocationSiteToScratchpad(AllocationSite* site, |
| 3135 ScratchpadSlotMode mode) { | 3075 ScratchpadSlotMode mode) { |
| 3136 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) { | 3076 if (allocation_sites_scratchpad_length_ < kAllocationSiteScratchpadSize) { |
| 3137 // We cannot use the normal write-barrier because slots need to be | 3077 // We cannot use the normal write-barrier because slots need to be |
| 3138 // recorded with non-incremental marking as well. We have to explicitly | 3078 // recorded with non-incremental marking as well. We have to explicitly |
| 3139 // record the slot to take evacuation candidates into account. | 3079 // record the slot to take evacuation candidates into account. |
| 3140 allocation_sites_scratchpad()->set( | 3080 allocation_sites_scratchpad()->set(allocation_sites_scratchpad_length_, |
| 3141 allocation_sites_scratchpad_length_, site, SKIP_WRITE_BARRIER); | 3081 site, SKIP_WRITE_BARRIER); |
| 3142 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt( | 3082 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt( |
| 3143 allocation_sites_scratchpad_length_); | 3083 allocation_sites_scratchpad_length_); |
| 3144 | 3084 |
| 3145 if (mode == RECORD_SCRATCHPAD_SLOT) { | 3085 if (mode == RECORD_SCRATCHPAD_SLOT) { |
| 3146 // We need to allow slots buffer overflow here since the evacuation | 3086 // We need to allow slots buffer overflow here since the evacuation |
| 3147 // candidates are not part of the global list of old space pages and | 3087 // candidates are not part of the global list of old space pages and |
| 3148 // releasing an evacuation candidate due to a slots buffer overflow | 3088 // releasing an evacuation candidate due to a slots buffer overflow |
| 3149 // results in lost pages. | 3089 // results in lost pages. |
| 3150 mark_compact_collector()->RecordSlot( | 3090 mark_compact_collector()->RecordSlot(slot, slot, *slot, |
| 3151 slot, slot, *slot, SlotsBuffer::IGNORE_OVERFLOW); | 3091 SlotsBuffer::IGNORE_OVERFLOW); |
| 3152 } | 3092 } |
| 3153 allocation_sites_scratchpad_length_++; | 3093 allocation_sites_scratchpad_length_++; |
| 3154 } | 3094 } |
| 3155 } | 3095 } |
| 3156 | 3096 |
| 3157 | 3097 |
| 3158 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { | 3098 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { |
| 3159 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); | 3099 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); |
| 3160 } | 3100 } |
| 3161 | 3101 |
| 3162 | 3102 |
| 3163 Heap::RootListIndex Heap::RootIndexForExternalArrayType( | 3103 Heap::RootListIndex Heap::RootIndexForExternalArrayType( |
| 3164 ExternalArrayType array_type) { | 3104 ExternalArrayType array_type) { |
| 3165 switch (array_type) { | 3105 switch (array_type) { |
| 3166 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ | 3106 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ |
| 3167 case kExternal##Type##Array: \ | 3107 case kExternal##Type##Array: \ |
| 3168 return kExternal##Type##ArrayMapRootIndex; | 3108 return kExternal##Type##ArrayMapRootIndex; |
| 3169 | 3109 |
| 3170 TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX) | 3110 TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX) |
| 3171 #undef ARRAY_TYPE_TO_ROOT_INDEX | 3111 #undef ARRAY_TYPE_TO_ROOT_INDEX |
| 3172 | 3112 |
| 3173 default: | 3113 default: |
| 3174 UNREACHABLE(); | 3114 UNREACHABLE(); |
| 3175 return kUndefinedValueRootIndex; | 3115 return kUndefinedValueRootIndex; |
| 3176 } | 3116 } |
| 3177 } | 3117 } |
| 3178 | 3118 |
| 3179 | 3119 |
| 3180 Map* Heap::MapForFixedTypedArray(ExternalArrayType array_type) { | 3120 Map* Heap::MapForFixedTypedArray(ExternalArrayType array_type) { |
| 3181 return Map::cast(roots_[RootIndexForFixedTypedArray(array_type)]); | 3121 return Map::cast(roots_[RootIndexForFixedTypedArray(array_type)]); |
| 3182 } | 3122 } |
| 3183 | 3123 |
| 3184 | 3124 |
| 3185 Heap::RootListIndex Heap::RootIndexForFixedTypedArray( | 3125 Heap::RootListIndex Heap::RootIndexForFixedTypedArray( |
| 3186 ExternalArrayType array_type) { | 3126 ExternalArrayType array_type) { |
| 3187 switch (array_type) { | 3127 switch (array_type) { |
| 3188 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ | 3128 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ |
| 3189 case kExternal##Type##Array: \ | 3129 case kExternal##Type##Array: \ |
| 3190 return kFixed##Type##ArrayMapRootIndex; | 3130 return kFixed##Type##ArrayMapRootIndex; |
| 3191 | 3131 |
| 3192 TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX) | 3132 TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX) |
| 3193 #undef ARRAY_TYPE_TO_ROOT_INDEX | 3133 #undef ARRAY_TYPE_TO_ROOT_INDEX |
| 3194 | 3134 |
| 3195 default: | 3135 default: |
| 3196 UNREACHABLE(); | 3136 UNREACHABLE(); |
| 3197 return kUndefinedValueRootIndex; | 3137 return kUndefinedValueRootIndex; |
| 3198 } | 3138 } |
| 3199 } | 3139 } |
| 3200 | 3140 |
| 3201 | 3141 |
| 3202 Heap::RootListIndex Heap::RootIndexForEmptyExternalArray( | 3142 Heap::RootListIndex Heap::RootIndexForEmptyExternalArray( |
| 3203 ElementsKind elementsKind) { | 3143 ElementsKind elementsKind) { |
| 3204 switch (elementsKind) { | 3144 switch (elementsKind) { |
| 3205 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ | 3145 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ |
| 3206 case EXTERNAL_##TYPE##_ELEMENTS: \ | 3146 case EXTERNAL_##TYPE##_ELEMENTS: \ |
| 3207 return kEmptyExternal##Type##ArrayRootIndex; | 3147 return kEmptyExternal##Type##ArrayRootIndex; |
| 3208 | 3148 |
| 3209 TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX) | 3149 TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX) |
| 3210 #undef ELEMENT_KIND_TO_ROOT_INDEX | 3150 #undef ELEMENT_KIND_TO_ROOT_INDEX |
| 3211 | 3151 |
| 3212 default: | 3152 default: |
| 3213 UNREACHABLE(); | 3153 UNREACHABLE(); |
| 3214 return kUndefinedValueRootIndex; | 3154 return kUndefinedValueRootIndex; |
| 3215 } | 3155 } |
| 3216 } | 3156 } |
| 3217 | 3157 |
| 3218 | 3158 |
| 3219 Heap::RootListIndex Heap::RootIndexForEmptyFixedTypedArray( | 3159 Heap::RootListIndex Heap::RootIndexForEmptyFixedTypedArray( |
| 3220 ElementsKind elementsKind) { | 3160 ElementsKind elementsKind) { |
| 3221 switch (elementsKind) { | 3161 switch (elementsKind) { |
| 3222 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ | 3162 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \ |
| 3223 case TYPE##_ELEMENTS: \ | 3163 case TYPE##_ELEMENTS: \ |
| 3224 return kEmptyFixed##Type##ArrayRootIndex; | 3164 return kEmptyFixed##Type##ArrayRootIndex; |
| 3225 | 3165 |
| 3226 TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX) | 3166 TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX) |
| 3227 #undef ELEMENT_KIND_TO_ROOT_INDEX | 3167 #undef ELEMENT_KIND_TO_ROOT_INDEX |
| 3228 default: | 3168 default: |
| 3229 UNREACHABLE(); | 3169 UNREACHABLE(); |
| 3230 return kUndefinedValueRootIndex; | 3170 return kUndefinedValueRootIndex; |
| 3231 } | 3171 } |
| 3232 } | 3172 } |
| 3233 | 3173 |
| 3234 | 3174 |
| (...skipping 22 matching lines...) Expand all Loading... |
| 3257 } | 3197 } |
| 3258 | 3198 |
| 3259 | 3199 |
| 3260 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { | 3200 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { |
| 3261 if (length < 0 || length > ByteArray::kMaxLength) { | 3201 if (length < 0 || length > ByteArray::kMaxLength) { |
| 3262 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 3202 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
| 3263 } | 3203 } |
| 3264 int size = ByteArray::SizeFor(length); | 3204 int size = ByteArray::SizeFor(length); |
| 3265 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 3205 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 3266 HeapObject* result; | 3206 HeapObject* result; |
| 3267 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3207 { |
| 3208 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3268 if (!allocation.To(&result)) return allocation; | 3209 if (!allocation.To(&result)) return allocation; |
| 3269 } | 3210 } |
| 3270 | 3211 |
| 3271 result->set_map_no_write_barrier(byte_array_map()); | 3212 result->set_map_no_write_barrier(byte_array_map()); |
| 3272 ByteArray::cast(result)->set_length(length); | 3213 ByteArray::cast(result)->set_length(length); |
| 3273 return result; | 3214 return result; |
| 3274 } | 3215 } |
| 3275 | 3216 |
| 3276 | 3217 |
| 3277 void Heap::CreateFillerObjectAt(Address addr, int size) { | 3218 void Heap::CreateFillerObjectAt(Address addr, int size) { |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3313 if (mode == FROM_GC) { | 3254 if (mode == FROM_GC) { |
| 3314 MemoryChunk::IncrementLiveBytesFromGC(address, by); | 3255 MemoryChunk::IncrementLiveBytesFromGC(address, by); |
| 3315 } else { | 3256 } else { |
| 3316 MemoryChunk::IncrementLiveBytesFromMutator(address, by); | 3257 MemoryChunk::IncrementLiveBytesFromMutator(address, by); |
| 3317 } | 3258 } |
| 3318 } | 3259 } |
| 3319 } | 3260 } |
| 3320 | 3261 |
| 3321 | 3262 |
| 3322 AllocationResult Heap::AllocateExternalArray(int length, | 3263 AllocationResult Heap::AllocateExternalArray(int length, |
| 3323 ExternalArrayType array_type, | 3264 ExternalArrayType array_type, |
| 3324 void* external_pointer, | 3265 void* external_pointer, |
| 3325 PretenureFlag pretenure) { | 3266 PretenureFlag pretenure) { |
| 3326 int size = ExternalArray::kAlignedSize; | 3267 int size = ExternalArray::kAlignedSize; |
| 3327 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 3268 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 3328 HeapObject* result; | 3269 HeapObject* result; |
| 3329 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3270 { |
| 3271 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3330 if (!allocation.To(&result)) return allocation; | 3272 if (!allocation.To(&result)) return allocation; |
| 3331 } | 3273 } |
| 3332 | 3274 |
| 3333 result->set_map_no_write_barrier( | 3275 result->set_map_no_write_barrier(MapForExternalArrayType(array_type)); |
| 3334 MapForExternalArrayType(array_type)); | |
| 3335 ExternalArray::cast(result)->set_length(length); | 3276 ExternalArray::cast(result)->set_length(length); |
| 3336 ExternalArray::cast(result)->set_external_pointer(external_pointer); | 3277 ExternalArray::cast(result)->set_external_pointer(external_pointer); |
| 3337 return result; | 3278 return result; |
| 3338 } | 3279 } |
| 3339 | 3280 |
| 3340 static void ForFixedTypedArray(ExternalArrayType array_type, | 3281 static void ForFixedTypedArray(ExternalArrayType array_type, int* element_size, |
| 3341 int* element_size, | |
| 3342 ElementsKind* element_kind) { | 3282 ElementsKind* element_kind) { |
| 3343 switch (array_type) { | 3283 switch (array_type) { |
| 3344 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ | 3284 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ |
| 3345 case kExternal##Type##Array: \ | 3285 case kExternal##Type##Array: \ |
| 3346 *element_size = size; \ | 3286 *element_size = size; \ |
| 3347 *element_kind = TYPE##_ELEMENTS; \ | 3287 *element_kind = TYPE##_ELEMENTS; \ |
| 3348 return; | 3288 return; |
| 3349 | 3289 |
| 3350 TYPED_ARRAYS(TYPED_ARRAY_CASE) | 3290 TYPED_ARRAYS(TYPED_ARRAY_CASE) |
| 3351 #undef TYPED_ARRAY_CASE | 3291 #undef TYPED_ARRAY_CASE |
| 3352 | 3292 |
| 3353 default: | 3293 default: |
| 3354 *element_size = 0; // Bogus | 3294 *element_size = 0; // Bogus |
| 3355 *element_kind = UINT8_ELEMENTS; // Bogus | 3295 *element_kind = UINT8_ELEMENTS; // Bogus |
| 3356 UNREACHABLE(); | 3296 UNREACHABLE(); |
| 3357 } | 3297 } |
| 3358 } | 3298 } |
| 3359 | 3299 |
| 3360 | 3300 |
| 3361 AllocationResult Heap::AllocateFixedTypedArray(int length, | 3301 AllocationResult Heap::AllocateFixedTypedArray(int length, |
| 3362 ExternalArrayType array_type, | 3302 ExternalArrayType array_type, |
| 3363 PretenureFlag pretenure) { | 3303 PretenureFlag pretenure) { |
| 3364 int element_size; | 3304 int element_size; |
| 3365 ElementsKind elements_kind; | 3305 ElementsKind elements_kind; |
| 3366 ForFixedTypedArray(array_type, &element_size, &elements_kind); | 3306 ForFixedTypedArray(array_type, &element_size, &elements_kind); |
| 3367 int size = OBJECT_POINTER_ALIGN( | 3307 int size = OBJECT_POINTER_ALIGN(length * element_size + |
| 3368 length * element_size + FixedTypedArrayBase::kDataOffset); | 3308 FixedTypedArrayBase::kDataOffset); |
| 3369 #ifndef V8_HOST_ARCH_64_BIT | 3309 #ifndef V8_HOST_ARCH_64_BIT |
| 3370 if (array_type == kExternalFloat64Array) { | 3310 if (array_type == kExternalFloat64Array) { |
| 3371 size += kPointerSize; | 3311 size += kPointerSize; |
| 3372 } | 3312 } |
| 3373 #endif | 3313 #endif |
| 3374 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 3314 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 3375 | 3315 |
| 3376 HeapObject* object; | 3316 HeapObject* object; |
| 3377 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3317 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3378 if (!allocation.To(&object)) return allocation; | 3318 if (!allocation.To(&object)) return allocation; |
| (...skipping 29 matching lines...) Expand all Loading... |
| 3408 // be moved. | 3348 // be moved. |
| 3409 CreateFillerObjectAt(result->address(), object_size); | 3349 CreateFillerObjectAt(result->address(), object_size); |
| 3410 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); | 3350 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); |
| 3411 if (!allocation.To(&result)) return allocation; | 3351 if (!allocation.To(&result)) return allocation; |
| 3412 OnAllocationEvent(result, object_size); | 3352 OnAllocationEvent(result, object_size); |
| 3413 } | 3353 } |
| 3414 } | 3354 } |
| 3415 | 3355 |
| 3416 result->set_map_no_write_barrier(code_map()); | 3356 result->set_map_no_write_barrier(code_map()); |
| 3417 Code* code = Code::cast(result); | 3357 Code* code = Code::cast(result); |
| 3418 DCHECK(isolate_->code_range() == NULL || | 3358 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || |
| 3419 !isolate_->code_range()->valid() || | |
| 3420 isolate_->code_range()->contains(code->address())); | 3359 isolate_->code_range()->contains(code->address())); |
| 3421 code->set_gc_metadata(Smi::FromInt(0)); | 3360 code->set_gc_metadata(Smi::FromInt(0)); |
| 3422 code->set_ic_age(global_ic_age_); | 3361 code->set_ic_age(global_ic_age_); |
| 3423 return code; | 3362 return code; |
| 3424 } | 3363 } |
| 3425 | 3364 |
| 3426 | 3365 |
| 3427 AllocationResult Heap::CopyCode(Code* code) { | 3366 AllocationResult Heap::CopyCode(Code* code) { |
| 3428 AllocationResult allocation; | 3367 AllocationResult allocation; |
| 3429 HeapObject* new_constant_pool; | 3368 HeapObject* new_constant_pool; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3446 // Copy code object. | 3385 // Copy code object. |
| 3447 Address old_addr = code->address(); | 3386 Address old_addr = code->address(); |
| 3448 Address new_addr = result->address(); | 3387 Address new_addr = result->address(); |
| 3449 CopyBlock(new_addr, old_addr, obj_size); | 3388 CopyBlock(new_addr, old_addr, obj_size); |
| 3450 Code* new_code = Code::cast(result); | 3389 Code* new_code = Code::cast(result); |
| 3451 | 3390 |
| 3452 // Update the constant pool. | 3391 // Update the constant pool. |
| 3453 new_code->set_constant_pool(new_constant_pool); | 3392 new_code->set_constant_pool(new_constant_pool); |
| 3454 | 3393 |
| 3455 // Relocate the copy. | 3394 // Relocate the copy. |
| 3456 DCHECK(isolate_->code_range() == NULL || | 3395 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || |
| 3457 !isolate_->code_range()->valid() || | |
| 3458 isolate_->code_range()->contains(code->address())); | 3396 isolate_->code_range()->contains(code->address())); |
| 3459 new_code->Relocate(new_addr - old_addr); | 3397 new_code->Relocate(new_addr - old_addr); |
| 3460 return new_code; | 3398 return new_code; |
| 3461 } | 3399 } |
| 3462 | 3400 |
| 3463 | 3401 |
| 3464 AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) { | 3402 AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) { |
| 3465 // Allocate ByteArray and ConstantPoolArray before the Code object, so that we | 3403 // Allocate ByteArray and ConstantPoolArray before the Code object, so that we |
| 3466 // do not risk leaving uninitialized Code object (and breaking the heap). | 3404 // do not risk leaving uninitialized Code object (and breaking the heap). |
| 3467 ByteArray* reloc_info_array; | 3405 ByteArray* reloc_info_array; |
| 3468 { AllocationResult allocation = | 3406 { |
| 3407 AllocationResult allocation = |
| 3469 AllocateByteArray(reloc_info.length(), TENURED); | 3408 AllocateByteArray(reloc_info.length(), TENURED); |
| 3470 if (!allocation.To(&reloc_info_array)) return allocation; | 3409 if (!allocation.To(&reloc_info_array)) return allocation; |
| 3471 } | 3410 } |
| 3472 HeapObject* new_constant_pool; | 3411 HeapObject* new_constant_pool; |
| 3473 if (FLAG_enable_ool_constant_pool && | 3412 if (FLAG_enable_ool_constant_pool && |
| 3474 code->constant_pool() != empty_constant_pool_array()) { | 3413 code->constant_pool() != empty_constant_pool_array()) { |
| 3475 // Copy the constant pool, since edits to the copied code may modify | 3414 // Copy the constant pool, since edits to the copied code may modify |
| 3476 // the constant pool. | 3415 // the constant pool. |
| 3477 AllocationResult allocation = | 3416 AllocationResult allocation = CopyConstantPoolArray(code->constant_pool()); |
| 3478 CopyConstantPoolArray(code->constant_pool()); | |
| 3479 if (!allocation.To(&new_constant_pool)) return allocation; | 3417 if (!allocation.To(&new_constant_pool)) return allocation; |
| 3480 } else { | 3418 } else { |
| 3481 new_constant_pool = empty_constant_pool_array(); | 3419 new_constant_pool = empty_constant_pool_array(); |
| 3482 } | 3420 } |
| 3483 | 3421 |
| 3484 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); | 3422 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); |
| 3485 | 3423 |
| 3486 int new_obj_size = Code::SizeFor(new_body_size); | 3424 int new_obj_size = Code::SizeFor(new_body_size); |
| 3487 | 3425 |
| 3488 Address old_addr = code->address(); | 3426 Address old_addr = code->address(); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3501 // Copy header and instructions. | 3439 // Copy header and instructions. |
| 3502 CopyBytes(new_addr, old_addr, relocation_offset); | 3440 CopyBytes(new_addr, old_addr, relocation_offset); |
| 3503 | 3441 |
| 3504 Code* new_code = Code::cast(result); | 3442 Code* new_code = Code::cast(result); |
| 3505 new_code->set_relocation_info(reloc_info_array); | 3443 new_code->set_relocation_info(reloc_info_array); |
| 3506 | 3444 |
| 3507 // Update constant pool. | 3445 // Update constant pool. |
| 3508 new_code->set_constant_pool(new_constant_pool); | 3446 new_code->set_constant_pool(new_constant_pool); |
| 3509 | 3447 |
| 3510 // Copy patched rinfo. | 3448 // Copy patched rinfo. |
| 3511 CopyBytes(new_code->relocation_start(), | 3449 CopyBytes(new_code->relocation_start(), reloc_info.start(), |
| 3512 reloc_info.start(), | |
| 3513 static_cast<size_t>(reloc_info.length())); | 3450 static_cast<size_t>(reloc_info.length())); |
| 3514 | 3451 |
| 3515 // Relocate the copy. | 3452 // Relocate the copy. |
| 3516 DCHECK(isolate_->code_range() == NULL || | 3453 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || |
| 3517 !isolate_->code_range()->valid() || | |
| 3518 isolate_->code_range()->contains(code->address())); | 3454 isolate_->code_range()->contains(code->address())); |
| 3519 new_code->Relocate(new_addr - old_addr); | 3455 new_code->Relocate(new_addr - old_addr); |
| 3520 | 3456 |
| 3521 #ifdef VERIFY_HEAP | 3457 #ifdef VERIFY_HEAP |
| 3522 if (FLAG_verify_heap) code->ObjectVerify(); | 3458 if (FLAG_verify_heap) code->ObjectVerify(); |
| 3523 #endif | 3459 #endif |
| 3524 return new_code; | 3460 return new_code; |
| 3525 } | 3461 } |
| 3526 | 3462 |
| 3527 | 3463 |
| 3528 void Heap::InitializeAllocationMemento(AllocationMemento* memento, | 3464 void Heap::InitializeAllocationMemento(AllocationMemento* memento, |
| 3529 AllocationSite* allocation_site) { | 3465 AllocationSite* allocation_site) { |
| 3530 memento->set_map_no_write_barrier(allocation_memento_map()); | 3466 memento->set_map_no_write_barrier(allocation_memento_map()); |
| 3531 DCHECK(allocation_site->map() == allocation_site_map()); | 3467 DCHECK(allocation_site->map() == allocation_site_map()); |
| 3532 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); | 3468 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); |
| 3533 if (FLAG_allocation_site_pretenuring) { | 3469 if (FLAG_allocation_site_pretenuring) { |
| 3534 allocation_site->IncrementMementoCreateCount(); | 3470 allocation_site->IncrementMementoCreateCount(); |
| 3535 } | 3471 } |
| 3536 } | 3472 } |
| 3537 | 3473 |
| 3538 | 3474 |
| 3539 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, | 3475 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, |
| 3540 AllocationSite* allocation_site) { | 3476 AllocationSite* allocation_site) { |
| 3541 DCHECK(gc_state_ == NOT_IN_GC); | 3477 DCHECK(gc_state_ == NOT_IN_GC); |
| 3542 DCHECK(map->instance_type() != MAP_TYPE); | 3478 DCHECK(map->instance_type() != MAP_TYPE); |
| 3543 // If allocation failures are disallowed, we may allocate in a different | 3479 // If allocation failures are disallowed, we may allocate in a different |
| 3544 // space when new space is full and the object is not a large object. | 3480 // space when new space is full and the object is not a large object. |
| 3545 AllocationSpace retry_space = | 3481 AllocationSpace retry_space = |
| 3546 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 3482 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
| 3547 int size = map->instance_size(); | 3483 int size = map->instance_size(); |
| 3548 if (allocation_site != NULL) { | 3484 if (allocation_site != NULL) { |
| 3549 size += AllocationMemento::kSize; | 3485 size += AllocationMemento::kSize; |
| 3550 } | 3486 } |
| 3551 HeapObject* result; | 3487 HeapObject* result; |
| 3552 AllocationResult allocation = AllocateRaw(size, space, retry_space); | 3488 AllocationResult allocation = AllocateRaw(size, space, retry_space); |
| 3553 if (!allocation.To(&result)) return allocation; | 3489 if (!allocation.To(&result)) return allocation; |
| 3554 // No need for write barrier since object is white and map is in old space. | 3490 // No need for write barrier since object is white and map is in old space. |
| 3555 result->set_map_no_write_barrier(map); | 3491 result->set_map_no_write_barrier(map); |
| 3556 if (allocation_site != NULL) { | 3492 if (allocation_site != NULL) { |
| 3557 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 3493 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
| 3558 reinterpret_cast<Address>(result) + map->instance_size()); | 3494 reinterpret_cast<Address>(result) + map->instance_size()); |
| 3559 InitializeAllocationMemento(alloc_memento, allocation_site); | 3495 InitializeAllocationMemento(alloc_memento, allocation_site); |
| 3560 } | 3496 } |
| 3561 return result; | 3497 return result; |
| 3562 } | 3498 } |
| 3563 | 3499 |
| 3564 | 3500 |
| 3565 void Heap::InitializeJSObjectFromMap(JSObject* obj, | 3501 void Heap::InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties, |
| 3566 FixedArray* properties, | |
| 3567 Map* map) { | 3502 Map* map) { |
| 3568 obj->set_properties(properties); | 3503 obj->set_properties(properties); |
| 3569 obj->initialize_elements(); | 3504 obj->initialize_elements(); |
| 3570 // TODO(1240798): Initialize the object's body using valid initial values | 3505 // TODO(1240798): Initialize the object's body using valid initial values |
| 3571 // according to the object's initial map. For example, if the map's | 3506 // according to the object's initial map. For example, if the map's |
| 3572 // instance type is JS_ARRAY_TYPE, the length field should be initialized | 3507 // instance type is JS_ARRAY_TYPE, the length field should be initialized |
| 3573 // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a | 3508 // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a |
| 3574 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object | 3509 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object |
| 3575 // verification code has to cope with (temporarily) invalid objects. See | 3510 // verification code has to cope with (temporarily) invalid objects. See |
| 3576 // for example, JSArray::JSArrayVerify). | 3511 // for example, JSArray::JSArrayVerify). |
| 3577 Object* filler; | 3512 Object* filler; |
| 3578 // We cannot always fill with one_pointer_filler_map because objects | 3513 // We cannot always fill with one_pointer_filler_map because objects |
| 3579 // created from API functions expect their internal fields to be initialized | 3514 // created from API functions expect their internal fields to be initialized |
| 3580 // with undefined_value. | 3515 // with undefined_value. |
| 3581 // Pre-allocated fields need to be initialized with undefined_value as well | 3516 // Pre-allocated fields need to be initialized with undefined_value as well |
| 3582 // so that object accesses before the constructor completes (e.g. in the | 3517 // so that object accesses before the constructor completes (e.g. in the |
| 3583 // debugger) will not cause a crash. | 3518 // debugger) will not cause a crash. |
| 3584 if (map->constructor()->IsJSFunction() && | 3519 if (map->constructor()->IsJSFunction() && |
| 3585 JSFunction::cast(map->constructor())-> | 3520 JSFunction::cast(map->constructor()) |
| 3586 IsInobjectSlackTrackingInProgress()) { | 3521 ->IsInobjectSlackTrackingInProgress()) { |
| 3587 // We might want to shrink the object later. | 3522 // We might want to shrink the object later. |
| 3588 DCHECK(obj->GetInternalFieldCount() == 0); | 3523 DCHECK(obj->GetInternalFieldCount() == 0); |
| 3589 filler = Heap::one_pointer_filler_map(); | 3524 filler = Heap::one_pointer_filler_map(); |
| 3590 } else { | 3525 } else { |
| 3591 filler = Heap::undefined_value(); | 3526 filler = Heap::undefined_value(); |
| 3592 } | 3527 } |
| 3593 obj->InitializeBody(map, Heap::undefined_value(), filler); | 3528 obj->InitializeBody(map, Heap::undefined_value(), filler); |
| 3594 } | 3529 } |
| 3595 | 3530 |
| 3596 | 3531 |
| 3597 AllocationResult Heap::AllocateJSObjectFromMap( | 3532 AllocationResult Heap::AllocateJSObjectFromMap( |
| 3598 Map* map, | 3533 Map* map, PretenureFlag pretenure, bool allocate_properties, |
| 3599 PretenureFlag pretenure, | |
| 3600 bool allocate_properties, | |
| 3601 AllocationSite* allocation_site) { | 3534 AllocationSite* allocation_site) { |
| 3602 // JSFunctions should be allocated using AllocateFunction to be | 3535 // JSFunctions should be allocated using AllocateFunction to be |
| 3603 // properly initialized. | 3536 // properly initialized. |
| 3604 DCHECK(map->instance_type() != JS_FUNCTION_TYPE); | 3537 DCHECK(map->instance_type() != JS_FUNCTION_TYPE); |
| 3605 | 3538 |
| 3606 // Both types of global objects should be allocated using | 3539 // Both types of global objects should be allocated using |
| 3607 // AllocateGlobalObject to be properly initialized. | 3540 // AllocateGlobalObject to be properly initialized. |
| 3608 DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); | 3541 DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); |
| 3609 DCHECK(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); | 3542 DCHECK(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); |
| 3610 | 3543 |
| 3611 // Allocate the backing storage for the properties. | 3544 // Allocate the backing storage for the properties. |
| 3612 FixedArray* properties; | 3545 FixedArray* properties; |
| 3613 if (allocate_properties) { | 3546 if (allocate_properties) { |
| 3614 int prop_size = map->InitialPropertiesLength(); | 3547 int prop_size = map->InitialPropertiesLength(); |
| 3615 DCHECK(prop_size >= 0); | 3548 DCHECK(prop_size >= 0); |
| 3616 { AllocationResult allocation = AllocateFixedArray(prop_size, pretenure); | 3549 { |
| 3550 AllocationResult allocation = AllocateFixedArray(prop_size, pretenure); |
| 3617 if (!allocation.To(&properties)) return allocation; | 3551 if (!allocation.To(&properties)) return allocation; |
| 3618 } | 3552 } |
| 3619 } else { | 3553 } else { |
| 3620 properties = empty_fixed_array(); | 3554 properties = empty_fixed_array(); |
| 3621 } | 3555 } |
| 3622 | 3556 |
| 3623 // Allocate the JSObject. | 3557 // Allocate the JSObject. |
| 3624 int size = map->instance_size(); | 3558 int size = map->instance_size(); |
| 3625 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); | 3559 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
| 3626 JSObject* js_obj; | 3560 JSObject* js_obj; |
| 3627 AllocationResult allocation = Allocate(map, space, allocation_site); | 3561 AllocationResult allocation = Allocate(map, space, allocation_site); |
| 3628 if (!allocation.To(&js_obj)) return allocation; | 3562 if (!allocation.To(&js_obj)) return allocation; |
| 3629 | 3563 |
| 3630 // Initialize the JSObject. | 3564 // Initialize the JSObject. |
| 3631 InitializeJSObjectFromMap(js_obj, properties, map); | 3565 InitializeJSObjectFromMap(js_obj, properties, map); |
| 3632 DCHECK(js_obj->HasFastElements() || | 3566 DCHECK(js_obj->HasFastElements() || js_obj->HasExternalArrayElements() || |
| 3633 js_obj->HasExternalArrayElements() || | |
| 3634 js_obj->HasFixedTypedArrayElements()); | 3567 js_obj->HasFixedTypedArrayElements()); |
| 3635 return js_obj; | 3568 return js_obj; |
| 3636 } | 3569 } |
| 3637 | 3570 |
| 3638 | 3571 |
| 3639 AllocationResult Heap::AllocateJSObject(JSFunction* constructor, | 3572 AllocationResult Heap::AllocateJSObject(JSFunction* constructor, |
| 3640 PretenureFlag pretenure, | 3573 PretenureFlag pretenure, |
| 3641 AllocationSite* allocation_site) { | 3574 AllocationSite* allocation_site) { |
| 3642 DCHECK(constructor->has_initial_map()); | 3575 DCHECK(constructor->has_initial_map()); |
| 3643 | 3576 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3663 int object_size = map->instance_size(); | 3596 int object_size = map->instance_size(); |
| 3664 HeapObject* clone; | 3597 HeapObject* clone; |
| 3665 | 3598 |
| 3666 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type())); | 3599 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type())); |
| 3667 | 3600 |
| 3668 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; | 3601 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
| 3669 | 3602 |
| 3670 // If we're forced to always allocate, we use the general allocation | 3603 // If we're forced to always allocate, we use the general allocation |
| 3671 // functions which may leave us with an object in old space. | 3604 // functions which may leave us with an object in old space. |
| 3672 if (always_allocate()) { | 3605 if (always_allocate()) { |
| 3673 { AllocationResult allocation = | 3606 { |
| 3607 AllocationResult allocation = |
| 3674 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); | 3608 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); |
| 3675 if (!allocation.To(&clone)) return allocation; | 3609 if (!allocation.To(&clone)) return allocation; |
| 3676 } | 3610 } |
| 3677 Address clone_address = clone->address(); | 3611 Address clone_address = clone->address(); |
| 3678 CopyBlock(clone_address, | 3612 CopyBlock(clone_address, source->address(), object_size); |
| 3679 source->address(), | |
| 3680 object_size); | |
| 3681 // Update write barrier for all fields that lie beyond the header. | 3613 // Update write barrier for all fields that lie beyond the header. |
| 3682 RecordWrites(clone_address, | 3614 RecordWrites(clone_address, JSObject::kHeaderSize, |
| 3683 JSObject::kHeaderSize, | |
| 3684 (object_size - JSObject::kHeaderSize) / kPointerSize); | 3615 (object_size - JSObject::kHeaderSize) / kPointerSize); |
| 3685 } else { | 3616 } else { |
| 3686 wb_mode = SKIP_WRITE_BARRIER; | 3617 wb_mode = SKIP_WRITE_BARRIER; |
| 3687 | 3618 |
| 3688 { int adjusted_object_size = site != NULL | 3619 { |
| 3689 ? object_size + AllocationMemento::kSize | 3620 int adjusted_object_size = |
| 3690 : object_size; | 3621 site != NULL ? object_size + AllocationMemento::kSize : object_size; |
| 3691 AllocationResult allocation = | 3622 AllocationResult allocation = |
| 3692 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); | 3623 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); |
| 3693 if (!allocation.To(&clone)) return allocation; | 3624 if (!allocation.To(&clone)) return allocation; |
| 3694 } | 3625 } |
| 3695 SLOW_DCHECK(InNewSpace(clone)); | 3626 SLOW_DCHECK(InNewSpace(clone)); |
| 3696 // Since we know the clone is allocated in new space, we can copy | 3627 // Since we know the clone is allocated in new space, we can copy |
| 3697 // the contents without worrying about updating the write barrier. | 3628 // the contents without worrying about updating the write barrier. |
| 3698 CopyBlock(clone->address(), | 3629 CopyBlock(clone->address(), source->address(), object_size); |
| 3699 source->address(), | |
| 3700 object_size); | |
| 3701 | 3630 |
| 3702 if (site != NULL) { | 3631 if (site != NULL) { |
| 3703 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 3632 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
| 3704 reinterpret_cast<Address>(clone) + object_size); | 3633 reinterpret_cast<Address>(clone) + object_size); |
| 3705 InitializeAllocationMemento(alloc_memento, site); | 3634 InitializeAllocationMemento(alloc_memento, site); |
| 3706 } | 3635 } |
| 3707 } | 3636 } |
| 3708 | 3637 |
| 3709 SLOW_DCHECK( | 3638 SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() == |
| 3710 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 3639 source->GetElementsKind()); |
| 3711 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 3640 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
| 3712 FixedArray* properties = FixedArray::cast(source->properties()); | 3641 FixedArray* properties = FixedArray::cast(source->properties()); |
| 3713 // Update elements if necessary. | 3642 // Update elements if necessary. |
| 3714 if (elements->length() > 0) { | 3643 if (elements->length() > 0) { |
| 3715 FixedArrayBase* elem; | 3644 FixedArrayBase* elem; |
| 3716 { AllocationResult allocation; | 3645 { |
| 3646 AllocationResult allocation; |
| 3717 if (elements->map() == fixed_cow_array_map()) { | 3647 if (elements->map() == fixed_cow_array_map()) { |
| 3718 allocation = FixedArray::cast(elements); | 3648 allocation = FixedArray::cast(elements); |
| 3719 } else if (source->HasFastDoubleElements()) { | 3649 } else if (source->HasFastDoubleElements()) { |
| 3720 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); | 3650 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); |
| 3721 } else { | 3651 } else { |
| 3722 allocation = CopyFixedArray(FixedArray::cast(elements)); | 3652 allocation = CopyFixedArray(FixedArray::cast(elements)); |
| 3723 } | 3653 } |
| 3724 if (!allocation.To(&elem)) return allocation; | 3654 if (!allocation.To(&elem)) return allocation; |
| 3725 } | 3655 } |
| 3726 JSObject::cast(clone)->set_elements(elem, wb_mode); | 3656 JSObject::cast(clone)->set_elements(elem, wb_mode); |
| 3727 } | 3657 } |
| 3728 // Update properties if necessary. | 3658 // Update properties if necessary. |
| 3729 if (properties->length() > 0) { | 3659 if (properties->length() > 0) { |
| 3730 FixedArray* prop; | 3660 FixedArray* prop; |
| 3731 { AllocationResult allocation = CopyFixedArray(properties); | 3661 { |
| 3662 AllocationResult allocation = CopyFixedArray(properties); |
| 3732 if (!allocation.To(&prop)) return allocation; | 3663 if (!allocation.To(&prop)) return allocation; |
| 3733 } | 3664 } |
| 3734 JSObject::cast(clone)->set_properties(prop, wb_mode); | 3665 JSObject::cast(clone)->set_properties(prop, wb_mode); |
| 3735 } | 3666 } |
| 3736 // Return the new clone. | 3667 // Return the new clone. |
| 3737 return clone; | 3668 return clone; |
| 3738 } | 3669 } |
| 3739 | 3670 |
| 3740 | 3671 |
| 3741 static inline void WriteOneByteData(Vector<const char> vector, | 3672 static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars, |
| 3742 uint8_t* chars, | |
| 3743 int len) { | 3673 int len) { |
| 3744 // Only works for ascii. | 3674 // Only works for ascii. |
| 3745 DCHECK(vector.length() == len); | 3675 DCHECK(vector.length() == len); |
| 3746 MemCopy(chars, vector.start(), len); | 3676 MemCopy(chars, vector.start(), len); |
| 3747 } | 3677 } |
| 3748 | 3678 |
| 3749 static inline void WriteTwoByteData(Vector<const char> vector, | 3679 static inline void WriteTwoByteData(Vector<const char> vector, uint16_t* chars, |
| 3750 uint16_t* chars, | |
| 3751 int len) { | 3680 int len) { |
| 3752 const uint8_t* stream = reinterpret_cast<const uint8_t*>(vector.start()); | 3681 const uint8_t* stream = reinterpret_cast<const uint8_t*>(vector.start()); |
| 3753 unsigned stream_length = vector.length(); | 3682 unsigned stream_length = vector.length(); |
| 3754 while (stream_length != 0) { | 3683 while (stream_length != 0) { |
| 3755 unsigned consumed = 0; | 3684 unsigned consumed = 0; |
| 3756 uint32_t c = unibrow::Utf8::ValueOf(stream, stream_length, &consumed); | 3685 uint32_t c = unibrow::Utf8::ValueOf(stream, stream_length, &consumed); |
| 3757 DCHECK(c != unibrow::Utf8::kBadChar); | 3686 DCHECK(c != unibrow::Utf8::kBadChar); |
| 3758 DCHECK(consumed <= stream_length); | 3687 DCHECK(consumed <= stream_length); |
| 3759 stream_length -= consumed; | 3688 stream_length -= consumed; |
| 3760 stream += consumed; | 3689 stream += consumed; |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3779 String::WriteToFlat(s, chars, 0, len); | 3708 String::WriteToFlat(s, chars, 0, len); |
| 3780 } | 3709 } |
| 3781 | 3710 |
| 3782 | 3711 |
| 3783 static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { | 3712 static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { |
| 3784 DCHECK(s->length() == len); | 3713 DCHECK(s->length() == len); |
| 3785 String::WriteToFlat(s, chars, 0, len); | 3714 String::WriteToFlat(s, chars, 0, len); |
| 3786 } | 3715 } |
| 3787 | 3716 |
| 3788 | 3717 |
| 3789 template<bool is_one_byte, typename T> | 3718 template <bool is_one_byte, typename T> |
| 3790 AllocationResult Heap::AllocateInternalizedStringImpl( | 3719 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars, |
| 3791 T t, int chars, uint32_t hash_field) { | 3720 uint32_t hash_field) { |
| 3792 DCHECK(chars >= 0); | 3721 DCHECK(chars >= 0); |
| 3793 // Compute map and object size. | 3722 // Compute map and object size. |
| 3794 int size; | 3723 int size; |
| 3795 Map* map; | 3724 Map* map; |
| 3796 | 3725 |
| 3797 DCHECK_LE(0, chars); | 3726 DCHECK_LE(0, chars); |
| 3798 DCHECK_GE(String::kMaxLength, chars); | 3727 DCHECK_GE(String::kMaxLength, chars); |
| 3799 if (is_one_byte) { | 3728 if (is_one_byte) { |
| 3800 map = ascii_internalized_string_map(); | 3729 map = ascii_internalized_string_map(); |
| 3801 size = SeqOneByteString::SizeFor(chars); | 3730 size = SeqOneByteString::SizeFor(chars); |
| 3802 } else { | 3731 } else { |
| 3803 map = internalized_string_map(); | 3732 map = internalized_string_map(); |
| 3804 size = SeqTwoByteString::SizeFor(chars); | 3733 size = SeqTwoByteString::SizeFor(chars); |
| 3805 } | 3734 } |
| 3806 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | 3735 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); |
| 3807 | 3736 |
| 3808 // Allocate string. | 3737 // Allocate string. |
| 3809 HeapObject* result; | 3738 HeapObject* result; |
| 3810 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3739 { |
| 3740 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3811 if (!allocation.To(&result)) return allocation; | 3741 if (!allocation.To(&result)) return allocation; |
| 3812 } | 3742 } |
| 3813 | 3743 |
| 3814 result->set_map_no_write_barrier(map); | 3744 result->set_map_no_write_barrier(map); |
| 3815 // Set length and hash fields of the allocated string. | 3745 // Set length and hash fields of the allocated string. |
| 3816 String* answer = String::cast(result); | 3746 String* answer = String::cast(result); |
| 3817 answer->set_length(chars); | 3747 answer->set_length(chars); |
| 3818 answer->set_hash_field(hash_field); | 3748 answer->set_hash_field(hash_field); |
| 3819 | 3749 |
| 3820 DCHECK_EQ(size, answer->Size()); | 3750 DCHECK_EQ(size, answer->Size()); |
| 3821 | 3751 |
| 3822 if (is_one_byte) { | 3752 if (is_one_byte) { |
| 3823 WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars); | 3753 WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars); |
| 3824 } else { | 3754 } else { |
| 3825 WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars); | 3755 WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars); |
| 3826 } | 3756 } |
| 3827 return answer; | 3757 return answer; |
| 3828 } | 3758 } |
| 3829 | 3759 |
| 3830 | 3760 |
| 3831 // Need explicit instantiations. | 3761 // Need explicit instantiations. |
| 3832 template | 3762 template AllocationResult Heap::AllocateInternalizedStringImpl<true>(String*, |
| 3833 AllocationResult Heap::AllocateInternalizedStringImpl<true>( | 3763 int, |
| 3834 String*, int, uint32_t); | 3764 uint32_t); |
| 3835 template | 3765 template AllocationResult Heap::AllocateInternalizedStringImpl<false>(String*, |
| 3836 AllocationResult Heap::AllocateInternalizedStringImpl<false>( | 3766 int, |
| 3837 String*, int, uint32_t); | 3767 uint32_t); |
| 3838 template | 3768 template AllocationResult Heap::AllocateInternalizedStringImpl<false>( |
| 3839 AllocationResult Heap::AllocateInternalizedStringImpl<false>( | |
| 3840 Vector<const char>, int, uint32_t); | 3769 Vector<const char>, int, uint32_t); |
| 3841 | 3770 |
| 3842 | 3771 |
| 3843 AllocationResult Heap::AllocateRawOneByteString(int length, | 3772 AllocationResult Heap::AllocateRawOneByteString(int length, |
| 3844 PretenureFlag pretenure) { | 3773 PretenureFlag pretenure) { |
| 3845 DCHECK_LE(0, length); | 3774 DCHECK_LE(0, length); |
| 3846 DCHECK_GE(String::kMaxLength, length); | 3775 DCHECK_GE(String::kMaxLength, length); |
| 3847 int size = SeqOneByteString::SizeFor(length); | 3776 int size = SeqOneByteString::SizeFor(length); |
| 3848 DCHECK(size <= SeqOneByteString::kMaxSize); | 3777 DCHECK(size <= SeqOneByteString::kMaxSize); |
| 3849 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 3778 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 3850 | 3779 |
| 3851 HeapObject* result; | 3780 HeapObject* result; |
| 3852 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3781 { |
| 3782 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3853 if (!allocation.To(&result)) return allocation; | 3783 if (!allocation.To(&result)) return allocation; |
| 3854 } | 3784 } |
| 3855 | 3785 |
| 3856 // Partially initialize the object. | 3786 // Partially initialize the object. |
| 3857 result->set_map_no_write_barrier(ascii_string_map()); | 3787 result->set_map_no_write_barrier(ascii_string_map()); |
| 3858 String::cast(result)->set_length(length); | 3788 String::cast(result)->set_length(length); |
| 3859 String::cast(result)->set_hash_field(String::kEmptyHashField); | 3789 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 3860 DCHECK_EQ(size, HeapObject::cast(result)->Size()); | 3790 DCHECK_EQ(size, HeapObject::cast(result)->Size()); |
| 3861 | 3791 |
| 3862 return result; | 3792 return result; |
| 3863 } | 3793 } |
| 3864 | 3794 |
| 3865 | 3795 |
| 3866 AllocationResult Heap::AllocateRawTwoByteString(int length, | 3796 AllocationResult Heap::AllocateRawTwoByteString(int length, |
| 3867 PretenureFlag pretenure) { | 3797 PretenureFlag pretenure) { |
| 3868 DCHECK_LE(0, length); | 3798 DCHECK_LE(0, length); |
| 3869 DCHECK_GE(String::kMaxLength, length); | 3799 DCHECK_GE(String::kMaxLength, length); |
| 3870 int size = SeqTwoByteString::SizeFor(length); | 3800 int size = SeqTwoByteString::SizeFor(length); |
| 3871 DCHECK(size <= SeqTwoByteString::kMaxSize); | 3801 DCHECK(size <= SeqTwoByteString::kMaxSize); |
| 3872 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 3802 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 3873 | 3803 |
| 3874 HeapObject* result; | 3804 HeapObject* result; |
| 3875 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3805 { |
| 3806 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3876 if (!allocation.To(&result)) return allocation; | 3807 if (!allocation.To(&result)) return allocation; |
| 3877 } | 3808 } |
| 3878 | 3809 |
| 3879 // Partially initialize the object. | 3810 // Partially initialize the object. |
| 3880 result->set_map_no_write_barrier(string_map()); | 3811 result->set_map_no_write_barrier(string_map()); |
| 3881 String::cast(result)->set_length(length); | 3812 String::cast(result)->set_length(length); |
| 3882 String::cast(result)->set_hash_field(String::kEmptyHashField); | 3813 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 3883 DCHECK_EQ(size, HeapObject::cast(result)->Size()); | 3814 DCHECK_EQ(size, HeapObject::cast(result)->Size()); |
| 3884 return result; | 3815 return result; |
| 3885 } | 3816 } |
| 3886 | 3817 |
| 3887 | 3818 |
| 3888 AllocationResult Heap::AllocateEmptyFixedArray() { | 3819 AllocationResult Heap::AllocateEmptyFixedArray() { |
| 3889 int size = FixedArray::SizeFor(0); | 3820 int size = FixedArray::SizeFor(0); |
| 3890 HeapObject* result; | 3821 HeapObject* result; |
| 3891 { AllocationResult allocation = | 3822 { |
| 3823 AllocationResult allocation = |
| 3892 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); | 3824 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); |
| 3893 if (!allocation.To(&result)) return allocation; | 3825 if (!allocation.To(&result)) return allocation; |
| 3894 } | 3826 } |
| 3895 // Initialize the object. | 3827 // Initialize the object. |
| 3896 result->set_map_no_write_barrier(fixed_array_map()); | 3828 result->set_map_no_write_barrier(fixed_array_map()); |
| 3897 FixedArray::cast(result)->set_length(0); | 3829 FixedArray::cast(result)->set_length(0); |
| 3898 return result; | 3830 return result; |
| 3899 } | 3831 } |
| 3900 | 3832 |
| 3901 | 3833 |
| 3902 AllocationResult Heap::AllocateEmptyExternalArray( | 3834 AllocationResult Heap::AllocateEmptyExternalArray( |
| 3903 ExternalArrayType array_type) { | 3835 ExternalArrayType array_type) { |
| 3904 return AllocateExternalArray(0, array_type, NULL, TENURED); | 3836 return AllocateExternalArray(0, array_type, NULL, TENURED); |
| 3905 } | 3837 } |
| 3906 | 3838 |
| 3907 | 3839 |
| 3908 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { | 3840 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { |
| 3909 if (!InNewSpace(src)) { | 3841 if (!InNewSpace(src)) { |
| 3910 return src; | 3842 return src; |
| 3911 } | 3843 } |
| 3912 | 3844 |
| 3913 int len = src->length(); | 3845 int len = src->length(); |
| 3914 HeapObject* obj; | 3846 HeapObject* obj; |
| 3915 { AllocationResult allocation = AllocateRawFixedArray(len, TENURED); | 3847 { |
| 3848 AllocationResult allocation = AllocateRawFixedArray(len, TENURED); |
| 3916 if (!allocation.To(&obj)) return allocation; | 3849 if (!allocation.To(&obj)) return allocation; |
| 3917 } | 3850 } |
| 3918 obj->set_map_no_write_barrier(fixed_array_map()); | 3851 obj->set_map_no_write_barrier(fixed_array_map()); |
| 3919 FixedArray* result = FixedArray::cast(obj); | 3852 FixedArray* result = FixedArray::cast(obj); |
| 3920 result->set_length(len); | 3853 result->set_length(len); |
| 3921 | 3854 |
| 3922 // Copy the content | 3855 // Copy the content |
| 3923 DisallowHeapAllocation no_gc; | 3856 DisallowHeapAllocation no_gc; |
| 3924 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 3857 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
| 3925 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); | 3858 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); |
| 3926 | 3859 |
| 3927 // TODO(mvstanton): The map is set twice because of protection against calling | 3860 // TODO(mvstanton): The map is set twice because of protection against calling |
| 3928 // set() on a COW FixedArray. Issue v8:3221 created to track this, and | 3861 // set() on a COW FixedArray. Issue v8:3221 created to track this, and |
| 3929 // we might then be able to remove this whole method. | 3862 // we might then be able to remove this whole method. |
| 3930 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map()); | 3863 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map()); |
| 3931 return result; | 3864 return result; |
| 3932 } | 3865 } |
| 3933 | 3866 |
| 3934 | 3867 |
| 3935 AllocationResult Heap::AllocateEmptyFixedTypedArray( | 3868 AllocationResult Heap::AllocateEmptyFixedTypedArray( |
| 3936 ExternalArrayType array_type) { | 3869 ExternalArrayType array_type) { |
| 3937 return AllocateFixedTypedArray(0, array_type, TENURED); | 3870 return AllocateFixedTypedArray(0, array_type, TENURED); |
| 3938 } | 3871 } |
| 3939 | 3872 |
| 3940 | 3873 |
| 3941 AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { | 3874 AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { |
| 3942 int len = src->length(); | 3875 int len = src->length(); |
| 3943 HeapObject* obj; | 3876 HeapObject* obj; |
| 3944 { AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED); | 3877 { |
| 3878 AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED); |
| 3945 if (!allocation.To(&obj)) return allocation; | 3879 if (!allocation.To(&obj)) return allocation; |
| 3946 } | 3880 } |
| 3947 if (InNewSpace(obj)) { | 3881 if (InNewSpace(obj)) { |
| 3948 obj->set_map_no_write_barrier(map); | 3882 obj->set_map_no_write_barrier(map); |
| 3949 CopyBlock(obj->address() + kPointerSize, | 3883 CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize, |
| 3950 src->address() + kPointerSize, | |
| 3951 FixedArray::SizeFor(len) - kPointerSize); | 3884 FixedArray::SizeFor(len) - kPointerSize); |
| 3952 return obj; | 3885 return obj; |
| 3953 } | 3886 } |
| 3954 obj->set_map_no_write_barrier(map); | 3887 obj->set_map_no_write_barrier(map); |
| 3955 FixedArray* result = FixedArray::cast(obj); | 3888 FixedArray* result = FixedArray::cast(obj); |
| 3956 result->set_length(len); | 3889 result->set_length(len); |
| 3957 | 3890 |
| 3958 // Copy the content | 3891 // Copy the content |
| 3959 DisallowHeapAllocation no_gc; | 3892 DisallowHeapAllocation no_gc; |
| 3960 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 3893 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
| 3961 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); | 3894 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); |
| 3962 return result; | 3895 return result; |
| 3963 } | 3896 } |
| 3964 | 3897 |
| 3965 | 3898 |
| 3966 AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, | 3899 AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, |
| 3967 Map* map) { | 3900 Map* map) { |
| 3968 int len = src->length(); | 3901 int len = src->length(); |
| 3969 HeapObject* obj; | 3902 HeapObject* obj; |
| 3970 { AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED); | 3903 { |
| 3904 AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED); |
| 3971 if (!allocation.To(&obj)) return allocation; | 3905 if (!allocation.To(&obj)) return allocation; |
| 3972 } | 3906 } |
| 3973 obj->set_map_no_write_barrier(map); | 3907 obj->set_map_no_write_barrier(map); |
| 3974 CopyBlock( | 3908 CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset, |
| 3975 obj->address() + FixedDoubleArray::kLengthOffset, | 3909 src->address() + FixedDoubleArray::kLengthOffset, |
| 3976 src->address() + FixedDoubleArray::kLengthOffset, | 3910 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); |
| 3977 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); | |
| 3978 return obj; | 3911 return obj; |
| 3979 } | 3912 } |
| 3980 | 3913 |
| 3981 | 3914 |
| 3982 AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, | 3915 AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, |
| 3983 Map* map) { | 3916 Map* map) { |
| 3984 HeapObject* obj; | 3917 HeapObject* obj; |
| 3985 if (src->is_extended_layout()) { | 3918 if (src->is_extended_layout()) { |
| 3986 ConstantPoolArray::NumberOfEntries small(src, | 3919 ConstantPoolArray::NumberOfEntries small(src, |
| 3987 ConstantPoolArray::SMALL_SECTION); | 3920 ConstantPoolArray::SMALL_SECTION); |
| 3988 ConstantPoolArray::NumberOfEntries extended(src, | 3921 ConstantPoolArray::NumberOfEntries extended( |
| 3989 ConstantPoolArray::EXTENDED_SECTION); | 3922 src, ConstantPoolArray::EXTENDED_SECTION); |
| 3990 AllocationResult allocation = | 3923 AllocationResult allocation = |
| 3991 AllocateExtendedConstantPoolArray(small, extended); | 3924 AllocateExtendedConstantPoolArray(small, extended); |
| 3992 if (!allocation.To(&obj)) return allocation; | 3925 if (!allocation.To(&obj)) return allocation; |
| 3993 } else { | 3926 } else { |
| 3994 ConstantPoolArray::NumberOfEntries small(src, | 3927 ConstantPoolArray::NumberOfEntries small(src, |
| 3995 ConstantPoolArray::SMALL_SECTION); | 3928 ConstantPoolArray::SMALL_SECTION); |
| 3996 AllocationResult allocation = AllocateConstantPoolArray(small); | 3929 AllocationResult allocation = AllocateConstantPoolArray(small); |
| 3997 if (!allocation.To(&obj)) return allocation; | 3930 if (!allocation.To(&obj)) return allocation; |
| 3998 } | 3931 } |
| 3999 obj->set_map_no_write_barrier(map); | 3932 obj->set_map_no_write_barrier(map); |
| 4000 CopyBlock( | 3933 CopyBlock(obj->address() + ConstantPoolArray::kFirstEntryOffset, |
| 4001 obj->address() + ConstantPoolArray::kFirstEntryOffset, | 3934 src->address() + ConstantPoolArray::kFirstEntryOffset, |
| 4002 src->address() + ConstantPoolArray::kFirstEntryOffset, | 3935 src->size() - ConstantPoolArray::kFirstEntryOffset); |
| 4003 src->size() - ConstantPoolArray::kFirstEntryOffset); | |
| 4004 return obj; | 3936 return obj; |
| 4005 } | 3937 } |
| 4006 | 3938 |
| 4007 | 3939 |
| 4008 AllocationResult Heap::AllocateRawFixedArray(int length, | 3940 AllocationResult Heap::AllocateRawFixedArray(int length, |
| 4009 PretenureFlag pretenure) { | 3941 PretenureFlag pretenure) { |
| 4010 if (length < 0 || length > FixedArray::kMaxLength) { | 3942 if (length < 0 || length > FixedArray::kMaxLength) { |
| 4011 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 3943 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
| 4012 } | 3944 } |
| 4013 int size = FixedArray::SizeFor(length); | 3945 int size = FixedArray::SizeFor(length); |
| 4014 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); | 3946 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
| 4015 | 3947 |
| 4016 return AllocateRaw(size, space, OLD_POINTER_SPACE); | 3948 return AllocateRaw(size, space, OLD_POINTER_SPACE); |
| 4017 } | 3949 } |
| 4018 | 3950 |
| 4019 | 3951 |
| 4020 AllocationResult Heap::AllocateFixedArrayWithFiller(int length, | 3952 AllocationResult Heap::AllocateFixedArrayWithFiller(int length, |
| 4021 PretenureFlag pretenure, | 3953 PretenureFlag pretenure, |
| 4022 Object* filler) { | 3954 Object* filler) { |
| 4023 DCHECK(length >= 0); | 3955 DCHECK(length >= 0); |
| 4024 DCHECK(empty_fixed_array()->IsFixedArray()); | 3956 DCHECK(empty_fixed_array()->IsFixedArray()); |
| 4025 if (length == 0) return empty_fixed_array(); | 3957 if (length == 0) return empty_fixed_array(); |
| 4026 | 3958 |
| 4027 DCHECK(!InNewSpace(filler)); | 3959 DCHECK(!InNewSpace(filler)); |
| 4028 HeapObject* result; | 3960 HeapObject* result; |
| 4029 { AllocationResult allocation = AllocateRawFixedArray(length, pretenure); | 3961 { |
| 3962 AllocationResult allocation = AllocateRawFixedArray(length, pretenure); |
| 4030 if (!allocation.To(&result)) return allocation; | 3963 if (!allocation.To(&result)) return allocation; |
| 4031 } | 3964 } |
| 4032 | 3965 |
| 4033 result->set_map_no_write_barrier(fixed_array_map()); | 3966 result->set_map_no_write_barrier(fixed_array_map()); |
| 4034 FixedArray* array = FixedArray::cast(result); | 3967 FixedArray* array = FixedArray::cast(result); |
| 4035 array->set_length(length); | 3968 array->set_length(length); |
| 4036 MemsetPointer(array->data_start(), filler, length); | 3969 MemsetPointer(array->data_start(), filler, length); |
| 4037 return array; | 3970 return array; |
| 4038 } | 3971 } |
| 4039 | 3972 |
| 4040 | 3973 |
| 4041 AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { | 3974 AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { |
| 4042 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); | 3975 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); |
| 4043 } | 3976 } |
| 4044 | 3977 |
| 4045 | 3978 |
| 4046 AllocationResult Heap::AllocateUninitializedFixedArray(int length) { | 3979 AllocationResult Heap::AllocateUninitializedFixedArray(int length) { |
| 4047 if (length == 0) return empty_fixed_array(); | 3980 if (length == 0) return empty_fixed_array(); |
| 4048 | 3981 |
| 4049 HeapObject* obj; | 3982 HeapObject* obj; |
| 4050 { AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); | 3983 { |
| 3984 AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); |
| 4051 if (!allocation.To(&obj)) return allocation; | 3985 if (!allocation.To(&obj)) return allocation; |
| 4052 } | 3986 } |
| 4053 | 3987 |
| 4054 obj->set_map_no_write_barrier(fixed_array_map()); | 3988 obj->set_map_no_write_barrier(fixed_array_map()); |
| 4055 FixedArray::cast(obj)->set_length(length); | 3989 FixedArray::cast(obj)->set_length(length); |
| 4056 return obj; | 3990 return obj; |
| 4057 } | 3991 } |
| 4058 | 3992 |
| 4059 | 3993 |
| 4060 AllocationResult Heap::AllocateUninitializedFixedDoubleArray( | 3994 AllocationResult Heap::AllocateUninitializedFixedDoubleArray( |
| 4061 int length, | 3995 int length, PretenureFlag pretenure) { |
| 4062 PretenureFlag pretenure) { | |
| 4063 if (length == 0) return empty_fixed_array(); | 3996 if (length == 0) return empty_fixed_array(); |
| 4064 | 3997 |
| 4065 HeapObject* elements; | 3998 HeapObject* elements; |
| 4066 AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure); | 3999 AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure); |
| 4067 if (!allocation.To(&elements)) return allocation; | 4000 if (!allocation.To(&elements)) return allocation; |
| 4068 | 4001 |
| 4069 elements->set_map_no_write_barrier(fixed_double_array_map()); | 4002 elements->set_map_no_write_barrier(fixed_double_array_map()); |
| 4070 FixedDoubleArray::cast(elements)->set_length(length); | 4003 FixedDoubleArray::cast(elements)->set_length(length); |
| 4071 return elements; | 4004 return elements; |
| 4072 } | 4005 } |
| 4073 | 4006 |
| 4074 | 4007 |
| 4075 AllocationResult Heap::AllocateRawFixedDoubleArray(int length, | 4008 AllocationResult Heap::AllocateRawFixedDoubleArray(int length, |
| 4076 PretenureFlag pretenure) { | 4009 PretenureFlag pretenure) { |
| 4077 if (length < 0 || length > FixedDoubleArray::kMaxLength) { | 4010 if (length < 0 || length > FixedDoubleArray::kMaxLength) { |
| 4078 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 4011 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
| 4079 } | 4012 } |
| 4080 int size = FixedDoubleArray::SizeFor(length); | 4013 int size = FixedDoubleArray::SizeFor(length); |
| 4081 #ifndef V8_HOST_ARCH_64_BIT | 4014 #ifndef V8_HOST_ARCH_64_BIT |
| 4082 size += kPointerSize; | 4015 size += kPointerSize; |
| 4083 #endif | 4016 #endif |
| 4084 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 4017 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 4085 | 4018 |
| 4086 HeapObject* object; | 4019 HeapObject* object; |
| 4087 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 4020 { |
| 4021 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 4088 if (!allocation.To(&object)) return allocation; | 4022 if (!allocation.To(&object)) return allocation; |
| 4089 } | 4023 } |
| 4090 | 4024 |
| 4091 return EnsureDoubleAligned(this, object, size); | 4025 return EnsureDoubleAligned(this, object, size); |
| 4092 } | 4026 } |
| 4093 | 4027 |
| 4094 | 4028 |
| 4095 AllocationResult Heap::AllocateConstantPoolArray( | 4029 AllocationResult Heap::AllocateConstantPoolArray( |
| 4096 const ConstantPoolArray::NumberOfEntries& small) { | 4030 const ConstantPoolArray::NumberOfEntries& small) { |
| 4097 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); | 4031 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); |
| 4098 int size = ConstantPoolArray::SizeFor(small); | 4032 int size = ConstantPoolArray::SizeFor(small); |
| 4099 #ifndef V8_HOST_ARCH_64_BIT | 4033 #ifndef V8_HOST_ARCH_64_BIT |
| 4100 size += kPointerSize; | 4034 size += kPointerSize; |
| 4101 #endif | 4035 #endif |
| 4102 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); | 4036 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); |
| 4103 | 4037 |
| 4104 HeapObject* object; | 4038 HeapObject* object; |
| 4105 { AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE); | 4039 { |
| 4040 AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE); |
| 4106 if (!allocation.To(&object)) return allocation; | 4041 if (!allocation.To(&object)) return allocation; |
| 4107 } | 4042 } |
| 4108 object = EnsureDoubleAligned(this, object, size); | 4043 object = EnsureDoubleAligned(this, object, size); |
| 4109 object->set_map_no_write_barrier(constant_pool_array_map()); | 4044 object->set_map_no_write_barrier(constant_pool_array_map()); |
| 4110 | 4045 |
| 4111 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); | 4046 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); |
| 4112 constant_pool->Init(small); | 4047 constant_pool->Init(small); |
| 4113 constant_pool->ClearPtrEntries(isolate()); | 4048 constant_pool->ClearPtrEntries(isolate()); |
| 4114 return constant_pool; | 4049 return constant_pool; |
| 4115 } | 4050 } |
| 4116 | 4051 |
| 4117 | 4052 |
| 4118 AllocationResult Heap::AllocateExtendedConstantPoolArray( | 4053 AllocationResult Heap::AllocateExtendedConstantPoolArray( |
| 4119 const ConstantPoolArray::NumberOfEntries& small, | 4054 const ConstantPoolArray::NumberOfEntries& small, |
| 4120 const ConstantPoolArray::NumberOfEntries& extended) { | 4055 const ConstantPoolArray::NumberOfEntries& extended) { |
| 4121 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); | 4056 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); |
| 4122 CHECK(extended.are_in_range(0, kMaxInt)); | 4057 CHECK(extended.are_in_range(0, kMaxInt)); |
| 4123 int size = ConstantPoolArray::SizeForExtended(small, extended); | 4058 int size = ConstantPoolArray::SizeForExtended(small, extended); |
| 4124 #ifndef V8_HOST_ARCH_64_BIT | 4059 #ifndef V8_HOST_ARCH_64_BIT |
| 4125 size += kPointerSize; | 4060 size += kPointerSize; |
| 4126 #endif | 4061 #endif |
| 4127 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); | 4062 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); |
| 4128 | 4063 |
| 4129 HeapObject* object; | 4064 HeapObject* object; |
| 4130 { AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE); | 4065 { |
| 4066 AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE); |
| 4131 if (!allocation.To(&object)) return allocation; | 4067 if (!allocation.To(&object)) return allocation; |
| 4132 } | 4068 } |
| 4133 object = EnsureDoubleAligned(this, object, size); | 4069 object = EnsureDoubleAligned(this, object, size); |
| 4134 object->set_map_no_write_barrier(constant_pool_array_map()); | 4070 object->set_map_no_write_barrier(constant_pool_array_map()); |
| 4135 | 4071 |
| 4136 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); | 4072 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); |
| 4137 constant_pool->InitExtended(small, extended); | 4073 constant_pool->InitExtended(small, extended); |
| 4138 constant_pool->ClearPtrEntries(isolate()); | 4074 constant_pool->ClearPtrEntries(isolate()); |
| 4139 return constant_pool; | 4075 return constant_pool; |
| 4140 } | 4076 } |
| 4141 | 4077 |
| 4142 | 4078 |
| 4143 AllocationResult Heap::AllocateEmptyConstantPoolArray() { | 4079 AllocationResult Heap::AllocateEmptyConstantPoolArray() { |
| 4144 ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0); | 4080 ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0); |
| 4145 int size = ConstantPoolArray::SizeFor(small); | 4081 int size = ConstantPoolArray::SizeFor(small); |
| 4146 HeapObject* result; | 4082 HeapObject* result; |
| 4147 { AllocationResult allocation = | 4083 { |
| 4084 AllocationResult allocation = |
| 4148 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); | 4085 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); |
| 4149 if (!allocation.To(&result)) return allocation; | 4086 if (!allocation.To(&result)) return allocation; |
| 4150 } | 4087 } |
| 4151 result->set_map_no_write_barrier(constant_pool_array_map()); | 4088 result->set_map_no_write_barrier(constant_pool_array_map()); |
| 4152 ConstantPoolArray::cast(result)->Init(small); | 4089 ConstantPoolArray::cast(result)->Init(small); |
| 4153 return result; | 4090 return result; |
| 4154 } | 4091 } |
| 4155 | 4092 |
| 4156 | 4093 |
| 4157 AllocationResult Heap::AllocateSymbol() { | 4094 AllocationResult Heap::AllocateSymbol() { |
| 4158 // Statically ensure that it is safe to allocate symbols in paged spaces. | 4095 // Statically ensure that it is safe to allocate symbols in paged spaces. |
| 4159 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); | 4096 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); |
| 4160 | 4097 |
| 4161 HeapObject* result; | 4098 HeapObject* result; |
| 4162 AllocationResult allocation = | 4099 AllocationResult allocation = |
| 4163 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); | 4100 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); |
| 4164 if (!allocation.To(&result)) return allocation; | 4101 if (!allocation.To(&result)) return allocation; |
| 4165 | 4102 |
| 4166 result->set_map_no_write_barrier(symbol_map()); | 4103 result->set_map_no_write_barrier(symbol_map()); |
| 4167 | 4104 |
| 4168 // Generate a random hash value. | 4105 // Generate a random hash value. |
| 4169 int hash; | 4106 int hash; |
| 4170 int attempts = 0; | 4107 int attempts = 0; |
| 4171 do { | 4108 do { |
| 4172 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; | 4109 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; |
| 4173 attempts++; | 4110 attempts++; |
| 4174 } while (hash == 0 && attempts < 30); | 4111 } while (hash == 0 && attempts < 30); |
| 4175 if (hash == 0) hash = 1; // never return 0 | 4112 if (hash == 0) hash = 1; // never return 0 |
| 4176 | 4113 |
| 4177 Symbol::cast(result)->set_hash_field( | 4114 Symbol::cast(result) |
| 4178 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); | 4115 ->set_hash_field(Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); |
| 4179 Symbol::cast(result)->set_name(undefined_value()); | 4116 Symbol::cast(result)->set_name(undefined_value()); |
| 4180 Symbol::cast(result)->set_flags(Smi::FromInt(0)); | 4117 Symbol::cast(result)->set_flags(Smi::FromInt(0)); |
| 4181 | 4118 |
| 4182 DCHECK(!Symbol::cast(result)->is_private()); | 4119 DCHECK(!Symbol::cast(result)->is_private()); |
| 4183 return result; | 4120 return result; |
| 4184 } | 4121 } |
| 4185 | 4122 |
| 4186 | 4123 |
| 4187 AllocationResult Heap::AllocateStruct(InstanceType type) { | 4124 AllocationResult Heap::AllocateStruct(InstanceType type) { |
| 4188 Map* map; | 4125 Map* map; |
| 4189 switch (type) { | 4126 switch (type) { |
| 4190 #define MAKE_CASE(NAME, Name, name) \ | 4127 #define MAKE_CASE(NAME, Name, name) \ |
| 4191 case NAME##_TYPE: map = name##_map(); break; | 4128 case NAME##_TYPE: \ |
| 4192 STRUCT_LIST(MAKE_CASE) | 4129 map = name##_map(); \ |
| 4130 break; |
| 4131 STRUCT_LIST(MAKE_CASE) |
| 4193 #undef MAKE_CASE | 4132 #undef MAKE_CASE |
| 4194 default: | 4133 default: |
| 4195 UNREACHABLE(); | 4134 UNREACHABLE(); |
| 4196 return exception(); | 4135 return exception(); |
| 4197 } | 4136 } |
| 4198 int size = map->instance_size(); | 4137 int size = map->instance_size(); |
| 4199 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); | 4138 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); |
| 4200 Struct* result; | 4139 Struct* result; |
| 4201 { AllocationResult allocation = Allocate(map, space); | 4140 { |
| 4141 AllocationResult allocation = Allocate(map, space); |
| 4202 if (!allocation.To(&result)) return allocation; | 4142 if (!allocation.To(&result)) return allocation; |
| 4203 } | 4143 } |
| 4204 result->InitializeBody(size); | 4144 result->InitializeBody(size); |
| 4205 return result; | 4145 return result; |
| 4206 } | 4146 } |
| 4207 | 4147 |
| 4208 | 4148 |
| 4209 bool Heap::IsHeapIterable() { | 4149 bool Heap::IsHeapIterable() { |
| 4210 // TODO(hpayer): This function is not correct. Allocation folding in old | 4150 // TODO(hpayer): This function is not correct. Allocation folding in old |
| 4211 // space breaks the iterability. | 4151 // space breaks the iterability. |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4257 // Hints greater than this value indicate that | 4197 // Hints greater than this value indicate that |
| 4258 // the embedder is requesting a lot of GC work. | 4198 // the embedder is requesting a lot of GC work. |
| 4259 const int kMaxHint = 1000; | 4199 const int kMaxHint = 1000; |
| 4260 const int kMinHintForIncrementalMarking = 10; | 4200 const int kMinHintForIncrementalMarking = 10; |
| 4261 // Minimal hint that allows to do full GC. | 4201 // Minimal hint that allows to do full GC. |
| 4262 const int kMinHintForFullGC = 100; | 4202 const int kMinHintForFullGC = 100; |
| 4263 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; | 4203 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; |
| 4264 // The size factor is in range [5..250]. The numbers here are chosen from | 4204 // The size factor is in range [5..250]. The numbers here are chosen from |
| 4265 // experiments. If you changes them, make sure to test with | 4205 // experiments. If you changes them, make sure to test with |
| 4266 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* | 4206 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* |
| 4267 intptr_t step_size = | 4207 intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold; |
| 4268 size_factor * IncrementalMarking::kAllocatedThreshold; | |
| 4269 | 4208 |
| 4270 isolate()->counters()->gc_idle_time_allotted_in_ms()->AddSample(hint); | 4209 isolate()->counters()->gc_idle_time_allotted_in_ms()->AddSample(hint); |
| 4271 HistogramTimerScope idle_notification_scope( | 4210 HistogramTimerScope idle_notification_scope( |
| 4272 isolate_->counters()->gc_idle_notification()); | 4211 isolate_->counters()->gc_idle_notification()); |
| 4273 | 4212 |
| 4274 if (contexts_disposed_ > 0) { | 4213 if (contexts_disposed_ > 0) { |
| 4275 contexts_disposed_ = 0; | 4214 contexts_disposed_ = 0; |
| 4276 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); | 4215 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); |
| 4277 if (hint >= mark_sweep_time && !FLAG_expose_gc && | 4216 if (hint >= mark_sweep_time && !FLAG_expose_gc && |
| 4278 incremental_marking()->IsStopped()) { | 4217 incremental_marking()->IsStopped()) { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 4299 // Use mark-sweep-compact events to count incremental GCs in a round. | 4238 // Use mark-sweep-compact events to count incremental GCs in a round. |
| 4300 | 4239 |
| 4301 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) { | 4240 if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) { |
| 4302 if (EnoughGarbageSinceLastIdleRound()) { | 4241 if (EnoughGarbageSinceLastIdleRound()) { |
| 4303 StartIdleRound(); | 4242 StartIdleRound(); |
| 4304 } else { | 4243 } else { |
| 4305 return true; | 4244 return true; |
| 4306 } | 4245 } |
| 4307 } | 4246 } |
| 4308 | 4247 |
| 4309 int remaining_mark_sweeps = kMaxMarkSweepsInIdleRound - | 4248 int remaining_mark_sweeps = |
| 4310 mark_sweeps_since_idle_round_started_; | 4249 kMaxMarkSweepsInIdleRound - mark_sweeps_since_idle_round_started_; |
| 4311 | 4250 |
| 4312 if (incremental_marking()->IsStopped()) { | 4251 if (incremental_marking()->IsStopped()) { |
| 4313 // If there are no more than two GCs left in this idle round and we are | 4252 // If there are no more than two GCs left in this idle round and we are |
| 4314 // allowed to do a full GC, then make those GCs full in order to compact | 4253 // allowed to do a full GC, then make those GCs full in order to compact |
| 4315 // the code space. | 4254 // the code space. |
| 4316 // TODO(ulan): Once we enable code compaction for incremental marking, | 4255 // TODO(ulan): Once we enable code compaction for incremental marking, |
| 4317 // we can get rid of this special case and always start incremental marking. | 4256 // we can get rid of this special case and always start incremental marking. |
| 4318 if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) { | 4257 if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) { |
| 4319 CollectAllGarbage(kReduceMemoryFootprintMask, | 4258 CollectAllGarbage(kReduceMemoryFootprintMask, |
| 4320 "idle notification: finalize idle round"); | 4259 "idle notification: finalize idle round"); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4365 lo_space_->CollectCodeStatistics(); | 4304 lo_space_->CollectCodeStatistics(); |
| 4366 PagedSpace::ReportCodeStatistics(isolate()); | 4305 PagedSpace::ReportCodeStatistics(isolate()); |
| 4367 } | 4306 } |
| 4368 | 4307 |
| 4369 | 4308 |
| 4370 // This function expects that NewSpace's allocated objects histogram is | 4309 // This function expects that NewSpace's allocated objects histogram is |
| 4371 // populated (via a call to CollectStatistics or else as a side effect of a | 4310 // populated (via a call to CollectStatistics or else as a side effect of a |
| 4372 // just-completed scavenge collection). | 4311 // just-completed scavenge collection). |
| 4373 void Heap::ReportHeapStatistics(const char* title) { | 4312 void Heap::ReportHeapStatistics(const char* title) { |
| 4374 USE(title); | 4313 USE(title); |
| 4375 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n", | 4314 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n", title, |
| 4376 title, gc_count_); | 4315 gc_count_); |
| 4377 PrintF("old_generation_allocation_limit_ %" V8_PTR_PREFIX "d\n", | 4316 PrintF("old_generation_allocation_limit_ %" V8_PTR_PREFIX "d\n", |
| 4378 old_generation_allocation_limit_); | 4317 old_generation_allocation_limit_); |
| 4379 | 4318 |
| 4380 PrintF("\n"); | 4319 PrintF("\n"); |
| 4381 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_)); | 4320 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_)); |
| 4382 isolate_->global_handles()->PrintStats(); | 4321 isolate_->global_handles()->PrintStats(); |
| 4383 PrintF("\n"); | 4322 PrintF("\n"); |
| 4384 | 4323 |
| 4385 PrintF("Heap statistics : "); | 4324 PrintF("Heap statistics : "); |
| 4386 isolate_->memory_allocator()->ReportStatistics(); | 4325 isolate_->memory_allocator()->ReportStatistics(); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4398 cell_space_->ReportStatistics(); | 4337 cell_space_->ReportStatistics(); |
| 4399 PrintF("PropertyCell space : "); | 4338 PrintF("PropertyCell space : "); |
| 4400 property_cell_space_->ReportStatistics(); | 4339 property_cell_space_->ReportStatistics(); |
| 4401 PrintF("Large object space : "); | 4340 PrintF("Large object space : "); |
| 4402 lo_space_->ReportStatistics(); | 4341 lo_space_->ReportStatistics(); |
| 4403 PrintF(">>>>>> ========================================= >>>>>>\n"); | 4342 PrintF(">>>>>> ========================================= >>>>>>\n"); |
| 4404 } | 4343 } |
| 4405 | 4344 |
| 4406 #endif // DEBUG | 4345 #endif // DEBUG |
| 4407 | 4346 |
| 4408 bool Heap::Contains(HeapObject* value) { | 4347 bool Heap::Contains(HeapObject* value) { return Contains(value->address()); } |
| 4409 return Contains(value->address()); | |
| 4410 } | |
| 4411 | 4348 |
| 4412 | 4349 |
| 4413 bool Heap::Contains(Address addr) { | 4350 bool Heap::Contains(Address addr) { |
| 4414 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false; | 4351 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false; |
| 4415 return HasBeenSetUp() && | 4352 return HasBeenSetUp() && |
| 4416 (new_space_.ToSpaceContains(addr) || | 4353 (new_space_.ToSpaceContains(addr) || |
| 4417 old_pointer_space_->Contains(addr) || | 4354 old_pointer_space_->Contains(addr) || |
| 4418 old_data_space_->Contains(addr) || | 4355 old_data_space_->Contains(addr) || code_space_->Contains(addr) || |
| 4419 code_space_->Contains(addr) || | 4356 map_space_->Contains(addr) || cell_space_->Contains(addr) || |
| 4420 map_space_->Contains(addr) || | 4357 property_cell_space_->Contains(addr) || |
| 4421 cell_space_->Contains(addr) || | 4358 lo_space_->SlowContains(addr)); |
| 4422 property_cell_space_->Contains(addr) || | |
| 4423 lo_space_->SlowContains(addr)); | |
| 4424 } | 4359 } |
| 4425 | 4360 |
| 4426 | 4361 |
| 4427 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { | 4362 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { |
| 4428 return InSpace(value->address(), space); | 4363 return InSpace(value->address(), space); |
| 4429 } | 4364 } |
| 4430 | 4365 |
| 4431 | 4366 |
| 4432 bool Heap::InSpace(Address addr, AllocationSpace space) { | 4367 bool Heap::InSpace(Address addr, AllocationSpace space) { |
| 4433 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false; | 4368 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false; |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4491 } | 4426 } |
| 4492 #endif | 4427 #endif |
| 4493 | 4428 |
| 4494 | 4429 |
| 4495 void Heap::ZapFromSpace() { | 4430 void Heap::ZapFromSpace() { |
| 4496 NewSpacePageIterator it(new_space_.FromSpaceStart(), | 4431 NewSpacePageIterator it(new_space_.FromSpaceStart(), |
| 4497 new_space_.FromSpaceEnd()); | 4432 new_space_.FromSpaceEnd()); |
| 4498 while (it.has_next()) { | 4433 while (it.has_next()) { |
| 4499 NewSpacePage* page = it.next(); | 4434 NewSpacePage* page = it.next(); |
| 4500 for (Address cursor = page->area_start(), limit = page->area_end(); | 4435 for (Address cursor = page->area_start(), limit = page->area_end(); |
| 4501 cursor < limit; | 4436 cursor < limit; cursor += kPointerSize) { |
| 4502 cursor += kPointerSize) { | |
| 4503 Memory::Address_at(cursor) = kFromSpaceZapValue; | 4437 Memory::Address_at(cursor) = kFromSpaceZapValue; |
| 4504 } | 4438 } |
| 4505 } | 4439 } |
| 4506 } | 4440 } |
| 4507 | 4441 |
| 4508 | 4442 |
| 4509 void Heap::IterateAndMarkPointersToFromSpace(Address start, | 4443 void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end, |
| 4510 Address end, | |
| 4511 ObjectSlotCallback callback) { | 4444 ObjectSlotCallback callback) { |
| 4512 Address slot_address = start; | 4445 Address slot_address = start; |
| 4513 | 4446 |
| 4514 // We are not collecting slots on new space objects during mutation | 4447 // We are not collecting slots on new space objects during mutation |
| 4515 // thus we have to scan for pointers to evacuation candidates when we | 4448 // thus we have to scan for pointers to evacuation candidates when we |
| 4516 // promote objects. But we should not record any slots in non-black | 4449 // promote objects. But we should not record any slots in non-black |
| 4517 // objects. Grey object's slots would be rescanned. | 4450 // objects. Grey object's slots would be rescanned. |
| 4518 // White object might not survive until the end of collection | 4451 // White object might not survive until the end of collection |
| 4519 // it would be a violation of the invariant to record it's slots. | 4452 // it would be a violation of the invariant to record it's slots. |
| 4520 bool record_slots = false; | 4453 bool record_slots = false; |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4557 | 4490 |
| 4558 | 4491 |
| 4559 bool IsAMapPointerAddress(Object** addr) { | 4492 bool IsAMapPointerAddress(Object** addr) { |
| 4560 uintptr_t a = reinterpret_cast<uintptr_t>(addr); | 4493 uintptr_t a = reinterpret_cast<uintptr_t>(addr); |
| 4561 int mod = a % Map::kSize; | 4494 int mod = a % Map::kSize; |
| 4562 return mod >= Map::kPointerFieldsBeginOffset && | 4495 return mod >= Map::kPointerFieldsBeginOffset && |
| 4563 mod < Map::kPointerFieldsEndOffset; | 4496 mod < Map::kPointerFieldsEndOffset; |
| 4564 } | 4497 } |
| 4565 | 4498 |
| 4566 | 4499 |
| 4567 bool EverythingsAPointer(Object** addr) { | 4500 bool EverythingsAPointer(Object** addr) { return true; } |
| 4568 return true; | |
| 4569 } | |
| 4570 | 4501 |
| 4571 | 4502 |
| 4572 static void CheckStoreBuffer(Heap* heap, | 4503 static void CheckStoreBuffer(Heap* heap, Object** current, Object** limit, |
| 4573 Object** current, | |
| 4574 Object** limit, | |
| 4575 Object**** store_buffer_position, | 4504 Object**** store_buffer_position, |
| 4576 Object*** store_buffer_top, | 4505 Object*** store_buffer_top, |
| 4577 CheckStoreBufferFilter filter, | 4506 CheckStoreBufferFilter filter, |
| 4578 Address special_garbage_start, | 4507 Address special_garbage_start, |
| 4579 Address special_garbage_end) { | 4508 Address special_garbage_end) { |
| 4580 Map* free_space_map = heap->free_space_map(); | 4509 Map* free_space_map = heap->free_space_map(); |
| 4581 for ( ; current < limit; current++) { | 4510 for (; current < limit; current++) { |
| 4582 Object* o = *current; | 4511 Object* o = *current; |
| 4583 Address current_address = reinterpret_cast<Address>(current); | 4512 Address current_address = reinterpret_cast<Address>(current); |
| 4584 // Skip free space. | 4513 // Skip free space. |
| 4585 if (o == free_space_map) { | 4514 if (o == free_space_map) { |
| 4586 Address current_address = reinterpret_cast<Address>(current); | 4515 Address current_address = reinterpret_cast<Address>(current); |
| 4587 FreeSpace* free_space = | 4516 FreeSpace* free_space = |
| 4588 FreeSpace::cast(HeapObject::FromAddress(current_address)); | 4517 FreeSpace::cast(HeapObject::FromAddress(current_address)); |
| 4589 int skip = free_space->Size(); | 4518 int skip = free_space->Size(); |
| 4590 DCHECK(current_address + skip <= reinterpret_cast<Address>(limit)); | 4519 DCHECK(current_address + skip <= reinterpret_cast<Address>(limit)); |
| 4591 DCHECK(skip > 0); | 4520 DCHECK(skip > 0); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4636 while (pages.has_next()) { | 4565 while (pages.has_next()) { |
| 4637 Page* page = pages.next(); | 4566 Page* page = pages.next(); |
| 4638 Object** current = reinterpret_cast<Object**>(page->area_start()); | 4567 Object** current = reinterpret_cast<Object**>(page->area_start()); |
| 4639 | 4568 |
| 4640 Address end = page->area_end(); | 4569 Address end = page->area_end(); |
| 4641 | 4570 |
| 4642 Object*** store_buffer_position = store_buffer()->Start(); | 4571 Object*** store_buffer_position = store_buffer()->Start(); |
| 4643 Object*** store_buffer_top = store_buffer()->Top(); | 4572 Object*** store_buffer_top = store_buffer()->Top(); |
| 4644 | 4573 |
| 4645 Object** limit = reinterpret_cast<Object**>(end); | 4574 Object** limit = reinterpret_cast<Object**>(end); |
| 4646 CheckStoreBuffer(this, | 4575 CheckStoreBuffer(this, current, limit, &store_buffer_position, |
| 4647 current, | 4576 store_buffer_top, &EverythingsAPointer, space->top(), |
| 4648 limit, | |
| 4649 &store_buffer_position, | |
| 4650 store_buffer_top, | |
| 4651 &EverythingsAPointer, | |
| 4652 space->top(), | |
| 4653 space->limit()); | 4577 space->limit()); |
| 4654 } | 4578 } |
| 4655 } | 4579 } |
| 4656 | 4580 |
| 4657 | 4581 |
| 4658 void Heap::MapSpaceCheckStoreBuffer() { | 4582 void Heap::MapSpaceCheckStoreBuffer() { |
| 4659 MapSpace* space = map_space(); | 4583 MapSpace* space = map_space(); |
| 4660 PageIterator pages(space); | 4584 PageIterator pages(space); |
| 4661 | 4585 |
| 4662 store_buffer()->SortUniq(); | 4586 store_buffer()->SortUniq(); |
| 4663 | 4587 |
| 4664 while (pages.has_next()) { | 4588 while (pages.has_next()) { |
| 4665 Page* page = pages.next(); | 4589 Page* page = pages.next(); |
| 4666 Object** current = reinterpret_cast<Object**>(page->area_start()); | 4590 Object** current = reinterpret_cast<Object**>(page->area_start()); |
| 4667 | 4591 |
| 4668 Address end = page->area_end(); | 4592 Address end = page->area_end(); |
| 4669 | 4593 |
| 4670 Object*** store_buffer_position = store_buffer()->Start(); | 4594 Object*** store_buffer_position = store_buffer()->Start(); |
| 4671 Object*** store_buffer_top = store_buffer()->Top(); | 4595 Object*** store_buffer_top = store_buffer()->Top(); |
| 4672 | 4596 |
| 4673 Object** limit = reinterpret_cast<Object**>(end); | 4597 Object** limit = reinterpret_cast<Object**>(end); |
| 4674 CheckStoreBuffer(this, | 4598 CheckStoreBuffer(this, current, limit, &store_buffer_position, |
| 4675 current, | 4599 store_buffer_top, &IsAMapPointerAddress, space->top(), |
| 4676 limit, | |
| 4677 &store_buffer_position, | |
| 4678 store_buffer_top, | |
| 4679 &IsAMapPointerAddress, | |
| 4680 space->top(), | |
| 4681 space->limit()); | 4600 space->limit()); |
| 4682 } | 4601 } |
| 4683 } | 4602 } |
| 4684 | 4603 |
| 4685 | 4604 |
| 4686 void Heap::LargeObjectSpaceCheckStoreBuffer() { | 4605 void Heap::LargeObjectSpaceCheckStoreBuffer() { |
| 4687 LargeObjectIterator it(lo_space()); | 4606 LargeObjectIterator it(lo_space()); |
| 4688 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 4607 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 4689 // We only have code, sequential strings, or fixed arrays in large | 4608 // We only have code, sequential strings, or fixed arrays in large |
| 4690 // object space, and only fixed arrays can possibly contain pointers to | 4609 // object space, and only fixed arrays can possibly contain pointers to |
| 4691 // the young generation. | 4610 // the young generation. |
| 4692 if (object->IsFixedArray()) { | 4611 if (object->IsFixedArray()) { |
| 4693 Object*** store_buffer_position = store_buffer()->Start(); | 4612 Object*** store_buffer_position = store_buffer()->Start(); |
| 4694 Object*** store_buffer_top = store_buffer()->Top(); | 4613 Object*** store_buffer_top = store_buffer()->Top(); |
| 4695 Object** current = reinterpret_cast<Object**>(object->address()); | 4614 Object** current = reinterpret_cast<Object**>(object->address()); |
| 4696 Object** limit = | 4615 Object** limit = |
| 4697 reinterpret_cast<Object**>(object->address() + object->Size()); | 4616 reinterpret_cast<Object**>(object->address() + object->Size()); |
| 4698 CheckStoreBuffer(this, | 4617 CheckStoreBuffer(this, current, limit, &store_buffer_position, |
| 4699 current, | 4618 store_buffer_top, &EverythingsAPointer, NULL, NULL); |
| 4700 limit, | |
| 4701 &store_buffer_position, | |
| 4702 store_buffer_top, | |
| 4703 &EverythingsAPointer, | |
| 4704 NULL, | |
| 4705 NULL); | |
| 4706 } | 4619 } |
| 4707 } | 4620 } |
| 4708 } | 4621 } |
| 4709 #endif | 4622 #endif |
| 4710 | 4623 |
| 4711 | 4624 |
| 4712 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | 4625 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { |
| 4713 IterateStrongRoots(v, mode); | 4626 IterateStrongRoots(v, mode); |
| 4714 IterateWeakRoots(v, mode); | 4627 IterateWeakRoots(v, mode); |
| 4715 } | 4628 } |
| 4716 | 4629 |
| 4717 | 4630 |
| 4718 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { | 4631 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { |
| 4719 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex])); | 4632 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex])); |
| 4720 v->Synchronize(VisitorSynchronization::kStringTable); | 4633 v->Synchronize(VisitorSynchronization::kStringTable); |
| 4721 if (mode != VISIT_ALL_IN_SCAVENGE && | 4634 if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { |
| 4722 mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { | |
| 4723 // Scavenge collections have special processing for this. | 4635 // Scavenge collections have special processing for this. |
| 4724 external_string_table_.Iterate(v); | 4636 external_string_table_.Iterate(v); |
| 4725 } | 4637 } |
| 4726 v->Synchronize(VisitorSynchronization::kExternalStringsTable); | 4638 v->Synchronize(VisitorSynchronization::kExternalStringsTable); |
| 4727 } | 4639 } |
| 4728 | 4640 |
| 4729 | 4641 |
| 4730 void Heap::IterateSmiRoots(ObjectVisitor* v) { | 4642 void Heap::IterateSmiRoots(ObjectVisitor* v) { |
| 4731 // Acquire execution access since we are going to read stack limit values. | 4643 // Acquire execution access since we are going to read stack limit values. |
| 4732 ExecutionAccess access(isolate()); | 4644 ExecutionAccess access(isolate()); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4808 // We don't do a v->Synchronize call here, because in debug mode that will | 4720 // We don't do a v->Synchronize call here, because in debug mode that will |
| 4809 // output a flag to the snapshot. However at this point the serializer and | 4721 // output a flag to the snapshot. However at this point the serializer and |
| 4810 // deserializer are deliberately a little unsynchronized (see above) so the | 4722 // deserializer are deliberately a little unsynchronized (see above) so the |
| 4811 // checking of the sync flag in the snapshot would fail. | 4723 // checking of the sync flag in the snapshot would fail. |
| 4812 } | 4724 } |
| 4813 | 4725 |
| 4814 | 4726 |
| 4815 // TODO(1236194): Since the heap size is configurable on the command line | 4727 // TODO(1236194): Since the heap size is configurable on the command line |
| 4816 // and through the API, we should gracefully handle the case that the heap | 4728 // and through the API, we should gracefully handle the case that the heap |
| 4817 // size is not big enough to fit all the initial objects. | 4729 // size is not big enough to fit all the initial objects. |
| 4818 bool Heap::ConfigureHeap(int max_semi_space_size, | 4730 bool Heap::ConfigureHeap(int max_semi_space_size, int max_old_space_size, |
| 4819 int max_old_space_size, | 4731 int max_executable_size, size_t code_range_size) { |
| 4820 int max_executable_size, | |
| 4821 size_t code_range_size) { | |
| 4822 if (HasBeenSetUp()) return false; | 4732 if (HasBeenSetUp()) return false; |
| 4823 | 4733 |
| 4824 // Overwrite default configuration. | 4734 // Overwrite default configuration. |
| 4825 if (max_semi_space_size > 0) { | 4735 if (max_semi_space_size > 0) { |
| 4826 max_semi_space_size_ = max_semi_space_size * MB; | 4736 max_semi_space_size_ = max_semi_space_size * MB; |
| 4827 } | 4737 } |
| 4828 if (max_old_space_size > 0) { | 4738 if (max_old_space_size > 0) { |
| 4829 max_old_generation_size_ = max_old_space_size * MB; | 4739 max_old_generation_size_ = max_old_space_size * MB; |
| 4830 } | 4740 } |
| 4831 if (max_executable_size > 0) { | 4741 if (max_executable_size > 0) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4876 // The new space size must be a power of two to support single-bit testing | 4786 // The new space size must be a power of two to support single-bit testing |
| 4877 // for containment. | 4787 // for containment. |
| 4878 max_semi_space_size_ = RoundUpToPowerOf2(max_semi_space_size_); | 4788 max_semi_space_size_ = RoundUpToPowerOf2(max_semi_space_size_); |
| 4879 reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_); | 4789 reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_); |
| 4880 | 4790 |
| 4881 if (FLAG_min_semi_space_size > 0) { | 4791 if (FLAG_min_semi_space_size > 0) { |
| 4882 int initial_semispace_size = FLAG_min_semi_space_size * MB; | 4792 int initial_semispace_size = FLAG_min_semi_space_size * MB; |
| 4883 if (initial_semispace_size > max_semi_space_size_) { | 4793 if (initial_semispace_size > max_semi_space_size_) { |
| 4884 initial_semispace_size_ = max_semi_space_size_; | 4794 initial_semispace_size_ = max_semi_space_size_; |
| 4885 if (FLAG_trace_gc) { | 4795 if (FLAG_trace_gc) { |
| 4886 PrintPID("Min semi-space size cannot be more than the maximum" | 4796 PrintPID( |
| 4887 "semi-space size of %d MB\n", max_semi_space_size_); | 4797 "Min semi-space size cannot be more than the maximum" |
| 4798 "semi-space size of %d MB\n", |
| 4799 max_semi_space_size_); |
| 4888 } | 4800 } |
| 4889 } else { | 4801 } else { |
| 4890 initial_semispace_size_ = initial_semispace_size; | 4802 initial_semispace_size_ = initial_semispace_size; |
| 4891 } | 4803 } |
| 4892 } | 4804 } |
| 4893 | 4805 |
| 4894 initial_semispace_size_ = Min(initial_semispace_size_, max_semi_space_size_); | 4806 initial_semispace_size_ = Min(initial_semispace_size_, max_semi_space_size_); |
| 4895 | 4807 |
| 4896 // The old generation is paged and needs at least one page for each space. | 4808 // The old generation is paged and needs at least one page for each space. |
| 4897 int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; | 4809 int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; |
| 4898 max_old_generation_size_ = | 4810 max_old_generation_size_ = |
| 4899 Max(static_cast<intptr_t>(paged_space_count * Page::kPageSize), | 4811 Max(static_cast<intptr_t>(paged_space_count * Page::kPageSize), |
| 4900 max_old_generation_size_); | 4812 max_old_generation_size_); |
| 4901 | 4813 |
| 4902 // We rely on being able to allocate new arrays in paged spaces. | 4814 // We rely on being able to allocate new arrays in paged spaces. |
| 4903 DCHECK(Page::kMaxRegularHeapObjectSize >= | 4815 DCHECK(Page::kMaxRegularHeapObjectSize >= |
| 4904 (JSArray::kSize + | 4816 (JSArray::kSize + |
| 4905 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + | 4817 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + |
| 4906 AllocationMemento::kSize)); | 4818 AllocationMemento::kSize)); |
| 4907 | 4819 |
| 4908 code_range_size_ = code_range_size * MB; | 4820 code_range_size_ = code_range_size * MB; |
| 4909 | 4821 |
| 4910 configured_ = true; | 4822 configured_ = true; |
| 4911 return true; | 4823 return true; |
| 4912 } | 4824 } |
| 4913 | 4825 |
| 4914 | 4826 |
| 4915 bool Heap::ConfigureHeapDefault() { | 4827 bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); } |
| 4916 return ConfigureHeap(0, 0, 0, 0); | |
| 4917 } | |
| 4918 | 4828 |
| 4919 | 4829 |
| 4920 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { | 4830 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { |
| 4921 *stats->start_marker = HeapStats::kStartMarker; | 4831 *stats->start_marker = HeapStats::kStartMarker; |
| 4922 *stats->end_marker = HeapStats::kEndMarker; | 4832 *stats->end_marker = HeapStats::kEndMarker; |
| 4923 *stats->new_space_size = new_space_.SizeAsInt(); | 4833 *stats->new_space_size = new_space_.SizeAsInt(); |
| 4924 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity()); | 4834 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity()); |
| 4925 *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects(); | 4835 *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects(); |
| 4926 *stats->old_pointer_space_capacity = old_pointer_space_->Capacity(); | 4836 *stats->old_pointer_space_capacity = old_pointer_space_->Capacity(); |
| 4927 *stats->old_data_space_size = old_data_space_->SizeOfObjects(); | 4837 *stats->old_data_space_size = old_data_space_->SizeOfObjects(); |
| 4928 *stats->old_data_space_capacity = old_data_space_->Capacity(); | 4838 *stats->old_data_space_capacity = old_data_space_->Capacity(); |
| 4929 *stats->code_space_size = code_space_->SizeOfObjects(); | 4839 *stats->code_space_size = code_space_->SizeOfObjects(); |
| 4930 *stats->code_space_capacity = code_space_->Capacity(); | 4840 *stats->code_space_capacity = code_space_->Capacity(); |
| 4931 *stats->map_space_size = map_space_->SizeOfObjects(); | 4841 *stats->map_space_size = map_space_->SizeOfObjects(); |
| 4932 *stats->map_space_capacity = map_space_->Capacity(); | 4842 *stats->map_space_capacity = map_space_->Capacity(); |
| 4933 *stats->cell_space_size = cell_space_->SizeOfObjects(); | 4843 *stats->cell_space_size = cell_space_->SizeOfObjects(); |
| 4934 *stats->cell_space_capacity = cell_space_->Capacity(); | 4844 *stats->cell_space_capacity = cell_space_->Capacity(); |
| 4935 *stats->property_cell_space_size = property_cell_space_->SizeOfObjects(); | 4845 *stats->property_cell_space_size = property_cell_space_->SizeOfObjects(); |
| 4936 *stats->property_cell_space_capacity = property_cell_space_->Capacity(); | 4846 *stats->property_cell_space_capacity = property_cell_space_->Capacity(); |
| 4937 *stats->lo_space_size = lo_space_->Size(); | 4847 *stats->lo_space_size = lo_space_->Size(); |
| 4938 isolate_->global_handles()->RecordStats(stats); | 4848 isolate_->global_handles()->RecordStats(stats); |
| 4939 *stats->memory_allocator_size = isolate()->memory_allocator()->Size(); | 4849 *stats->memory_allocator_size = isolate()->memory_allocator()->Size(); |
| 4940 *stats->memory_allocator_capacity = | 4850 *stats->memory_allocator_capacity = |
| 4941 isolate()->memory_allocator()->Size() + | 4851 isolate()->memory_allocator()->Size() + |
| 4942 isolate()->memory_allocator()->Available(); | 4852 isolate()->memory_allocator()->Available(); |
| 4943 *stats->os_error = base::OS::GetLastError(); | 4853 *stats->os_error = base::OS::GetLastError(); |
| 4944 isolate()->memory_allocator()->Available(); | 4854 isolate()->memory_allocator()->Available(); |
| 4945 if (take_snapshot) { | 4855 if (take_snapshot) { |
| 4946 HeapIterator iterator(this); | 4856 HeapIterator iterator(this); |
| 4947 for (HeapObject* obj = iterator.next(); | 4857 for (HeapObject* obj = iterator.next(); obj != NULL; |
| 4948 obj != NULL; | |
| 4949 obj = iterator.next()) { | 4858 obj = iterator.next()) { |
| 4950 InstanceType type = obj->map()->instance_type(); | 4859 InstanceType type = obj->map()->instance_type(); |
| 4951 DCHECK(0 <= type && type <= LAST_TYPE); | 4860 DCHECK(0 <= type && type <= LAST_TYPE); |
| 4952 stats->objects_per_type[type]++; | 4861 stats->objects_per_type[type]++; |
| 4953 stats->size_per_type[type] += obj->Size(); | 4862 stats->size_per_type[type] += obj->Size(); |
| 4954 } | 4863 } |
| 4955 } | 4864 } |
| 4956 } | 4865 } |
| 4957 | 4866 |
| 4958 | 4867 |
| 4959 intptr_t Heap::PromotedSpaceSizeOfObjects() { | 4868 intptr_t Heap::PromotedSpaceSizeOfObjects() { |
| 4960 return old_pointer_space_->SizeOfObjects() | 4869 return old_pointer_space_->SizeOfObjects() + |
| 4961 + old_data_space_->SizeOfObjects() | 4870 old_data_space_->SizeOfObjects() + code_space_->SizeOfObjects() + |
| 4962 + code_space_->SizeOfObjects() | 4871 map_space_->SizeOfObjects() + cell_space_->SizeOfObjects() + |
| 4963 + map_space_->SizeOfObjects() | 4872 property_cell_space_->SizeOfObjects() + lo_space_->SizeOfObjects(); |
| 4964 + cell_space_->SizeOfObjects() | |
| 4965 + property_cell_space_->SizeOfObjects() | |
| 4966 + lo_space_->SizeOfObjects(); | |
| 4967 } | 4873 } |
| 4968 | 4874 |
| 4969 | 4875 |
| 4970 int64_t Heap::PromotedExternalMemorySize() { | 4876 int64_t Heap::PromotedExternalMemorySize() { |
| 4971 if (amount_of_external_allocated_memory_ | 4877 if (amount_of_external_allocated_memory_ <= |
| 4972 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; | 4878 amount_of_external_allocated_memory_at_last_global_gc_) |
| 4973 return amount_of_external_allocated_memory_ | 4879 return 0; |
| 4974 - amount_of_external_allocated_memory_at_last_global_gc_; | 4880 return amount_of_external_allocated_memory_ - |
| 4881 amount_of_external_allocated_memory_at_last_global_gc_; |
| 4975 } | 4882 } |
| 4976 | 4883 |
| 4977 | 4884 |
| 4978 intptr_t Heap::OldGenerationAllocationLimit(intptr_t old_gen_size, | 4885 intptr_t Heap::OldGenerationAllocationLimit(intptr_t old_gen_size, |
| 4979 int freed_global_handles) { | 4886 int freed_global_handles) { |
| 4980 const int kMaxHandles = 1000; | 4887 const int kMaxHandles = 1000; |
| 4981 const int kMinHandles = 100; | 4888 const int kMinHandles = 100; |
| 4982 double min_factor = 1.1; | 4889 double min_factor = 1.1; |
| 4983 double max_factor = 4; | 4890 double max_factor = 4; |
| 4984 // We set the old generation growing factor to 2 to grow the heap slower on | 4891 // We set the old generation growing factor to 2 to grow the heap slower on |
| 4985 // memory-constrained devices. | 4892 // memory-constrained devices. |
| 4986 if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice) { | 4893 if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice) { |
| 4987 max_factor = 2; | 4894 max_factor = 2; |
| 4988 } | 4895 } |
| 4989 // If there are many freed global handles, then the next full GC will | 4896 // If there are many freed global handles, then the next full GC will |
| 4990 // likely collect a lot of garbage. Choose the heap growing factor | 4897 // likely collect a lot of garbage. Choose the heap growing factor |
| 4991 // depending on freed global handles. | 4898 // depending on freed global handles. |
| 4992 // TODO(ulan, hpayer): Take into account mutator utilization. | 4899 // TODO(ulan, hpayer): Take into account mutator utilization. |
| 4993 double factor; | 4900 double factor; |
| 4994 if (freed_global_handles <= kMinHandles) { | 4901 if (freed_global_handles <= kMinHandles) { |
| 4995 factor = max_factor; | 4902 factor = max_factor; |
| 4996 } else if (freed_global_handles >= kMaxHandles) { | 4903 } else if (freed_global_handles >= kMaxHandles) { |
| 4997 factor = min_factor; | 4904 factor = min_factor; |
| 4998 } else { | 4905 } else { |
| 4999 // Compute factor using linear interpolation between points | 4906 // Compute factor using linear interpolation between points |
| 5000 // (kMinHandles, max_factor) and (kMaxHandles, min_factor). | 4907 // (kMinHandles, max_factor) and (kMaxHandles, min_factor). |
| 5001 factor = max_factor - | 4908 factor = max_factor - |
| 5002 (freed_global_handles - kMinHandles) * (max_factor - min_factor) / | 4909 (freed_global_handles - kMinHandles) * (max_factor - min_factor) / |
| 5003 (kMaxHandles - kMinHandles); | 4910 (kMaxHandles - kMinHandles); |
| 5004 } | 4911 } |
| 5005 | 4912 |
| 5006 if (FLAG_stress_compaction || | 4913 if (FLAG_stress_compaction || |
| 5007 mark_compact_collector()->reduce_memory_footprint_) { | 4914 mark_compact_collector()->reduce_memory_footprint_) { |
| 5008 factor = min_factor; | 4915 factor = min_factor; |
| 5009 } | 4916 } |
| 5010 | 4917 |
| 5011 intptr_t limit = static_cast<intptr_t>(old_gen_size * factor); | 4918 intptr_t limit = static_cast<intptr_t>(old_gen_size * factor); |
| 5012 limit = Max(limit, kMinimumOldGenerationAllocationLimit); | 4919 limit = Max(limit, kMinimumOldGenerationAllocationLimit); |
| 5013 limit += new_space_.Capacity(); | 4920 limit += new_space_.Capacity(); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 5027 | 4934 |
| 5028 void Heap::DisableInlineAllocation() { | 4935 void Heap::DisableInlineAllocation() { |
| 5029 if (inline_allocation_disabled_) return; | 4936 if (inline_allocation_disabled_) return; |
| 5030 inline_allocation_disabled_ = true; | 4937 inline_allocation_disabled_ = true; |
| 5031 | 4938 |
| 5032 // Update inline allocation limit for new space. | 4939 // Update inline allocation limit for new space. |
| 5033 new_space()->UpdateInlineAllocationLimit(0); | 4940 new_space()->UpdateInlineAllocationLimit(0); |
| 5034 | 4941 |
| 5035 // Update inline allocation limit for old spaces. | 4942 // Update inline allocation limit for old spaces. |
| 5036 PagedSpaces spaces(this); | 4943 PagedSpaces spaces(this); |
| 5037 for (PagedSpace* space = spaces.next(); | 4944 for (PagedSpace* space = spaces.next(); space != NULL; |
| 5038 space != NULL; | |
| 5039 space = spaces.next()) { | 4945 space = spaces.next()) { |
| 5040 space->EmptyAllocationInfo(); | 4946 space->EmptyAllocationInfo(); |
| 5041 } | 4947 } |
| 5042 } | 4948 } |
| 5043 | 4949 |
| 5044 | 4950 |
| 5045 V8_DECLARE_ONCE(initialize_gc_once); | 4951 V8_DECLARE_ONCE(initialize_gc_once); |
| 5046 | 4952 |
| 5047 static void InitializeGCOnce() { | 4953 static void InitializeGCOnce() { |
| 5048 InitializeScavengingVisitorsTables(); | 4954 InitializeScavengingVisitorsTables(); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 5067 if (!configured_) { | 4973 if (!configured_) { |
| 5068 if (!ConfigureHeapDefault()) return false; | 4974 if (!ConfigureHeapDefault()) return false; |
| 5069 } | 4975 } |
| 5070 | 4976 |
| 5071 base::CallOnce(&initialize_gc_once, &InitializeGCOnce); | 4977 base::CallOnce(&initialize_gc_once, &InitializeGCOnce); |
| 5072 | 4978 |
| 5073 MarkMapPointersAsEncoded(false); | 4979 MarkMapPointersAsEncoded(false); |
| 5074 | 4980 |
| 5075 // Set up memory allocator. | 4981 // Set up memory allocator. |
| 5076 if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize())) | 4982 if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize())) |
| 5077 return false; | 4983 return false; |
| 5078 | 4984 |
| 5079 // Set up new space. | 4985 // Set up new space. |
| 5080 if (!new_space_.SetUp(reserved_semispace_size_, max_semi_space_size_)) { | 4986 if (!new_space_.SetUp(reserved_semispace_size_, max_semi_space_size_)) { |
| 5081 return false; | 4987 return false; |
| 5082 } | 4988 } |
| 5083 new_space_top_after_last_gc_ = new_space()->top(); | 4989 new_space_top_after_last_gc_ = new_space()->top(); |
| 5084 | 4990 |
| 5085 // Initialize old pointer space. | 4991 // Initialize old pointer space. |
| 5086 old_pointer_space_ = | 4992 old_pointer_space_ = new OldSpace(this, max_old_generation_size_, |
| 5087 new OldSpace(this, | 4993 OLD_POINTER_SPACE, NOT_EXECUTABLE); |
| 5088 max_old_generation_size_, | |
| 5089 OLD_POINTER_SPACE, | |
| 5090 NOT_EXECUTABLE); | |
| 5091 if (old_pointer_space_ == NULL) return false; | 4994 if (old_pointer_space_ == NULL) return false; |
| 5092 if (!old_pointer_space_->SetUp()) return false; | 4995 if (!old_pointer_space_->SetUp()) return false; |
| 5093 | 4996 |
| 5094 // Initialize old data space. | 4997 // Initialize old data space. |
| 5095 old_data_space_ = | 4998 old_data_space_ = new OldSpace(this, max_old_generation_size_, OLD_DATA_SPACE, |
| 5096 new OldSpace(this, | 4999 NOT_EXECUTABLE); |
| 5097 max_old_generation_size_, | |
| 5098 OLD_DATA_SPACE, | |
| 5099 NOT_EXECUTABLE); | |
| 5100 if (old_data_space_ == NULL) return false; | 5000 if (old_data_space_ == NULL) return false; |
| 5101 if (!old_data_space_->SetUp()) return false; | 5001 if (!old_data_space_->SetUp()) return false; |
| 5102 | 5002 |
| 5103 if (!isolate_->code_range()->SetUp(code_range_size_)) return false; | 5003 if (!isolate_->code_range()->SetUp(code_range_size_)) return false; |
| 5104 | 5004 |
| 5105 // Initialize the code space, set its maximum capacity to the old | 5005 // Initialize the code space, set its maximum capacity to the old |
| 5106 // generation size. It needs executable memory. | 5006 // generation size. It needs executable memory. |
| 5107 code_space_ = | 5007 code_space_ = |
| 5108 new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE); | 5008 new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE); |
| 5109 if (code_space_ == NULL) return false; | 5009 if (code_space_ == NULL) return false; |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5172 | 5072 |
| 5173 | 5073 |
| 5174 void Heap::SetStackLimits() { | 5074 void Heap::SetStackLimits() { |
| 5175 DCHECK(isolate_ != NULL); | 5075 DCHECK(isolate_ != NULL); |
| 5176 DCHECK(isolate_ == isolate()); | 5076 DCHECK(isolate_ == isolate()); |
| 5177 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5077 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 5178 // something that looks like an out of range Smi to the GC. | 5078 // something that looks like an out of range Smi to the GC. |
| 5179 | 5079 |
| 5180 // Set up the special root array entries containing the stack limits. | 5080 // Set up the special root array entries containing the stack limits. |
| 5181 // These are actually addresses, but the tag makes the GC ignore it. | 5081 // These are actually addresses, but the tag makes the GC ignore it. |
| 5182 roots_[kStackLimitRootIndex] = | 5082 roots_[kStackLimitRootIndex] = reinterpret_cast<Object*>( |
| 5183 reinterpret_cast<Object*>( | 5083 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag); |
| 5184 (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag); | 5084 roots_[kRealStackLimitRootIndex] = reinterpret_cast<Object*>( |
| 5185 roots_[kRealStackLimitRootIndex] = | 5085 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag); |
| 5186 reinterpret_cast<Object*>( | |
| 5187 (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag); | |
| 5188 } | 5086 } |
| 5189 | 5087 |
| 5190 | 5088 |
| 5191 void Heap::TearDown() { | 5089 void Heap::TearDown() { |
| 5192 #ifdef VERIFY_HEAP | 5090 #ifdef VERIFY_HEAP |
| 5193 if (FLAG_verify_heap) { | 5091 if (FLAG_verify_heap) { |
| 5194 Verify(); | 5092 Verify(); |
| 5195 } | 5093 } |
| 5196 #endif | 5094 #endif |
| 5197 | 5095 |
| 5198 UpdateMaximumCommitted(); | 5096 UpdateMaximumCommitted(); |
| 5199 | 5097 |
| 5200 if (FLAG_print_cumulative_gc_stat) { | 5098 if (FLAG_print_cumulative_gc_stat) { |
| 5201 PrintF("\n"); | 5099 PrintF("\n"); |
| 5202 PrintF("gc_count=%d ", gc_count_); | 5100 PrintF("gc_count=%d ", gc_count_); |
| 5203 PrintF("mark_sweep_count=%d ", ms_count_); | 5101 PrintF("mark_sweep_count=%d ", ms_count_); |
| 5204 PrintF("max_gc_pause=%.1f ", get_max_gc_pause()); | 5102 PrintF("max_gc_pause=%.1f ", get_max_gc_pause()); |
| 5205 PrintF("total_gc_time=%.1f ", total_gc_time_ms_); | 5103 PrintF("total_gc_time=%.1f ", total_gc_time_ms_); |
| 5206 PrintF("min_in_mutator=%.1f ", get_min_in_mutator()); | 5104 PrintF("min_in_mutator=%.1f ", get_min_in_mutator()); |
| 5207 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", | 5105 PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", get_max_alive_after_gc()); |
| 5208 get_max_alive_after_gc()); | |
| 5209 PrintF("total_marking_time=%.1f ", tracer_.cumulative_sweeping_duration()); | 5106 PrintF("total_marking_time=%.1f ", tracer_.cumulative_sweeping_duration()); |
| 5210 PrintF("total_sweeping_time=%.1f ", tracer_.cumulative_sweeping_duration()); | 5107 PrintF("total_sweeping_time=%.1f ", tracer_.cumulative_sweeping_duration()); |
| 5211 PrintF("\n\n"); | 5108 PrintF("\n\n"); |
| 5212 } | 5109 } |
| 5213 | 5110 |
| 5214 if (FLAG_print_max_heap_committed) { | 5111 if (FLAG_print_max_heap_committed) { |
| 5215 PrintF("\n"); | 5112 PrintF("\n"); |
| 5216 PrintF("maximum_committed_by_heap=%" V8_PTR_PREFIX "d ", | 5113 PrintF("maximum_committed_by_heap=%" V8_PTR_PREFIX "d ", |
| 5217 MaximumCommittedMemory()); | 5114 MaximumCommittedMemory()); |
| 5218 PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ", | 5115 PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ", |
| 5219 new_space_.MaximumCommittedMemory()); | 5116 new_space_.MaximumCommittedMemory()); |
| 5220 PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ", | 5117 PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ", |
| 5221 old_data_space_->MaximumCommittedMemory()); | 5118 old_data_space_->MaximumCommittedMemory()); |
| 5222 PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ", | 5119 PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ", |
| 5223 old_pointer_space_->MaximumCommittedMemory()); | 5120 old_pointer_space_->MaximumCommittedMemory()); |
| 5224 PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ", | 5121 PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ", |
| 5225 old_pointer_space_->MaximumCommittedMemory()); | 5122 old_pointer_space_->MaximumCommittedMemory()); |
| 5226 PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ", | 5123 PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ", |
| 5227 code_space_->MaximumCommittedMemory()); | 5124 code_space_->MaximumCommittedMemory()); |
| 5228 PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ", | 5125 PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ", |
| 5229 map_space_->MaximumCommittedMemory()); | 5126 map_space_->MaximumCommittedMemory()); |
| 5230 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ", | 5127 PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ", |
| 5231 cell_space_->MaximumCommittedMemory()); | 5128 cell_space_->MaximumCommittedMemory()); |
| 5232 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ", | 5129 PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ", |
| 5233 property_cell_space_->MaximumCommittedMemory()); | 5130 property_cell_space_->MaximumCommittedMemory()); |
| 5234 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ", | 5131 PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ", |
| 5235 lo_space_->MaximumCommittedMemory()); | 5132 lo_space_->MaximumCommittedMemory()); |
| 5236 PrintF("\n\n"); | 5133 PrintF("\n\n"); |
| 5237 } | 5134 } |
| 5238 | 5135 |
| 5239 if (FLAG_verify_predictable) { | 5136 if (FLAG_verify_predictable) { |
| 5240 PrintAlloctionsHash(); | 5137 PrintAlloctionsHash(); |
| 5241 } | 5138 } |
| 5242 | 5139 |
| 5243 TearDownArrayBuffers(); | 5140 TearDownArrayBuffers(); |
| 5244 | 5141 |
| 5245 isolate_->global_handles()->TearDown(); | 5142 isolate_->global_handles()->TearDown(); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5293 } | 5190 } |
| 5294 | 5191 |
| 5295 store_buffer()->TearDown(); | 5192 store_buffer()->TearDown(); |
| 5296 incremental_marking()->TearDown(); | 5193 incremental_marking()->TearDown(); |
| 5297 | 5194 |
| 5298 isolate_->memory_allocator()->TearDown(); | 5195 isolate_->memory_allocator()->TearDown(); |
| 5299 } | 5196 } |
| 5300 | 5197 |
| 5301 | 5198 |
| 5302 void Heap::AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, | 5199 void Heap::AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, |
| 5303 GCType gc_type, | 5200 GCType gc_type, bool pass_isolate) { |
| 5304 bool pass_isolate) { | |
| 5305 DCHECK(callback != NULL); | 5201 DCHECK(callback != NULL); |
| 5306 GCPrologueCallbackPair pair(callback, gc_type, pass_isolate); | 5202 GCPrologueCallbackPair pair(callback, gc_type, pass_isolate); |
| 5307 DCHECK(!gc_prologue_callbacks_.Contains(pair)); | 5203 DCHECK(!gc_prologue_callbacks_.Contains(pair)); |
| 5308 return gc_prologue_callbacks_.Add(pair); | 5204 return gc_prologue_callbacks_.Add(pair); |
| 5309 } | 5205 } |
| 5310 | 5206 |
| 5311 | 5207 |
| 5312 void Heap::RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback) { | 5208 void Heap::RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback) { |
| 5313 DCHECK(callback != NULL); | 5209 DCHECK(callback != NULL); |
| 5314 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { | 5210 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { |
| 5315 if (gc_prologue_callbacks_[i].callback == callback) { | 5211 if (gc_prologue_callbacks_[i].callback == callback) { |
| 5316 gc_prologue_callbacks_.Remove(i); | 5212 gc_prologue_callbacks_.Remove(i); |
| 5317 return; | 5213 return; |
| 5318 } | 5214 } |
| 5319 } | 5215 } |
| 5320 UNREACHABLE(); | 5216 UNREACHABLE(); |
| 5321 } | 5217 } |
| 5322 | 5218 |
| 5323 | 5219 |
| 5324 void Heap::AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, | 5220 void Heap::AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, |
| 5325 GCType gc_type, | 5221 GCType gc_type, bool pass_isolate) { |
| 5326 bool pass_isolate) { | |
| 5327 DCHECK(callback != NULL); | 5222 DCHECK(callback != NULL); |
| 5328 GCEpilogueCallbackPair pair(callback, gc_type, pass_isolate); | 5223 GCEpilogueCallbackPair pair(callback, gc_type, pass_isolate); |
| 5329 DCHECK(!gc_epilogue_callbacks_.Contains(pair)); | 5224 DCHECK(!gc_epilogue_callbacks_.Contains(pair)); |
| 5330 return gc_epilogue_callbacks_.Add(pair); | 5225 return gc_epilogue_callbacks_.Add(pair); |
| 5331 } | 5226 } |
| 5332 | 5227 |
| 5333 | 5228 |
| 5334 void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) { | 5229 void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) { |
| 5335 DCHECK(callback != NULL); | 5230 DCHECK(callback != NULL); |
| 5336 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { | 5231 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { |
| (...skipping 28 matching lines...) Expand all Loading... |
| 5365 | 5260 |
| 5366 DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<Object> obj) { | 5261 DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<Object> obj) { |
| 5367 Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj); | 5262 Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj); |
| 5368 if (dep->IsDependentCode()) return DependentCode::cast(dep); | 5263 if (dep->IsDependentCode()) return DependentCode::cast(dep); |
| 5369 return DependentCode::cast(empty_fixed_array()); | 5264 return DependentCode::cast(empty_fixed_array()); |
| 5370 } | 5265 } |
| 5371 | 5266 |
| 5372 | 5267 |
| 5373 void Heap::EnsureWeakObjectToCodeTable() { | 5268 void Heap::EnsureWeakObjectToCodeTable() { |
| 5374 if (!weak_object_to_code_table()->IsHashTable()) { | 5269 if (!weak_object_to_code_table()->IsHashTable()) { |
| 5375 set_weak_object_to_code_table(*WeakHashTable::New( | 5270 set_weak_object_to_code_table( |
| 5376 isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY, TENURED)); | 5271 *WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY, |
| 5272 TENURED)); |
| 5377 } | 5273 } |
| 5378 } | 5274 } |
| 5379 | 5275 |
| 5380 | 5276 |
| 5381 void Heap::FatalProcessOutOfMemory(const char* location, bool take_snapshot) { | 5277 void Heap::FatalProcessOutOfMemory(const char* location, bool take_snapshot) { |
| 5382 v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot); | 5278 v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot); |
| 5383 } | 5279 } |
| 5384 | 5280 |
| 5385 #ifdef DEBUG | 5281 #ifdef DEBUG |
| 5386 | 5282 |
| 5387 class PrintHandleVisitor: public ObjectVisitor { | 5283 class PrintHandleVisitor : public ObjectVisitor { |
| 5388 public: | 5284 public: |
| 5389 void VisitPointers(Object** start, Object** end) { | 5285 void VisitPointers(Object** start, Object** end) { |
| 5390 for (Object** p = start; p < end; p++) | 5286 for (Object** p = start; p < end; p++) |
| 5391 PrintF(" handle %p to %p\n", | 5287 PrintF(" handle %p to %p\n", reinterpret_cast<void*>(p), |
| 5392 reinterpret_cast<void*>(p), | |
| 5393 reinterpret_cast<void*>(*p)); | 5288 reinterpret_cast<void*>(*p)); |
| 5394 } | 5289 } |
| 5395 }; | 5290 }; |
| 5396 | 5291 |
| 5397 | 5292 |
| 5398 void Heap::PrintHandles() { | 5293 void Heap::PrintHandles() { |
| 5399 PrintF("Handles:\n"); | 5294 PrintF("Handles:\n"); |
| 5400 PrintHandleVisitor v; | 5295 PrintHandleVisitor v; |
| 5401 isolate_->handle_scope_implementer()->Iterate(&v); | 5296 isolate_->handle_scope_implementer()->Iterate(&v); |
| 5402 } | 5297 } |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5441 case CELL_SPACE: | 5336 case CELL_SPACE: |
| 5442 return heap_->cell_space(); | 5337 return heap_->cell_space(); |
| 5443 case PROPERTY_CELL_SPACE: | 5338 case PROPERTY_CELL_SPACE: |
| 5444 return heap_->property_cell_space(); | 5339 return heap_->property_cell_space(); |
| 5445 default: | 5340 default: |
| 5446 return NULL; | 5341 return NULL; |
| 5447 } | 5342 } |
| 5448 } | 5343 } |
| 5449 | 5344 |
| 5450 | 5345 |
| 5451 | |
| 5452 OldSpace* OldSpaces::next() { | 5346 OldSpace* OldSpaces::next() { |
| 5453 switch (counter_++) { | 5347 switch (counter_++) { |
| 5454 case OLD_POINTER_SPACE: | 5348 case OLD_POINTER_SPACE: |
| 5455 return heap_->old_pointer_space(); | 5349 return heap_->old_pointer_space(); |
| 5456 case OLD_DATA_SPACE: | 5350 case OLD_DATA_SPACE: |
| 5457 return heap_->old_data_space(); | 5351 return heap_->old_data_space(); |
| 5458 case CODE_SPACE: | 5352 case CODE_SPACE: |
| 5459 return heap_->code_space(); | 5353 return heap_->code_space(); |
| 5460 default: | 5354 default: |
| 5461 return NULL; | 5355 return NULL; |
| 5462 } | 5356 } |
| 5463 } | 5357 } |
| 5464 | 5358 |
| 5465 | 5359 |
| 5466 SpaceIterator::SpaceIterator(Heap* heap) | 5360 SpaceIterator::SpaceIterator(Heap* heap) |
| 5467 : heap_(heap), | 5361 : heap_(heap), |
| 5468 current_space_(FIRST_SPACE), | 5362 current_space_(FIRST_SPACE), |
| 5469 iterator_(NULL), | 5363 iterator_(NULL), |
| 5470 size_func_(NULL) { | 5364 size_func_(NULL) {} |
| 5471 } | |
| 5472 | 5365 |
| 5473 | 5366 |
| 5474 SpaceIterator::SpaceIterator(Heap* heap, HeapObjectCallback size_func) | 5367 SpaceIterator::SpaceIterator(Heap* heap, HeapObjectCallback size_func) |
| 5475 : heap_(heap), | 5368 : heap_(heap), |
| 5476 current_space_(FIRST_SPACE), | 5369 current_space_(FIRST_SPACE), |
| 5477 iterator_(NULL), | 5370 iterator_(NULL), |
| 5478 size_func_(size_func) { | 5371 size_func_(size_func) {} |
| 5479 } | |
| 5480 | 5372 |
| 5481 | 5373 |
| 5482 SpaceIterator::~SpaceIterator() { | 5374 SpaceIterator::~SpaceIterator() { |
| 5483 // Delete active iterator if any. | 5375 // Delete active iterator if any. |
| 5484 delete iterator_; | 5376 delete iterator_; |
| 5485 } | 5377 } |
| 5486 | 5378 |
| 5487 | 5379 |
| 5488 bool SpaceIterator::has_next() { | 5380 bool SpaceIterator::has_next() { |
| 5489 // Iterate until no more spaces. | 5381 // Iterate until no more spaces. |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5525 case CODE_SPACE: | 5417 case CODE_SPACE: |
| 5526 iterator_ = new HeapObjectIterator(heap_->code_space(), size_func_); | 5418 iterator_ = new HeapObjectIterator(heap_->code_space(), size_func_); |
| 5527 break; | 5419 break; |
| 5528 case MAP_SPACE: | 5420 case MAP_SPACE: |
| 5529 iterator_ = new HeapObjectIterator(heap_->map_space(), size_func_); | 5421 iterator_ = new HeapObjectIterator(heap_->map_space(), size_func_); |
| 5530 break; | 5422 break; |
| 5531 case CELL_SPACE: | 5423 case CELL_SPACE: |
| 5532 iterator_ = new HeapObjectIterator(heap_->cell_space(), size_func_); | 5424 iterator_ = new HeapObjectIterator(heap_->cell_space(), size_func_); |
| 5533 break; | 5425 break; |
| 5534 case PROPERTY_CELL_SPACE: | 5426 case PROPERTY_CELL_SPACE: |
| 5535 iterator_ = new HeapObjectIterator(heap_->property_cell_space(), | 5427 iterator_ = |
| 5536 size_func_); | 5428 new HeapObjectIterator(heap_->property_cell_space(), size_func_); |
| 5537 break; | 5429 break; |
| 5538 case LO_SPACE: | 5430 case LO_SPACE: |
| 5539 iterator_ = new LargeObjectIterator(heap_->lo_space(), size_func_); | 5431 iterator_ = new LargeObjectIterator(heap_->lo_space(), size_func_); |
| 5540 break; | 5432 break; |
| 5541 } | 5433 } |
| 5542 | 5434 |
| 5543 // Return the newly allocated iterator; | 5435 // Return the newly allocated iterator; |
| 5544 DCHECK(iterator_ != NULL); | 5436 DCHECK(iterator_ != NULL); |
| 5545 return iterator_; | 5437 return iterator_; |
| 5546 } | 5438 } |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5621 HeapIterator::HeapObjectsFiltering filtering) | 5513 HeapIterator::HeapObjectsFiltering filtering) |
| 5622 : make_heap_iterable_helper_(heap), | 5514 : make_heap_iterable_helper_(heap), |
| 5623 no_heap_allocation_(), | 5515 no_heap_allocation_(), |
| 5624 heap_(heap), | 5516 heap_(heap), |
| 5625 filtering_(filtering), | 5517 filtering_(filtering), |
| 5626 filter_(NULL) { | 5518 filter_(NULL) { |
| 5627 Init(); | 5519 Init(); |
| 5628 } | 5520 } |
| 5629 | 5521 |
| 5630 | 5522 |
| 5631 HeapIterator::~HeapIterator() { | 5523 HeapIterator::~HeapIterator() { Shutdown(); } |
| 5632 Shutdown(); | |
| 5633 } | |
| 5634 | 5524 |
| 5635 | 5525 |
| 5636 void HeapIterator::Init() { | 5526 void HeapIterator::Init() { |
| 5637 // Start the iteration. | 5527 // Start the iteration. |
| 5638 space_iterator_ = new SpaceIterator(heap_); | 5528 space_iterator_ = new SpaceIterator(heap_); |
| 5639 switch (filtering_) { | 5529 switch (filtering_) { |
| 5640 case kFilterUnreachable: | 5530 case kFilterUnreachable: |
| 5641 filter_ = new UnreachableObjectsFilter(heap_); | 5531 filter_ = new UnreachableObjectsFilter(heap_); |
| 5642 break; | 5532 break; |
| 5643 default: | 5533 default: |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5699 // Restart the iterator. | 5589 // Restart the iterator. |
| 5700 Shutdown(); | 5590 Shutdown(); |
| 5701 Init(); | 5591 Init(); |
| 5702 } | 5592 } |
| 5703 | 5593 |
| 5704 | 5594 |
| 5705 #ifdef DEBUG | 5595 #ifdef DEBUG |
| 5706 | 5596 |
| 5707 Object* const PathTracer::kAnyGlobalObject = NULL; | 5597 Object* const PathTracer::kAnyGlobalObject = NULL; |
| 5708 | 5598 |
| 5709 class PathTracer::MarkVisitor: public ObjectVisitor { | 5599 class PathTracer::MarkVisitor : public ObjectVisitor { |
| 5710 public: | 5600 public: |
| 5711 explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {} | 5601 explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {} |
| 5712 void VisitPointers(Object** start, Object** end) { | 5602 void VisitPointers(Object** start, Object** end) { |
| 5713 // Scan all HeapObject pointers in [start, end) | 5603 // Scan all HeapObject pointers in [start, end) |
| 5714 for (Object** p = start; !tracer_->found() && (p < end); p++) { | 5604 for (Object** p = start; !tracer_->found() && (p < end); p++) { |
| 5715 if ((*p)->IsHeapObject()) | 5605 if ((*p)->IsHeapObject()) tracer_->MarkRecursively(p, this); |
| 5716 tracer_->MarkRecursively(p, this); | |
| 5717 } | 5606 } |
| 5718 } | 5607 } |
| 5719 | 5608 |
| 5720 private: | 5609 private: |
| 5721 PathTracer* tracer_; | 5610 PathTracer* tracer_; |
| 5722 }; | 5611 }; |
| 5723 | 5612 |
| 5724 | 5613 |
| 5725 class PathTracer::UnmarkVisitor: public ObjectVisitor { | 5614 class PathTracer::UnmarkVisitor : public ObjectVisitor { |
| 5726 public: | 5615 public: |
| 5727 explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {} | 5616 explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {} |
| 5728 void VisitPointers(Object** start, Object** end) { | 5617 void VisitPointers(Object** start, Object** end) { |
| 5729 // Scan all HeapObject pointers in [start, end) | 5618 // Scan all HeapObject pointers in [start, end) |
| 5730 for (Object** p = start; p < end; p++) { | 5619 for (Object** p = start; p < end; p++) { |
| 5731 if ((*p)->IsHeapObject()) | 5620 if ((*p)->IsHeapObject()) tracer_->UnmarkRecursively(p, this); |
| 5732 tracer_->UnmarkRecursively(p, this); | |
| 5733 } | 5621 } |
| 5734 } | 5622 } |
| 5735 | 5623 |
| 5736 private: | 5624 private: |
| 5737 PathTracer* tracer_; | 5625 PathTracer* tracer_; |
| 5738 }; | 5626 }; |
| 5739 | 5627 |
| 5740 | 5628 |
| 5741 void PathTracer::VisitPointers(Object** start, Object** end) { | 5629 void PathTracer::VisitPointers(Object** start, Object** end) { |
| 5742 bool done = ((what_to_find_ == FIND_FIRST) && found_target_); | 5630 bool done = ((what_to_find_ == FIND_FIRST) && found_target_); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5799 // not visited yet | 5687 // not visited yet |
| 5800 Map* map = Map::cast(map_word.ToMap()); | 5688 Map* map = Map::cast(map_word.ToMap()); |
| 5801 | 5689 |
| 5802 MapWord marked_map_word = | 5690 MapWord marked_map_word = |
| 5803 MapWord::FromRawValue(obj->map_word().ToRawValue() + kMarkTag); | 5691 MapWord::FromRawValue(obj->map_word().ToRawValue() + kMarkTag); |
| 5804 obj->set_map_word(marked_map_word); | 5692 obj->set_map_word(marked_map_word); |
| 5805 | 5693 |
| 5806 // Scan the object body. | 5694 // Scan the object body. |
| 5807 if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) { | 5695 if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) { |
| 5808 // This is specialized to scan Context's properly. | 5696 // This is specialized to scan Context's properly. |
| 5809 Object** start = reinterpret_cast<Object**>(obj->address() + | 5697 Object** start = |
| 5810 Context::kHeaderSize); | 5698 reinterpret_cast<Object**>(obj->address() + Context::kHeaderSize); |
| 5811 Object** end = reinterpret_cast<Object**>(obj->address() + | 5699 Object** end = |
| 5812 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize); | 5700 reinterpret_cast<Object**>(obj->address() + Context::kHeaderSize + |
| 5701 Context::FIRST_WEAK_SLOT * kPointerSize); |
| 5813 mark_visitor->VisitPointers(start, end); | 5702 mark_visitor->VisitPointers(start, end); |
| 5814 } else { | 5703 } else { |
| 5815 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), mark_visitor); | 5704 obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), mark_visitor); |
| 5816 } | 5705 } |
| 5817 | 5706 |
| 5818 // Scan the map after the body because the body is a lot more interesting | 5707 // Scan the map after the body because the body is a lot more interesting |
| 5819 // when doing leak detection. | 5708 // when doing leak detection. |
| 5820 MarkRecursively(reinterpret_cast<Object**>(&map), mark_visitor); | 5709 MarkRecursively(reinterpret_cast<Object**>(&map), mark_visitor); |
| 5821 | 5710 |
| 5822 if (!found_target_in_trace_) { // don't pop if found the target | 5711 if (!found_target_in_trace_) { // don't pop if found the target |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5876 void Heap::TracePathToObject(Object* target) { | 5765 void Heap::TracePathToObject(Object* target) { |
| 5877 PathTracer tracer(target, PathTracer::FIND_ALL, VISIT_ALL); | 5766 PathTracer tracer(target, PathTracer::FIND_ALL, VISIT_ALL); |
| 5878 IterateRoots(&tracer, VISIT_ONLY_STRONG); | 5767 IterateRoots(&tracer, VISIT_ONLY_STRONG); |
| 5879 } | 5768 } |
| 5880 | 5769 |
| 5881 | 5770 |
| 5882 // Triggers a depth-first traversal of reachable objects from roots | 5771 // Triggers a depth-first traversal of reachable objects from roots |
| 5883 // and finds a path to any global object and prints it. Useful for | 5772 // and finds a path to any global object and prints it. Useful for |
| 5884 // determining the source for leaks of global objects. | 5773 // determining the source for leaks of global objects. |
| 5885 void Heap::TracePathToGlobal() { | 5774 void Heap::TracePathToGlobal() { |
| 5886 PathTracer tracer(PathTracer::kAnyGlobalObject, | 5775 PathTracer tracer(PathTracer::kAnyGlobalObject, PathTracer::FIND_ALL, |
| 5887 PathTracer::FIND_ALL, | |
| 5888 VISIT_ALL); | 5776 VISIT_ALL); |
| 5889 IterateRoots(&tracer, VISIT_ONLY_STRONG); | 5777 IterateRoots(&tracer, VISIT_ONLY_STRONG); |
| 5890 } | 5778 } |
| 5891 #endif | 5779 #endif |
| 5892 | 5780 |
| 5893 | 5781 |
| 5894 void Heap::UpdateCumulativeGCStatistics(double duration, | 5782 void Heap::UpdateCumulativeGCStatistics(double duration, |
| 5895 double spent_in_mutator, | 5783 double spent_in_mutator, |
| 5896 double marking_time) { | 5784 double marking_time) { |
| 5897 if (FLAG_print_cumulative_gc_stat) { | 5785 if (FLAG_print_cumulative_gc_stat) { |
| (...skipping 24 matching lines...) Expand all Loading... |
| 5922 for (int i = 0; i < kEntriesPerBucket; i++) { | 5810 for (int i = 0; i < kEntriesPerBucket; i++) { |
| 5923 Key& key = keys_[index + i]; | 5811 Key& key = keys_[index + i]; |
| 5924 if ((key.map == *map) && key.name->Equals(*name)) { | 5812 if ((key.map == *map) && key.name->Equals(*name)) { |
| 5925 return field_offsets_[index + i]; | 5813 return field_offsets_[index + i]; |
| 5926 } | 5814 } |
| 5927 } | 5815 } |
| 5928 return kNotFound; | 5816 return kNotFound; |
| 5929 } | 5817 } |
| 5930 | 5818 |
| 5931 | 5819 |
| 5932 void KeyedLookupCache::Update(Handle<Map> map, | 5820 void KeyedLookupCache::Update(Handle<Map> map, Handle<Name> name, |
| 5933 Handle<Name> name, | |
| 5934 int field_offset) { | 5821 int field_offset) { |
| 5935 DisallowHeapAllocation no_gc; | 5822 DisallowHeapAllocation no_gc; |
| 5936 if (!name->IsUniqueName()) { | 5823 if (!name->IsUniqueName()) { |
| 5937 if (!StringTable::InternalizeStringIfExists(name->GetIsolate(), | 5824 if (!StringTable::InternalizeStringIfExists( |
| 5938 Handle<String>::cast(name)). | 5825 name->GetIsolate(), Handle<String>::cast(name)).ToHandle(&name)) { |
| 5939 ToHandle(&name)) { | |
| 5940 return; | 5826 return; |
| 5941 } | 5827 } |
| 5942 } | 5828 } |
| 5943 // This cache is cleared only between mark compact passes, so we expect the | 5829 // This cache is cleared only between mark compact passes, so we expect the |
| 5944 // cache to only contain old space names. | 5830 // cache to only contain old space names. |
| 5945 DCHECK(!map->GetIsolate()->heap()->InNewSpace(*name)); | 5831 DCHECK(!map->GetIsolate()->heap()->InNewSpace(*name)); |
| 5946 | 5832 |
| 5947 int index = (Hash(map, name) & kHashMask); | 5833 int index = (Hash(map, name) & kHashMask); |
| 5948 // After a GC there will be free slots, so we use them in order (this may | 5834 // After a GC there will be free slots, so we use them in order (this may |
| 5949 // help to get the most frequently used one in position 0). | 5835 // help to get the most frequently used one in position 0). |
| 5950 for (int i = 0; i< kEntriesPerBucket; i++) { | 5836 for (int i = 0; i < kEntriesPerBucket; i++) { |
| 5951 Key& key = keys_[index]; | 5837 Key& key = keys_[index]; |
| 5952 Object* free_entry_indicator = NULL; | 5838 Object* free_entry_indicator = NULL; |
| 5953 if (key.map == free_entry_indicator) { | 5839 if (key.map == free_entry_indicator) { |
| 5954 key.map = *map; | 5840 key.map = *map; |
| 5955 key.name = *name; | 5841 key.name = *name; |
| 5956 field_offsets_[index + i] = field_offset; | 5842 field_offsets_[index + i] = field_offset; |
| 5957 return; | 5843 return; |
| 5958 } | 5844 } |
| 5959 } | 5845 } |
| 5960 // No free entry found in this bucket, so we move them all down one and | 5846 // No free entry found in this bucket, so we move them all down one and |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6049 // StoreBuffer::Filter relies on MemoryChunk::FromAnyPointerAddress. | 5935 // StoreBuffer::Filter relies on MemoryChunk::FromAnyPointerAddress. |
| 6050 // If FromAnyPointerAddress encounters a slot that belongs to a large | 5936 // If FromAnyPointerAddress encounters a slot that belongs to a large |
| 6051 // chunk queued for deletion it will fail to find the chunk because | 5937 // chunk queued for deletion it will fail to find the chunk because |
| 6052 // it try to perform a search in the list of pages owned by of the large | 5938 // it try to perform a search in the list of pages owned by of the large |
| 6053 // object space and queued chunks were detached from that list. | 5939 // object space and queued chunks were detached from that list. |
| 6054 // To work around this we split large chunk into normal kPageSize aligned | 5940 // To work around this we split large chunk into normal kPageSize aligned |
| 6055 // pieces and initialize size, owner and flags field of every piece. | 5941 // pieces and initialize size, owner and flags field of every piece. |
| 6056 // If FromAnyPointerAddress encounters a slot that belongs to one of | 5942 // If FromAnyPointerAddress encounters a slot that belongs to one of |
| 6057 // these smaller pieces it will treat it as a slot on a normal Page. | 5943 // these smaller pieces it will treat it as a slot on a normal Page. |
| 6058 Address chunk_end = chunk->address() + chunk->size(); | 5944 Address chunk_end = chunk->address() + chunk->size(); |
| 6059 MemoryChunk* inner = MemoryChunk::FromAddress( | 5945 MemoryChunk* inner = |
| 6060 chunk->address() + Page::kPageSize); | 5946 MemoryChunk::FromAddress(chunk->address() + Page::kPageSize); |
| 6061 MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1); | 5947 MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1); |
| 6062 while (inner <= inner_last) { | 5948 while (inner <= inner_last) { |
| 6063 // Size of a large chunk is always a multiple of | 5949 // Size of a large chunk is always a multiple of |
| 6064 // OS::AllocateAlignment() so there is always | 5950 // OS::AllocateAlignment() so there is always |
| 6065 // enough space for a fake MemoryChunk header. | 5951 // enough space for a fake MemoryChunk header. |
| 6066 Address area_end = Min(inner->address() + Page::kPageSize, chunk_end); | 5952 Address area_end = Min(inner->address() + Page::kPageSize, chunk_end); |
| 6067 // Guard against overflow. | 5953 // Guard against overflow. |
| 6068 if (area_end < inner->address()) area_end = chunk_end; | 5954 if (area_end < inner->address()) area_end = chunk_end; |
| 6069 inner->SetArea(inner->address(), area_end); | 5955 inner->SetArea(inner->address(), area_end); |
| 6070 inner->set_size(Page::kPageSize); | 5956 inner->set_size(Page::kPageSize); |
| 6071 inner->set_owner(lo_space()); | 5957 inner->set_owner(lo_space()); |
| 6072 inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED); | 5958 inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED); |
| 6073 inner = MemoryChunk::FromAddress( | 5959 inner = MemoryChunk::FromAddress(inner->address() + Page::kPageSize); |
| 6074 inner->address() + Page::kPageSize); | |
| 6075 } | 5960 } |
| 6076 } | 5961 } |
| 6077 } | 5962 } |
| 6078 isolate_->heap()->store_buffer()->Compact(); | 5963 isolate_->heap()->store_buffer()->Compact(); |
| 6079 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 5964 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
| 6080 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 5965 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
| 6081 next = chunk->next_chunk(); | 5966 next = chunk->next_chunk(); |
| 6082 isolate_->memory_allocator()->Free(chunk); | 5967 isolate_->memory_allocator()->Free(chunk); |
| 6083 } | 5968 } |
| 6084 chunks_queued_for_free_ = NULL; | 5969 chunks_queued_for_free_ = NULL; |
| (...skipping 25 matching lines...) Expand all Loading... |
| 6110 } | 5995 } |
| 6111 | 5996 |
| 6112 | 5997 |
| 6113 static base::LazyMutex checkpoint_object_stats_mutex = LAZY_MUTEX_INITIALIZER; | 5998 static base::LazyMutex checkpoint_object_stats_mutex = LAZY_MUTEX_INITIALIZER; |
| 6114 | 5999 |
| 6115 | 6000 |
| 6116 void Heap::CheckpointObjectStats() { | 6001 void Heap::CheckpointObjectStats() { |
| 6117 base::LockGuard<base::Mutex> lock_guard( | 6002 base::LockGuard<base::Mutex> lock_guard( |
| 6118 checkpoint_object_stats_mutex.Pointer()); | 6003 checkpoint_object_stats_mutex.Pointer()); |
| 6119 Counters* counters = isolate()->counters(); | 6004 Counters* counters = isolate()->counters(); |
| 6120 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \ | 6005 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \ |
| 6121 counters->count_of_##name()->Increment( \ | 6006 counters->count_of_##name()->Increment( \ |
| 6122 static_cast<int>(object_counts_[name])); \ | 6007 static_cast<int>(object_counts_[name])); \ |
| 6123 counters->count_of_##name()->Decrement( \ | 6008 counters->count_of_##name()->Decrement( \ |
| 6124 static_cast<int>(object_counts_last_time_[name])); \ | 6009 static_cast<int>(object_counts_last_time_[name])); \ |
| 6125 counters->size_of_##name()->Increment( \ | 6010 counters->size_of_##name()->Increment( \ |
| 6126 static_cast<int>(object_sizes_[name])); \ | 6011 static_cast<int>(object_sizes_[name])); \ |
| 6127 counters->size_of_##name()->Decrement( \ | 6012 counters->size_of_##name()->Decrement( \ |
| 6128 static_cast<int>(object_sizes_last_time_[name])); | 6013 static_cast<int>(object_sizes_last_time_[name])); |
| 6129 INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 6014 INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6130 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6015 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6131 int index; | 6016 int index; |
| 6132 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \ | 6017 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \ |
| 6133 index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \ | 6018 index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \ |
| 6134 counters->count_of_CODE_TYPE_##name()->Increment( \ | 6019 counters->count_of_CODE_TYPE_##name()->Increment( \ |
| 6135 static_cast<int>(object_counts_[index])); \ | 6020 static_cast<int>(object_counts_[index])); \ |
| 6136 counters->count_of_CODE_TYPE_##name()->Decrement( \ | 6021 counters->count_of_CODE_TYPE_##name()->Decrement( \ |
| 6137 static_cast<int>(object_counts_last_time_[index])); \ | 6022 static_cast<int>(object_counts_last_time_[index])); \ |
| (...skipping 26 matching lines...) Expand all Loading... |
| 6164 static_cast<int>(object_sizes_[index])); \ | 6049 static_cast<int>(object_sizes_[index])); \ |
| 6165 counters->size_of_CODE_AGE_##name()->Decrement( \ | 6050 counters->size_of_CODE_AGE_##name()->Decrement( \ |
| 6166 static_cast<int>(object_sizes_last_time_[index])); | 6051 static_cast<int>(object_sizes_last_time_[index])); |
| 6167 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6052 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6168 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6053 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6169 | 6054 |
| 6170 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6055 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6171 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6056 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6172 ClearObjectStats(); | 6057 ClearObjectStats(); |
| 6173 } | 6058 } |
| 6174 | 6059 } |
| 6175 } } // namespace v8::internal | 6060 } // namespace v8::internal |
| OLD | NEW |