OLD | NEW |
---|---|
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/scavenger.h" | 5 #include "src/heap/scavenger.h" |
6 | 6 |
7 #include "src/contexts.h" | 7 #include "src/contexts.h" |
8 #include "src/heap/heap.h" | 8 #include "src/heap/heap.h" |
9 #include "src/heap/objects-visiting-inl.h" | 9 #include "src/heap/objects-visiting-inl.h" |
10 #include "src/heap/scavenger-inl.h" | 10 #include "src/heap/scavenger-inl.h" |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
193 heap->promotion_queue()->insert( | 193 heap->promotion_queue()->insert( |
194 target, object_size, | 194 target, object_size, |
195 Marking::IsBlack(Marking::MarkBitFrom(object))); | 195 Marking::IsBlack(Marking::MarkBitFrom(object))); |
196 } | 196 } |
197 heap->IncrementPromotedObjectsSize(object_size); | 197 heap->IncrementPromotedObjectsSize(object_size); |
198 return true; | 198 return true; |
199 } | 199 } |
200 return false; | 200 return false; |
201 } | 201 } |
202 | 202 |
203 | |
204 template <ObjectContents object_contents, AllocationAlignment alignment> | 203 template <ObjectContents object_contents, AllocationAlignment alignment> |
205 static inline void EvacuateObject(Map* map, HeapObject** slot, | 204 static inline void EvacuateObject(Map* map, HeapObject** slot, |
206 HeapObject* object, int object_size) { | 205 HeapObject* object, int object_size, |
206 PromotionMode promotion_mode) { | |
207 SLOW_DCHECK(object_size <= Page::kAllocatableMemory); | 207 SLOW_DCHECK(object_size <= Page::kAllocatableMemory); |
208 SLOW_DCHECK(object->Size() == object_size); | 208 SLOW_DCHECK(object->Size() == object_size); |
209 Heap* heap = map->GetHeap(); | 209 Heap* heap = map->GetHeap(); |
210 | 210 |
211 if (!heap->ShouldBePromoted(object->address(), object_size)) { | 211 if (promotion_mode != FORCE_PROMOTION && |
212 !heap->ShouldBePromoted(object->address(), object_size)) { | |
212 // A semi-space copy may fail due to fragmentation. In that case, we | 213 // A semi-space copy may fail due to fragmentation. In that case, we |
213 // try to promote the object. | 214 // try to promote the object. |
214 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 215 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { |
215 return; | 216 return; |
216 } | 217 } |
217 } | 218 } |
218 | 219 |
219 if (PromoteObject<object_contents, alignment>(map, slot, object, | 220 if (PromoteObject<object_contents, alignment>(map, slot, object, |
220 object_size)) { | 221 object_size)) { |
221 return; | 222 return; |
ulan
2016/05/23 13:04:22
if (promotion_mode == FORCE_PROMOTION)
FatalProce
ahaas
2016/05/23 13:31:48
Done.
| |
222 } | 223 } |
223 | 224 |
224 // If promotion failed, we try to copy the object to the other semi-space | 225 // If promotion failed, we try to copy the object to the other semi-space |
225 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; | 226 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; |
226 | 227 |
227 FatalProcessOutOfMemory("Scavenger: semi-space copy\n"); | 228 FatalProcessOutOfMemory("Scavenger: semi-space copy\n"); |
228 } | 229 } |
229 | 230 |
230 | |
231 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, | 231 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
232 HeapObject* object) { | 232 HeapObject* object, |
233 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); | 233 PromotionMode promotion_mode) { |
234 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object, | |
235 promotion_mode); | |
234 | 236 |
235 if (marks_handling == IGNORE_MARKS) return; | 237 if (marks_handling == IGNORE_MARKS) return; |
236 | 238 |
237 MapWord map_word = object->map_word(); | 239 MapWord map_word = object->map_word(); |
238 DCHECK(map_word.IsForwardingAddress()); | 240 DCHECK(map_word.IsForwardingAddress()); |
239 HeapObject* target = map_word.ToForwardingAddress(); | 241 HeapObject* target = map_word.ToForwardingAddress(); |
240 | 242 |
241 MarkBit mark_bit = Marking::MarkBitFrom(target); | 243 MarkBit mark_bit = Marking::MarkBitFrom(target); |
242 if (Marking::IsBlack(mark_bit)) { | 244 if (Marking::IsBlack(mark_bit)) { |
243 // This object is black and it might not be rescanned by marker. | 245 // This object is black and it might not be rescanned by marker. |
244 // We should explicitly record code entry slot for compaction because | 246 // We should explicitly record code entry slot for compaction because |
245 // promotion queue processing (IteratePromotedObjectPointers) will | 247 // promotion queue processing (IteratePromotedObjectPointers) will |
246 // miss it as it is not HeapObject-tagged. | 248 // miss it as it is not HeapObject-tagged. |
247 Address code_entry_slot = | 249 Address code_entry_slot = |
248 target->address() + JSFunction::kCodeEntryOffset; | 250 target->address() + JSFunction::kCodeEntryOffset; |
249 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 251 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
250 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( | 252 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( |
251 target, code_entry_slot, code); | 253 target, code_entry_slot, code); |
252 } | 254 } |
253 } | 255 } |
254 | 256 |
255 | |
256 static inline void EvacuateFixedArray(Map* map, HeapObject** slot, | 257 static inline void EvacuateFixedArray(Map* map, HeapObject** slot, |
257 HeapObject* object) { | 258 HeapObject* object, |
259 PromotionMode promotion_mode) { | |
258 int length = reinterpret_cast<FixedArray*>(object)->synchronized_length(); | 260 int length = reinterpret_cast<FixedArray*>(object)->synchronized_length(); |
259 int object_size = FixedArray::SizeFor(length); | 261 int object_size = FixedArray::SizeFor(length); |
260 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | 262 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, object_size, |
261 object_size); | 263 promotion_mode); |
262 } | 264 } |
263 | 265 |
264 | |
265 static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot, | 266 static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot, |
266 HeapObject* object) { | 267 HeapObject* object, |
268 PromotionMode promotion_mode) { | |
267 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); | 269 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); |
268 int object_size = FixedDoubleArray::SizeFor(length); | 270 int object_size = FixedDoubleArray::SizeFor(length); |
269 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); | 271 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size, |
272 promotion_mode); | |
270 } | 273 } |
271 | 274 |
272 | |
273 static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot, | 275 static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot, |
274 HeapObject* object) { | 276 HeapObject* object, |
277 PromotionMode promotion_mode) { | |
275 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); | 278 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); |
276 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | 279 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, object_size, |
277 object_size); | 280 promotion_mode); |
278 } | 281 } |
279 | 282 |
280 | |
281 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, | 283 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, |
282 HeapObject* object) { | 284 HeapObject* object, |
285 PromotionMode promotion_mode) { | |
283 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); | 286 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); |
284 EvacuateObject<POINTER_OBJECT, kDoubleAligned>(map, slot, object, | 287 EvacuateObject<POINTER_OBJECT, kDoubleAligned>(map, slot, object, |
285 object_size); | 288 object_size, promotion_mode); |
286 } | 289 } |
287 | 290 |
288 | |
289 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, | 291 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, |
290 HeapObject* object) { | 292 HeapObject* object, |
291 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); | 293 PromotionMode promotion_mode) { |
294 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object, | |
295 promotion_mode); | |
292 } | 296 } |
293 | 297 |
294 | |
295 static inline void EvacuateByteArray(Map* map, HeapObject** slot, | 298 static inline void EvacuateByteArray(Map* map, HeapObject** slot, |
296 HeapObject* object) { | 299 HeapObject* object, |
300 PromotionMode promotion_mode) { | |
297 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); | 301 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |
298 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); | 302 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size, |
303 promotion_mode); | |
299 } | 304 } |
300 | 305 |
301 | |
302 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, | 306 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, |
303 HeapObject* object) { | 307 HeapObject* object, |
308 PromotionMode promotion_mode) { | |
304 int object_size = SeqOneByteString::cast(object) | 309 int object_size = SeqOneByteString::cast(object) |
305 ->SeqOneByteStringSize(map->instance_type()); | 310 ->SeqOneByteStringSize(map->instance_type()); |
306 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); | 311 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size, |
312 promotion_mode); | |
307 } | 313 } |
308 | 314 |
309 | |
310 static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot, | 315 static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot, |
311 HeapObject* object) { | 316 HeapObject* object, |
317 PromotionMode promotion_mode) { | |
312 int object_size = SeqTwoByteString::cast(object) | 318 int object_size = SeqTwoByteString::cast(object) |
313 ->SeqTwoByteStringSize(map->instance_type()); | 319 ->SeqTwoByteStringSize(map->instance_type()); |
314 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); | 320 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size, |
321 promotion_mode); | |
315 } | 322 } |
316 | 323 |
317 | |
318 static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, | 324 static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, |
319 HeapObject* object) { | 325 HeapObject* object, |
326 PromotionMode promotion_mode) { | |
320 DCHECK(IsShortcutCandidate(map->instance_type())); | 327 DCHECK(IsShortcutCandidate(map->instance_type())); |
321 | 328 |
322 Heap* heap = map->GetHeap(); | 329 Heap* heap = map->GetHeap(); |
323 | 330 |
324 if (marks_handling == IGNORE_MARKS && | 331 if (marks_handling == IGNORE_MARKS && |
325 ConsString::cast(object)->unchecked_second() == heap->empty_string()) { | 332 ConsString::cast(object)->unchecked_second() == heap->empty_string()) { |
326 HeapObject* first = | 333 HeapObject* first = |
327 HeapObject::cast(ConsString::cast(object)->unchecked_first()); | 334 HeapObject::cast(ConsString::cast(object)->unchecked_first()); |
328 | 335 |
329 *slot = first; | 336 *slot = first; |
330 | 337 |
331 if (!heap->InNewSpace(first)) { | 338 if (!heap->InNewSpace(first)) { |
332 object->set_map_word(MapWord::FromForwardingAddress(first)); | 339 object->set_map_word(MapWord::FromForwardingAddress(first)); |
333 return; | 340 return; |
334 } | 341 } |
335 | 342 |
336 MapWord first_word = first->map_word(); | 343 MapWord first_word = first->map_word(); |
337 if (first_word.IsForwardingAddress()) { | 344 if (first_word.IsForwardingAddress()) { |
338 HeapObject* target = first_word.ToForwardingAddress(); | 345 HeapObject* target = first_word.ToForwardingAddress(); |
339 | 346 |
340 *slot = target; | 347 *slot = target; |
341 object->set_map_word(MapWord::FromForwardingAddress(target)); | 348 object->set_map_word(MapWord::FromForwardingAddress(target)); |
342 return; | 349 return; |
343 } | 350 } |
344 | 351 |
345 Scavenger::ScavengeObjectSlow(slot, first); | 352 Scavenger::ScavengeObjectSlow(slot, first, promotion_mode); |
346 object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 353 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
347 return; | 354 return; |
348 } | 355 } |
349 | 356 |
350 int object_size = ConsString::kSize; | 357 int object_size = ConsString::kSize; |
351 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, | 358 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object, object_size, |
352 object_size); | 359 promotion_mode); |
353 } | 360 } |
354 | 361 |
355 template <ObjectContents object_contents> | 362 template <ObjectContents object_contents> |
356 class ObjectEvacuationStrategy { | 363 class ObjectEvacuationStrategy { |
357 public: | 364 public: |
358 template <int object_size> | 365 template <int object_size> |
359 static inline void VisitSpecialized(Map* map, HeapObject** slot, | 366 static inline void VisitSpecialized(Map* map, HeapObject** slot, |
360 HeapObject* object) { | 367 HeapObject* object, |
361 EvacuateObject<object_contents, kWordAligned>(map, slot, object, | 368 PromotionMode promotion_mode) { |
362 object_size); | 369 EvacuateObject<object_contents, kWordAligned>( |
370 map, slot, object, object_size, promotion_mode); | |
363 } | 371 } |
364 | 372 |
365 static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) { | 373 static inline void Visit(Map* map, HeapObject** slot, HeapObject* object, |
374 PromotionMode promotion_mode) { | |
366 int object_size = map->instance_size(); | 375 int object_size = map->instance_size(); |
367 EvacuateObject<object_contents, kWordAligned>(map, slot, object, | 376 EvacuateObject<object_contents, kWordAligned>( |
368 object_size); | 377 map, slot, object, object_size, promotion_mode); |
369 } | 378 } |
370 }; | 379 }; |
371 | 380 |
372 static VisitorDispatchTable<ScavengingCallback> table_; | 381 static VisitorDispatchTable<ScavengingCallback> table_; |
373 }; | 382 }; |
374 | 383 |
375 | 384 |
376 template <MarksHandling marks_handling, | 385 template <MarksHandling marks_handling, |
377 LoggingAndProfiling logging_and_profiling_mode> | 386 LoggingAndProfiling logging_and_profiling_mode> |
378 VisitorDispatchTable<ScavengingCallback> | 387 VisitorDispatchTable<ScavengingCallback> |
379 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_; | 388 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_; |
380 | 389 |
381 | 390 |
382 // static | 391 // static |
383 void Scavenger::Initialize() { | 392 void Scavenger::Initialize() { |
384 ScavengingVisitor<TRANSFER_MARKS, | 393 ScavengingVisitor<TRANSFER_MARKS, |
385 LOGGING_AND_PROFILING_DISABLED>::Initialize(); | 394 LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
386 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); | 395 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
387 ScavengingVisitor<TRANSFER_MARKS, | 396 ScavengingVisitor<TRANSFER_MARKS, |
388 LOGGING_AND_PROFILING_ENABLED>::Initialize(); | 397 LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
389 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); | 398 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
390 } | 399 } |
391 | 400 |
392 | 401 |
393 // static | 402 // static |
394 void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { | 403 void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object, |
404 PromotionMode promotion_mode) { | |
395 SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object)); | 405 SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object)); |
396 MapWord first_word = object->map_word(); | 406 MapWord first_word = object->map_word(); |
397 SLOW_DCHECK(!first_word.IsForwardingAddress()); | 407 SLOW_DCHECK(!first_word.IsForwardingAddress()); |
398 Map* map = first_word.ToMap(); | 408 Map* map = first_word.ToMap(); |
399 Scavenger* scavenger = map->GetHeap()->scavenge_collector_; | 409 Scavenger* scavenger = map->GetHeap()->scavenge_collector_; |
400 scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object); | 410 scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object, |
411 promotion_mode); | |
401 } | 412 } |
402 | 413 |
403 | 414 |
404 void Scavenger::SelectScavengingVisitorsTable() { | 415 void Scavenger::SelectScavengingVisitorsTable() { |
405 bool logging_and_profiling = | 416 bool logging_and_profiling = |
406 FLAG_verify_predictable || isolate()->logger()->is_logging() || | 417 FLAG_verify_predictable || isolate()->logger()->is_logging() || |
407 isolate()->cpu_profiler()->is_profiling() || | 418 isolate()->cpu_profiler()->is_profiling() || |
408 (isolate()->heap_profiler() != NULL && | 419 (isolate()->heap_profiler() != NULL && |
409 isolate()->heap_profiler()->is_tracking_object_moves()); | 420 isolate()->heap_profiler()->is_tracking_object_moves()); |
410 | 421 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
457 | 468 |
458 void ScavengeVisitor::ScavengePointer(Object** p) { | 469 void ScavengeVisitor::ScavengePointer(Object** p) { |
459 Object* object = *p; | 470 Object* object = *p; |
460 if (!heap_->InNewSpace(object)) return; | 471 if (!heap_->InNewSpace(object)) return; |
461 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), | 472 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), |
462 reinterpret_cast<HeapObject*>(object)); | 473 reinterpret_cast<HeapObject*>(object)); |
463 } | 474 } |
464 | 475 |
465 } // namespace internal | 476 } // namespace internal |
466 } // namespace v8 | 477 } // namespace v8 |
OLD | NEW |