| Index: src/heap/scavenger.cc
|
| diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc
|
| index 40aeb74aa95641039587ebec0cfa00be0a8fedaf..3f532ead6265f0f07d8530a1696c7f5f35bf5d43 100644
|
| --- a/src/heap/scavenger.cc
|
| +++ b/src/heap/scavenger.cc
|
| @@ -186,7 +186,9 @@ class ScavengingVisitor : public StaticVisitorBase {
|
| *slot = target;
|
|
|
| if (object_contents == POINTER_OBJECT) {
|
| - heap->promotion_queue()->insert(target, object_size);
|
| + heap->promotion_queue()->insert(
|
| + target, object_size,
|
| + Marking::IsBlack(Marking::MarkBitFrom(object)));
|
| }
|
| heap->IncrementPromotedObjectsSize(object_size);
|
| return true;
|
| @@ -236,7 +238,7 @@ class ScavengingVisitor : public StaticVisitorBase {
|
| if (Marking::IsBlack(mark_bit)) {
|
| // This object is black and it might not be rescanned by marker.
|
| // We should explicitly record code entry slot for compaction because
|
| - // promotion queue processing (IterateAndMarkPointersToFromSpace) will
|
| + // promotion queue processing (IteratePromotedObjectPointers) will
|
| // miss it as it is not HeapObject-tagged.
|
| Address code_entry_slot =
|
| target->address() + JSFunction::kCodeEntryOffset;
|
|
|