OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1871 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1882 while (!promotion_queue()->is_empty()) { | 1882 while (!promotion_queue()->is_empty()) { |
1883 HeapObject* target; | 1883 HeapObject* target; |
1884 int size; | 1884 int size; |
1885 promotion_queue()->remove(&target, &size); | 1885 promotion_queue()->remove(&target, &size); |
1886 | 1886 |
1887 // Promoted object might be already partially visited | 1887 // Promoted object might be already partially visited |
1888 // during old space pointer iteration. Thus we search specifically | 1888 // during old space pointer iteration. Thus we search specifically |
1889 // for pointers to from semispace instead of looking for pointers | 1889 // for pointers to from semispace instead of looking for pointers |
1890 // to new space. | 1890 // to new space. |
1891 DCHECK(!target->IsMap()); | 1891 DCHECK(!target->IsMap()); |
1892 Address obj_address = target->address(); | |
1893 | 1892 |
1894 // We are not collecting slots on new space objects during mutation | 1893 IteratePointersToFromSpace(target, size, &Scavenger::ScavengeObject); |
1895 // thus we have to scan for pointers to evacuation candidates when we | |
1896 // promote objects. But we should not record any slots in non-black | |
1897 // objects. Grey object's slots would be rescanned. | |
1898 // White object might not survive until the end of collection | |
1899 // it would be a violation of the invariant to record it's slots. | |
1900 bool record_slots = false; | |
1901 if (incremental_marking()->IsCompacting()) { | |
1902 MarkBit mark_bit = Marking::MarkBitFrom(target); | |
1903 record_slots = Marking::IsBlack(mark_bit); | |
1904 } | |
1905 #if V8_DOUBLE_FIELDS_UNBOXING | |
1906 LayoutDescriptorHelper helper(target->map()); | |
1907 bool has_only_tagged_fields = helper.all_fields_tagged(); | |
1908 | |
1909 if (!has_only_tagged_fields) { | |
1910 for (int offset = 0; offset < size;) { | |
1911 int end_of_region_offset; | |
1912 if (helper.IsTagged(offset, size, &end_of_region_offset)) { | |
1913 IterateAndMarkPointersToFromSpace( | |
1914 target, obj_address + offset, | |
1915 obj_address + end_of_region_offset, record_slots, | |
1916 &Scavenger::ScavengeObject); | |
1917 } | |
1918 offset = end_of_region_offset; | |
1919 } | |
1920 } else { | |
1921 #endif | |
1922 IterateAndMarkPointersToFromSpace(target, obj_address, | |
1923 obj_address + size, record_slots, | |
1924 &Scavenger::ScavengeObject); | |
1925 #if V8_DOUBLE_FIELDS_UNBOXING | |
1926 } | |
1927 #endif | |
1928 } | 1894 } |
1929 } | 1895 } |
1930 | 1896 |
1931 // Take another spin if there are now unswept objects in new space | 1897 // Take another spin if there are now unswept objects in new space |
1932 // (there are currently no more unswept promoted objects). | 1898 // (there are currently no more unswept promoted objects). |
1933 } while (new_space_front != new_space_.top()); | 1899 } while (new_space_front != new_space_.top()); |
1934 | 1900 |
1935 return new_space_front; | 1901 return new_space_front; |
1936 } | 1902 } |
1937 | 1903 |
(...skipping 2529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4467 } else if (record_slots && | 4433 } else if (record_slots && |
4468 MarkCompactCollector::IsOnEvacuationCandidate(target)) { | 4434 MarkCompactCollector::IsOnEvacuationCandidate(target)) { |
4469 mark_compact_collector()->RecordSlot(object, slot, target); | 4435 mark_compact_collector()->RecordSlot(object, slot, target); |
4470 } | 4436 } |
4471 } | 4437 } |
4472 slot_address += kPointerSize; | 4438 slot_address += kPointerSize; |
4473 } | 4439 } |
4474 } | 4440 } |
4475 | 4441 |
4476 | 4442 |
| 4443 void Heap::IteratePointersToFromSpace(HeapObject* target, int size, |
| 4444 ObjectSlotCallback callback) { |
| 4445 Address obj_address = target->address(); |
| 4446 |
| 4447 // We are not collecting slots on new space objects during mutation |
| 4448 // thus we have to scan for pointers to evacuation candidates when we |
| 4449 // promote objects. But we should not record any slots in non-black |
| 4450 // objects. Grey object's slots would be rescanned. |
| 4451 // White object might not survive until the end of collection |
| 4452 // it would be a violation of the invariant to record it's slots. |
| 4453 bool record_slots = false; |
| 4454 if (incremental_marking()->IsCompacting()) { |
| 4455 MarkBit mark_bit = Marking::MarkBitFrom(target); |
| 4456 record_slots = Marking::IsBlack(mark_bit); |
| 4457 } |
| 4458 |
| 4459 // Do not scavenge JSArrayBuffer's contents |
| 4460 switch (target->ContentType()) { |
| 4461 case HeapObjectContents::kTaggedValues: { |
| 4462 IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size, |
| 4463 record_slots, callback); |
| 4464 break; |
| 4465 } |
| 4466 case HeapObjectContents::kMixedValues: { |
| 4467 if (target->IsFixedTypedArrayBase()) { |
| 4468 IterateAndMarkPointersToFromSpace( |
| 4469 target, obj_address + FixedTypedArrayBase::kBasePointerOffset, |
| 4470 obj_address + FixedTypedArrayBase::kHeaderSize, record_slots, |
| 4471 callback); |
| 4472 } else if (target->IsBytecodeArray()) { |
| 4473 IterateAndMarkPointersToFromSpace( |
| 4474 target, obj_address + BytecodeArray::kConstantPoolOffset, |
| 4475 obj_address + BytecodeArray::kHeaderSize, record_slots, callback); |
| 4476 } else if (target->IsJSArrayBuffer()) { |
| 4477 IterateAndMarkPointersToFromSpace( |
| 4478 target, obj_address, |
| 4479 obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize, |
| 4480 record_slots, callback); |
| 4481 IterateAndMarkPointersToFromSpace( |
| 4482 target, obj_address + JSArrayBuffer::kSize, obj_address + size, |
| 4483 record_slots, callback); |
| 4484 #if V8_DOUBLE_FIELDS_UNBOXING |
| 4485 } else if (FLAG_unbox_double_fields) { |
| 4486 LayoutDescriptorHelper helper(target->map()); |
| 4487 DCHECK(!helper.all_fields_tagged()); |
| 4488 |
| 4489 for (int offset = 0; offset < size;) { |
| 4490 int end_of_region_offset; |
| 4491 if (helper.IsTagged(offset, size, &end_of_region_offset)) { |
| 4492 IterateAndMarkPointersToFromSpace( |
| 4493 target, obj_address + offset, |
| 4494 obj_address + end_of_region_offset, record_slots, callback); |
| 4495 } |
| 4496 offset = end_of_region_offset; |
| 4497 } |
| 4498 #endif |
| 4499 } |
| 4500 break; |
| 4501 } |
| 4502 case HeapObjectContents::kRawValues: { |
| 4503 break; |
| 4504 } |
| 4505 } |
| 4506 } |
| 4507 |
| 4508 |
4477 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | 4509 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { |
4478 IterateStrongRoots(v, mode); | 4510 IterateStrongRoots(v, mode); |
4479 IterateWeakRoots(v, mode); | 4511 IterateWeakRoots(v, mode); |
4480 } | 4512 } |
4481 | 4513 |
4482 | 4514 |
4483 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { | 4515 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { |
4484 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex])); | 4516 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex])); |
4485 v->Synchronize(VisitorSynchronization::kStringTable); | 4517 v->Synchronize(VisitorSynchronization::kStringTable); |
4486 if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { | 4518 if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { |
(...skipping 1633 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6120 } | 6152 } |
6121 | 6153 |
6122 | 6154 |
6123 // static | 6155 // static |
6124 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6156 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6125 return StaticVisitorBase::GetVisitorId(map); | 6157 return StaticVisitorBase::GetVisitorId(map); |
6126 } | 6158 } |
6127 | 6159 |
6128 } // namespace internal | 6160 } // namespace internal |
6129 } // namespace v8 | 6161 } // namespace v8 |
OLD | NEW |