OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_SPACES_H_ | 5 #ifndef V8_HEAP_SPACES_H_ |
6 #define V8_HEAP_SPACES_H_ | 6 #define V8_HEAP_SPACES_H_ |
7 | 7 |
8 #include <list> | 8 #include <list> |
9 #include <memory> | 9 #include <memory> |
| 10 #include <unordered_set> |
10 | 11 |
11 #include "src/allocation.h" | 12 #include "src/allocation.h" |
12 #include "src/base/atomic-utils.h" | 13 #include "src/base/atomic-utils.h" |
13 #include "src/base/atomicops.h" | 14 #include "src/base/atomicops.h" |
14 #include "src/base/bits.h" | 15 #include "src/base/bits.h" |
15 #include "src/base/hashmap.h" | 16 #include "src/base/hashmap.h" |
16 #include "src/base/platform/mutex.h" | 17 #include "src/base/platform/mutex.h" |
17 #include "src/flags.h" | 18 #include "src/flags.h" |
18 #include "src/heap/marking.h" | 19 #include "src/heap/marking.h" |
19 #include "src/list.h" | 20 #include "src/list.h" |
(...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
343 kWriteBarrierCounterOffset + | 344 kWriteBarrierCounterOffset + |
344 kIntptrSize // intptr_t write_barrier_counter_ | 345 kIntptrSize // intptr_t write_barrier_counter_ |
345 + kPointerSize // AtomicValue high_water_mark_ | 346 + kPointerSize // AtomicValue high_water_mark_ |
346 + kPointerSize // base::Mutex* mutex_ | 347 + kPointerSize // base::Mutex* mutex_ |
347 + kPointerSize // base::AtomicWord concurrent_sweeping_ | 348 + kPointerSize // base::AtomicWord concurrent_sweeping_ |
348 + 2 * kPointerSize // AtomicNumber free-list statistics | 349 + 2 * kPointerSize // AtomicNumber free-list statistics |
349 + kPointerSize // AtomicValue next_chunk_ | 350 + kPointerSize // AtomicValue next_chunk_ |
350 + kPointerSize // AtomicValue prev_chunk_ | 351 + kPointerSize // AtomicValue prev_chunk_ |
351 // FreeListCategory categories_[kNumberOfCategories] | 352 // FreeListCategory categories_[kNumberOfCategories] |
352 + FreeListCategory::kSize * kNumberOfCategories + | 353 + FreeListCategory::kSize * kNumberOfCategories + |
353 kPointerSize; // LocalArrayBufferTracker* local_tracker_; | 354 kPointerSize // LocalArrayBufferTracker* local_tracker_; |
| 355 // std::unordered_set<Address>* black_area_end_marker_map_ |
| 356 + kPointerSize; |
354 | 357 |
355 // We add some more space to the computed header size to amount for missing | 358 // We add some more space to the computed header size to amount for missing |
356 // alignment requirements in our computation. | 359 // alignment requirements in our computation. |
357 // Try to get kHeaderSize properly aligned on 32-bit and 64-bit machines. | 360 // Try to get kHeaderSize properly aligned on 32-bit and 64-bit machines. |
358 static const size_t kHeaderSize = kMinHeaderSize; | 361 static const size_t kHeaderSize = kMinHeaderSize; |
359 | 362 |
360 static const int kBodyOffset = | 363 static const int kBodyOffset = |
361 CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize); | 364 CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize); |
362 | 365 |
363 // The start offset of the object area in a page. Aligned to both maps and | 366 // The start offset of the object area in a page. Aligned to both maps and |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
585 owner_ = reinterpret_cast<Address>(space) + kPageHeaderTag; | 588 owner_ = reinterpret_cast<Address>(space) + kPageHeaderTag; |
586 DCHECK((reinterpret_cast<intptr_t>(owner_) & kPageHeaderTagMask) == | 589 DCHECK((reinterpret_cast<intptr_t>(owner_) & kPageHeaderTagMask) == |
587 kPageHeaderTag); | 590 kPageHeaderTag); |
588 } | 591 } |
589 | 592 |
590 bool HasPageHeader() { return owner() != nullptr; } | 593 bool HasPageHeader() { return owner() != nullptr; } |
591 | 594 |
592 void InsertAfter(MemoryChunk* other); | 595 void InsertAfter(MemoryChunk* other); |
593 void Unlink(); | 596 void Unlink(); |
594 | 597 |
| 598 void ReleaseBlackAreaEndMarkerMap() { |
| 599 if (black_area_end_marker_map_) { |
| 600 delete black_area_end_marker_map_; |
| 601 black_area_end_marker_map_ = nullptr; |
| 602 } |
| 603 } |
| 604 |
| 605 bool IsBlackAreaEndMarker(Address address) { |
| 606 if (black_area_end_marker_map_) { |
| 607 return black_area_end_marker_map_->find(address) != |
| 608 black_area_end_marker_map_->end(); |
| 609 } |
| 610 return false; |
| 611 } |
| 612 |
| 613 void AddBlackAreaEndMarker(Address address) { |
| 614 if (!black_area_end_marker_map_) { |
| 615 black_area_end_marker_map_ = new std::unordered_set<Address>(); |
| 616 } |
| 617 auto ret = black_area_end_marker_map_->insert(address); |
| 618 USE(ret); |
| 619 // Check that we inserted a new black area end marker. |
| 620 DCHECK(ret.second); |
| 621 } |
| 622 |
| 623 bool HasBlackAreas() { return black_area_end_marker_map_ != nullptr; } |
| 624 |
595 protected: | 625 protected: |
596 static MemoryChunk* Initialize(Heap* heap, Address base, size_t size, | 626 static MemoryChunk* Initialize(Heap* heap, Address base, size_t size, |
597 Address area_start, Address area_end, | 627 Address area_start, Address area_end, |
598 Executability executable, Space* owner, | 628 Executability executable, Space* owner, |
599 base::VirtualMemory* reservation); | 629 base::VirtualMemory* reservation); |
600 | 630 |
601 // Should be called when memory chunk is about to be freed. | 631 // Should be called when memory chunk is about to be freed. |
602 void ReleaseAllocatedMemory(); | 632 void ReleaseAllocatedMemory(); |
603 | 633 |
604 base::VirtualMemory* reserved_memory() { return &reservation_; } | 634 base::VirtualMemory* reserved_memory() { return &reservation_; } |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
653 | 683 |
654 // next_chunk_ holds a pointer of type MemoryChunk | 684 // next_chunk_ holds a pointer of type MemoryChunk |
655 base::AtomicValue<MemoryChunk*> next_chunk_; | 685 base::AtomicValue<MemoryChunk*> next_chunk_; |
656 // prev_chunk_ holds a pointer of type MemoryChunk | 686 // prev_chunk_ holds a pointer of type MemoryChunk |
657 base::AtomicValue<MemoryChunk*> prev_chunk_; | 687 base::AtomicValue<MemoryChunk*> prev_chunk_; |
658 | 688 |
659 FreeListCategory categories_[kNumberOfCategories]; | 689 FreeListCategory categories_[kNumberOfCategories]; |
660 | 690 |
661 LocalArrayBufferTracker* local_tracker_; | 691 LocalArrayBufferTracker* local_tracker_; |
662 | 692 |
| 693 // Stores the end addresses of black areas. |
| 694 std::unordered_set<Address>* black_area_end_marker_map_; |
| 695 |
663 private: | 696 private: |
664 void InitializeReservedMemory() { reservation_.Reset(); } | 697 void InitializeReservedMemory() { reservation_.Reset(); } |
665 | 698 |
666 friend class MemoryAllocator; | 699 friend class MemoryAllocator; |
667 friend class MemoryChunkValidator; | 700 friend class MemoryChunkValidator; |
668 }; | 701 }; |
669 | 702 |
670 // ----------------------------------------------------------------------------- | 703 // ----------------------------------------------------------------------------- |
671 // A page is a memory chunk of a size 1MB. Large object pages may be larger. | 704 // A page is a memory chunk of a size 1MB. Large object pages may be larger. |
672 // | 705 // |
(...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1474 | 1507 |
1475 | 1508 |
1476 // ----------------------------------------------------------------------------- | 1509 // ----------------------------------------------------------------------------- |
1477 // A space has a circular list of pages. The next page can be accessed via | 1510 // A space has a circular list of pages. The next page can be accessed via |
1478 // Page::next_page() call. | 1511 // Page::next_page() call. |
1479 | 1512 |
1480 // An abstraction of allocation and relocation pointers in a page-structured | 1513 // An abstraction of allocation and relocation pointers in a page-structured |
1481 // space. | 1514 // space. |
1482 class AllocationInfo { | 1515 class AllocationInfo { |
1483 public: | 1516 public: |
1484 AllocationInfo() : top_(nullptr), limit_(nullptr) {} | 1517 AllocationInfo() : original_top_(nullptr), top_(nullptr), limit_(nullptr) {} |
1485 AllocationInfo(Address top, Address limit) : top_(top), limit_(limit) {} | 1518 AllocationInfo(Address top, Address limit) |
| 1519 : original_top_(top), top_(top), limit_(limit) {} |
1486 | 1520 |
1487 void Reset(Address top, Address limit) { | 1521 void Reset(Address top, Address limit) { |
| 1522 original_top_ = top; |
1488 set_top(top); | 1523 set_top(top); |
1489 set_limit(limit); | 1524 set_limit(limit); |
1490 } | 1525 } |
1491 | 1526 |
| 1527 Address original_top() { |
| 1528 SLOW_DCHECK(top_ == NULL || |
| 1529 (reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0); |
| 1530 return original_top_; |
| 1531 } |
| 1532 |
1492 INLINE(void set_top(Address top)) { | 1533 INLINE(void set_top(Address top)) { |
1493 SLOW_DCHECK(top == NULL || | 1534 SLOW_DCHECK(top == NULL || |
1494 (reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0); | 1535 (reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0); |
1495 top_ = top; | 1536 top_ = top; |
1496 } | 1537 } |
1497 | 1538 |
1498 INLINE(Address top()) const { | 1539 INLINE(Address top()) const { |
1499 SLOW_DCHECK(top_ == NULL || | 1540 SLOW_DCHECK(top_ == NULL || |
1500 (reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0); | 1541 (reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0); |
1501 return top_; | 1542 return top_; |
(...skipping 13 matching lines...) Expand all Loading... |
1515 | 1556 |
1516 #ifdef DEBUG | 1557 #ifdef DEBUG |
1517 bool VerifyPagedAllocation() { | 1558 bool VerifyPagedAllocation() { |
1518 return (Page::FromAllocationAreaAddress(top_) == | 1559 return (Page::FromAllocationAreaAddress(top_) == |
1519 Page::FromAllocationAreaAddress(limit_)) && | 1560 Page::FromAllocationAreaAddress(limit_)) && |
1520 (top_ <= limit_); | 1561 (top_ <= limit_); |
1521 } | 1562 } |
1522 #endif | 1563 #endif |
1523 | 1564 |
1524 private: | 1565 private: |
| 1566 // The original top address when the allocation info was initialized. |
| 1567 Address original_top_; |
1525 // Current allocation top. | 1568 // Current allocation top. |
1526 Address top_; | 1569 Address top_; |
1527 // Current allocation limit. | 1570 // Current allocation limit. |
1528 Address limit_; | 1571 Address limit_; |
1529 }; | 1572 }; |
1530 | 1573 |
1531 | 1574 |
1532 // An abstraction of the accounting statistics of a page-structured space. | 1575 // An abstraction of the accounting statistics of a page-structured space. |
1533 // | 1576 // |
1534 // The stats are only set by functions that ensure they stay balanced. These | 1577 // The stats are only set by functions that ensure they stay balanced. These |
(...skipping 1450 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2985 count = 0; | 3028 count = 0; |
2986 } | 3029 } |
2987 // Must be small, since an iteration is used for lookup. | 3030 // Must be small, since an iteration is used for lookup. |
2988 static const int kMaxComments = 64; | 3031 static const int kMaxComments = 64; |
2989 }; | 3032 }; |
2990 #endif | 3033 #endif |
2991 } // namespace internal | 3034 } // namespace internal |
2992 } // namespace v8 | 3035 } // namespace v8 |
2993 | 3036 |
2994 #endif // V8_HEAP_SPACES_H_ | 3037 #endif // V8_HEAP_SPACES_H_ |
OLD | NEW |