| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 366 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 377 BasePage* next() const { return m_next; } | 377 BasePage* next() const { return m_next; } |
| 378 | 378 |
| 379 // virtual methods are slow. So performance-sensitive methods | 379 // virtual methods are slow. So performance-sensitive methods |
| 380 // should be defined as non-virtual methods on NormalPage and LargeObjectPag
e. | 380 // should be defined as non-virtual methods on NormalPage and LargeObjectPag
e. |
| 381 // The following methods are not performance-sensitive. | 381 // The following methods are not performance-sensitive. |
| 382 virtual size_t objectPayloadSizeForTesting() = 0; | 382 virtual size_t objectPayloadSizeForTesting() = 0; |
| 383 virtual bool isEmpty() = 0; | 383 virtual bool isEmpty() = 0; |
| 384 virtual void removeFromHeap() = 0; | 384 virtual void removeFromHeap() = 0; |
| 385 virtual void sweep() = 0; | 385 virtual void sweep() = 0; |
| 386 virtual void markUnmarkedObjectsDead() = 0; | 386 virtual void markUnmarkedObjectsDead() = 0; |
| 387 #if defined(ADDRESS_SANITIZER) |
| 388 virtual void poisonUnmarkedObjects() = 0; |
| 389 #endif |
| 387 // Check if the given address points to an object in this | 390 // Check if the given address points to an object in this |
| 388 // heap page. If so, find the start of that object and mark it | 391 // heap page. If so, find the start of that object and mark it |
| 389 // using the given Visitor. Otherwise do nothing. The pointer must | 392 // using the given Visitor. Otherwise do nothing. The pointer must |
| 390 // be within the same aligned blinkPageSize as the this-pointer. | 393 // be within the same aligned blinkPageSize as the this-pointer. |
| 391 // | 394 // |
| 392 // This is used during conservative stack scanning to | 395 // This is used during conservative stack scanning to |
| 393 // conservatively mark all objects that could be referenced from | 396 // conservatively mark all objects that could be referenced from |
| 394 // the stack. | 397 // the stack. |
| 395 virtual void checkAndMarkPointer(Visitor*, Address) = 0; | 398 virtual void checkAndMarkPointer(Visitor*, Address) = 0; |
| 396 virtual void markOrphaned(); | 399 virtual void markOrphaned(); |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 461 bool containedInObjectPayload(Address address) | 464 bool containedInObjectPayload(Address address) |
| 462 { | 465 { |
| 463 return payload() <= address && address < payloadEnd(); | 466 return payload() <= address && address < payloadEnd(); |
| 464 } | 467 } |
| 465 | 468 |
| 466 virtual size_t objectPayloadSizeForTesting() override; | 469 virtual size_t objectPayloadSizeForTesting() override; |
| 467 virtual bool isEmpty() override; | 470 virtual bool isEmpty() override; |
| 468 virtual void removeFromHeap() override; | 471 virtual void removeFromHeap() override; |
| 469 virtual void sweep() override; | 472 virtual void sweep() override; |
| 470 virtual void markUnmarkedObjectsDead() override; | 473 virtual void markUnmarkedObjectsDead() override; |
| 474 #if defined(ADDRESS_SANITIZER) |
| 475 virtual void poisonUnmarkedObjects() override; |
| 476 #endif |
| 471 virtual void checkAndMarkPointer(Visitor*, Address) override; | 477 virtual void checkAndMarkPointer(Visitor*, Address) override; |
| 472 virtual void markOrphaned() override; | 478 virtual void markOrphaned() override; |
| 473 #if ENABLE(GC_PROFILING) | 479 #if ENABLE(GC_PROFILING) |
| 474 const GCInfo* findGCInfo(Address) override; | 480 const GCInfo* findGCInfo(Address) override; |
| 475 void snapshot(TracedValue*, ThreadState::SnapshotInfo*) override; | 481 void snapshot(TracedValue*, ThreadState::SnapshotInfo*) override; |
| 476 void incrementMarkedObjectsAge() override; | 482 void incrementMarkedObjectsAge() override; |
| 477 void countMarkedObjects(ClassAgeCountsMap&) override; | 483 void countMarkedObjects(ClassAgeCountsMap&) override; |
| 478 void countObjectsToSweep(ClassAgeCountsMap&) override; | 484 void countObjectsToSweep(ClassAgeCountsMap&) override; |
| 479 #endif | 485 #endif |
| 480 #if ENABLE(ASSERT) || ENABLE(GC_PROFILING) | 486 #if ENABLE(ASSERT) || ENABLE(GC_PROFILING) |
| 481 // Returns true for the whole blinkPageSize page that the page is on, even | 487 // Returns true for the whole blinkPageSize page that the page is on, even |
| 482 // for the header, and the unmapped guard page at the start. That ensures | 488 // for the header, and the unmapped guard page at the start. That ensures |
| 483 // the result can be used to populate the negative page cache. | 489 // the result can be used to populate the negative page cache. |
| 484 virtual bool contains(Address) override; | 490 virtual bool contains(Address) override; |
| 485 #endif | 491 #endif |
| 486 virtual size_t size() override { return blinkPageSize; } | 492 virtual size_t size() override { return blinkPageSize; } |
| 487 static size_t pageHeaderSize() | 493 static size_t pageHeaderSize() |
| 488 { | 494 { |
| 489 // Compute the amount of padding we have to add to a header to make | 495 // Compute the amount of padding we have to add to a header to make |
| 490 // the size of the header plus the padding a multiple of 8 bytes. | 496 // the size of the header plus the padding a multiple of 8 bytes. |
| 491 size_t paddingSize = (sizeof(NormalPage) + allocationGranularity - (size
of(HeapObjectHeader) % allocationGranularity)) % allocationGranularity; | 497 size_t paddingSize = (sizeof(NormalPage) + allocationGranularity - (size
of(HeapObjectHeader) % allocationGranularity)) % allocationGranularity; |
| 492 return sizeof(NormalPage) + paddingSize; | 498 return sizeof(NormalPage) + paddingSize; |
| 493 } | 499 } |
| 494 | 500 |
| 495 | 501 |
| 496 NormalPageHeap* heapForNormalPage(); | 502 NormalPageHeap* heapForNormalPage(); |
| 497 void clearObjectStartBitMap(); | 503 void clearObjectStartBitMap(); |
| 498 | 504 |
| 499 #if defined(ADDRESS_SANITIZER) | |
| 500 void poisonUnmarkedObjects(); | |
| 501 #endif | |
| 502 | |
| 503 private: | 505 private: |
| 504 HeapObjectHeader* findHeaderFromAddress(Address); | 506 HeapObjectHeader* findHeaderFromAddress(Address); |
| 505 void populateObjectStartBitMap(); | 507 void populateObjectStartBitMap(); |
| 506 bool isObjectStartBitMapComputed() { return m_objectStartBitMapComputed; } | 508 bool isObjectStartBitMapComputed() { return m_objectStartBitMapComputed; } |
| 507 | 509 |
| 508 bool m_objectStartBitMapComputed; | 510 bool m_objectStartBitMapComputed; |
| 509 uint8_t m_objectStartBitMap[reservedForObjectBitMap]; | 511 uint8_t m_objectStartBitMap[reservedForObjectBitMap]; |
| 510 }; | 512 }; |
| 511 | 513 |
| 512 // Large allocations are allocated as separate objects and linked in a list. | 514 // Large allocations are allocated as separate objects and linked in a list. |
| (...skipping 11 matching lines...) Expand all Loading... |
| 524 bool containedInObjectPayload(Address address) | 526 bool containedInObjectPayload(Address address) |
| 525 { | 527 { |
| 526 return payload() <= address && address < payloadEnd(); | 528 return payload() <= address && address < payloadEnd(); |
| 527 } | 529 } |
| 528 | 530 |
| 529 virtual size_t objectPayloadSizeForTesting() override; | 531 virtual size_t objectPayloadSizeForTesting() override; |
| 530 virtual bool isEmpty() override; | 532 virtual bool isEmpty() override; |
| 531 virtual void removeFromHeap() override; | 533 virtual void removeFromHeap() override; |
| 532 virtual void sweep() override; | 534 virtual void sweep() override; |
| 533 virtual void markUnmarkedObjectsDead() override; | 535 virtual void markUnmarkedObjectsDead() override; |
| 536 #if defined(ADDRESS_SANITIZER) |
| 537 virtual void poisonUnmarkedObjects() override; |
| 538 #endif |
| 534 virtual void checkAndMarkPointer(Visitor*, Address) override; | 539 virtual void checkAndMarkPointer(Visitor*, Address) override; |
| 535 virtual void markOrphaned() override; | 540 virtual void markOrphaned() override; |
| 536 | 541 |
| 537 #if ENABLE(GC_PROFILING) | 542 #if ENABLE(GC_PROFILING) |
| 538 const GCInfo* findGCInfo(Address) override; | 543 const GCInfo* findGCInfo(Address) override; |
| 539 void snapshot(TracedValue*, ThreadState::SnapshotInfo*) override; | 544 void snapshot(TracedValue*, ThreadState::SnapshotInfo*) override; |
| 540 void incrementMarkedObjectsAge() override; | 545 void incrementMarkedObjectsAge() override; |
| 541 void countMarkedObjects(ClassAgeCountsMap&) override; | 546 void countMarkedObjects(ClassAgeCountsMap&) override; |
| 542 void countObjectsToSweep(ClassAgeCountsMap&) override; | 547 void countObjectsToSweep(ClassAgeCountsMap&) override; |
| 543 #endif | 548 #endif |
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 734 #endif | 739 #endif |
| 735 | 740 |
| 736 virtual void clearFreeLists() { } | 741 virtual void clearFreeLists() { } |
| 737 void makeConsistentForSweeping(); | 742 void makeConsistentForSweeping(); |
| 738 #if ENABLE(ASSERT) | 743 #if ENABLE(ASSERT) |
| 739 virtual bool isConsistentForSweeping() = 0; | 744 virtual bool isConsistentForSweeping() = 0; |
| 740 #endif | 745 #endif |
| 741 size_t objectPayloadSizeForTesting(); | 746 size_t objectPayloadSizeForTesting(); |
| 742 void prepareHeapForTermination(); | 747 void prepareHeapForTermination(); |
| 743 void prepareForSweep(); | 748 void prepareForSweep(); |
| 749 #if defined(ADDRESS_SANITIZER) |
| 750 void poisonUnmarkedObjects(); |
| 751 #endif |
| 744 Address lazySweep(size_t, size_t gcInfoIndex); | 752 Address lazySweep(size_t, size_t gcInfoIndex); |
| 745 void sweepUnsweptPage(); | 753 void sweepUnsweptPage(); |
| 746 // Returns true if we have swept all pages within the deadline. | 754 // Returns true if we have swept all pages within the deadline. |
| 747 // Returns false otherwise. | 755 // Returns false otherwise. |
| 748 bool lazySweepWithDeadline(double deadlineSeconds); | 756 bool lazySweepWithDeadline(double deadlineSeconds); |
| 749 void completeSweep(); | 757 void completeSweep(); |
| 750 | 758 |
| 751 ThreadState* threadState() { return m_threadState; } | 759 ThreadState* threadState() { return m_threadState; } |
| 752 int heapIndex() const { return m_index; } | 760 int heapIndex() const { return m_index; } |
| 753 | 761 |
| (...skipping 1290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2044 Value* table = reinterpret_cast<Value*>(pointer); | 2052 Value* table = reinterpret_cast<Value*>(pointer); |
| 2045 for (unsigned i = 0; i < length; ++i) { | 2053 for (unsigned i = 0; i < length; ++i) { |
| 2046 if (!Table::isEmptyOrDeletedBucket(table[i])) | 2054 if (!Table::isEmptyOrDeletedBucket(table[i])) |
| 2047 table[i].~Value(); | 2055 table[i].~Value(); |
| 2048 } | 2056 } |
| 2049 } | 2057 } |
| 2050 | 2058 |
| 2051 } // namespace blink | 2059 } // namespace blink |
| 2052 | 2060 |
| 2053 #endif // Heap_h | 2061 #endif // Heap_h |
| OLD | NEW |