| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/metrics/persistent_histogram_allocator.h" | 5 #include "base/metrics/persistent_histogram_allocator.h" |
| 6 | 6 |
| 7 #include <memory> | 7 #include <memory> |
| 8 | 8 |
| 9 #include "base/atomicops.h" |
| 9 #include "base/files/file_path.h" | 10 #include "base/files/file_path.h" |
| 10 #include "base/files/file_util.h" | 11 #include "base/files/file_util.h" |
| 11 #include "base/files/important_file_writer.h" | 12 #include "base/files/important_file_writer.h" |
| 12 #include "base/files/memory_mapped_file.h" | 13 #include "base/files/memory_mapped_file.h" |
| 13 #include "base/lazy_instance.h" | 14 #include "base/lazy_instance.h" |
| 14 #include "base/logging.h" | 15 #include "base/logging.h" |
| 15 #include "base/memory/ptr_util.h" | 16 #include "base/memory/ptr_util.h" |
| 16 #include "base/metrics/histogram.h" | 17 #include "base/metrics/histogram.h" |
| 17 #include "base/metrics/histogram_base.h" | 18 #include "base/metrics/histogram_base.h" |
| 18 #include "base/metrics/histogram_samples.h" | 19 #include "base/metrics/histogram_samples.h" |
| (...skipping 18 matching lines...) Expand all Loading... |
| 37 enum : uint32_t { | 38 enum : uint32_t { |
| 38 kTypeIdRangesArray = 0xBCEA225A + 1, // SHA1(RangesArray) v1 | 39 kTypeIdRangesArray = 0xBCEA225A + 1, // SHA1(RangesArray) v1 |
| 39 kTypeIdCountsArray = 0x53215530 + 1, // SHA1(CountsArray) v1 | 40 kTypeIdCountsArray = 0x53215530 + 1, // SHA1(CountsArray) v1 |
| 40 }; | 41 }; |
| 41 | 42 |
| 42 // The current globally-active persistent allocator for all new histograms. | 43 // The current globally-active persistent allocator for all new histograms. |
| 43 // The object held here will obviously not be destructed at process exit | 44 // The object held here will obviously not be destructed at process exit |
| 44 // but that's best since PersistentMemoryAllocator objects (that underlie | 45 // but that's best since PersistentMemoryAllocator objects (that underlie |
| 45 // GlobalHistogramAllocator objects) are explicitly forbidden from doing | 46 // GlobalHistogramAllocator objects) are explicitly forbidden from doing |
| 46 // anything essential at exit anyway due to the fact that they depend on data | 47 // anything essential at exit anyway due to the fact that they depend on data |
| 47 // managed elsewhere and which could be destructed first. | 48 // managed elsewhere and which could be destructed first. An AtomicWord is |
| 48 GlobalHistogramAllocator* g_allocator = nullptr; | 49 // used instead of std::atomic because the latter can create global ctors |
| 50 // and dtors. |
| 51 subtle::AtomicWord g_allocator = 0; |
| 49 | 52 |
| 50 // Take an array of range boundaries and create a proper BucketRanges object | 53 // Take an array of range boundaries and create a proper BucketRanges object |
| 51 // which is returned to the caller. A return of nullptr indicates that the | 54 // which is returned to the caller. A return of nullptr indicates that the |
| 52 // passed boundaries are invalid. | 55 // passed boundaries are invalid. |
| 53 std::unique_ptr<BucketRanges> CreateRangesFromData( | 56 std::unique_ptr<BucketRanges> CreateRangesFromData( |
| 54 HistogramBase::Sample* ranges_data, | 57 HistogramBase::Sample* ranges_data, |
| 55 uint32_t ranges_checksum, | 58 uint32_t ranges_checksum, |
| 56 size_t count) { | 59 size_t count) { |
| 57 // To avoid racy destruction at shutdown, the following may be leaked. | 60 // To avoid racy destruction at shutdown, the following may be leaked. |
| 58 std::unique_ptr<BucketRanges> ranges(new BucketRanges(count)); | 61 std::unique_ptr<BucketRanges> ranges(new BucketRanges(count)); |
| (...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 495 base::subtle::Acquire_Load(&atomic_histogram_pointer)); | 498 base::subtle::Acquire_Load(&atomic_histogram_pointer)); |
| 496 if (!histogram_pointer) { | 499 if (!histogram_pointer) { |
| 497 // It's possible for multiple threads to make it here in parallel but | 500 // It's possible for multiple threads to make it here in parallel but |
| 498 // they'll always return the same result as there is a mutex in the Get. | 501 // they'll always return the same result as there is a mutex in the Get. |
| 499 // The purpose of the "initialized" variable is just to ensure that | 502 // The purpose of the "initialized" variable is just to ensure that |
| 500 // the same thread doesn't recurse which is also why it doesn't have | 503 // the same thread doesn't recurse which is also why it doesn't have |
| 501 // to be atomic. | 504 // to be atomic. |
| 502 static bool initialized = false; | 505 static bool initialized = false; |
| 503 if (!initialized) { | 506 if (!initialized) { |
| 504 initialized = true; | 507 initialized = true; |
| 505 if (g_allocator) { | 508 if (GlobalHistogramAllocator::Get()) { |
| 506 DVLOG(1) << "Creating the results-histogram inside persistent" | 509 DVLOG(1) << "Creating the results-histogram inside persistent" |
| 507 << " memory can cause future allocations to crash if" | 510 << " memory can cause future allocations to crash if" |
| 508 << " that memory is ever released (for testing)."; | 511 << " that memory is ever released (for testing)."; |
| 509 } | 512 } |
| 510 | 513 |
| 511 histogram_pointer = LinearHistogram::FactoryGet( | 514 histogram_pointer = LinearHistogram::FactoryGet( |
| 512 kResultHistogram, 1, CREATE_HISTOGRAM_MAX, CREATE_HISTOGRAM_MAX + 1, | 515 kResultHistogram, 1, CREATE_HISTOGRAM_MAX, CREATE_HISTOGRAM_MAX + 1, |
| 513 HistogramBase::kUmaTargetedHistogramFlag); | 516 HistogramBase::kUmaTargetedHistogramFlag); |
| 514 base::subtle::Release_Store( | 517 base::subtle::Release_Store( |
| 515 &atomic_histogram_pointer, | 518 &atomic_histogram_pointer, |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 643 } | 646 } |
| 644 | 647 |
| 645 return histogram; | 648 return histogram; |
| 646 } | 649 } |
| 647 | 650 |
| 648 HistogramBase* | 651 HistogramBase* |
| 649 PersistentHistogramAllocator::GetOrCreateStatisticsRecorderHistogram( | 652 PersistentHistogramAllocator::GetOrCreateStatisticsRecorderHistogram( |
| 650 const HistogramBase* histogram) { | 653 const HistogramBase* histogram) { |
| 651 // This should never be called on the global histogram allocator as objects | 654 // This should never be called on the global histogram allocator as objects |
| 652 // created there are already within the global statistics recorder. | 655 // created there are already within the global statistics recorder. |
| 653 DCHECK_NE(g_allocator, this); | 656 DCHECK_NE(GlobalHistogramAllocator::Get(), this); |
| 654 DCHECK(histogram); | 657 DCHECK(histogram); |
| 655 | 658 |
| 656 HistogramBase* existing = | 659 HistogramBase* existing = |
| 657 StatisticsRecorder::FindHistogram(histogram->histogram_name()); | 660 StatisticsRecorder::FindHistogram(histogram->histogram_name()); |
| 658 if (existing) | 661 if (existing) |
| 659 return existing; | 662 return existing; |
| 660 | 663 |
| 661 // Adding the passed histogram to the SR would cause a problem if the | 664 // Adding the passed histogram to the SR would cause a problem if the |
| 662 // allocator that holds it eventually goes away. Instead, create a new | 665 // allocator that holds it eventually goes away. Instead, create a new |
| 663 // one from a serialized version. | 666 // one from a serialized version. |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 812 new GlobalHistogramAllocator(MakeUnique<SharedPersistentMemoryAllocator>( | 815 new GlobalHistogramAllocator(MakeUnique<SharedPersistentMemoryAllocator>( |
| 813 std::move(shm), 0, StringPiece(), /*readonly=*/false)))); | 816 std::move(shm), 0, StringPiece(), /*readonly=*/false)))); |
| 814 } | 817 } |
| 815 | 818 |
| 816 // static | 819 // static |
| 817 void GlobalHistogramAllocator::Set( | 820 void GlobalHistogramAllocator::Set( |
| 818 std::unique_ptr<GlobalHistogramAllocator> allocator) { | 821 std::unique_ptr<GlobalHistogramAllocator> allocator) { |
| 819 // Releasing or changing an allocator is extremely dangerous because it | 822 // Releasing or changing an allocator is extremely dangerous because it |
| 820 // likely has histograms stored within it. If the backing memory is also | 823 // likely has histograms stored within it. If the backing memory is also |
| 821 // also released, future accesses to those histograms will seg-fault. | 824 // also released, future accesses to those histograms will seg-fault. |
| 822 CHECK(!g_allocator); | 825 CHECK(!subtle::NoBarrier_Load(&g_allocator)); |
| 823 g_allocator = allocator.release(); | 826 subtle::NoBarrier_Store(&g_allocator, |
| 827 reinterpret_cast<uintptr_t>(allocator.release())); |
| 824 size_t existing = StatisticsRecorder::GetHistogramCount(); | 828 size_t existing = StatisticsRecorder::GetHistogramCount(); |
| 825 | 829 |
| 826 DVLOG_IF(1, existing) | 830 DVLOG_IF(1, existing) |
| 827 << existing << " histograms were created before persistence was enabled."; | 831 << existing << " histograms were created before persistence was enabled."; |
| 828 } | 832 } |
| 829 | 833 |
| 830 // static | 834 // static |
| 831 GlobalHistogramAllocator* GlobalHistogramAllocator::Get() { | 835 GlobalHistogramAllocator* GlobalHistogramAllocator::Get() { |
| 832 return g_allocator; | 836 return reinterpret_cast<GlobalHistogramAllocator*>( |
| 837 subtle::NoBarrier_Load(&g_allocator)); |
| 833 } | 838 } |
| 834 | 839 |
| 835 // static | 840 // static |
| 836 std::unique_ptr<GlobalHistogramAllocator> | 841 std::unique_ptr<GlobalHistogramAllocator> |
| 837 GlobalHistogramAllocator::ReleaseForTesting() { | 842 GlobalHistogramAllocator::ReleaseForTesting() { |
| 838 GlobalHistogramAllocator* histogram_allocator = g_allocator; | 843 GlobalHistogramAllocator* histogram_allocator = Get(); |
| 839 if (!histogram_allocator) | 844 if (!histogram_allocator) |
| 840 return nullptr; | 845 return nullptr; |
| 841 PersistentMemoryAllocator* memory_allocator = | 846 PersistentMemoryAllocator* memory_allocator = |
| 842 histogram_allocator->memory_allocator(); | 847 histogram_allocator->memory_allocator(); |
| 843 | 848 |
| 844 // Before releasing the memory, it's necessary to have the Statistics- | 849 // Before releasing the memory, it's necessary to have the Statistics- |
| 845 // Recorder forget about the histograms contained therein; otherwise, | 850 // Recorder forget about the histograms contained therein; otherwise, |
| 846 // some operations will try to access them and the released memory. | 851 // some operations will try to access them and the released memory. |
| 847 PersistentMemoryAllocator::Iterator iter(memory_allocator); | 852 PersistentMemoryAllocator::Iterator iter(memory_allocator); |
| 848 const PersistentHistogramData* data; | 853 const PersistentHistogramData* data; |
| 849 while ((data = iter.GetNextOfObject<PersistentHistogramData>()) != nullptr) { | 854 while ((data = iter.GetNextOfObject<PersistentHistogramData>()) != nullptr) { |
| 850 StatisticsRecorder::ForgetHistogramForTesting(data->name); | 855 StatisticsRecorder::ForgetHistogramForTesting(data->name); |
| 851 | 856 |
| 852 // If a test breaks here then a memory region containing a histogram | 857 // If a test breaks here then a memory region containing a histogram |
| 853 // actively used by this code is being released back to the test. | 858 // actively used by this code is being released back to the test. |
| 854 // If that memory segment were to be deleted, future calls to create | 859 // If that memory segment were to be deleted, future calls to create |
| 855 // persistent histograms would crash. To avoid this, have the test call | 860 // persistent histograms would crash. To avoid this, have the test call |
| 856 // the method GetCreateHistogramResultHistogram() *before* setting | 861 // the method GetCreateHistogramResultHistogram() *before* setting |
| 857 // the (temporary) memory allocator via SetGlobalAllocator() so that | 862 // the (temporary) memory allocator via SetGlobalAllocator() so that |
| 858 // histogram is instead allocated from the process heap. | 863 // histogram is instead allocated from the process heap. |
| 859 DCHECK_NE(kResultHistogram, data->name); | 864 DCHECK_NE(kResultHistogram, data->name); |
| 860 } | 865 } |
| 861 | 866 |
| 862 g_allocator = nullptr; | 867 subtle::NoBarrier_Store(&g_allocator, 0); |
| 863 return WrapUnique(histogram_allocator); | 868 return WrapUnique(histogram_allocator); |
| 864 }; | 869 }; |
| 865 | 870 |
| 866 void GlobalHistogramAllocator::SetPersistentLocation(const FilePath& location) { | 871 void GlobalHistogramAllocator::SetPersistentLocation(const FilePath& location) { |
| 867 persistent_location_ = location; | 872 persistent_location_ = location; |
| 868 } | 873 } |
| 869 | 874 |
| 870 const FilePath& GlobalHistogramAllocator::GetPersistentLocation() const { | 875 const FilePath& GlobalHistogramAllocator::GetPersistentLocation() const { |
| 871 return persistent_location_; | 876 return persistent_location_; |
| 872 } | 877 } |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 936 while (true) { | 941 while (true) { |
| 937 std::unique_ptr<HistogramBase> histogram = | 942 std::unique_ptr<HistogramBase> histogram = |
| 938 import_iterator_.GetNextWithIgnore(record_to_ignore); | 943 import_iterator_.GetNextWithIgnore(record_to_ignore); |
| 939 if (!histogram) | 944 if (!histogram) |
| 940 break; | 945 break; |
| 941 StatisticsRecorder::RegisterOrDeleteDuplicate(histogram.release()); | 946 StatisticsRecorder::RegisterOrDeleteDuplicate(histogram.release()); |
| 942 } | 947 } |
| 943 } | 948 } |
| 944 | 949 |
| 945 } // namespace base | 950 } // namespace base |
| OLD | NEW |