Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(31)

Side by Side Diff: base/metrics/persistent_histogram_allocator.cc

Issue 2655673002: Use atomic ops for managing g_allocator. (Closed)
Patch Set: use Get() instead of g_allocator Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/metrics/persistent_histogram_allocator.h" 5 #include "base/metrics/persistent_histogram_allocator.h"
6 6
7 #include <memory> 7 #include <memory>
8 8
9 #include "base/atomicops.h"
9 #include "base/files/file_path.h" 10 #include "base/files/file_path.h"
10 #include "base/files/file_util.h" 11 #include "base/files/file_util.h"
11 #include "base/files/important_file_writer.h" 12 #include "base/files/important_file_writer.h"
12 #include "base/files/memory_mapped_file.h" 13 #include "base/files/memory_mapped_file.h"
13 #include "base/lazy_instance.h" 14 #include "base/lazy_instance.h"
14 #include "base/logging.h" 15 #include "base/logging.h"
15 #include "base/memory/ptr_util.h" 16 #include "base/memory/ptr_util.h"
16 #include "base/metrics/histogram.h" 17 #include "base/metrics/histogram.h"
17 #include "base/metrics/histogram_base.h" 18 #include "base/metrics/histogram_base.h"
18 #include "base/metrics/histogram_samples.h" 19 #include "base/metrics/histogram_samples.h"
(...skipping 18 matching lines...) Expand all
37 enum : uint32_t { 38 enum : uint32_t {
38 kTypeIdRangesArray = 0xBCEA225A + 1, // SHA1(RangesArray) v1 39 kTypeIdRangesArray = 0xBCEA225A + 1, // SHA1(RangesArray) v1
39 kTypeIdCountsArray = 0x53215530 + 1, // SHA1(CountsArray) v1 40 kTypeIdCountsArray = 0x53215530 + 1, // SHA1(CountsArray) v1
40 }; 41 };
41 42
42 // The current globally-active persistent allocator for all new histograms. 43 // The current globally-active persistent allocator for all new histograms.
43 // The object held here will obviously not be destructed at process exit 44 // The object held here will obviously not be destructed at process exit
44 // but that's best since PersistentMemoryAllocator objects (that underlie 45 // but that's best since PersistentMemoryAllocator objects (that underlie
45 // GlobalHistogramAllocator objects) are explicitly forbidden from doing 46 // GlobalHistogramAllocator objects) are explicitly forbidden from doing
46 // anything essential at exit anyway due to the fact that they depend on data 47 // anything essential at exit anyway due to the fact that they depend on data
47 // managed elsewhere and which could be destructed first. 48 // managed elsewhere and which could be destructed first. An AtomicWord is
48 GlobalHistogramAllocator* g_allocator = nullptr; 49 // used instead of std::atomic because the latter can create global ctors
50 // and dtors.
51 subtle::AtomicWord g_allocator = 0;
49 52
50 // Take an array of range boundaries and create a proper BucketRanges object 53 // Take an array of range boundaries and create a proper BucketRanges object
51 // which is returned to the caller. A return of nullptr indicates that the 54 // which is returned to the caller. A return of nullptr indicates that the
52 // passed boundaries are invalid. 55 // passed boundaries are invalid.
53 std::unique_ptr<BucketRanges> CreateRangesFromData( 56 std::unique_ptr<BucketRanges> CreateRangesFromData(
54 HistogramBase::Sample* ranges_data, 57 HistogramBase::Sample* ranges_data,
55 uint32_t ranges_checksum, 58 uint32_t ranges_checksum,
56 size_t count) { 59 size_t count) {
57 // To avoid racy destruction at shutdown, the following may be leaked. 60 // To avoid racy destruction at shutdown, the following may be leaked.
58 std::unique_ptr<BucketRanges> ranges(new BucketRanges(count)); 61 std::unique_ptr<BucketRanges> ranges(new BucketRanges(count));
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
495 base::subtle::Acquire_Load(&atomic_histogram_pointer)); 498 base::subtle::Acquire_Load(&atomic_histogram_pointer));
496 if (!histogram_pointer) { 499 if (!histogram_pointer) {
497 // It's possible for multiple threads to make it here in parallel but 500 // It's possible for multiple threads to make it here in parallel but
498 // they'll always return the same result as there is a mutex in the Get. 501 // they'll always return the same result as there is a mutex in the Get.
499 // The purpose of the "initialized" variable is just to ensure that 502 // The purpose of the "initialized" variable is just to ensure that
500 // the same thread doesn't recurse which is also why it doesn't have 503 // the same thread doesn't recurse which is also why it doesn't have
501 // to be atomic. 504 // to be atomic.
502 static bool initialized = false; 505 static bool initialized = false;
503 if (!initialized) { 506 if (!initialized) {
504 initialized = true; 507 initialized = true;
505 if (g_allocator) { 508 if (GlobalHistogramAllocator::Get()) {
506 DVLOG(1) << "Creating the results-histogram inside persistent" 509 DVLOG(1) << "Creating the results-histogram inside persistent"
507 << " memory can cause future allocations to crash if" 510 << " memory can cause future allocations to crash if"
508 << " that memory is ever released (for testing)."; 511 << " that memory is ever released (for testing).";
509 } 512 }
510 513
511 histogram_pointer = LinearHistogram::FactoryGet( 514 histogram_pointer = LinearHistogram::FactoryGet(
512 kResultHistogram, 1, CREATE_HISTOGRAM_MAX, CREATE_HISTOGRAM_MAX + 1, 515 kResultHistogram, 1, CREATE_HISTOGRAM_MAX, CREATE_HISTOGRAM_MAX + 1,
513 HistogramBase::kUmaTargetedHistogramFlag); 516 HistogramBase::kUmaTargetedHistogramFlag);
514 base::subtle::Release_Store( 517 base::subtle::Release_Store(
515 &atomic_histogram_pointer, 518 &atomic_histogram_pointer,
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
643 } 646 }
644 647
645 return histogram; 648 return histogram;
646 } 649 }
647 650
648 HistogramBase* 651 HistogramBase*
649 PersistentHistogramAllocator::GetOrCreateStatisticsRecorderHistogram( 652 PersistentHistogramAllocator::GetOrCreateStatisticsRecorderHistogram(
650 const HistogramBase* histogram) { 653 const HistogramBase* histogram) {
651 // This should never be called on the global histogram allocator as objects 654 // This should never be called on the global histogram allocator as objects
652 // created there are already within the global statistics recorder. 655 // created there are already within the global statistics recorder.
653 DCHECK_NE(g_allocator, this); 656 DCHECK_NE(GlobalHistogramAllocator::Get(), this);
654 DCHECK(histogram); 657 DCHECK(histogram);
655 658
656 HistogramBase* existing = 659 HistogramBase* existing =
657 StatisticsRecorder::FindHistogram(histogram->histogram_name()); 660 StatisticsRecorder::FindHistogram(histogram->histogram_name());
658 if (existing) 661 if (existing)
659 return existing; 662 return existing;
660 663
661 // Adding the passed histogram to the SR would cause a problem if the 664 // Adding the passed histogram to the SR would cause a problem if the
662 // allocator that holds it eventually goes away. Instead, create a new 665 // allocator that holds it eventually goes away. Instead, create a new
663 // one from a serialized version. Deserialization calls the appropriate 666 // one from a serialized version. Deserialization calls the appropriate
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after
814 new GlobalHistogramAllocator(MakeUnique<SharedPersistentMemoryAllocator>( 817 new GlobalHistogramAllocator(MakeUnique<SharedPersistentMemoryAllocator>(
815 std::move(shm), 0, StringPiece(), /*readonly=*/false)))); 818 std::move(shm), 0, StringPiece(), /*readonly=*/false))));
816 } 819 }
817 820
818 // static 821 // static
819 void GlobalHistogramAllocator::Set( 822 void GlobalHistogramAllocator::Set(
820 std::unique_ptr<GlobalHistogramAllocator> allocator) { 823 std::unique_ptr<GlobalHistogramAllocator> allocator) {
821 // Releasing or changing an allocator is extremely dangerous because it 824 // Releasing or changing an allocator is extremely dangerous because it
822 // likely has histograms stored within it. If the backing memory is also 825 // likely has histograms stored within it. If the backing memory is also
823 // also released, future accesses to those histograms will seg-fault. 826 // also released, future accesses to those histograms will seg-fault.
824 CHECK(!g_allocator); 827 CHECK(!subtle::NoBarrier_Load(&g_allocator));
825 g_allocator = allocator.release(); 828 subtle::NoBarrier_Store(&g_allocator,
829 reinterpret_cast<uintptr_t>(allocator.release()));
826 size_t existing = StatisticsRecorder::GetHistogramCount(); 830 size_t existing = StatisticsRecorder::GetHistogramCount();
827 831
828 DVLOG_IF(1, existing) 832 DVLOG_IF(1, existing)
829 << existing << " histograms were created before persistence was enabled."; 833 << existing << " histograms were created before persistence was enabled.";
830 } 834 }
831 835
832 // static 836 // static
833 GlobalHistogramAllocator* GlobalHistogramAllocator::Get() { 837 GlobalHistogramAllocator* GlobalHistogramAllocator::Get() {
834 return g_allocator; 838 return reinterpret_cast<GlobalHistogramAllocator*>(
839 subtle::NoBarrier_Load(&g_allocator));
835 } 840 }
836 841
837 // static 842 // static
838 std::unique_ptr<GlobalHistogramAllocator> 843 std::unique_ptr<GlobalHistogramAllocator>
839 GlobalHistogramAllocator::ReleaseForTesting() { 844 GlobalHistogramAllocator::ReleaseForTesting() {
840 GlobalHistogramAllocator* histogram_allocator = g_allocator; 845 GlobalHistogramAllocator* histogram_allocator = Get();
841 if (!histogram_allocator) 846 if (!histogram_allocator)
842 return nullptr; 847 return nullptr;
843 PersistentMemoryAllocator* memory_allocator = 848 PersistentMemoryAllocator* memory_allocator =
844 histogram_allocator->memory_allocator(); 849 histogram_allocator->memory_allocator();
845 850
846 // Before releasing the memory, it's necessary to have the Statistics- 851 // Before releasing the memory, it's necessary to have the Statistics-
847 // Recorder forget about the histograms contained therein; otherwise, 852 // Recorder forget about the histograms contained therein; otherwise,
848 // some operations will try to access them and the released memory. 853 // some operations will try to access them and the released memory.
849 PersistentMemoryAllocator::Iterator iter(memory_allocator); 854 PersistentMemoryAllocator::Iterator iter(memory_allocator);
850 const PersistentHistogramData* data; 855 const PersistentHistogramData* data;
851 while ((data = iter.GetNextOfObject<PersistentHistogramData>()) != nullptr) { 856 while ((data = iter.GetNextOfObject<PersistentHistogramData>()) != nullptr) {
852 StatisticsRecorder::ForgetHistogramForTesting(data->name); 857 StatisticsRecorder::ForgetHistogramForTesting(data->name);
853 858
854 // If a test breaks here then a memory region containing a histogram 859 // If a test breaks here then a memory region containing a histogram
855 // actively used by this code is being released back to the test. 860 // actively used by this code is being released back to the test.
856 // If that memory segment were to be deleted, future calls to create 861 // If that memory segment were to be deleted, future calls to create
857 // persistent histograms would crash. To avoid this, have the test call 862 // persistent histograms would crash. To avoid this, have the test call
858 // the method GetCreateHistogramResultHistogram() *before* setting 863 // the method GetCreateHistogramResultHistogram() *before* setting
859 // the (temporary) memory allocator via SetGlobalAllocator() so that 864 // the (temporary) memory allocator via SetGlobalAllocator() so that
860 // histogram is instead allocated from the process heap. 865 // histogram is instead allocated from the process heap.
861 DCHECK_NE(kResultHistogram, data->name); 866 DCHECK_NE(kResultHistogram, data->name);
862 } 867 }
863 868
864 g_allocator = nullptr; 869 subtle::NoBarrier_Store(&g_allocator, 0);
865 return WrapUnique(histogram_allocator); 870 return WrapUnique(histogram_allocator);
866 }; 871 };
867 872
868 void GlobalHistogramAllocator::SetPersistentLocation(const FilePath& location) { 873 void GlobalHistogramAllocator::SetPersistentLocation(const FilePath& location) {
869 persistent_location_ = location; 874 persistent_location_ = location;
870 } 875 }
871 876
872 const FilePath& GlobalHistogramAllocator::GetPersistentLocation() const { 877 const FilePath& GlobalHistogramAllocator::GetPersistentLocation() const {
873 return persistent_location_; 878 return persistent_location_;
874 } 879 }
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
938 while (true) { 943 while (true) {
939 std::unique_ptr<HistogramBase> histogram = 944 std::unique_ptr<HistogramBase> histogram =
940 import_iterator_.GetNextWithIgnore(record_to_ignore); 945 import_iterator_.GetNextWithIgnore(record_to_ignore);
941 if (!histogram) 946 if (!histogram)
942 break; 947 break;
943 StatisticsRecorder::RegisterOrDeleteDuplicate(histogram.release()); 948 StatisticsRecorder::RegisterOrDeleteDuplicate(histogram.release());
944 } 949 }
945 } 950 }
946 951
947 } // namespace base 952 } // namespace base
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698