| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/metrics/persistent_histogram_allocator.h" | 5 #include "base/metrics/persistent_histogram_allocator.h" |
| 6 | 6 |
| 7 #include <memory> | 7 #include <memory> |
| 8 | 8 |
| 9 #include "base/lazy_instance.h" | 9 #include "base/lazy_instance.h" |
| 10 #include "base/logging.h" | 10 #include "base/logging.h" |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 // will be safely ignored. | 34 // will be safely ignored. |
| 35 enum : uint32_t { | 35 enum : uint32_t { |
| 36 kTypeIdHistogram = 0xF1645910 + 2, // SHA1(Histogram) v2 | 36 kTypeIdHistogram = 0xF1645910 + 2, // SHA1(Histogram) v2 |
| 37 kTypeIdRangesArray = 0xBCEA225A + 1, // SHA1(RangesArray) v1 | 37 kTypeIdRangesArray = 0xBCEA225A + 1, // SHA1(RangesArray) v1 |
| 38 kTypeIdCountsArray = 0x53215530 + 1, // SHA1(CountsArray) v1 | 38 kTypeIdCountsArray = 0x53215530 + 1, // SHA1(CountsArray) v1 |
| 39 }; | 39 }; |
| 40 | 40 |
| 41 // The current globally-active persistent allocator for all new histograms. | 41 // The current globally-active persistent allocator for all new histograms. |
| 42 // The object held here will obviously not be destructed at process exit | 42 // The object held here will obviously not be destructed at process exit |
| 43 // but that's best since PersistentMemoryAllocator objects (that underlie | 43 // but that's best since PersistentMemoryAllocator objects (that underlie |
| 44 // PersistentHistogramAllocator objects) are explicitly forbidden from doing | 44 // GlobalHistogramAllocator objects) are explicitly forbidden from doing |
| 45 // anything essential at exit anyway due to the fact that they depend on data | 45 // anything essential at exit anyway due to the fact that they depend on data |
| 46 // managed elsewhere and which could be destructed first. | 46 // managed elsewhere and which could be destructed first. |
| 47 PersistentHistogramAllocator* g_allocator; | 47 GlobalHistogramAllocator* g_allocator; |
| 48 | 48 |
| 49 // Take an array of range boundaries and create a proper BucketRanges object | 49 // Take an array of range boundaries and create a proper BucketRanges object |
| 50 // which is returned to the caller. A return of nullptr indicates that the | 50 // which is returned to the caller. A return of nullptr indicates that the |
| 51 // passed boundaries are invalid. | 51 // passed boundaries are invalid. |
| 52 std::unique_ptr<BucketRanges> CreateRangesFromData( | 52 std::unique_ptr<BucketRanges> CreateRangesFromData( |
| 53 HistogramBase::Sample* ranges_data, | 53 HistogramBase::Sample* ranges_data, |
| 54 uint32_t ranges_checksum, | 54 uint32_t ranges_checksum, |
| 55 size_t count) { | 55 size_t count) { |
| 56 // To avoid racy destruction at shutdown, the following may be leaked. | 56 // To avoid racy destruction at shutdown, the following may be leaked. |
| 57 std::unique_ptr<BucketRanges> ranges(new BucketRanges(count)); | 57 std::unique_ptr<BucketRanges> ranges(new BucketRanges(count)); |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 168 | 168 |
| 169 // static | 169 // static |
| 170 void PersistentHistogramAllocator::RecordCreateHistogramResult( | 170 void PersistentHistogramAllocator::RecordCreateHistogramResult( |
| 171 CreateHistogramResultType result) { | 171 CreateHistogramResultType result) { |
| 172 HistogramBase* result_histogram = GetCreateHistogramResultHistogram(); | 172 HistogramBase* result_histogram = GetCreateHistogramResultHistogram(); |
| 173 if (result_histogram) | 173 if (result_histogram) |
| 174 result_histogram->Add(result); | 174 result_histogram->Add(result); |
| 175 } | 175 } |
| 176 | 176 |
| 177 // static | 177 // static |
| 178 void PersistentHistogramAllocator::SetGlobalAllocator( | |
| 179 std::unique_ptr<PersistentHistogramAllocator> allocator) { | |
| 180 // Releasing or changing an allocator is extremely dangerous because it | |
| 181 // likely has histograms stored within it. If the backing memory is also | |
| 182 // also released, future accesses to those histograms will seg-fault. | |
| 183 CHECK(!g_allocator); | |
| 184 g_allocator = allocator.release(); | |
| 185 | |
| 186 size_t existing = StatisticsRecorder::GetHistogramCount(); | |
| 187 DLOG_IF(WARNING, existing) | |
| 188 << existing | |
| 189 << " histograms were created before persistence was enabled."; | |
| 190 } | |
| 191 | |
| 192 // static | |
| 193 PersistentHistogramAllocator* | |
| 194 PersistentHistogramAllocator::GetGlobalAllocator() { | |
| 195 return g_allocator; | |
| 196 } | |
| 197 | |
| 198 // static | |
| 199 std::unique_ptr<PersistentHistogramAllocator> | |
| 200 PersistentHistogramAllocator::ReleaseGlobalAllocatorForTesting() { | |
| 201 PersistentHistogramAllocator* histogram_allocator = g_allocator; | |
| 202 if (!histogram_allocator) | |
| 203 return nullptr; | |
| 204 PersistentMemoryAllocator* memory_allocator = | |
| 205 histogram_allocator->memory_allocator(); | |
| 206 | |
| 207 // Before releasing the memory, it's necessary to have the Statistics- | |
| 208 // Recorder forget about the histograms contained therein; otherwise, | |
| 209 // some operations will try to access them and the released memory. | |
| 210 PersistentMemoryAllocator::Iterator iter; | |
| 211 PersistentMemoryAllocator::Reference ref; | |
| 212 uint32_t type_id; | |
| 213 memory_allocator->CreateIterator(&iter); | |
| 214 while ((ref = memory_allocator->GetNextIterable(&iter, &type_id)) != 0) { | |
| 215 if (type_id == kTypeIdHistogram) { | |
| 216 PersistentHistogramData* histogram_data = | |
| 217 memory_allocator->GetAsObject<PersistentHistogramData>( | |
| 218 ref, kTypeIdHistogram); | |
| 219 DCHECK(histogram_data); | |
| 220 StatisticsRecorder::ForgetHistogramForTesting(histogram_data->name); | |
| 221 | |
| 222 // If a test breaks here then a memory region containing a histogram | |
| 223 // actively used by this code is being released back to the test. | |
| 224 // If that memory segment were to be deleted, future calls to create | |
| 225 // persistent histograms would crash. To avoid this, have the test call | |
| 226 // the method GetCreateHistogramResultHistogram() *before* setting | |
| 227 // the (temporary) memory allocator via SetGlobalAllocator() so that | |
| 228 // histogram is instead allocated from the process heap. | |
| 229 DCHECK_NE(kResultHistogram, histogram_data->name); | |
| 230 } | |
| 231 } | |
| 232 | |
| 233 g_allocator = nullptr; | |
| 234 return WrapUnique(histogram_allocator); | |
| 235 }; | |
| 236 | |
| 237 // static | |
| 238 void PersistentHistogramAllocator::CreateGlobalAllocatorOnPersistentMemory( | |
| 239 void* base, | |
| 240 size_t size, | |
| 241 size_t page_size, | |
| 242 uint64_t id, | |
| 243 StringPiece name) { | |
| 244 SetGlobalAllocator(WrapUnique(new PersistentHistogramAllocator( | |
| 245 WrapUnique(new PersistentMemoryAllocator(base, size, page_size, id, | |
| 246 name, false))))); | |
| 247 } | |
| 248 | |
| 249 // static | |
| 250 void PersistentHistogramAllocator::CreateGlobalAllocatorOnLocalMemory( | |
| 251 size_t size, | |
| 252 uint64_t id, | |
| 253 StringPiece name) { | |
| 254 SetGlobalAllocator(WrapUnique(new PersistentHistogramAllocator( | |
| 255 WrapUnique(new LocalPersistentMemoryAllocator(size, id, name))))); | |
| 256 } | |
| 257 | |
| 258 // static | |
| 259 void PersistentHistogramAllocator::CreateGlobalAllocatorOnSharedMemory( | |
| 260 size_t size, | |
| 261 const SharedMemoryHandle& handle) { | |
| 262 std::unique_ptr<SharedMemory> shm( | |
| 263 new SharedMemory(handle, /*readonly=*/false)); | |
| 264 if (!shm->Map(size)) { | |
| 265 NOTREACHED(); | |
| 266 return; | |
| 267 } | |
| 268 | |
| 269 SetGlobalAllocator(WrapUnique(new PersistentHistogramAllocator( | |
| 270 WrapUnique(new SharedPersistentMemoryAllocator( | |
| 271 std::move(shm), 0, StringPiece(), /*readonly=*/false))))); | |
| 272 } | |
| 273 | |
| 274 // static | |
| 275 std::unique_ptr<HistogramBase> PersistentHistogramAllocator::CreateHistogram( | 178 std::unique_ptr<HistogramBase> PersistentHistogramAllocator::CreateHistogram( |
| 276 PersistentHistogramData* histogram_data_ptr) { | 179 PersistentHistogramData* histogram_data_ptr) { |
| 277 if (!histogram_data_ptr) { | 180 if (!histogram_data_ptr) { |
| 278 RecordCreateHistogramResult(CREATE_HISTOGRAM_INVALID_METADATA_POINTER); | 181 RecordCreateHistogramResult(CREATE_HISTOGRAM_INVALID_METADATA_POINTER); |
| 279 NOTREACHED(); | 182 NOTREACHED(); |
| 280 return nullptr; | 183 return nullptr; |
| 281 } | 184 } |
| 282 | 185 |
| 283 // Sparse histograms are quite different so handle them as a special case. | 186 // Sparse histograms are quite different so handle them as a special case. |
| 284 if (histogram_data_ptr->histogram_type == SPARSE_HISTOGRAM) { | 187 if (histogram_data_ptr->histogram_type == SPARSE_HISTOGRAM) { |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 525 // using what is already known above but avoids duplicating the switch | 428 // using what is already known above but avoids duplicating the switch |
| 526 // statement here and serves as a double-check that everything is | 429 // statement here and serves as a double-check that everything is |
| 527 // correct before commiting the new histogram to persistent space. | 430 // correct before commiting the new histogram to persistent space. |
| 528 std::unique_ptr<HistogramBase> histogram = CreateHistogram(histogram_data); | 431 std::unique_ptr<HistogramBase> histogram = CreateHistogram(histogram_data); |
| 529 DCHECK(histogram); | 432 DCHECK(histogram); |
| 530 if (ref_ptr != nullptr) | 433 if (ref_ptr != nullptr) |
| 531 *ref_ptr = histogram_ref; | 434 *ref_ptr = histogram_ref; |
| 532 | 435 |
| 533 // By storing the reference within the allocator to this histogram, the | 436 // By storing the reference within the allocator to this histogram, the |
| 534 // next import (which will happen before the next histogram creation) | 437 // next import (which will happen before the next histogram creation) |
| 535 // will know to skip it. See also the comment in ImportGlobalHistograms(). | 438 // will know to skip it. |
| 439 // See also the comment in ImportHistogramsToStatisticsRecorder(). |
| 536 subtle::NoBarrier_Store(&last_created_, histogram_ref); | 440 subtle::NoBarrier_Store(&last_created_, histogram_ref); |
| 537 return histogram; | 441 return histogram; |
| 538 } | 442 } |
| 539 | 443 |
| 540 CreateHistogramResultType result; | 444 CreateHistogramResultType result; |
| 541 if (memory_allocator_->IsCorrupt()) { | 445 if (memory_allocator_->IsCorrupt()) { |
| 542 RecordCreateHistogramResult(CREATE_HISTOGRAM_ALLOCATOR_NEWLY_CORRUPT); | 446 RecordCreateHistogramResult(CREATE_HISTOGRAM_ALLOCATOR_NEWLY_CORRUPT); |
| 543 result = CREATE_HISTOGRAM_ALLOCATOR_CORRUPT; | 447 result = CREATE_HISTOGRAM_ALLOCATOR_CORRUPT; |
| 544 } else if (memory_allocator_->IsFull()) { | 448 } else if (memory_allocator_->IsFull()) { |
| 545 result = CREATE_HISTOGRAM_ALLOCATOR_FULL; | 449 result = CREATE_HISTOGRAM_ALLOCATOR_FULL; |
| 546 } else { | 450 } else { |
| 547 result = CREATE_HISTOGRAM_ALLOCATOR_ERROR; | 451 result = CREATE_HISTOGRAM_ALLOCATOR_ERROR; |
| 548 } | 452 } |
| 549 RecordCreateHistogramResult(result); | 453 RecordCreateHistogramResult(result); |
| 550 NOTREACHED() << "error=" << result; | 454 NOTREACHED() << "error=" << result; |
| 551 | 455 |
| 552 return nullptr; | 456 return nullptr; |
| 553 } | 457 } |
| 554 | 458 |
| 459 GlobalHistogramAllocator::~GlobalHistogramAllocator() {} |
| 460 |
| 555 // static | 461 // static |
| 556 void PersistentHistogramAllocator::ImportGlobalHistograms() { | 462 void GlobalHistogramAllocator::CreateWithPersistentMemory( |
| 557 // The lock protects against concurrent access to the iterator and is created | 463 void* base, |
| 558 // in a thread-safe manner when needed. | 464 size_t size, |
| 559 static base::LazyInstance<base::Lock>::Leaky lock = LAZY_INSTANCE_INITIALIZER; | 465 size_t page_size, |
| 466 uint64_t id, |
| 467 StringPiece name) { |
| 468 Set(WrapUnique(new GlobalHistogramAllocator( |
| 469 WrapUnique(new PersistentMemoryAllocator( |
| 470 base, size, page_size, id, name, false))))); |
| 471 } |
| 560 | 472 |
| 561 if (g_allocator) { | 473 // static |
| 562 // TODO(bcwhite): Investigate a lock-free, thread-safe iterator. | 474 void GlobalHistogramAllocator::CreateWithLocalMemory( |
| 563 base::AutoLock auto_lock(lock.Get()); | 475 size_t size, |
| 476 uint64_t id, |
| 477 StringPiece name) { |
| 478 Set(WrapUnique(new GlobalHistogramAllocator( |
| 479 WrapUnique(new LocalPersistentMemoryAllocator(size, id, name))))); |
| 480 } |
| 564 | 481 |
| 565 // Each call resumes from where it last left off so a persistant iterator | 482 // static |
| 566 // is needed. This class has a constructor so even the definition has to | 483 void GlobalHistogramAllocator::CreateWithSharedMemory( |
| 567 // be protected by the lock in order to be thread-safe. | 484 std::unique_ptr<SharedMemory> memory, |
| 568 static Iterator iter; | 485 size_t size, |
| 569 if (iter.is_clear()) | 486 uint64_t id, |
| 570 g_allocator->CreateIterator(&iter); | 487 StringPiece name) { |
| 488 if (!memory->memory() && !memory->Map(size)) |
| 489 NOTREACHED(); |
| 571 | 490 |
| 572 // Skip the import if it's the histogram that was last created. Should a | 491 if (memory->memory()) { |
| 573 // race condition cause the "last created" to be overwritten before it | 492 DCHECK_LE(memory->mapped_size(), size); |
| 574 // is recognized here then the histogram will be created and be ignored | 493 Set(WrapUnique(new GlobalHistogramAllocator( |
| 575 // when it is detected as a duplicate by the statistics-recorder. This | 494 WrapUnique(new SharedPersistentMemoryAllocator( |
| 576 // simple check reduces the time of creating persistent histograms by | 495 std::move(memory), 0, StringPiece(), /*readonly=*/false))))); |
| 577 // about 40%. | 496 } |
| 578 Reference last_created = | 497 } |
| 579 subtle::NoBarrier_Load(&g_allocator->last_created_); | |
| 580 | 498 |
| 581 while (true) { | 499 // static |
| 582 std::unique_ptr<HistogramBase> histogram = | 500 void GlobalHistogramAllocator::CreateWithSharedMemoryHandle( |
| 583 g_allocator->GetNextHistogramWithIgnore(&iter, last_created); | 501 const SharedMemoryHandle& handle, |
| 584 if (!histogram) | 502 size_t size) { |
| 585 break; | 503 std::unique_ptr<SharedMemory> shm( |
| 586 StatisticsRecorder::RegisterOrDeleteDuplicate(histogram.release()); | 504 new SharedMemory(handle, /*readonly=*/false)); |
| 505 if (!shm->Map(size)) { |
| 506 NOTREACHED(); |
| 507 return; |
| 508 } |
| 509 |
| 510 Set(WrapUnique(new GlobalHistogramAllocator( |
| 511 WrapUnique(new SharedPersistentMemoryAllocator( |
| 512 std::move(shm), 0, StringPiece(), /*readonly=*/false))))); |
| 513 } |
| 514 |
| 515 // static |
| 516 void GlobalHistogramAllocator::Set( |
| 517 std::unique_ptr<GlobalHistogramAllocator> allocator) { |
| 518 // Releasing or changing an allocator is extremely dangerous because it |
| 519 // likely has histograms stored within it. If the backing memory is also |
| 520 // also released, future accesses to those histograms will seg-fault. |
| 521 CHECK(!g_allocator); |
| 522 g_allocator = allocator.release(); |
| 523 size_t existing = StatisticsRecorder::GetHistogramCount(); |
| 524 |
| 525 DLOG_IF(WARNING, existing) |
| 526 << existing << " histograms were created before persistence was enabled."; |
| 527 } |
| 528 |
| 529 // static |
| 530 GlobalHistogramAllocator* GlobalHistogramAllocator::Get() { |
| 531 return g_allocator; |
| 532 } |
| 533 |
| 534 // static |
| 535 std::unique_ptr<GlobalHistogramAllocator> |
| 536 GlobalHistogramAllocator::ReleaseForTesting() { |
| 537 GlobalHistogramAllocator* histogram_allocator = g_allocator; |
| 538 if (!histogram_allocator) |
| 539 return nullptr; |
| 540 PersistentMemoryAllocator* memory_allocator = |
| 541 histogram_allocator->memory_allocator(); |
| 542 |
| 543 // Before releasing the memory, it's necessary to have the Statistics- |
| 544 // Recorder forget about the histograms contained therein; otherwise, |
| 545 // some operations will try to access them and the released memory. |
| 546 PersistentMemoryAllocator::Iterator iter; |
| 547 PersistentMemoryAllocator::Reference ref; |
| 548 uint32_t type_id; |
| 549 memory_allocator->CreateIterator(&iter); |
| 550 while ((ref = memory_allocator->GetNextIterable(&iter, &type_id)) != 0) { |
| 551 if (type_id == kTypeIdHistogram) { |
| 552 PersistentHistogramData* histogram_data = |
| 553 memory_allocator->GetAsObject<PersistentHistogramData>( |
| 554 ref, kTypeIdHistogram); |
| 555 DCHECK(histogram_data); |
| 556 StatisticsRecorder::ForgetHistogramForTesting(histogram_data->name); |
| 557 |
| 558 // If a test breaks here then a memory region containing a histogram |
| 559 // actively used by this code is being released back to the test. |
| 560 // If that memory segment were to be deleted, future calls to create |
| 561 // persistent histograms would crash. To avoid this, have the test call |
| 562 // the method GetCreateHistogramResultHistogram() *before* setting |
| 563 // the (temporary) memory allocator via SetGlobalAllocator() so that |
| 564 // histogram is instead allocated from the process heap. |
| 565 DCHECK_NE(kResultHistogram, histogram_data->name); |
| 587 } | 566 } |
| 588 } | 567 } |
| 568 |
| 569 g_allocator = nullptr; |
| 570 return WrapUnique(histogram_allocator); |
| 571 }; |
| 572 |
| 573 GlobalHistogramAllocator::GlobalHistogramAllocator( |
| 574 std::unique_ptr<PersistentMemoryAllocator> memory) |
| 575 : PersistentHistogramAllocator(std::move(memory)) { |
| 576 CreateIterator(&import_iterator_); |
| 577 } |
| 578 |
| 579 void GlobalHistogramAllocator::ImportHistogramsToStatisticsRecorder() { |
| 580 // Skip the import if it's the histogram that was last created. Should a |
| 581 // race condition cause the "last created" to be overwritten before it |
| 582 // is recognized here then the histogram will be created and be ignored |
| 583 // when it is detected as a duplicate by the statistics-recorder. This |
| 584 // simple check reduces the time of creating persistent histograms by |
| 585 // about 40%. |
| 586 Reference record_to_ignore = last_created(); |
| 587 |
| 588 // There is no lock on this because it's expected to be called only by |
| 589 // the StatisticsRecorder which has its own lock. |
| 590 while (true) { |
| 591 std::unique_ptr<HistogramBase> histogram = |
| 592 GetNextHistogramWithIgnore(&import_iterator_, record_to_ignore); |
| 593 if (!histogram) |
| 594 break; |
| 595 StatisticsRecorder::RegisterOrDeleteDuplicate(histogram.release()); |
| 596 } |
| 589 } | 597 } |
| 590 | 598 |
| 591 } // namespace base | 599 } // namespace base |
| OLD | NEW |