OLD | NEW |
(Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "base/trace_event/category_registry.h" |
| 6 |
| 7 #include <string.h> |
| 8 |
| 9 #include <type_traits> |
| 10 |
| 11 #include "base/atomicops.h" |
| 12 #include "base/debug/leak_annotations.h" |
| 13 #include "base/lazy_instance.h" |
| 14 #include "base/logging.h" |
| 15 #include "base/synchronization/lock.h" |
| 16 #include "base/third_party/dynamic_annotations/dynamic_annotations.h" |
| 17 #include "base/trace_event/trace_category.h" |
| 18 |
| 19 namespace base { |
| 20 namespace trace_event { |
| 21 |
| 22 namespace { |
| 23 |
| 24 constexpr size_t kMaxCategories = 200; |
| 25 const int kNumBuiltinCategories = 4; |
| 26 |
| 27 // |g_categories| might end up causing creating dynamic initializers if not POD. |
| 28 static_assert(std::is_pod<TraceCategory>::value, "TraceCategory must be POD"); |
| 29 |
| 30 // These entries must be kept consistent with the kCategory* consts below. |
| 31 TraceCategory g_categories[kMaxCategories] = { |
| 32 {0, 0, "tracing categories exhausted; must increase kMaxCategories"}, |
| 33 {0, 0, "tracing already shutdown"}, // See kCategoryAlreadyShutdown below. |
| 34 {0, 0, "__metadata"}, // See kCategoryMetadata below. |
| 35 {0, 0, "toplevel"}, // Warmup the toplevel category. |
| 36 }; |
| 37 |
| 38 base::subtle::AtomicWord g_category_index = kNumBuiltinCategories; |
| 39 |
| 40 base::LazyInstance<base::Lock>::Leaky g_category_lock = |
| 41 LAZY_INSTANCE_INITIALIZER; |
| 42 |
| 43 bool IsValidCategoryPtr(const TraceCategory* category) { |
| 44 // If any of these are hit, something has cached a corrupt category pointer. |
| 45 uintptr_t ptr = reinterpret_cast<uintptr_t>(category); |
| 46 return ptr % sizeof(void*) == 0 && |
| 47 ptr >= reinterpret_cast<uintptr_t>(&g_categories[0]) && |
| 48 ptr <= reinterpret_cast<uintptr_t>(&g_categories[kMaxCategories - 1]); |
| 49 } |
| 50 |
| 51 } // namespace |
| 52 |
| 53 // static |
| 54 TraceCategory* const CategoryRegistry::kCategoryExhausted = &g_categories[0]; |
| 55 TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown = |
| 56 &g_categories[1]; |
| 57 TraceCategory* const CategoryRegistry::kCategoryMetadata = &g_categories[2]; |
| 58 |
| 59 // static |
| 60 void CategoryRegistry::Initialize() { |
| 61 // Trace is enabled or disabled on one thread while other threads are |
| 62 // accessing the enabled flag. We don't care whether edge-case events are |
| 63 // traced or not, so we allow races on the enabled flag to keep the trace |
| 64 // macros fast. |
| 65 for (size_t i = 0; i < kMaxCategories; ++i) { |
| 66 ANNOTATE_BENIGN_RACE(g_categories[i].state_ptr(), |
| 67 "trace_event category enabled"); |
| 68 // If this DCHECK is hit in a test it means that ResetForTesting() is not |
| 69 // called and the categories state leaks between test fixtures. |
| 70 DCHECK(!g_categories[i].is_enabled()); |
| 71 } |
| 72 } |
| 73 |
| 74 // static |
| 75 void CategoryRegistry::ResetForTesting() { |
| 76 AutoLock lock(g_category_lock.Get()); |
| 77 for (size_t i = 0; i < kMaxCategories; ++i) |
| 78 g_categories[i].reset_for_testing(); |
| 79 } |
| 80 |
| 81 // static |
| 82 bool CategoryRegistry::GetOrCreateCategoryByName(const char* category_name, |
| 83 TraceCategory** category) { |
| 84 DCHECK(!strchr(category_name, '"')) |
| 85 << "Category names may not contain double quote"; |
| 86 |
| 87 // The g_categories is append only, avoid using a lock for the fast path. |
| 88 size_t category_index = base::subtle::Acquire_Load(&g_category_index); |
| 89 |
| 90 // Search for pre-existing category group. |
| 91 for (size_t i = 0; i < category_index; ++i) { |
| 92 if (strcmp(g_categories[i].name(), category_name) == 0) { |
| 93 *category = &g_categories[i]; |
| 94 return false; |
| 95 } |
| 96 } |
| 97 |
| 98 // This is the slow path: the lock is not held in the case above, so more |
| 99 // than one thread could have reached here trying to add the same category. |
| 100 // Only hold the lock when actually appending a new category, and check the |
| 101 // categories groups again. |
| 102 // TODO(primiano): there should be no need for the acquire/release semantics |
| 103 // on g_category_index below, the outer lock implies that. Remove once the |
| 104 // tracing refactoring reaches a quieter state and we can afford the risk. |
| 105 AutoLock lock(g_category_lock.Get()); |
| 106 category_index = base::subtle::Acquire_Load(&g_category_index); |
| 107 for (size_t i = 0; i < category_index; ++i) { |
| 108 if (strcmp(g_categories[i].name(), category_name) == 0) { |
| 109 *category = &g_categories[i]; |
| 110 return false; |
| 111 } |
| 112 } |
| 113 |
| 114 // Create a new category. |
| 115 if (category_index >= kMaxCategories) { |
| 116 NOTREACHED() << "must increase kMaxCategories"; |
| 117 *category = kCategoryExhausted; |
| 118 return false; |
| 119 } |
| 120 |
| 121 // TODO(primiano): this strdup should be removed. The only documented reason |
| 122 // for it was TraceWatchEvent, which is gone. However, something might have |
| 123 // ended up relying on this. Needs some auditing before removal. |
| 124 const char* category_name_copy = strdup(category_name); |
| 125 ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy); |
| 126 |
| 127 *category = &g_categories[category_index]; |
| 128 DCHECK(!(*category)->is_valid()); |
| 129 DCHECK(!(*category)->is_enabled()); |
| 130 (*category)->set_name(category_name_copy); |
| 131 |
| 132 // Update the max index now. |
| 133 base::subtle::Release_Store(&g_category_index, category_index + 1); |
| 134 return true; |
| 135 } |
| 136 |
| 137 // static |
| 138 const TraceCategory* CategoryRegistry::GetCategoryByStatePtr( |
| 139 const uint8_t* category_state) { |
| 140 const TraceCategory* category = TraceCategory::FromStatePtr(category_state); |
| 141 DCHECK(IsValidCategoryPtr(category)); |
| 142 return category; |
| 143 } |
| 144 |
| 145 // static |
| 146 bool CategoryRegistry::IsBuiltinCategory(const TraceCategory* category) { |
| 147 DCHECK(IsValidCategoryPtr(category)); |
| 148 return category < &g_categories[kNumBuiltinCategories]; |
| 149 } |
| 150 |
| 151 // static |
| 152 CategoryRegistry::Range CategoryRegistry::GetAllCategories() { |
| 153 // The |g_categories| array is append only. We have to only guarantee to |
| 154 // not return an index to a category which is being initialized by |
| 155 // GetOrCreateCategoryByName(). |
| 156 size_t category_index = base::subtle::Acquire_Load(&g_category_index); |
| 157 return CategoryRegistry::Range(&g_categories[0], |
| 158 &g_categories[category_index]); |
| 159 } |
| 160 |
| 161 } // namespace trace_event |
| 162 } // namespace base |
OLD | NEW |