Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "base/trace_event/category_registry.h" | |
| 6 | |
| 7 #include <string.h> | |
| 8 | |
| 9 #include "base/atomicops.h" | |
| 10 #include "base/lazy_instance.h" | |
| 11 #include "base/logging.h" | |
| 12 #include "base/synchronization/lock.h" | |
| 13 #include "base/third_party/dynamic_annotations/dynamic_annotations.h" | |
| 14 #include "base/trace_event/trace_category.h" | |
| 15 | |
| 16 namespace base { | |
| 17 namespace trace_event { | |
| 18 | |
| 19 namespace { | |
| 20 | |
| 21 constexpr size_t kMaxCategories = 200; | |
| 22 const int kNumBuiltinCategories = 4; | |
| 23 | |
| 24 // These entries must be kept consistent with the kCategory* consts below. | |
| 25 TraceCategory g_categories[kMaxCategories] = { | |
| 26 {"tracing categories exhausted; must increase kMaxCategories"}, | |
| 27 {"tracing already shutdown"}, // See kCategoryAlreadyShutdown below. | |
| 28 {"__metadata"}, // See kCategoryMetadata below. | |
| 29 {"toplevel"}, // Warmup the toplevel category. | |
| 30 }; | |
| 31 | |
| 32 base::subtle::AtomicWord g_category_index = kNumBuiltinCategories; | |
| 33 | |
| 34 base::LazyInstance<base::Lock>::Leaky g_category_lock = | |
|
oystein (OOO til 10th of July)
2016/10/28 06:28:00
Lazily created locks scare me :). Are we sure ther
Primiano Tucci (use gerrit)
2016/10/28 15:28:39
This is a quite established pattern:
https://cs.ch
| |
| 35 LAZY_INSTANCE_INITIALIZER; | |
| 36 | |
| 37 void DCheckIsValidCategoryPtr(const TraceCategory* category) { | |
|
oystein (OOO til 10th of July)
2016/10/28 06:28:00
#if DCHECK_IS_ON() rather than "DCheck" as part of
Primiano Tucci (use gerrit)
2016/10/28 15:28:39
Actually thinking more I think the right pattern i
| |
| 38 // If any of these are hit, something has cached a corrupt category pointer. | |
| 39 uintptr_t ptr = reinterpret_cast<uintptr_t>(category); | |
| 40 DCHECK_EQ(0u, ptr % sizeof(void*)); | |
| 41 DCHECK(ptr >= reinterpret_cast<uintptr_t>(&g_categories[0]) && | |
| 42 ptr <= reinterpret_cast<uintptr_t>(&g_categories[kMaxCategories - 1])); | |
| 43 } | |
| 44 | |
| 45 } // namespace | |
| 46 | |
| 47 // static | |
| 48 TraceCategory* const CategoryRegistry::kCategoryExhausted = &g_categories[0]; | |
|
oystein (OOO til 10th of July)
2016/10/28 06:28:00
Why are these outside of the anonymous namespace?
Primiano Tucci (use gerrit)
2016/10/28 15:28:39
because I want to be able to reference to some of
| |
| 49 TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown = | |
| 50 &g_categories[1]; | |
| 51 TraceCategory* const CategoryRegistry::kCategoryMetadata = &g_categories[2]; | |
| 52 | |
| 53 // static | |
| 54 void CategoryRegistry::Initialize() { | |
| 55 // Trace is enabled or disabled on one thread while other threads are | |
| 56 // accessing the enabled flag. We don't care whether edge-case events are | |
| 57 // traced or not, so we allow races on the enabled flag to keep the trace | |
| 58 // macros fast. | |
| 59 for (size_t i = 0; i < kMaxCategories; ++i) { | |
| 60 ANNOTATE_BENIGN_RACE(g_categories[i].state_ptr(), | |
| 61 "trace_event category enabled"); | |
| 62 // If this DCHECK is hit in a test it means that ResetForTesting() is not | |
| 63 // called and the categories state leaks between test fixtures. | |
| 64 DCHECK(!g_categories[i].is_enabled()); | |
| 65 } | |
| 66 } | |
| 67 | |
| 68 // static | |
| 69 void CategoryRegistry::ResetForTesting() { | |
| 70 AutoLock lock(g_category_lock.Get()); | |
| 71 for (size_t i = 0; i < kMaxCategories; ++i) | |
| 72 g_categories[i].reset_for_testing(); | |
| 73 } | |
| 74 | |
| 75 // static | |
| 76 bool CategoryRegistry::GetOrCreateCategoryByName(const char* category_name, | |
| 77 TraceCategory** category) { | |
| 78 DCHECK(!strchr(category_name, '"')) | |
| 79 << "Category names may not contain double quote"; | |
| 80 | |
| 81 // The g_categories is append only, avoid using a lock for the fast path. | |
| 82 size_t category_index = base::subtle::Acquire_Load(&g_category_index); | |
| 83 | |
| 84 // Search for pre-existing category group. | |
| 85 for (size_t i = 0; i < category_index; ++i) { | |
| 86 if (strcmp(g_categories[i].name(), category_name) == 0) { | |
| 87 *category = &g_categories[i]; | |
| 88 return false; | |
| 89 } | |
| 90 } | |
| 91 | |
| 92 // This is the slow path: the lock is not held in the case above, so more | |
| 93 // than one thread could have reached here trying to add the same category. | |
| 94 // Only hold the lock when actually appending a new category, and check the | |
| 95 // categories groups again. | |
| 96 // TODO(primiano): there should be no need for the acquire/release semantics | |
| 97 // on g_category_index below, the outer lock implies that. Remove once the | |
| 98 // tracing refactoring reaches a quieter state and we can afford the risk. | |
| 99 AutoLock lock(g_category_lock.Get()); | |
| 100 category_index = base::subtle::Acquire_Load(&g_category_index); | |
| 101 for (size_t i = 0; i < category_index; ++i) { | |
| 102 if (strcmp(g_categories[i].name(), category_name) == 0) { | |
| 103 *category = &g_categories[i]; | |
| 104 return false; | |
| 105 } | |
| 106 } | |
| 107 | |
| 108 // Create a new category. | |
| 109 if (category_index >= kMaxCategories) { | |
| 110 NOTREACHED() << "must increase kMaxCategories"; | |
| 111 *category = kCategoryExhausted; | |
| 112 return false; | |
| 113 } | |
| 114 | |
| 115 // TODO(primiano): this strdup should be removed. The only documented reason | |
| 116 // for it was TraceWatchEvent, which is gone. However, something might have | |
| 117 // ended up relying on this. Needs some auditing before removal. | |
| 118 const char* category_name_copy = strdup(category_name); | |
| 119 ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy); | |
| 120 | |
| 121 *category = &g_categories[category_index]; | |
| 122 DCHECK(!(*category)->is_valid()); | |
| 123 DCHECK(!(*category)->is_enabled()); | |
| 124 (*category)->set_name(category_name_copy); | |
| 125 | |
| 126 // Update the max index now. | |
| 127 base::subtle::Release_Store(&g_category_index, category_index + 1); | |
| 128 return true; | |
| 129 } | |
| 130 | |
| 131 // static | |
| 132 const TraceCategory* CategoryRegistry::GetCategoryByStatePtr( | |
| 133 const uint8_t* category_state) { | |
| 134 // the state_ptr() is guaranteed to be the first field in the Category. See | |
| 135 // the static_assert in trace_category.h . | |
| 136 const TraceCategory* category = | |
| 137 reinterpret_cast<const TraceCategory*>(category_state); | |
| 138 DCheckIsValidCategoryPtr(category); | |
| 139 return category; | |
| 140 } | |
| 141 | |
| 142 // static | |
| 143 bool CategoryRegistry::IsBuiltinCategory(const TraceCategory* category) { | |
| 144 const uintptr_t p = reinterpret_cast<uintptr_t>(category); | |
| 145 DCheckIsValidCategoryPtr(category); | |
| 146 return p < reinterpret_cast<uintptr_t>(&g_categories[kNumBuiltinCategories]); | |
|
oystein (OOO til 10th of July)
2016/10/28 06:28:00
Hmm do we actually need all o the uintptr_t castin
Primiano Tucci (use gerrit)
2016/10/28 15:28:39
good point. you are right. done.
| |
| 147 } | |
| 148 | |
| 149 // static | |
| 150 CategoryRegistry::Range CategoryRegistry::GetAllCategories() { | |
| 151 // The |g_categories| array is append only. We have to only guarantee to | |
| 152 // not return an index to a category which is being initialized by | |
| 153 // GetOrCreateCategoryByName(). | |
| 154 size_t category_index = base::subtle::Acquire_Load(&g_category_index); | |
| 155 return CategoryRegistry::Range(&g_categories[0], | |
| 156 &g_categories[category_index]); | |
| 157 } | |
| 158 | |
| 159 } // namespace trace_event | |
| 160 } // namespace base | |
| OLD | NEW |