OLD | NEW |
| (Empty) |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "gpu/common/gpu_trace_event.h" | |
6 | |
7 #include "base/format_macros.h" | |
8 #include "base/process_util.h" | |
9 #include "base/stringprintf.h" | |
10 #include "base/utf_string_conversions.h" | |
11 #include "base/time.h" | |
12 | |
13 #define USE_UNRELIABLE_NOW | |
14 | |
15 using namespace base; | |
16 | |
17 namespace gpu { | |
18 | |
19 // Controls the number of trace events we will buffer in-memory | |
20 // before throwing them away. | |
21 #define TRACE_EVENT_BUFFER_SIZE 500000 | |
22 #define TRACE_EVENT_BATCH_SIZE 1000 | |
23 | |
24 #define TRACE_EVENT_MAX_CATEGORIES 42 | |
25 | |
26 static TraceCategory g_categories[TRACE_EVENT_MAX_CATEGORIES]; | |
27 static int g_category_index = 0; | |
28 | |
29 //////////////////////////////////////////////////////////////////////////////// | |
30 // | |
31 // TraceLog::Category | |
32 // | |
33 //////////////////////////////////////////////////////////////////////////////// | |
34 TraceCategory::TraceCategory() | |
35 : name_(NULL) { | |
36 base::subtle::NoBarrier_Store(&enabled_, | |
37 static_cast<base::subtle::Atomic32>(0)); | |
38 } | |
39 | |
40 TraceCategory::~TraceCategory() { | |
41 base::subtle::NoBarrier_Store(&enabled_, | |
42 static_cast<base::subtle::Atomic32>(0)); | |
43 } | |
44 | |
45 //////////////////////////////////////////////////////////////////////////////// | |
46 // | |
47 // TraceValue | |
48 // | |
49 //////////////////////////////////////////////////////////////////////////////// | |
50 | |
51 void TraceValue::Destroy() { | |
52 if (type_ == TRACE_TYPE_STRING) { | |
53 free(value_.as_string); | |
54 value_.as_string = NULL; | |
55 } | |
56 type_ = TRACE_TYPE_UNDEFINED; | |
57 value_.as_uint = 0ull; | |
58 } | |
59 | |
60 TraceValue& TraceValue::operator=(const TraceValue& rhs) { | |
61 DCHECK(sizeof(value_) == sizeof(uint64)); | |
62 Destroy(); | |
63 type_ = rhs.type_; | |
64 if (rhs.type_ == TRACE_TYPE_STRING) { | |
65 value_.as_string = base::strdup(rhs.value_.as_string); | |
66 } else { | |
67 // Copy all 64 bits for all other types. | |
68 value_.as_uint = rhs.value_.as_uint; | |
69 } | |
70 return *this; | |
71 } | |
72 | |
73 bool TraceValue::operator==(const TraceValue& rhs) const { | |
74 if (type_ != rhs.type()) | |
75 return false; | |
76 if (rhs.type_ == TRACE_TYPE_STRING) { | |
77 return (strcmp(value_.as_string, rhs.value_.as_string) == 0); | |
78 } else { | |
79 // Compare all 64 bits for all other types. Unused bits are set to zero | |
80 // by the constructors of types that may be less than 64 bits. | |
81 return (value_.as_uint == rhs.value_.as_uint); | |
82 } | |
83 } | |
84 | |
85 void TraceValue::AppendAsJSON(std::string* out) const { | |
86 char temp_string[128]; | |
87 std::string::size_type start_pos; | |
88 switch (type_) { | |
89 case TRACE_TYPE_BOOL: | |
90 *out += as_bool()? "true" : "false"; | |
91 break; | |
92 case TRACE_TYPE_UINT: | |
93 base::snprintf(temp_string, sizeof(temp_string), "%llu", | |
94 static_cast<unsigned long long>(as_uint())); | |
95 *out += temp_string; | |
96 break; | |
97 case TRACE_TYPE_INT: | |
98 base::snprintf(temp_string, sizeof(temp_string), "%lld", | |
99 static_cast<long long>(as_int())); | |
100 *out += temp_string; | |
101 break; | |
102 case TRACE_TYPE_DOUBLE: | |
103 base::snprintf(temp_string, sizeof(temp_string), "%f", as_double()); | |
104 *out += temp_string; | |
105 break; | |
106 case TRACE_TYPE_POINTER: | |
107 base::snprintf(temp_string, sizeof(temp_string), "%p", as_pointer()); | |
108 *out += temp_string; | |
109 break; | |
110 case TRACE_TYPE_STRING: | |
111 start_pos = out->size(); | |
112 *out += as_string(); | |
113 // replace " character with ' | |
114 while ((start_pos = out->find_first_of('\"', start_pos)) != | |
115 std::string::npos) | |
116 (*out)[start_pos] = '\''; | |
117 break; | |
118 default: | |
119 break; | |
120 } | |
121 } | |
122 | |
123 //////////////////////////////////////////////////////////////////////////////// | |
124 // | |
125 // TraceEvent | |
126 // | |
127 //////////////////////////////////////////////////////////////////////////////// | |
128 | |
129 namespace { | |
130 const char* GetPhaseStr(TraceEventPhase phase) { | |
131 if (phase == GPU_TRACE_EVENT_PHASE_BEGIN) { | |
132 return "B"; | |
133 } else if (phase == GPU_TRACE_EVENT_PHASE_INSTANT) { | |
134 return "I"; | |
135 } else if (phase == GPU_TRACE_EVENT_PHASE_END) { | |
136 return "E"; | |
137 } else { | |
138 DCHECK(false); | |
139 return "?"; | |
140 } | |
141 } | |
142 } | |
143 | |
144 TraceEvent::TraceEvent() | |
145 : processId(0), | |
146 threadId(0), | |
147 phase(GPU_TRACE_EVENT_PHASE_BEGIN), | |
148 category(NULL), | |
149 name(NULL) { | |
150 memset(&argNames, 0, sizeof(argNames)); | |
151 } | |
152 | |
153 TraceEvent::~TraceEvent() { | |
154 } | |
155 | |
156 | |
157 void TraceEvent::AppendAsJSON(std::string* out, | |
158 const std::vector<TraceEvent>& events, | |
159 size_t start, | |
160 size_t count) { | |
161 *out += "["; | |
162 for (size_t i = 0; i < count && start + i < events.size(); ++i) { | |
163 if (i > 0) | |
164 *out += ","; | |
165 events[i + start].AppendAsJSON(out); | |
166 } | |
167 *out += "]"; | |
168 } | |
169 | |
170 void TraceEvent::AppendAsJSON(std::string* out) const { | |
171 int nargs = 0; | |
172 for (int i = 0; i < TRACE_MAX_NUM_ARGS; ++i) { | |
173 if (argNames[i] == NULL) | |
174 break; | |
175 nargs += 1; | |
176 } | |
177 | |
178 const char* phaseStr = GetPhaseStr(phase); | |
179 int64 time_int64 = timestamp.ToInternalValue(); | |
180 StringAppendF(out, | |
181 "{\"cat\":\"%s\",\"pid\":%i,\"tid\":%i,\"ts\":%lld," | |
182 "\"ph\":\"%s\",\"name\":\"%s\",\"args\":{", | |
183 category->name(), | |
184 static_cast<int>(processId), | |
185 static_cast<int>(threadId), | |
186 static_cast<long long>(time_int64), | |
187 phaseStr, | |
188 name); | |
189 for (int i = 0; i < nargs; ++i) { | |
190 if (i > 0) | |
191 *out += ","; | |
192 *out += "\""; | |
193 *out += argNames[i]; | |
194 *out += "\":\""; | |
195 argValues[i].AppendAsJSON(out); | |
196 *out += "\""; | |
197 } | |
198 *out += "}}"; | |
199 } | |
200 | |
201 //////////////////////////////////////////////////////////////////////////////// | |
202 // | |
203 // TraceLog | |
204 // | |
205 //////////////////////////////////////////////////////////////////////////////// | |
206 | |
207 // static | |
208 TraceLog* TraceLog::GetInstance() { | |
209 return Singleton<TraceLog, StaticMemorySingletonTraits<TraceLog> >::get(); | |
210 } | |
211 | |
212 TraceLog::TraceLog() | |
213 : enabled_(false) | |
214 { | |
215 logged_events_.reserve(1024); | |
216 } | |
217 | |
218 TraceLog::~TraceLog() { | |
219 } | |
220 | |
221 TraceCategory* TraceLog::GetCategory(const char* name) { | |
222 AutoLock lock(lock_); | |
223 for (int i = 0; i < g_category_index; i++) { | |
224 if (strcmp(g_categories[i].name(), name) == 0) | |
225 return &g_categories[i]; | |
226 } | |
227 CHECK(g_category_index < TRACE_EVENT_MAX_CATEGORIES) << | |
228 "must increase TRACE_EVENT_MAX_CATEGORIES"; | |
229 int new_index = g_category_index++; | |
230 g_categories[new_index].set(name, enabled_); | |
231 return &g_categories[new_index]; | |
232 } | |
233 | |
234 void TraceLog::SetEnabled(bool enabled) { | |
235 AutoLock lock(lock_); | |
236 if (enabled == enabled_) | |
237 return; | |
238 if (enabled) { | |
239 // Enable all categories. | |
240 enabled_ = true; | |
241 for (int i = 0; i < g_category_index; i++) { | |
242 base::subtle::NoBarrier_Store(&g_categories[i].enabled_, | |
243 static_cast<base::subtle::Atomic32>(1)); | |
244 } | |
245 } else { | |
246 // Disable all categories. | |
247 for (int i = 0; i < g_category_index; i++) { | |
248 base::subtle::NoBarrier_Store(&g_categories[i].enabled_, | |
249 static_cast<base::subtle::Atomic32>(0)); | |
250 } | |
251 enabled_ = false; | |
252 FlushWithLockAlreadyHeld(); | |
253 } | |
254 } | |
255 | |
256 float TraceLog::GetBufferPercentFull() const { | |
257 return (float)((double)logged_events_.size()/(double)TRACE_EVENT_BUFFER_SIZE); | |
258 } | |
259 | |
260 void TraceLog::SetOutputCallback(TraceLog::OutputCallback* cb) { | |
261 AutoLock lock(lock_); | |
262 if (enabled_) { | |
263 FlushWithLockAlreadyHeld(); | |
264 } | |
265 output_callback_.reset(cb); | |
266 } | |
267 | |
268 void TraceLog::SetBufferFullCallback(TraceLog::BufferFullCallback* cb) { | |
269 AutoLock lock(lock_); | |
270 buffer_full_callback_.reset(cb); | |
271 } | |
272 | |
273 void TraceLog::AddRemotelyCollectedData(const std::string& json_events) { | |
274 AutoLock lock(lock_); | |
275 if (output_callback_.get()) | |
276 output_callback_->Run(json_events); | |
277 } | |
278 | |
279 void TraceLog::Flush() { | |
280 AutoLock lock(lock_); | |
281 FlushWithLockAlreadyHeld(); | |
282 } | |
283 | |
284 void TraceLog::FlushWithLockAlreadyHeld() { | |
285 if (output_callback_.get() && logged_events_.size()) { | |
286 for (size_t i = 0; i < logged_events_.size(); i += TRACE_EVENT_BATCH_SIZE) { | |
287 std::string json_events; | |
288 TraceEvent::AppendAsJSON(&json_events, logged_events_, | |
289 i, TRACE_EVENT_BATCH_SIZE); | |
290 output_callback_->Run(json_events); | |
291 } | |
292 } | |
293 logged_events_.erase(logged_events_.begin(), logged_events_.end()); | |
294 } | |
295 | |
296 void TraceLog::AddTraceEvent(TraceEventPhase phase, | |
297 const char* file, int line, | |
298 TraceCategory* category, | |
299 const char* name, | |
300 const char* arg1name, TraceValue arg1val, | |
301 const char* arg2name, TraceValue arg2val) { | |
302 DCHECK(file && name); | |
303 #ifdef USE_UNRELIABLE_NOW | |
304 TimeTicks now = TimeTicks::HighResNow(); | |
305 #else | |
306 TimeTicks now = TimeTicks::Now(); | |
307 #endif | |
308 //static_cast<unsigned long>(base::GetCurrentProcId()), | |
309 AutoLock lock(lock_); | |
310 if (logged_events_.size() >= TRACE_EVENT_BUFFER_SIZE) | |
311 return; | |
312 logged_events_.push_back(TraceEvent()); | |
313 TraceEvent& event = logged_events_.back(); | |
314 event.processId = static_cast<unsigned long>(base::GetCurrentProcId()); | |
315 event.threadId = PlatformThread::CurrentId(); | |
316 event.timestamp = now; | |
317 event.phase = phase; | |
318 event.category = category; | |
319 event.name = name; | |
320 event.argNames[0] = arg1name; | |
321 event.argValues[0] = arg1val; | |
322 event.argNames[1] = arg2name; | |
323 event.argValues[1] = arg2val; | |
324 COMPILE_ASSERT(TRACE_MAX_NUM_ARGS == 2, TraceEvent_arc_count_out_of_sync); | |
325 if (logged_events_.size() == TRACE_EVENT_BUFFER_SIZE && | |
326 buffer_full_callback_.get()) | |
327 buffer_full_callback_->Run(); | |
328 } | |
329 | |
330 } // namespace gpu | |
OLD | NEW |