| OLD | NEW |
| (Empty) |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 """Monitor tracing events on chrome via chrome remote debugging.""" | |
| 6 | |
| 7 import itertools | |
| 8 import logging | |
| 9 import operator | |
| 10 | |
| 11 import clovis_constants | |
| 12 import devtools_monitor | |
| 13 | |
| 14 | |
| 15 class TracingTrack(devtools_monitor.Track): | |
| 16 """Grabs and processes trace event messages. | |
| 17 | |
| 18 See https://goo.gl/Qabkqk for details on the protocol. | |
| 19 """ | |
| 20 def __init__(self, connection, categories, fetch_stream=False): | |
| 21 """Initialize this TracingTrack. | |
| 22 | |
| 23 Args: | |
| 24 connection: a DevToolsConnection. | |
| 25 categories: ([str] or None) If set, a list of categories to enable or | |
| 26 disable in Chrome tracing. Categories prefixed with '-' are | |
| 27 disabled. | |
| 28 fetch_stream: if true, use a websocket stream to fetch tracing data rather | |
| 29 than dataCollected events. It appears based on very limited testing that | |
| 30 a stream is slower than the default reporting as dataCollected events. | |
| 31 """ | |
| 32 super(TracingTrack, self).__init__(connection) | |
| 33 if connection: | |
| 34 connection.RegisterListener('Tracing.dataCollected', self) | |
| 35 | |
| 36 self._categories = set(categories) | |
| 37 params = {} | |
| 38 params['categories'] = ','.join(self._categories) | |
| 39 if fetch_stream: | |
| 40 params['transferMode'] = 'ReturnAsStream' | |
| 41 | |
| 42 if connection: | |
| 43 connection.SyncRequestNoResponse('Tracing.start', params) | |
| 44 | |
| 45 self._events = [] | |
| 46 self._base_msec = None | |
| 47 self._interval_tree = None | |
| 48 self._main_frame_id = None | |
| 49 | |
| 50 def Handle(self, method, event): | |
| 51 for e in event['params']['value']: | |
| 52 event = Event(e) | |
| 53 self._events.append(event) | |
| 54 if self._base_msec is None or event.start_msec < self._base_msec: | |
| 55 self._base_msec = event.start_msec | |
| 56 # Invalidate our index rather than trying to be fancy and incrementally | |
| 57 # update. | |
| 58 self._interval_tree = None | |
| 59 | |
| 60 def Categories(self): | |
| 61 """Returns the set of categories in this trace.""" | |
| 62 return self._categories | |
| 63 | |
| 64 def GetFirstEventMillis(self): | |
| 65 """Find the canonical start time for this track. | |
| 66 | |
| 67 Returns: | |
| 68 The millisecond timestamp of the first request. | |
| 69 """ | |
| 70 return self._base_msec | |
| 71 | |
| 72 def GetEvents(self): | |
| 73 """Returns a list of tracing.Event. Not sorted.""" | |
| 74 return self._events | |
| 75 | |
| 76 def GetMatchingEvents(self, category, name): | |
| 77 """Gets events matching |category| and |name|.""" | |
| 78 return [e for e in self.GetEvents() if e.Matches(category, name)] | |
| 79 | |
| 80 def GetMatchingMainFrameEvents(self, category, name): | |
| 81 """Gets events matching |category| and |name| that occur in the main frame. | |
| 82 | |
| 83 Events without a 'frame' key in their |args| are discarded. | |
| 84 """ | |
| 85 matching_events = self.GetMatchingEvents(category, name) | |
| 86 return [e for e in matching_events | |
| 87 if 'frame' in e.args and e.args['frame'] == self.GetMainFrameID()] | |
| 88 | |
| 89 def GetMainFrameRoutingID(self): | |
| 90 """Returns the main frame routing ID.""" | |
| 91 for event in self.GetMatchingEvents( | |
| 92 'navigation', 'RenderFrameImpl::OnNavigate'): | |
| 93 return event.args['id'] | |
| 94 assert False | |
| 95 | |
| 96 def GetMainFrameID(self): | |
| 97 """Returns the main frame ID.""" | |
| 98 if not self._main_frame_id: | |
| 99 navigation_start_events = self.GetMatchingEvents( | |
| 100 'blink.user_timing', 'navigationStart') | |
| 101 first_event = min(navigation_start_events, key=lambda e: e.start_msec) | |
| 102 self._main_frame_id = first_event.args['frame'] | |
| 103 | |
| 104 return self._main_frame_id | |
| 105 | |
| 106 def SetMainFrameID(self, frame_id): | |
| 107 """Set the main frame ID. Normally this is used only for testing.""" | |
| 108 self._main_frame_id = frame_id | |
| 109 | |
| 110 def EventsAt(self, msec): | |
| 111 """Gets events active at a timestamp. | |
| 112 | |
| 113 Args: | |
| 114 msec: tracing milliseconds to query. Tracing milliseconds appears to be | |
| 115 since chrome startup (ie, arbitrary epoch). | |
| 116 | |
| 117 Returns: | |
| 118 List of events active at that timestamp. Instantaneous (ie, instant, | |
| 119 sample and counter) events are never included. Event end times are | |
| 120 exclusive, so that an event ending at the usec parameter will not be | |
| 121 returned. | |
| 122 """ | |
| 123 self._IndexEvents() | |
| 124 return self._interval_tree.EventsAt(msec) | |
| 125 | |
| 126 def Filter(self, pid=None, tid=None, categories=None): | |
| 127 """Returns a new TracingTrack with a subset of the events. | |
| 128 | |
| 129 Args: | |
| 130 pid: (int or None) Selects events from this PID. | |
| 131 tid: (int or None) Selects events from this TID. | |
| 132 categories: (set([str]) or None) Selects events belonging to one of the | |
| 133 categories. | |
| 134 """ | |
| 135 events = self._events | |
| 136 if pid is not None: | |
| 137 events = filter(lambda e : e.tracing_event['pid'] == pid, events) | |
| 138 if tid is not None: | |
| 139 events = filter(lambda e : e.tracing_event['tid'] == tid, events) | |
| 140 if categories is not None: | |
| 141 events = filter( | |
| 142 lambda e : set(e.category.split(',')).intersection(categories), | |
| 143 events) | |
| 144 tracing_track = TracingTrack(None, clovis_constants.DEFAULT_CATEGORIES) | |
| 145 tracing_track._events = events | |
| 146 tracing_track._categories = self._categories | |
| 147 if categories is not None: | |
| 148 tracing_track._categories = self._categories.intersection(categories) | |
| 149 return tracing_track | |
| 150 | |
| 151 def ToJsonDict(self): | |
| 152 return {'categories': list(self._categories), | |
| 153 'events': [e.ToJsonDict() for e in self._events]} | |
| 154 | |
| 155 @classmethod | |
| 156 def FromJsonDict(cls, json_dict): | |
| 157 if not json_dict: | |
| 158 return None | |
| 159 assert 'events' in json_dict | |
| 160 events = [Event(e) for e in json_dict['events']] | |
| 161 tracing_track = TracingTrack(None, clovis_constants.DEFAULT_CATEGORIES) | |
| 162 tracing_track._categories = set(json_dict.get('categories', [])) | |
| 163 tracing_track._events = events | |
| 164 tracing_track._base_msec = events[0].start_msec if events else 0 | |
| 165 for e in events[1:]: | |
| 166 if e.type == 'M': | |
| 167 continue # No timestamp for metadata events. | |
| 168 assert e.start_msec > 0 | |
| 169 if e.start_msec < tracing_track._base_msec: | |
| 170 tracing_track._base_msec = e.start_msec | |
| 171 return tracing_track | |
| 172 | |
| 173 def OverlappingEvents(self, start_msec, end_msec): | |
| 174 self._IndexEvents() | |
| 175 return self._interval_tree.OverlappingEvents(start_msec, end_msec) | |
| 176 | |
| 177 def EventsEndingBetween(self, start_msec, end_msec): | |
| 178 """Gets the list of events ending within an interval. | |
| 179 | |
| 180 Args: | |
| 181 start_msec: the start of the range to query, in milliseconds, inclusive. | |
| 182 end_msec: the end of the range to query, in milliseconds, inclusive. | |
| 183 | |
| 184 Returns: | |
| 185 See OverlappingEvents() above. | |
| 186 """ | |
| 187 overlapping_events = self.OverlappingEvents(start_msec, end_msec) | |
| 188 return [e for e in overlapping_events | |
| 189 if start_msec <= e.end_msec <= end_msec] | |
| 190 | |
| 191 def EventFromStep(self, step_event): | |
| 192 """Returns the Event associated with a step event, or None. | |
| 193 | |
| 194 Args: | |
| 195 step_event: (Event) Step event. | |
| 196 | |
| 197 Returns: | |
| 198 an Event that matches the step event, or None. | |
| 199 """ | |
| 200 self._IndexEvents() | |
| 201 assert 'step' in step_event.args and step_event.tracing_event['ph'] == 'T' | |
| 202 candidates = self._interval_tree.EventsAt(step_event.start_msec) | |
| 203 for event in candidates: | |
| 204 # IDs are only unique within a process (often they are pointers). | |
| 205 if (event.pid == step_event.pid and event.tracing_event['ph'] != 'T' | |
| 206 and event.name == step_event.name and event.id == step_event.id): | |
| 207 return event | |
| 208 return None | |
| 209 | |
| 210 def _IndexEvents(self, strict=False): | |
| 211 if self._interval_tree: | |
| 212 return | |
| 213 complete_events = [] | |
| 214 spanning_events = self._SpanningEvents() | |
| 215 for event in self._events: | |
| 216 if not event.IsIndexable(): | |
| 217 continue | |
| 218 if event.IsComplete(): | |
| 219 complete_events.append(event) | |
| 220 continue | |
| 221 matched_event = spanning_events.Match(event, strict) | |
| 222 if matched_event is not None: | |
| 223 complete_events.append(matched_event) | |
| 224 self._interval_tree = _IntervalTree.FromEvents(complete_events) | |
| 225 | |
| 226 if strict and spanning_events.HasPending(): | |
| 227 raise devtools_monitor.DevToolsConnectionException( | |
| 228 'Pending spanning events: %s' % | |
| 229 '\n'.join([str(e) for e in spanning_events.PendingEvents()])) | |
| 230 | |
| 231 def _GetEvents(self): | |
| 232 self._IndexEvents() | |
| 233 return self._interval_tree.GetEvents() | |
| 234 | |
| 235 def HasLoadingSucceeded(self): | |
| 236 """Returns whether the loading has succeed at recording time.""" | |
| 237 main_frame_id = self.GetMainFrameRoutingID() | |
| 238 for event in self.GetMatchingEvents( | |
| 239 'navigation', 'RenderFrameImpl::didFailProvisionalLoad'): | |
| 240 if event.args['id'] == main_frame_id: | |
| 241 return False | |
| 242 for event in self.GetMatchingEvents( | |
| 243 'navigation', 'RenderFrameImpl::didFailLoad'): | |
| 244 if event.args['id'] == main_frame_id: | |
| 245 return False | |
| 246 return True | |
| 247 | |
| 248 class _SpanningEvents(object): | |
| 249 def __init__(self): | |
| 250 self._duration_stack = [] | |
| 251 self._async_stacks = {} | |
| 252 self._objects = {} | |
| 253 self._MATCH_HANDLER = { | |
| 254 'B': self._DurationBegin, | |
| 255 'E': self._DurationEnd, | |
| 256 'b': self._AsyncStart, | |
| 257 'e': self._AsyncEnd, | |
| 258 'S': self._AsyncStart, | |
| 259 'F': self._AsyncEnd, | |
| 260 'N': self._ObjectCreated, | |
| 261 'D': self._ObjectDestroyed, | |
| 262 'M': self._Ignore, | |
| 263 'X': self._Ignore, | |
| 264 'R': self._Ignore, | |
| 265 'p': self._Ignore, | |
| 266 '(': self._Ignore, # Context events. | |
| 267 ')': self._Ignore, # Ditto. | |
| 268 None: self._Ignore, | |
| 269 } | |
| 270 | |
| 271 def Match(self, event, strict=False): | |
| 272 return self._MATCH_HANDLER.get( | |
| 273 event.type, self._Unsupported)(event, strict) | |
| 274 | |
| 275 def HasPending(self): | |
| 276 return (self._duration_stack or | |
| 277 self._async_stacks or | |
| 278 self._objects) | |
| 279 | |
| 280 def PendingEvents(self): | |
| 281 return itertools.chain( | |
| 282 (e for e in self._duration_stack), | |
| 283 (o for o in self._objects), | |
| 284 itertools.chain.from_iterable(( | |
| 285 (e for e in s) for s in self._async_stacks.itervalues()))) | |
| 286 | |
| 287 def _AsyncKey(self, event, _): | |
| 288 return (event.tracing_event['cat'], event.id) | |
| 289 | |
| 290 def _Ignore(self, _event, _): | |
| 291 return None | |
| 292 | |
| 293 def _Unsupported(self, event, _): | |
| 294 raise devtools_monitor.DevToolsConnectionException( | |
| 295 'Unsupported spanning event type: %s' % event) | |
| 296 | |
| 297 def _DurationBegin(self, event, _): | |
| 298 self._duration_stack.append(event) | |
| 299 return None | |
| 300 | |
| 301 def _DurationEnd(self, event, _): | |
| 302 if not self._duration_stack: | |
| 303 raise devtools_monitor.DevToolsConnectionException( | |
| 304 'Unmatched duration end: %s' % event) | |
| 305 start = self._duration_stack.pop() | |
| 306 start.SetClose(event) | |
| 307 return start | |
| 308 | |
| 309 def _AsyncStart(self, event, strict): | |
| 310 key = self._AsyncKey(event, strict) | |
| 311 self._async_stacks.setdefault(key, []).append(event) | |
| 312 return None | |
| 313 | |
| 314 def _AsyncEnd(self, event, strict): | |
| 315 key = self._AsyncKey(event, strict) | |
| 316 if key not in self._async_stacks: | |
| 317 message = 'Unmatched async end %s: %s' % (key, event) | |
| 318 if strict: | |
| 319 raise devtools_monitor.DevToolsConnectionException(message) | |
| 320 else: | |
| 321 logging.warning(message) | |
| 322 return None | |
| 323 stack = self._async_stacks[key] | |
| 324 start = stack.pop() | |
| 325 if not stack: | |
| 326 del self._async_stacks[key] | |
| 327 start.SetClose(event) | |
| 328 return start | |
| 329 | |
| 330 def _ObjectCreated(self, event, _): | |
| 331 # The tracing event format has object deletion timestamps being exclusive, | |
| 332 # that is the timestamp for a deletion my equal that of the next create at | |
| 333 # the same address. This asserts that does not happen in practice as it is | |
| 334 # inconvenient to handle that correctly here. | |
| 335 if event.id in self._objects: | |
| 336 raise devtools_monitor.DevToolsConnectionException( | |
| 337 'Multiple objects at same address: %s, %s' % | |
| 338 (event, self._objects[event.id])) | |
| 339 self._objects[event.id] = event | |
| 340 return None | |
| 341 | |
| 342 def _ObjectDestroyed(self, event, _): | |
| 343 if event.id not in self._objects: | |
| 344 raise devtools_monitor.DevToolsConnectionException( | |
| 345 'Missing object creation for %s' % event) | |
| 346 start = self._objects[event.id] | |
| 347 del self._objects[event.id] | |
| 348 start.SetClose(event) | |
| 349 return start | |
| 350 | |
| 351 | |
| 352 class Event(object): | |
| 353 """Wraps a tracing event.""" | |
| 354 CLOSING_EVENTS = {'E': 'B', | |
| 355 'e': 'b', | |
| 356 'F': 'S', | |
| 357 'D': 'N'} | |
| 358 __slots__ = ('_tracing_event', 'start_msec', 'end_msec', '_synthetic') | |
| 359 def __init__(self, tracing_event, synthetic=False): | |
| 360 """Creates Event. | |
| 361 | |
| 362 Intended to be created only by TracingTrack. | |
| 363 | |
| 364 Args: | |
| 365 tracing_event: JSON tracing event, as defined in https://goo.gl/Qabkqk. | |
| 366 synthetic: True if the event is synthetic. This is only used for indexing | |
| 367 internal to TracingTrack. | |
| 368 """ | |
| 369 if not synthetic and tracing_event['ph'] in ['s', 't', 'f']: | |
| 370 raise devtools_monitor.DevToolsConnectionException( | |
| 371 'Unsupported event: %s' % tracing_event) | |
| 372 | |
| 373 self._tracing_event = tracing_event | |
| 374 # Note tracing event times are in microseconds. | |
| 375 self.start_msec = tracing_event['ts'] / 1000.0 | |
| 376 self.end_msec = None | |
| 377 self._synthetic = synthetic | |
| 378 if self.type == 'X': | |
| 379 # Some events don't have a duration. | |
| 380 duration = (tracing_event['dur'] | |
| 381 if 'dur' in tracing_event else tracing_event['tdur']) | |
| 382 self.end_msec = self.start_msec + duration / 1000.0 | |
| 383 | |
| 384 @property | |
| 385 def type(self): | |
| 386 if self._synthetic: | |
| 387 return None | |
| 388 return self._tracing_event['ph'] | |
| 389 | |
| 390 @property | |
| 391 def category(self): | |
| 392 return self._tracing_event['cat'] | |
| 393 | |
| 394 @property | |
| 395 def pid(self): | |
| 396 return self._tracing_event['pid'] | |
| 397 | |
| 398 @property | |
| 399 def args(self): | |
| 400 return self._tracing_event.get('args', {}) | |
| 401 | |
| 402 @property | |
| 403 def id(self): | |
| 404 return self._tracing_event.get('id') | |
| 405 | |
| 406 @property | |
| 407 def name(self): | |
| 408 return self._tracing_event['name'] | |
| 409 | |
| 410 @property | |
| 411 def tracing_event(self): | |
| 412 return self._tracing_event | |
| 413 | |
| 414 @property | |
| 415 def synthetic(self): | |
| 416 return self._synthetic | |
| 417 | |
| 418 def __str__(self): | |
| 419 return ''.join([str(self._tracing_event), | |
| 420 '[%s,%s]' % (self.start_msec, self.end_msec)]) | |
| 421 | |
| 422 def Matches(self, category, name): | |
| 423 """Match tracing events. | |
| 424 | |
| 425 Args: | |
| 426 category: a tracing category (event['cat']). | |
| 427 name: the tracing event name (event['name']). | |
| 428 | |
| 429 Returns: | |
| 430 True if the event matches and False otherwise. | |
| 431 """ | |
| 432 if name != self.name: | |
| 433 return False | |
| 434 categories = self.category.split(',') | |
| 435 return category in categories | |
| 436 | |
| 437 def IsIndexable(self): | |
| 438 """True iff the event can be indexed by time.""" | |
| 439 return self._synthetic or self.type not in [ | |
| 440 'I', 'P', 'c', 'C', | |
| 441 'n', 'T', 'p', # TODO(mattcary): ?? instant types of async events. | |
| 442 'O', # TODO(mattcary): ?? object snapshot | |
| 443 'M' # Metadata | |
| 444 ] | |
| 445 | |
| 446 def IsComplete(self): | |
| 447 return self.type == 'X' | |
| 448 | |
| 449 def Synthesize(self): | |
| 450 """Expand into synthetic events. | |
| 451 | |
| 452 Returns: | |
| 453 A list of events, possibly some synthetic, whose start times are all | |
| 454 interesting for purposes of indexing. If the event is not indexable the | |
| 455 set may be empty. | |
| 456 """ | |
| 457 if not self.IsIndexable(): | |
| 458 return [] | |
| 459 if self.IsComplete(): | |
| 460 # Tracing event timestamps are microseconds! | |
| 461 return [self, Event({'ts': self.end_msec * 1000}, synthetic=True)] | |
| 462 return [self] | |
| 463 | |
| 464 def SetClose(self, closing): | |
| 465 """Close a spanning event. | |
| 466 | |
| 467 Args: | |
| 468 closing: The closing event. | |
| 469 | |
| 470 Raises: | |
| 471 devtools_monitor.DevToolsConnectionException if closing can't property | |
| 472 close this event. | |
| 473 """ | |
| 474 if self.type != self.CLOSING_EVENTS.get(closing.type): | |
| 475 raise devtools_monitor.DevToolsConnectionException( | |
| 476 'Bad closing: %s --> %s' % (self, closing)) | |
| 477 if self.type in ['b', 'S'] and ( | |
| 478 self.tracing_event['cat'] != closing.tracing_event['cat'] or | |
| 479 self.id != closing.id): | |
| 480 raise devtools_monitor.DevToolsConnectionException( | |
| 481 'Bad async closing: %s --> %s' % (self, closing)) | |
| 482 self.end_msec = closing.start_msec | |
| 483 if 'args' in closing.tracing_event: | |
| 484 self.tracing_event.setdefault( | |
| 485 'args', {}).update(closing.tracing_event['args']) | |
| 486 | |
| 487 def ToJsonDict(self): | |
| 488 return self._tracing_event | |
| 489 | |
| 490 @classmethod | |
| 491 def FromJsonDict(cls, json_dict): | |
| 492 return Event(json_dict) | |
| 493 | |
| 494 | |
| 495 class _IntervalTree(object): | |
| 496 """Simple interval tree. This is not an optimal one, as the split is done with | |
| 497 an equal number of events on each side, according to start time. | |
| 498 """ | |
| 499 _TRESHOLD = 100 | |
| 500 def __init__(self, start, end, events): | |
| 501 """Builds an interval tree. | |
| 502 | |
| 503 Args: | |
| 504 start: start timestamp of this node, in ms. | |
| 505 end: end timestamp covered by this node, in ms. | |
| 506 events: Iterable of objects having start_msec and end_msec fields. Has to | |
| 507 be sorted by start_msec. | |
| 508 """ | |
| 509 self.start = start | |
| 510 self.end = end | |
| 511 self._events = events | |
| 512 self._left = self._right = None | |
| 513 if len(self._events) > self._TRESHOLD: | |
| 514 self._Divide() | |
| 515 | |
| 516 @classmethod | |
| 517 def FromEvents(cls, events): | |
| 518 """Returns an IntervalTree instance from a list of events.""" | |
| 519 filtered_events = [e for e in events | |
| 520 if e.start_msec is not None and e.end_msec is not None] | |
| 521 filtered_events.sort(key=operator.attrgetter('start_msec')) | |
| 522 start = min(event.start_msec for event in filtered_events) | |
| 523 end = max(event.end_msec for event in filtered_events) | |
| 524 return _IntervalTree(start, end, filtered_events) | |
| 525 | |
| 526 def OverlappingEvents(self, start, end): | |
| 527 """Returns a set of events overlapping with [start, end).""" | |
| 528 if min(end, self.end) - max(start, self.start) <= 0: | |
| 529 return set() | |
| 530 elif self._IsLeaf(): | |
| 531 result = set() | |
| 532 for event in self._events: | |
| 533 if self._Overlaps(event, start, end): | |
| 534 result.add(event) | |
| 535 return result | |
| 536 else: | |
| 537 return (self._left.OverlappingEvents(start, end) | |
| 538 | self._right.OverlappingEvents(start, end)) | |
| 539 | |
| 540 def EventsAt(self, timestamp): | |
| 541 result = set() | |
| 542 if self._IsLeaf(): | |
| 543 for event in self._events: | |
| 544 if event.start_msec <= timestamp < event.end_msec: | |
| 545 result.add(event) | |
| 546 else: | |
| 547 if self._left.start <= timestamp < self._left.end: | |
| 548 result |= self._left.EventsAt(timestamp) | |
| 549 if self._right.start <= timestamp < self._right.end: | |
| 550 result |= self._right.EventsAt(timestamp) | |
| 551 return result | |
| 552 | |
| 553 def GetEvents(self): | |
| 554 return self._events | |
| 555 | |
| 556 def _Divide(self): | |
| 557 middle = len(self._events) / 2 | |
| 558 left_events = self._events[:middle] | |
| 559 right_events = self._events[middle:] | |
| 560 left_end = max(e.end_msec for e in left_events) | |
| 561 right_start = min(e.start_msec for e in right_events) | |
| 562 self._left = _IntervalTree(self.start, left_end, left_events) | |
| 563 self._right = _IntervalTree(right_start, self.end, right_events) | |
| 564 | |
| 565 def _IsLeaf(self): | |
| 566 return self._left is None | |
| 567 | |
| 568 @classmethod | |
| 569 def _Overlaps(cls, event, start, end): | |
| 570 return (min(end, event.end_msec) - max(start, event.start_msec) > 0 | |
| 571 or start <= event.start_msec < end) # For instant events. | |
| OLD | NEW |