| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 import argparse | 6 import argparse |
| 7 import errno | 7 import errno |
| 8 import json |
| 8 import os | 9 import os |
| 9 import re | 10 import re |
| 10 import sys | 11 import sys |
| 11 import urllib | 12 import urllib |
| 12 import urllib2 | 13 import urllib2 |
| 13 | 14 |
| 14 # Where all the data lives. | 15 # Where all the data lives. |
| 15 ROOT_URL = "http://build.chromium.org/p/chromium.memory.fyi/builders" | 16 ROOT_URL = "http://build.chromium.org/p/chromium.memory.fyi/builders" |
| 16 | 17 |
| 17 # TODO(groby) - support multi-line search from the command line. Useful when | 18 # TODO(groby) - support multi-line search from the command line. Useful when |
| 18 # scanning for classes of failures, see below. | 19 # scanning for classes of failures, see below. |
| 19 SEARCH_STRING = """<p class=\"failure result\"> | 20 SEARCH_STRING = """<p class=\"failure result\"> |
| 20 Failed memory test: content | 21 Failed memory test: content |
| 21 </p>""" | 22 </p>""" |
| 22 | 23 |
| 23 # Location of the log cache. | 24 # Location of the log cache. |
| 24 CACHE_DIR = "buildlogs.tmp" | 25 CACHE_DIR = "buildlogs.tmp" |
| 25 | 26 |
| 26 # If we don't find anything after searching |CUTOFF| logs, we're probably done. | 27 # If we don't find anything after searching |CUTOFF| logs, we're probably done. |
| 27 CUTOFF = 100 | 28 CUTOFF = 200 |
| 28 | 29 |
| 29 def EnsurePath(path): | 30 def EnsurePath(path): |
| 30 """Makes sure |path| does exist, tries to create it if it doesn't.""" | 31 """Makes sure |path| does exist, tries to create it if it doesn't.""" |
| 31 try: | 32 try: |
| 32 os.makedirs(path) | 33 os.makedirs(path) |
| 33 except OSError as exception: | 34 except OSError as exception: |
| 34 if exception.errno != errno.EEXIST: | 35 if exception.errno != errno.EEXIST: |
| 35 raise | 36 raise |
| 36 | 37 |
| 37 | 38 |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 184 self._current = 0 | 185 self._current = 0 |
| 185 return False | 186 return False |
| 186 | 187 |
| 187 | 188 |
| 188 def main(argv): | 189 def main(argv): |
| 189 # Create argument parser. | 190 # Create argument parser. |
| 190 parser = argparse.ArgumentParser() | 191 parser = argparse.ArgumentParser() |
| 191 commands = parser.add_mutually_exclusive_group(required=True) | 192 commands = parser.add_mutually_exclusive_group(required=True) |
| 192 commands.add_argument("--update", action='store_true') | 193 commands.add_argument("--update", action='store_true') |
| 193 commands.add_argument("--find", metavar='search term') | 194 commands.add_argument("--find", metavar='search term') |
| 195 parser.add_argument("--json", action='store_true', |
| 196 help="Output in JSON format") |
| 194 args = parser.parse_args() | 197 args = parser.parse_args() |
| 195 | 198 |
| 196 path = os.path.abspath(os.path.dirname(argv[0])) | 199 path = os.path.abspath(os.path.dirname(argv[0])) |
| 197 cache_path = os.path.join(path, CACHE_DIR) | 200 cache_path = os.path.join(path, CACHE_DIR) |
| 198 | 201 |
| 199 fyi = Waterfall(ROOT_URL, cache_path) | 202 fyi = Waterfall(ROOT_URL, cache_path) |
| 200 | 203 |
| 201 if args.update: | 204 if args.update: |
| 202 fyi.Update() | 205 fyi.Update() |
| 203 for builder in fyi.Builders(): | 206 for builder in fyi.Builders(): |
| 204 print "Updating", builder.Name() | 207 print "Updating", builder.Name() |
| 205 builder.ScanLogs(lambda x:False) | 208 builder.ScanLogs(lambda x:False) |
| 206 | 209 |
| 207 if args.find: | 210 if args.find: |
| 211 result = [] |
| 208 tester = MultiLineChange(args.find.splitlines()) | 212 tester = MultiLineChange(args.find.splitlines()) |
| 209 fyi.FetchInfo() | 213 fyi.FetchInfo() |
| 210 | 214 |
| 211 print "SCANNING FOR ", args.find | 215 if not args.json: |
| 216 print "SCANNING FOR ", args.find |
| 212 for builder in fyi.Builders(): | 217 for builder in fyi.Builders(): |
| 213 print "Scanning", builder.Name() | 218 if not args.json: |
| 219 print "Scanning", builder.Name() |
| 214 occurrences = builder.ScanLogs(tester) | 220 occurrences = builder.ScanLogs(tester) |
| 215 if occurrences: | 221 if occurrences: |
| 216 min_build = min(occurrences) | 222 min_build = min(occurrences) |
| 217 path = builder.GetBuildPath(min_build) | 223 path = builder.GetBuildPath(min_build) |
| 218 print "Earliest occurrence in build %d" % min_build | 224 if args.json: |
| 219 print "Latest occurrence in build %d" % max(occurrences) | 225 data = {} |
| 220 print "Latest build: %d" % builder.LatestBuild() | 226 data['builder'] = builder.Name() |
| 221 print path | 227 data['first_affected'] = min_build |
| 222 print "%d total" % len(occurrences) | 228 data['last_affected'] = max(occurrences) |
| 223 | 229 data['last_build'] = builder.LatestBuild() |
| 230 data['frequency'] = ((int(builder.LatestBuild()) - int(min_build)) / |
| 231 len(occurrences)) |
| 232 data['total'] = len(occurrences) |
| 233 data['first_url'] = path |
| 234 result.append(data) |
| 235 else: |
| 236 print "Earliest occurrence in build %d" % min_build |
| 237 print "Latest occurrence in build %d" % max(occurrences) |
| 238 print "Latest build: %d" % builder.LatestBuild() |
| 239 print path |
| 240 print "%d total" % len(occurrences) |
| 241 if args.json: |
| 242 json.dump(result, sys.stdout, indent=2, sort_keys=True) |
| 224 | 243 |
| 225 if __name__ == "__main__": | 244 if __name__ == "__main__": |
| 226 sys.exit(main(sys.argv)) | 245 sys.exit(main(sys.argv)) |
| 227 | 246 |
| OLD | NEW |