Chromium Code Reviews| Index: client/swarming.py |
| diff --git a/client/swarming.py b/client/swarming.py |
| index d67c2b358cf83e6dbfb5acfee76739a89317b241..bebb0c6de644f4afc0a09f8b722015b88504c983 100755 |
| --- a/client/swarming.py |
| +++ b/client/swarming.py |
| @@ -55,9 +55,7 @@ def default_task_name(options): |
| if not options.task_name: |
| task_name = u'%s/%s' % ( |
| options.user, |
| - '_'.join( |
| - '%s=%s' % (k, v) |
| - for k, v in sorted(options.dimensions.iteritems()))) |
| + '_'.join('%s=%s' % (k, v) for k, v in options.dimensions)) |
| if options.isolated: |
| task_name += u'/' + options.isolated |
| return task_name |
| @@ -162,12 +160,10 @@ def task_request_to_raw_request(task_request, hide_token): |
| # version of the server that doesn't know about 'service_account_token'. |
| if out['service_account_token'] in (None, 'none'): |
| out.pop('service_account_token') |
| - # Maps are not supported until protobuf v3. |
| out['properties']['dimensions'] = [ |
| {'key': k, 'value': v} |
| - for k, v in out['properties']['dimensions'].iteritems() |
| + for k, v in out['properties']['dimensions'] |
| ] |
| - out['properties']['dimensions'].sort(key=lambda x: x['key']) |
| out['properties']['env'] = [ |
| {'key': k, 'value': v} |
| for k, v in out['properties']['env'].iteritems() |
| @@ -815,6 +811,44 @@ def endpoints_api_discovery_apis(host): |
| return out |
| +def get_yielder(base_url, limit): |
| + """Returns the first query and a function that yields following items.""" |
| + CHUNK_SIZE = 250 |
| + |
| + url = base_url |
| + if limit: |
| + url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit)) |
| + data = net.url_read_json(url) |
| + if data is None: |
| + # TODO(maruel): Do basic diagnostic. |
| + raise Failure('Failed to access %s' % url) |
| + org_cursor = data.pop('cursor', None) |
| + org_total = len(data.get('items') or []) |
| + logging.info('get_yielder(%s) returning %d items', base_url, org_total) |
| + |
| + def yielder(): |
| + cursor = org_cursor |
| + total = org_total |
| + # Some items support cursors. Try to get automatically if cursors are needed |
| + # by looking at the 'cursor' items. |
| + while cursor and (not limit or total < limit): |
| + merge_char = '&' if '?' in base_url else '?' |
| + url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor)) |
| + if limit: |
| + url += '&limit=%d' % min(CHUNK_SIZE, limit - total) |
| + new = net.url_read_json(url) |
| + if new is None: |
| + raise Failure('Failed to access %s' % url) |
| + cursor = new.get('cursor') |
| + new_items = new.get('items') |
| + nb_items = len(new_items or []) |
| + total += nb_items |
| + logging.info('get_yielder(%s) yielding %d items', base_url, nb_items) |
| + yield new_items |
| + |
| + return data, yielder |
|
Vadim Sh.
2017/06/05 21:33:13
instead of returning data, you can yield data['ite
M-A Ruel
2017/06/07 15:36:03
It's because some queries may not return 'items'.
|
| + |
| + |
| ### Commands. |
| @@ -832,6 +866,22 @@ def add_filter_options(parser): |
| parser.add_option_group(parser.filter_group) |
| +def process_filter_options(parser, options): |
| + for key, value in options.dimensions: |
| + if ':' in key: |
| + parser.error('--dimension key cannot contain ":"') |
| + if key.strip() != key: |
| + parser.error('--dimension key has whitespace') |
| + if not key: |
| + parser.error('--dimension key is empty') |
| + |
| + if value.strip() != value: |
| + parser.error('--dimension value has whitespace') |
| + if not value: |
| + parser.error('--dimension value is empty') |
| + options.dimensions.sort() |
| + |
| + |
| def add_sharding_options(parser): |
| parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options') |
| parser.sharding_group.add_option( |
| @@ -923,7 +973,7 @@ def process_trigger_options(parser, options, args): |
| Generates service account tokens if necessary. |
| """ |
| - options.dimensions = dict(options.dimensions) |
| + process_filter_options(parser, options) |
| options.env = dict(options.env) |
| if args and args[0] == '--': |
| args = args[1:] |
| @@ -1089,65 +1139,72 @@ def CMDbots(parser, args): |
| add_filter_options(parser) |
| parser.filter_group.add_option( |
| '--dead-only', action='store_true', |
| - help='Only print dead bots, useful to reap them and reimage broken bots') |
| + help='Filter out bots alive, useful to reap them and reimage broken bots') |
| parser.filter_group.add_option( |
| '-k', '--keep-dead', action='store_true', |
| - help='Do not filter out dead bots') |
| + help='Keep both dead and alive bots') |
| + parser.filter_group.add_option( |
| + '--busy', action='store_true', help='Keep only busy bots') |
| + parser.filter_group.add_option( |
| + '--idle', action='store_true', help='Keep only idle bots') |
| + parser.filter_group.add_option( |
| + '--mp', action='store_true', |
| + help='Keep only Machine Provider managed bots') |
| + parser.filter_group.add_option( |
| + '--non-mp', action='store_true', |
| + help='Keep only non Machine Provider managed bots') |
| parser.filter_group.add_option( |
| '-b', '--bare', action='store_true', |
| help='Do not print out dimensions') |
| options, args = parser.parse_args(args) |
| + process_filter_options(parser, options) |
| if options.keep_dead and options.dead_only: |
| - parser.error('Use only one of --keep-dead and --dead-only') |
| - |
| - bots = [] |
| - cursor = None |
| - limit = 250 |
| - # Iterate via cursors. |
| - base_url = ( |
| - options.swarming + '/api/swarming/v1/bots/list?limit=%d' % limit) |
| - while True: |
| - url = base_url |
| - if cursor: |
| - url += '&cursor=%s' % urllib.quote(cursor) |
| - data = net.url_read_json(url) |
| - if data is None: |
| - print >> sys.stderr, 'Failed to access %s' % options.swarming |
| - return 1 |
| - bots.extend(data['items']) |
| - cursor = data.get('cursor') |
| - if not cursor: |
| - break |
| + parser.error('Use only one of --keep-dead or --dead-only') |
| + if options.busy and options.idle: |
| + parser.error('Use only one of --busy or --idle') |
| + if options.mp and options.non_mp: |
| + parser.error('Use only one of --mp or --non-mp') |
| + |
| + url = options.swarming + '/api/swarming/v1/bots/list' |
| + if options.dead_only: |
| + url += '?is_dead=TRUE' |
| + elif options.keep_dead: |
| + url += '?is_dead=NONE' |
| + else: |
| + url += '?is_dead=FALSE' |
| - for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']): |
| - if options.dead_only: |
| - if not bot.get('is_dead'): |
| - continue |
| - elif not options.keep_dead and bot.get('is_dead'): |
| - continue |
| + if options.busy: |
| + url += '&is_busy=TRUE' |
| + elif options.idle: |
| + url += '&is_busy=FALSE' |
| + else: |
| + url += '&is_busy=NONE' |
| - # If the user requested to filter on dimensions, ensure the bot has all the |
| - # dimensions requested. |
| - dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})} |
| - for key, value in options.dimensions: |
| - if key not in dimensions: |
| - break |
| - # A bot can have multiple value for a key, for example, |
| - # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will |
| - # be accepted. |
| - if isinstance(dimensions[key], list): |
| - if value not in dimensions[key]: |
| - break |
| - else: |
| - if value != dimensions[key]: |
| - break |
| - else: |
| - print bot['bot_id'] |
| - if not options.bare: |
| - print ' %s' % json.dumps(dimensions, sort_keys=True) |
| - if bot.get('task_id'): |
| - print ' task: %s' % bot['task_id'] |
| + if options.mp: |
| + url += '&is_mp=TRUE' |
| + elif options.non_mp: |
| + url += '&is_mp=FALSE' |
| + else: |
| + url += '&is_mp=NONE' |
| + |
| + for key, value in options.dimensions: |
| + url += '&dimensions=%s:%s' % (key, value) |
|
Vadim Sh.
2017/06/05 21:33:13
this needs url encoding
M-A Ruel
2017/06/07 15:36:03
Argh, thanks. Changed to encode everything systema
|
| + try: |
| + data, yielder = get_yielder(url, 0) |
| + bots = data.get('items') or [] |
| + for items in yielder(): |
| + bots.extend(items) |
| + except Failure as e: |
| + sys.stderr.write('\n%s\n' % e) |
| + return 1 |
| + for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']): |
| + print bot['bot_id'] |
| + if not options.bare: |
| + dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})} |
| + print ' %s' % json.dumps(dimensions, sort_keys=True) |
| + if bot.get('task_id'): |
| + print ' task: %s' % bot['task_id'] |
| return 0 |
| @@ -1266,8 +1323,6 @@ def CMDquery(parser, args): |
| swarming.py query -S server-url.com --limit 10 \\ |
| 'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome' |
| """ |
| - CHUNK_SIZE = 250 |
| - |
| parser.add_option( |
| '-L', '--limit', type='int', default=200, |
| help='Limit to enforce on limitless items (like number of tasks); ' |
| @@ -1283,44 +1338,21 @@ def CMDquery(parser, args): |
| 'Must specify only method name and optionally query args properly ' |
| 'escaped.') |
| base_url = options.swarming + '/api/swarming/v1/' + args[0] |
| - url = base_url |
| - if options.limit: |
| - # Check check, change if not working out. |
| - merge_char = '&' if '?' in url else '?' |
| - url += '%slimit=%d' % (merge_char, min(CHUNK_SIZE, options.limit)) |
| - data = net.url_read_json(url) |
| - if data is None: |
| - # TODO(maruel): Do basic diagnostic. |
| - print >> sys.stderr, 'Failed to access %s' % url |
| - return 1 |
| - |
| - # Some items support cursors. Try to get automatically if cursors are needed |
| - # by looking at the 'cursor' items. |
| - while ( |
| - data.get('cursor') and |
| - (not options.limit or len(data['items']) < options.limit)): |
| - merge_char = '&' if '?' in base_url else '?' |
| - url = base_url + '%scursor=%s' % (merge_char, urllib.quote(data['cursor'])) |
| - if options.limit: |
| - url += '&limit=%d' % min(CHUNK_SIZE, options.limit - len(data['items'])) |
| - if options.progress: |
| - sys.stdout.write('.') |
| - sys.stdout.flush() |
| - new = net.url_read_json(url) |
| - if new is None: |
| + try: |
| + data, yielder = get_yielder(base_url, options.limit) |
| + for items in yielder(): |
| + # Some items support cursors. Try to get automatically if cursors are |
|
Vadim Sh.
2017/06/05 21:33:13
this comment doesn't seem relevant any more
M-A Ruel
2017/06/07 15:36:03
Done.
|
| + # needed by looking at the 'cursor' items. |
| + data['items'].extend(items) |
| if options.progress: |
| - print('') |
| - print >> sys.stderr, 'Failed to access %s' % options.swarming |
| - return 1 |
| - data['items'].extend(new.get('items', [])) |
| - data['cursor'] = new.get('cursor') |
| - |
| + sys.stdout.write('.') |
| + sys.stdout.flush() |
| + except Failure as e: |
| + sys.stderr.write('\n%s\n' % e) |
| + return 1 |
| if options.progress: |
| - print('') |
| - if options.limit and len(data.get('items', [])) > options.limit: |
| - data['items'] = data['items'][:options.limit] |
| - data.pop('cursor', None) |
| - |
| + sys.stdout.write('\n') |
| + sys.stdout.flush() |
| if options.json: |
| options.json = unicode(os.path.abspath(options.json)) |
| tools.write_json(options.json, data, True) |