Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5228)

Unified Diff: client/swarming.py

Issue 2923633003: client: Support repeated keys in task request. (Closed)
Patch Set: fixes Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | client/tests/swarming_test.py » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: client/swarming.py
diff --git a/client/swarming.py b/client/swarming.py
index d67c2b358cf83e6dbfb5acfee76739a89317b241..6514eea5529f4257041aadea3f160ca8bc2bbc97 100755
--- a/client/swarming.py
+++ b/client/swarming.py
@@ -55,9 +55,7 @@ def default_task_name(options):
if not options.task_name:
task_name = u'%s/%s' % (
options.user,
- '_'.join(
- '%s=%s' % (k, v)
- for k, v in sorted(options.dimensions.iteritems())))
+ '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
if options.isolated:
task_name += u'/' + options.isolated
return task_name
@@ -162,12 +160,10 @@ def task_request_to_raw_request(task_request, hide_token):
# version of the server that doesn't know about 'service_account_token'.
if out['service_account_token'] in (None, 'none'):
out.pop('service_account_token')
- # Maps are not supported until protobuf v3.
out['properties']['dimensions'] = [
{'key': k, 'value': v}
- for k, v in out['properties']['dimensions'].iteritems()
+ for k, v in out['properties']['dimensions']
]
- out['properties']['dimensions'].sort(key=lambda x: x['key'])
out['properties']['env'] = [
{'key': k, 'value': v}
for k, v in out['properties']['env'].iteritems()
@@ -815,6 +811,47 @@ def endpoints_api_discovery_apis(host):
return out
+def get_yielder(base_url, limit):
+ """Returns the first query and a function that yields following items."""
+ CHUNK_SIZE = 250
+
+ url = base_url
+ if limit:
+ url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
+ data = net.url_read_json(url)
+ if data is None:
+ # TODO(maruel): Do basic diagnostic.
+ raise Failure('Failed to access %s' % url)
+ org_cursor = data.pop('cursor', None)
+ org_total = len(data.get('items') or [])
+ logging.info('get_yielder(%s) returning %d items', base_url, org_total)
+ if not org_cursor or not org_total:
+ # This is not an iterable resource.
+ return data, lambda: []
+
+ def yielder():
+ cursor = org_cursor
+ total = org_total
+ # Some items support cursors. Try to get automatically if cursors are needed
+ # by looking at the 'cursor' items.
+ while cursor and (not limit or total < limit):
+ merge_char = '&' if '?' in base_url else '?'
+ url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
+ if limit:
+ url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
+ new = net.url_read_json(url)
+ if new is None:
+ raise Failure('Failed to access %s' % url)
+ cursor = new.get('cursor')
+ new_items = new.get('items')
+ nb_items = len(new_items or [])
+ total += nb_items
+ logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
+ yield new_items
+
+ return data, yielder
+
+
### Commands.
@@ -832,6 +869,22 @@ def add_filter_options(parser):
parser.add_option_group(parser.filter_group)
+def process_filter_options(parser, options):
+ for key, value in options.dimensions:
+ if ':' in key:
+ parser.error('--dimension key cannot contain ":"')
+ if key.strip() != key:
+ parser.error('--dimension key has whitespace')
+ if not key:
+ parser.error('--dimension key is empty')
+
+ if value.strip() != value:
+ parser.error('--dimension value has whitespace')
+ if not value:
+ parser.error('--dimension value is empty')
+ options.dimensions.sort()
+
+
def add_sharding_options(parser):
parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
parser.sharding_group.add_option(
@@ -923,7 +976,7 @@ def process_trigger_options(parser, options, args):
Generates service account tokens if necessary.
"""
- options.dimensions = dict(options.dimensions)
+ process_filter_options(parser, options)
options.env = dict(options.env)
if args and args[0] == '--':
args = args[1:]
@@ -1089,65 +1142,75 @@ def CMDbots(parser, args):
add_filter_options(parser)
parser.filter_group.add_option(
'--dead-only', action='store_true',
- help='Only print dead bots, useful to reap them and reimage broken bots')
+ help='Filter out bots alive, useful to reap them and reimage broken bots')
parser.filter_group.add_option(
'-k', '--keep-dead', action='store_true',
- help='Do not filter out dead bots')
+ help='Keep both dead and alive bots')
+ parser.filter_group.add_option(
+ '--busy', action='store_true', help='Keep only busy bots')
+ parser.filter_group.add_option(
+ '--idle', action='store_true', help='Keep only idle bots')
+ parser.filter_group.add_option(
+ '--mp', action='store_true',
+ help='Keep only Machine Provider managed bots')
+ parser.filter_group.add_option(
+ '--non-mp', action='store_true',
+ help='Keep only non Machine Provider managed bots')
parser.filter_group.add_option(
'-b', '--bare', action='store_true',
help='Do not print out dimensions')
options, args = parser.parse_args(args)
+ process_filter_options(parser, options)
if options.keep_dead and options.dead_only:
- parser.error('Use only one of --keep-dead and --dead-only')
-
- bots = []
- cursor = None
- limit = 250
- # Iterate via cursors.
- base_url = (
- options.swarming + '/api/swarming/v1/bots/list?limit=%d' % limit)
- while True:
- url = base_url
- if cursor:
- url += '&cursor=%s' % urllib.quote(cursor)
- data = net.url_read_json(url)
- if data is None:
- print >> sys.stderr, 'Failed to access %s' % options.swarming
- return 1
- bots.extend(data['items'])
- cursor = data.get('cursor')
- if not cursor:
- break
+ parser.error('Use only one of --keep-dead or --dead-only')
+ if options.busy and options.idle:
+ parser.error('Use only one of --busy or --idle')
+ if options.mp and options.non_mp:
+ parser.error('Use only one of --mp or --non-mp')
+
+ url = options.swarming + '/api/swarming/v1/bots/list?'
+ values = []
+ if options.dead_only:
+ values.append(('is_dead', 'TRUE'))
+ elif options.keep_dead:
+ values.append(('is_dead', 'NONE'))
+ else:
+ values.append(('is_dead', 'FALSE'))
- for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
- if options.dead_only:
- if not bot.get('is_dead'):
- continue
- elif not options.keep_dead and bot.get('is_dead'):
- continue
+ if options.busy:
+ values.append(('is_busy', 'TRUE'))
+ elif options.idle:
+ values.append(('is_busy', 'FALSE'))
+ else:
+ values.append(('is_busy', 'NONE'))
- # If the user requested to filter on dimensions, ensure the bot has all the
- # dimensions requested.
- dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
- for key, value in options.dimensions:
- if key not in dimensions:
- break
- # A bot can have multiple value for a key, for example,
- # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will
- # be accepted.
- if isinstance(dimensions[key], list):
- if value not in dimensions[key]:
- break
- else:
- if value != dimensions[key]:
- break
- else:
- print bot['bot_id']
- if not options.bare:
- print ' %s' % json.dumps(dimensions, sort_keys=True)
- if bot.get('task_id'):
- print ' task: %s' % bot['task_id']
+ if options.mp:
+ values.append(('is_mp', 'TRUE'))
+ elif options.non_mp:
+ values.append(('is_mp', 'FALSE'))
+ else:
+ values.append(('is_mp', 'NONE'))
+
+ for key, value in options.dimensions:
+ values.append(('dimensions', '%s:%s' % (key, value)))
+ url += urllib.urlencode(values)
+ try:
+ data, yielder = get_yielder(url, 0)
+ bots = data.get('items') or []
+ for items in yielder():
+ if items:
+ bots.extend(items)
+ except Failure as e:
+ sys.stderr.write('\n%s\n' % e)
+ return 1
+ for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
+ print bot['bot_id']
+ if not options.bare:
+ dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
+ print ' %s' % json.dumps(dimensions, sort_keys=True)
+ if bot.get('task_id'):
+ print ' task: %s' % bot['task_id']
return 0
@@ -1255,19 +1318,21 @@ def CMDquery(parser, args):
gather the list of API methods from the server.
Examples:
+ Raw task request and results:
+ swarming.py query -S server-url.com task/123456/request
+ swarming.py query -S server-url.com task/123456/result
+
Listing all bots:
swarming.py query -S server-url.com bots/list
- Listing last 10 tasks on a specific bot named 'swarm1':
- swarming.py query -S server-url.com --limit 10 bot/swarm1/tasks
+ Listing last 10 tasks on a specific bot named 'bot1':
+ swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
- Listing last 10 tasks with tags os:Ubuntu-12.04 and pool:Chrome. Note that
+ Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
quoting is important!:
swarming.py query -S server-url.com --limit 10 \\
- 'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome'
+ 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
"""
- CHUNK_SIZE = 250
-
parser.add_option(
'-L', '--limit', type='int', default=200,
help='Limit to enforce on limitless items (like number of tasks); '
@@ -1283,44 +1348,20 @@ def CMDquery(parser, args):
'Must specify only method name and optionally query args properly '
'escaped.')
base_url = options.swarming + '/api/swarming/v1/' + args[0]
- url = base_url
- if options.limit:
- # Check check, change if not working out.
- merge_char = '&' if '?' in url else '?'
- url += '%slimit=%d' % (merge_char, min(CHUNK_SIZE, options.limit))
- data = net.url_read_json(url)
- if data is None:
- # TODO(maruel): Do basic diagnostic.
- print >> sys.stderr, 'Failed to access %s' % url
- return 1
-
- # Some items support cursors. Try to get automatically if cursors are needed
- # by looking at the 'cursor' items.
- while (
- data.get('cursor') and
- (not options.limit or len(data['items']) < options.limit)):
- merge_char = '&' if '?' in base_url else '?'
- url = base_url + '%scursor=%s' % (merge_char, urllib.quote(data['cursor']))
- if options.limit:
- url += '&limit=%d' % min(CHUNK_SIZE, options.limit - len(data['items']))
- if options.progress:
- sys.stdout.write('.')
- sys.stdout.flush()
- new = net.url_read_json(url)
- if new is None:
+ try:
+ data, yielder = get_yielder(base_url, options.limit)
+ for items in yielder():
+ if items:
+ data['items'].extend(items)
if options.progress:
- print('')
- print >> sys.stderr, 'Failed to access %s' % options.swarming
- return 1
- data['items'].extend(new.get('items', []))
- data['cursor'] = new.get('cursor')
-
+ sys.stderr.write('.')
+ sys.stderr.flush()
+ except Failure as e:
+ sys.stderr.write('\n%s\n' % e)
+ return 1
if options.progress:
- print('')
- if options.limit and len(data.get('items', [])) > options.limit:
- data['items'] = data['items'][:options.limit]
- data.pop('cursor', None)
-
+ sys.stderr.write('\n')
+ sys.stderr.flush()
if options.json:
options.json = unicode(os.path.abspath(options.json))
tools.write_json(options.json, data, True)
« no previous file with comments | « no previous file | client/tests/swarming_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698