Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(60)

Side by Side Diff: client/swarming.py

Issue 2923633003: client: Support repeated keys in task request. (Closed)
Patch Set: Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | client/tests/swarming_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The LUCI Authors. All rights reserved. 2 # Copyright 2013 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 """Client tool to trigger tasks or retrieve results from a Swarming server.""" 6 """Client tool to trigger tasks or retrieve results from a Swarming server."""
7 7
8 __version__ = '0.9.1' 8 __version__ = '0.9.1'
9 9
10 import collections 10 import collections
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 class Failure(Exception): 48 class Failure(Exception):
49 """Generic failure.""" 49 """Generic failure."""
50 pass 50 pass
51 51
52 52
53 def default_task_name(options): 53 def default_task_name(options):
54 """Returns a default task name if not specified.""" 54 """Returns a default task name if not specified."""
55 if not options.task_name: 55 if not options.task_name:
56 task_name = u'%s/%s' % ( 56 task_name = u'%s/%s' % (
57 options.user, 57 options.user,
58 '_'.join( 58 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
59 '%s=%s' % (k, v)
60 for k, v in sorted(options.dimensions.iteritems())))
61 if options.isolated: 59 if options.isolated:
62 task_name += u'/' + options.isolated 60 task_name += u'/' + options.isolated
63 return task_name 61 return task_name
64 return options.task_name 62 return options.task_name
65 63
66 64
67 ### Triggering. 65 ### Triggering.
68 66
69 67
70 # See ../appengine/swarming/swarming_rpcs.py. 68 # See ../appengine/swarming/swarming_rpcs.py.
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
155 This is for the v1 client Swarming API. 153 This is for the v1 client Swarming API.
156 """ 154 """
157 out = namedtuple_to_dict(task_request) 155 out = namedtuple_to_dict(task_request)
158 if hide_token: 156 if hide_token:
159 if out['service_account_token'] not in (None, 'bot', 'none'): 157 if out['service_account_token'] not in (None, 'bot', 'none'):
160 out['service_account_token'] = '<hidden>' 158 out['service_account_token'] = '<hidden>'
161 # Don't send 'service_account_token' if it is None to avoid confusing older 159 # Don't send 'service_account_token' if it is None to avoid confusing older
162 # version of the server that doesn't know about 'service_account_token'. 160 # version of the server that doesn't know about 'service_account_token'.
163 if out['service_account_token'] in (None, 'none'): 161 if out['service_account_token'] in (None, 'none'):
164 out.pop('service_account_token') 162 out.pop('service_account_token')
165 # Maps are not supported until protobuf v3.
166 out['properties']['dimensions'] = [ 163 out['properties']['dimensions'] = [
167 {'key': k, 'value': v} 164 {'key': k, 'value': v}
168 for k, v in out['properties']['dimensions'].iteritems() 165 for k, v in out['properties']['dimensions']
169 ] 166 ]
170 out['properties']['dimensions'].sort(key=lambda x: x['key'])
171 out['properties']['env'] = [ 167 out['properties']['env'] = [
172 {'key': k, 'value': v} 168 {'key': k, 'value': v}
173 for k, v in out['properties']['env'].iteritems() 169 for k, v in out['properties']['env'].iteritems()
174 ] 170 ]
175 out['properties']['env'].sort(key=lambda x: x['key']) 171 out['properties']['env'].sort(key=lambda x: x['key'])
176 return out 172 return out
177 173
178 174
179 def swarming_trigger(swarming, raw_request): 175 def swarming_trigger(swarming, raw_request):
180 """Triggers a request on the Swarming server and returns the json data. 176 """Triggers a request on the Swarming server and returns the json data.
(...skipping 627 matching lines...) Expand 10 before | Expand all | Expand 10 after
808 # URL is of the following form: 804 # URL is of the following form:
809 # url = host + ( 805 # url = host + (
810 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version']) 806 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
811 api_data = net.url_read_json(api['discoveryRestUrl']) 807 api_data = net.url_read_json(api['discoveryRestUrl'])
812 if api_data is None: 808 if api_data is None:
813 raise APIError('Failed to discover %s on %s' % (api['id'], host)) 809 raise APIError('Failed to discover %s on %s' % (api['id'], host))
814 out[api['id']] = api_data 810 out[api['id']] = api_data
815 return out 811 return out
816 812
817 813
814 def get_yielder(base_url, limit):
815 """Returns the first query and a function that yields following items."""
816 CHUNK_SIZE = 250
817
818 url = base_url
819 if limit:
820 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
821 data = net.url_read_json(url)
822 if data is None:
823 # TODO(maruel): Do basic diagnostic.
824 raise Failure('Failed to access %s' % url)
825 org_cursor = data.pop('cursor', None)
826 org_total = len(data.get('items') or [])
827 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
828
829 def yielder():
830 cursor = org_cursor
831 total = org_total
832 # Some items support cursors. Try to get automatically if cursors are needed
833 # by looking at the 'cursor' items.
834 while cursor and (not limit or total < limit):
835 merge_char = '&' if '?' in base_url else '?'
836 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
837 if limit:
838 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
839 new = net.url_read_json(url)
840 if new is None:
841 raise Failure('Failed to access %s' % url)
842 cursor = new.get('cursor')
843 new_items = new.get('items')
844 nb_items = len(new_items or [])
845 total += nb_items
846 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
847 yield new_items
848
849 return data, yielder
Vadim Sh. 2017/06/05 21:33:13 instead of returning data, you can yield data['ite
M-A Ruel 2017/06/07 15:36:03 It's because some queries may not return 'items'.
850
851
818 ### Commands. 852 ### Commands.
819 853
820 854
821 def abort_task(_swarming, _manifest): 855 def abort_task(_swarming, _manifest):
822 """Given a task manifest that was triggered, aborts its execution.""" 856 """Given a task manifest that was triggered, aborts its execution."""
823 # TODO(vadimsh): No supported by the server yet. 857 # TODO(vadimsh): No supported by the server yet.
824 858
825 859
826 def add_filter_options(parser): 860 def add_filter_options(parser):
827 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection') 861 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
828 parser.filter_group.add_option( 862 parser.filter_group.add_option(
829 '-d', '--dimension', default=[], action='append', nargs=2, 863 '-d', '--dimension', default=[], action='append', nargs=2,
830 dest='dimensions', metavar='FOO bar', 864 dest='dimensions', metavar='FOO bar',
831 help='dimension to filter on') 865 help='dimension to filter on')
832 parser.add_option_group(parser.filter_group) 866 parser.add_option_group(parser.filter_group)
833 867
834 868
869 def process_filter_options(parser, options):
870 for key, value in options.dimensions:
871 if ':' in key:
872 parser.error('--dimension key cannot contain ":"')
873 if key.strip() != key:
874 parser.error('--dimension key has whitespace')
875 if not key:
876 parser.error('--dimension key is empty')
877
878 if value.strip() != value:
879 parser.error('--dimension value has whitespace')
880 if not value:
881 parser.error('--dimension value is empty')
882 options.dimensions.sort()
883
884
835 def add_sharding_options(parser): 885 def add_sharding_options(parser):
836 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options') 886 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
837 parser.sharding_group.add_option( 887 parser.sharding_group.add_option(
838 '--shards', type='int', default=1, metavar='NUMBER', 888 '--shards', type='int', default=1, metavar='NUMBER',
839 help='Number of shards to trigger and collect.') 889 help='Number of shards to trigger and collect.')
840 parser.add_option_group(parser.sharding_group) 890 parser.add_option_group(parser.sharding_group)
841 891
842 892
843 def add_trigger_options(parser): 893 def add_trigger_options(parser):
844 """Adds all options to trigger a task on Swarming.""" 894 """Adds all options to trigger a task on Swarming."""
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
916 '--deadline', type='int', dest='expiration', 966 '--deadline', type='int', dest='expiration',
917 help=optparse.SUPPRESS_HELP) 967 help=optparse.SUPPRESS_HELP)
918 parser.add_option_group(group) 968 parser.add_option_group(group)
919 969
920 970
921 def process_trigger_options(parser, options, args): 971 def process_trigger_options(parser, options, args):
922 """Processes trigger options and does preparatory steps. 972 """Processes trigger options and does preparatory steps.
923 973
924 Generates service account tokens if necessary. 974 Generates service account tokens if necessary.
925 """ 975 """
926 options.dimensions = dict(options.dimensions) 976 process_filter_options(parser, options)
927 options.env = dict(options.env) 977 options.env = dict(options.env)
928 if args and args[0] == '--': 978 if args and args[0] == '--':
929 args = args[1:] 979 args = args[1:]
930 980
931 if not options.dimensions: 981 if not options.dimensions:
932 parser.error('Please at least specify one --dimension') 982 parser.error('Please at least specify one --dimension')
933 if not all(len(t.split(':', 1)) == 2 for t in options.tags): 983 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
934 parser.error('--tags must be in the format key:value') 984 parser.error('--tags must be in the format key:value')
935 if options.raw_cmd and not args: 985 if options.raw_cmd and not args:
936 parser.error( 986 parser.error(
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
1082 print('Deleting %s failed. Probably already gone' % bot) 1132 print('Deleting %s failed. Probably already gone' % bot)
1083 result = 1 1133 result = 1
1084 return result 1134 return result
1085 1135
1086 1136
1087 def CMDbots(parser, args): 1137 def CMDbots(parser, args):
1088 """Returns information about the bots connected to the Swarming server.""" 1138 """Returns information about the bots connected to the Swarming server."""
1089 add_filter_options(parser) 1139 add_filter_options(parser)
1090 parser.filter_group.add_option( 1140 parser.filter_group.add_option(
1091 '--dead-only', action='store_true', 1141 '--dead-only', action='store_true',
1092 help='Only print dead bots, useful to reap them and reimage broken bots') 1142 help='Filter out bots alive, useful to reap them and reimage broken bots')
1093 parser.filter_group.add_option( 1143 parser.filter_group.add_option(
1094 '-k', '--keep-dead', action='store_true', 1144 '-k', '--keep-dead', action='store_true',
1095 help='Do not filter out dead bots') 1145 help='Keep both dead and alive bots')
1146 parser.filter_group.add_option(
1147 '--busy', action='store_true', help='Keep only busy bots')
1148 parser.filter_group.add_option(
1149 '--idle', action='store_true', help='Keep only idle bots')
1150 parser.filter_group.add_option(
1151 '--mp', action='store_true',
1152 help='Keep only Machine Provider managed bots')
1153 parser.filter_group.add_option(
1154 '--non-mp', action='store_true',
1155 help='Keep only non Machine Provider managed bots')
1096 parser.filter_group.add_option( 1156 parser.filter_group.add_option(
1097 '-b', '--bare', action='store_true', 1157 '-b', '--bare', action='store_true',
1098 help='Do not print out dimensions') 1158 help='Do not print out dimensions')
1099 options, args = parser.parse_args(args) 1159 options, args = parser.parse_args(args)
1160 process_filter_options(parser, options)
1100 1161
1101 if options.keep_dead and options.dead_only: 1162 if options.keep_dead and options.dead_only:
1102 parser.error('Use only one of --keep-dead and --dead-only') 1163 parser.error('Use only one of --keep-dead or --dead-only')
1164 if options.busy and options.idle:
1165 parser.error('Use only one of --busy or --idle')
1166 if options.mp and options.non_mp:
1167 parser.error('Use only one of --mp or --non-mp')
1103 1168
1104 bots = [] 1169 url = options.swarming + '/api/swarming/v1/bots/list'
1105 cursor = None 1170 if options.dead_only:
1106 limit = 250 1171 url += '?is_dead=TRUE'
1107 # Iterate via cursors. 1172 elif options.keep_dead:
1108 base_url = ( 1173 url += '?is_dead=NONE'
1109 options.swarming + '/api/swarming/v1/bots/list?limit=%d' % limit) 1174 else:
1110 while True: 1175 url += '?is_dead=FALSE'
1111 url = base_url
1112 if cursor:
1113 url += '&cursor=%s' % urllib.quote(cursor)
1114 data = net.url_read_json(url)
1115 if data is None:
1116 print >> sys.stderr, 'Failed to access %s' % options.swarming
1117 return 1
1118 bots.extend(data['items'])
1119 cursor = data.get('cursor')
1120 if not cursor:
1121 break
1122 1176
1177 if options.busy:
1178 url += '&is_busy=TRUE'
1179 elif options.idle:
1180 url += '&is_busy=FALSE'
1181 else:
1182 url += '&is_busy=NONE'
1183
1184 if options.mp:
1185 url += '&is_mp=TRUE'
1186 elif options.non_mp:
1187 url += '&is_mp=FALSE'
1188 else:
1189 url += '&is_mp=NONE'
1190
1191 for key, value in options.dimensions:
1192 url += '&dimensions=%s:%s' % (key, value)
Vadim Sh. 2017/06/05 21:33:13 this needs url encoding
M-A Ruel 2017/06/07 15:36:03 Argh, thanks. Changed to encode everything systema
1193 try:
1194 data, yielder = get_yielder(url, 0)
1195 bots = data.get('items') or []
1196 for items in yielder():
1197 bots.extend(items)
1198 except Failure as e:
1199 sys.stderr.write('\n%s\n' % e)
1200 return 1
1123 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']): 1201 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
1124 if options.dead_only: 1202 print bot['bot_id']
1125 if not bot.get('is_dead'): 1203 if not options.bare:
1126 continue 1204 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1127 elif not options.keep_dead and bot.get('is_dead'): 1205 print ' %s' % json.dumps(dimensions, sort_keys=True)
1128 continue 1206 if bot.get('task_id'):
1129 1207 print ' task: %s' % bot['task_id']
1130 # If the user requested to filter on dimensions, ensure the bot has all the
1131 # dimensions requested.
1132 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1133 for key, value in options.dimensions:
1134 if key not in dimensions:
1135 break
1136 # A bot can have multiple value for a key, for example,
1137 # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will
1138 # be accepted.
1139 if isinstance(dimensions[key], list):
1140 if value not in dimensions[key]:
1141 break
1142 else:
1143 if value != dimensions[key]:
1144 break
1145 else:
1146 print bot['bot_id']
1147 if not options.bare:
1148 print ' %s' % json.dumps(dimensions, sort_keys=True)
1149 if bot.get('task_id'):
1150 print ' task: %s' % bot['task_id']
1151 return 0 1208 return 0
1152 1209
1153 1210
1154 @subcommand.usage('task_id') 1211 @subcommand.usage('task_id')
1155 def CMDcancel(parser, args): 1212 def CMDcancel(parser, args):
1156 """Cancels a task.""" 1213 """Cancels a task."""
1157 options, args = parser.parse_args(args) 1214 options, args = parser.parse_args(args)
1158 if not args: 1215 if not args:
1159 parser.error('Please specify the task to cancel') 1216 parser.error('Please specify the task to cancel')
1160 for task_id in args: 1217 for task_id in args:
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
1259 swarming.py query -S server-url.com bots/list 1316 swarming.py query -S server-url.com bots/list
1260 1317
1261 Listing last 10 tasks on a specific bot named 'swarm1': 1318 Listing last 10 tasks on a specific bot named 'swarm1':
1262 swarming.py query -S server-url.com --limit 10 bot/swarm1/tasks 1319 swarming.py query -S server-url.com --limit 10 bot/swarm1/tasks
1263 1320
1264 Listing last 10 tasks with tags os:Ubuntu-12.04 and pool:Chrome. Note that 1321 Listing last 10 tasks with tags os:Ubuntu-12.04 and pool:Chrome. Note that
1265 quoting is important!: 1322 quoting is important!:
1266 swarming.py query -S server-url.com --limit 10 \\ 1323 swarming.py query -S server-url.com --limit 10 \\
1267 'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome' 1324 'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome'
1268 """ 1325 """
1269 CHUNK_SIZE = 250
1270
1271 parser.add_option( 1326 parser.add_option(
1272 '-L', '--limit', type='int', default=200, 1327 '-L', '--limit', type='int', default=200,
1273 help='Limit to enforce on limitless items (like number of tasks); ' 1328 help='Limit to enforce on limitless items (like number of tasks); '
1274 'default=%default') 1329 'default=%default')
1275 parser.add_option( 1330 parser.add_option(
1276 '--json', help='Path to JSON output file (otherwise prints to stdout)') 1331 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1277 parser.add_option( 1332 parser.add_option(
1278 '--progress', action='store_true', 1333 '--progress', action='store_true',
1279 help='Prints a dot at each request to show progress') 1334 help='Prints a dot at each request to show progress')
1280 options, args = parser.parse_args(args) 1335 options, args = parser.parse_args(args)
1281 if len(args) != 1: 1336 if len(args) != 1:
1282 parser.error( 1337 parser.error(
1283 'Must specify only method name and optionally query args properly ' 1338 'Must specify only method name and optionally query args properly '
1284 'escaped.') 1339 'escaped.')
1285 base_url = options.swarming + '/api/swarming/v1/' + args[0] 1340 base_url = options.swarming + '/api/swarming/v1/' + args[0]
1286 url = base_url 1341 try:
1287 if options.limit: 1342 data, yielder = get_yielder(base_url, options.limit)
1288 # Check check, change if not working out. 1343 for items in yielder():
1289 merge_char = '&' if '?' in url else '?' 1344 # Some items support cursors. Try to get automatically if cursors are
Vadim Sh. 2017/06/05 21:33:13 this comment doesn't seem relevant any more
M-A Ruel 2017/06/07 15:36:03 Done.
1290 url += '%slimit=%d' % (merge_char, min(CHUNK_SIZE, options.limit)) 1345 # needed by looking at the 'cursor' items.
1291 data = net.url_read_json(url) 1346 data['items'].extend(items)
1292 if data is None: 1347 if options.progress:
1293 # TODO(maruel): Do basic diagnostic. 1348 sys.stdout.write('.')
1294 print >> sys.stderr, 'Failed to access %s' % url 1349 sys.stdout.flush()
1350 except Failure as e:
1351 sys.stderr.write('\n%s\n' % e)
1295 return 1 1352 return 1
1296
1297 # Some items support cursors. Try to get automatically if cursors are needed
1298 # by looking at the 'cursor' items.
1299 while (
1300 data.get('cursor') and
1301 (not options.limit or len(data['items']) < options.limit)):
1302 merge_char = '&' if '?' in base_url else '?'
1303 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(data['cursor']))
1304 if options.limit:
1305 url += '&limit=%d' % min(CHUNK_SIZE, options.limit - len(data['items']))
1306 if options.progress:
1307 sys.stdout.write('.')
1308 sys.stdout.flush()
1309 new = net.url_read_json(url)
1310 if new is None:
1311 if options.progress:
1312 print('')
1313 print >> sys.stderr, 'Failed to access %s' % options.swarming
1314 return 1
1315 data['items'].extend(new.get('items', []))
1316 data['cursor'] = new.get('cursor')
1317
1318 if options.progress: 1353 if options.progress:
1319 print('') 1354 sys.stdout.write('\n')
1320 if options.limit and len(data.get('items', [])) > options.limit: 1355 sys.stdout.flush()
1321 data['items'] = data['items'][:options.limit]
1322 data.pop('cursor', None)
1323
1324 if options.json: 1356 if options.json:
1325 options.json = unicode(os.path.abspath(options.json)) 1357 options.json = unicode(os.path.abspath(options.json))
1326 tools.write_json(options.json, data, True) 1358 tools.write_json(options.json, data, True)
1327 else: 1359 else:
1328 try: 1360 try:
1329 tools.write_json(sys.stdout, data, False) 1361 tools.write_json(sys.stdout, data, False)
Vadim Sh. 2017/06/05 21:33:13 this progress output will screw up json consider
M-A Ruel 2017/06/07 15:36:03 Done.
1330 sys.stdout.write('\n') 1362 sys.stdout.write('\n')
1331 except IOError: 1363 except IOError:
1332 pass 1364 pass
1333 return 0 1365 return 0
1334 1366
1335 1367
1336 def CMDquery_list(parser, args): 1368 def CMDquery_list(parser, args):
1337 """Returns list of all the Swarming APIs that can be used with command 1369 """Returns list of all the Swarming APIs that can be used with command
1338 'query'. 1370 'query'.
1339 """ 1371 """
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
1652 dispatcher = subcommand.CommandDispatcher(__name__) 1684 dispatcher = subcommand.CommandDispatcher(__name__)
1653 return dispatcher.execute(OptionParserSwarming(version=__version__), args) 1685 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
1654 1686
1655 1687
1656 if __name__ == '__main__': 1688 if __name__ == '__main__':
1657 subprocess42.inhibit_os_error_reporting() 1689 subprocess42.inhibit_os_error_reporting()
1658 fix_encoding.fix_encoding() 1690 fix_encoding.fix_encoding()
1659 tools.disable_buffering() 1691 tools.disable_buffering()
1660 colorama.init() 1692 colorama.init()
1661 sys.exit(main(sys.argv[1:])) 1693 sys.exit(main(sys.argv[1:]))
OLDNEW
« no previous file with comments | « no previous file | client/tests/swarming_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698