Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: client/swarming.py

Issue 2923633003: client: Support repeated keys in task request. (Closed)
Patch Set: fixes Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | client/tests/swarming_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The LUCI Authors. All rights reserved. 2 # Copyright 2013 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 """Client tool to trigger tasks or retrieve results from a Swarming server.""" 6 """Client tool to trigger tasks or retrieve results from a Swarming server."""
7 7
8 __version__ = '0.9.1' 8 __version__ = '0.9.1'
9 9
10 import collections 10 import collections
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 class Failure(Exception): 48 class Failure(Exception):
49 """Generic failure.""" 49 """Generic failure."""
50 pass 50 pass
51 51
52 52
53 def default_task_name(options): 53 def default_task_name(options):
54 """Returns a default task name if not specified.""" 54 """Returns a default task name if not specified."""
55 if not options.task_name: 55 if not options.task_name:
56 task_name = u'%s/%s' % ( 56 task_name = u'%s/%s' % (
57 options.user, 57 options.user,
58 '_'.join( 58 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
59 '%s=%s' % (k, v)
60 for k, v in sorted(options.dimensions.iteritems())))
61 if options.isolated: 59 if options.isolated:
62 task_name += u'/' + options.isolated 60 task_name += u'/' + options.isolated
63 return task_name 61 return task_name
64 return options.task_name 62 return options.task_name
65 63
66 64
67 ### Triggering. 65 ### Triggering.
68 66
69 67
70 # See ../appengine/swarming/swarming_rpcs.py. 68 # See ../appengine/swarming/swarming_rpcs.py.
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
155 This is for the v1 client Swarming API. 153 This is for the v1 client Swarming API.
156 """ 154 """
157 out = namedtuple_to_dict(task_request) 155 out = namedtuple_to_dict(task_request)
158 if hide_token: 156 if hide_token:
159 if out['service_account_token'] not in (None, 'bot', 'none'): 157 if out['service_account_token'] not in (None, 'bot', 'none'):
160 out['service_account_token'] = '<hidden>' 158 out['service_account_token'] = '<hidden>'
161 # Don't send 'service_account_token' if it is None to avoid confusing older 159 # Don't send 'service_account_token' if it is None to avoid confusing older
162 # version of the server that doesn't know about 'service_account_token'. 160 # version of the server that doesn't know about 'service_account_token'.
163 if out['service_account_token'] in (None, 'none'): 161 if out['service_account_token'] in (None, 'none'):
164 out.pop('service_account_token') 162 out.pop('service_account_token')
165 # Maps are not supported until protobuf v3.
166 out['properties']['dimensions'] = [ 163 out['properties']['dimensions'] = [
167 {'key': k, 'value': v} 164 {'key': k, 'value': v}
168 for k, v in out['properties']['dimensions'].iteritems() 165 for k, v in out['properties']['dimensions']
169 ] 166 ]
170 out['properties']['dimensions'].sort(key=lambda x: x['key'])
171 out['properties']['env'] = [ 167 out['properties']['env'] = [
172 {'key': k, 'value': v} 168 {'key': k, 'value': v}
173 for k, v in out['properties']['env'].iteritems() 169 for k, v in out['properties']['env'].iteritems()
174 ] 170 ]
175 out['properties']['env'].sort(key=lambda x: x['key']) 171 out['properties']['env'].sort(key=lambda x: x['key'])
176 return out 172 return out
177 173
178 174
179 def swarming_trigger(swarming, raw_request): 175 def swarming_trigger(swarming, raw_request):
180 """Triggers a request on the Swarming server and returns the json data. 176 """Triggers a request on the Swarming server and returns the json data.
(...skipping 627 matching lines...) Expand 10 before | Expand all | Expand 10 after
808 # URL is of the following form: 804 # URL is of the following form:
809 # url = host + ( 805 # url = host + (
810 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version']) 806 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
811 api_data = net.url_read_json(api['discoveryRestUrl']) 807 api_data = net.url_read_json(api['discoveryRestUrl'])
812 if api_data is None: 808 if api_data is None:
813 raise APIError('Failed to discover %s on %s' % (api['id'], host)) 809 raise APIError('Failed to discover %s on %s' % (api['id'], host))
814 out[api['id']] = api_data 810 out[api['id']] = api_data
815 return out 811 return out
816 812
817 813
814 def get_yielder(base_url, limit):
815 """Returns the first query and a function that yields following items."""
816 CHUNK_SIZE = 250
817
818 url = base_url
819 if limit:
820 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
821 data = net.url_read_json(url)
822 if data is None:
823 # TODO(maruel): Do basic diagnostic.
824 raise Failure('Failed to access %s' % url)
825 org_cursor = data.pop('cursor', None)
826 org_total = len(data.get('items') or [])
827 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
828 if not org_cursor or not org_total:
829 # This is not an iterable resource.
830 return data, lambda: []
831
832 def yielder():
833 cursor = org_cursor
834 total = org_total
835 # Some items support cursors. Try to get automatically if cursors are needed
836 # by looking at the 'cursor' items.
837 while cursor and (not limit or total < limit):
838 merge_char = '&' if '?' in base_url else '?'
839 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
840 if limit:
841 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
842 new = net.url_read_json(url)
843 if new is None:
844 raise Failure('Failed to access %s' % url)
845 cursor = new.get('cursor')
846 new_items = new.get('items')
847 nb_items = len(new_items or [])
848 total += nb_items
849 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
850 yield new_items
851
852 return data, yielder
853
854
818 ### Commands. 855 ### Commands.
819 856
820 857
821 def abort_task(_swarming, _manifest): 858 def abort_task(_swarming, _manifest):
822 """Given a task manifest that was triggered, aborts its execution.""" 859 """Given a task manifest that was triggered, aborts its execution."""
823 # TODO(vadimsh): No supported by the server yet. 860 # TODO(vadimsh): No supported by the server yet.
824 861
825 862
826 def add_filter_options(parser): 863 def add_filter_options(parser):
827 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection') 864 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
828 parser.filter_group.add_option( 865 parser.filter_group.add_option(
829 '-d', '--dimension', default=[], action='append', nargs=2, 866 '-d', '--dimension', default=[], action='append', nargs=2,
830 dest='dimensions', metavar='FOO bar', 867 dest='dimensions', metavar='FOO bar',
831 help='dimension to filter on') 868 help='dimension to filter on')
832 parser.add_option_group(parser.filter_group) 869 parser.add_option_group(parser.filter_group)
833 870
834 871
872 def process_filter_options(parser, options):
873 for key, value in options.dimensions:
874 if ':' in key:
875 parser.error('--dimension key cannot contain ":"')
876 if key.strip() != key:
877 parser.error('--dimension key has whitespace')
878 if not key:
879 parser.error('--dimension key is empty')
880
881 if value.strip() != value:
882 parser.error('--dimension value has whitespace')
883 if not value:
884 parser.error('--dimension value is empty')
885 options.dimensions.sort()
886
887
835 def add_sharding_options(parser): 888 def add_sharding_options(parser):
836 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options') 889 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
837 parser.sharding_group.add_option( 890 parser.sharding_group.add_option(
838 '--shards', type='int', default=1, metavar='NUMBER', 891 '--shards', type='int', default=1, metavar='NUMBER',
839 help='Number of shards to trigger and collect.') 892 help='Number of shards to trigger and collect.')
840 parser.add_option_group(parser.sharding_group) 893 parser.add_option_group(parser.sharding_group)
841 894
842 895
843 def add_trigger_options(parser): 896 def add_trigger_options(parser):
844 """Adds all options to trigger a task on Swarming.""" 897 """Adds all options to trigger a task on Swarming."""
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
916 '--deadline', type='int', dest='expiration', 969 '--deadline', type='int', dest='expiration',
917 help=optparse.SUPPRESS_HELP) 970 help=optparse.SUPPRESS_HELP)
918 parser.add_option_group(group) 971 parser.add_option_group(group)
919 972
920 973
921 def process_trigger_options(parser, options, args): 974 def process_trigger_options(parser, options, args):
922 """Processes trigger options and does preparatory steps. 975 """Processes trigger options and does preparatory steps.
923 976
924 Generates service account tokens if necessary. 977 Generates service account tokens if necessary.
925 """ 978 """
926 options.dimensions = dict(options.dimensions) 979 process_filter_options(parser, options)
927 options.env = dict(options.env) 980 options.env = dict(options.env)
928 if args and args[0] == '--': 981 if args and args[0] == '--':
929 args = args[1:] 982 args = args[1:]
930 983
931 if not options.dimensions: 984 if not options.dimensions:
932 parser.error('Please at least specify one --dimension') 985 parser.error('Please at least specify one --dimension')
933 if not all(len(t.split(':', 1)) == 2 for t in options.tags): 986 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
934 parser.error('--tags must be in the format key:value') 987 parser.error('--tags must be in the format key:value')
935 if options.raw_cmd and not args: 988 if options.raw_cmd and not args:
936 parser.error( 989 parser.error(
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
1082 print('Deleting %s failed. Probably already gone' % bot) 1135 print('Deleting %s failed. Probably already gone' % bot)
1083 result = 1 1136 result = 1
1084 return result 1137 return result
1085 1138
1086 1139
1087 def CMDbots(parser, args): 1140 def CMDbots(parser, args):
1088 """Returns information about the bots connected to the Swarming server.""" 1141 """Returns information about the bots connected to the Swarming server."""
1089 add_filter_options(parser) 1142 add_filter_options(parser)
1090 parser.filter_group.add_option( 1143 parser.filter_group.add_option(
1091 '--dead-only', action='store_true', 1144 '--dead-only', action='store_true',
1092 help='Only print dead bots, useful to reap them and reimage broken bots') 1145 help='Filter out bots alive, useful to reap them and reimage broken bots')
1093 parser.filter_group.add_option( 1146 parser.filter_group.add_option(
1094 '-k', '--keep-dead', action='store_true', 1147 '-k', '--keep-dead', action='store_true',
1095 help='Do not filter out dead bots') 1148 help='Keep both dead and alive bots')
1149 parser.filter_group.add_option(
1150 '--busy', action='store_true', help='Keep only busy bots')
1151 parser.filter_group.add_option(
1152 '--idle', action='store_true', help='Keep only idle bots')
1153 parser.filter_group.add_option(
1154 '--mp', action='store_true',
1155 help='Keep only Machine Provider managed bots')
1156 parser.filter_group.add_option(
1157 '--non-mp', action='store_true',
1158 help='Keep only non Machine Provider managed bots')
1096 parser.filter_group.add_option( 1159 parser.filter_group.add_option(
1097 '-b', '--bare', action='store_true', 1160 '-b', '--bare', action='store_true',
1098 help='Do not print out dimensions') 1161 help='Do not print out dimensions')
1099 options, args = parser.parse_args(args) 1162 options, args = parser.parse_args(args)
1163 process_filter_options(parser, options)
1100 1164
1101 if options.keep_dead and options.dead_only: 1165 if options.keep_dead and options.dead_only:
1102 parser.error('Use only one of --keep-dead and --dead-only') 1166 parser.error('Use only one of --keep-dead or --dead-only')
1167 if options.busy and options.idle:
1168 parser.error('Use only one of --busy or --idle')
1169 if options.mp and options.non_mp:
1170 parser.error('Use only one of --mp or --non-mp')
1103 1171
1104 bots = [] 1172 url = options.swarming + '/api/swarming/v1/bots/list?'
1105 cursor = None 1173 values = []
1106 limit = 250 1174 if options.dead_only:
1107 # Iterate via cursors. 1175 values.append(('is_dead', 'TRUE'))
1108 base_url = ( 1176 elif options.keep_dead:
1109 options.swarming + '/api/swarming/v1/bots/list?limit=%d' % limit) 1177 values.append(('is_dead', 'NONE'))
1110 while True: 1178 else:
1111 url = base_url 1179 values.append(('is_dead', 'FALSE'))
1112 if cursor:
1113 url += '&cursor=%s' % urllib.quote(cursor)
1114 data = net.url_read_json(url)
1115 if data is None:
1116 print >> sys.stderr, 'Failed to access %s' % options.swarming
1117 return 1
1118 bots.extend(data['items'])
1119 cursor = data.get('cursor')
1120 if not cursor:
1121 break
1122 1180
1181 if options.busy:
1182 values.append(('is_busy', 'TRUE'))
1183 elif options.idle:
1184 values.append(('is_busy', 'FALSE'))
1185 else:
1186 values.append(('is_busy', 'NONE'))
1187
1188 if options.mp:
1189 values.append(('is_mp', 'TRUE'))
1190 elif options.non_mp:
1191 values.append(('is_mp', 'FALSE'))
1192 else:
1193 values.append(('is_mp', 'NONE'))
1194
1195 for key, value in options.dimensions:
1196 values.append(('dimensions', '%s:%s' % (key, value)))
1197 url += urllib.urlencode(values)
1198 try:
1199 data, yielder = get_yielder(url, 0)
1200 bots = data.get('items') or []
1201 for items in yielder():
1202 if items:
1203 bots.extend(items)
1204 except Failure as e:
1205 sys.stderr.write('\n%s\n' % e)
1206 return 1
1123 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']): 1207 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
1124 if options.dead_only: 1208 print bot['bot_id']
1125 if not bot.get('is_dead'): 1209 if not options.bare:
1126 continue 1210 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1127 elif not options.keep_dead and bot.get('is_dead'): 1211 print ' %s' % json.dumps(dimensions, sort_keys=True)
1128 continue 1212 if bot.get('task_id'):
1129 1213 print ' task: %s' % bot['task_id']
1130 # If the user requested to filter on dimensions, ensure the bot has all the
1131 # dimensions requested.
1132 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1133 for key, value in options.dimensions:
1134 if key not in dimensions:
1135 break
1136 # A bot can have multiple value for a key, for example,
1137 # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will
1138 # be accepted.
1139 if isinstance(dimensions[key], list):
1140 if value not in dimensions[key]:
1141 break
1142 else:
1143 if value != dimensions[key]:
1144 break
1145 else:
1146 print bot['bot_id']
1147 if not options.bare:
1148 print ' %s' % json.dumps(dimensions, sort_keys=True)
1149 if bot.get('task_id'):
1150 print ' task: %s' % bot['task_id']
1151 return 0 1214 return 0
1152 1215
1153 1216
1154 @subcommand.usage('task_id') 1217 @subcommand.usage('task_id')
1155 def CMDcancel(parser, args): 1218 def CMDcancel(parser, args):
1156 """Cancels a task.""" 1219 """Cancels a task."""
1157 options, args = parser.parse_args(args) 1220 options, args = parser.parse_args(args)
1158 if not args: 1221 if not args:
1159 parser.error('Please specify the task to cancel') 1222 parser.error('Please specify the task to cancel')
1160 for task_id in args: 1223 for task_id in args:
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1248 print data 1311 print data
1249 return 0 1312 return 0
1250 1313
1251 1314
1252 @subcommand.usage('[method name]') 1315 @subcommand.usage('[method name]')
1253 def CMDquery(parser, args): 1316 def CMDquery(parser, args):
1254 """Returns raw JSON information via an URL endpoint. Use 'query-list' to 1317 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1255 gather the list of API methods from the server. 1318 gather the list of API methods from the server.
1256 1319
1257 Examples: 1320 Examples:
1321 Raw task request and results:
1322 swarming.py query -S server-url.com task/123456/request
1323 swarming.py query -S server-url.com task/123456/result
1324
1258 Listing all bots: 1325 Listing all bots:
1259 swarming.py query -S server-url.com bots/list 1326 swarming.py query -S server-url.com bots/list
1260 1327
1261 Listing last 10 tasks on a specific bot named 'swarm1': 1328 Listing last 10 tasks on a specific bot named 'bot1':
1262 swarming.py query -S server-url.com --limit 10 bot/swarm1/tasks 1329 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
1263 1330
1264 Listing last 10 tasks with tags os:Ubuntu-12.04 and pool:Chrome. Note that 1331 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
1265 quoting is important!: 1332 quoting is important!:
1266 swarming.py query -S server-url.com --limit 10 \\ 1333 swarming.py query -S server-url.com --limit 10 \\
1267 'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome' 1334 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
1268 """ 1335 """
1269 CHUNK_SIZE = 250
1270
1271 parser.add_option( 1336 parser.add_option(
1272 '-L', '--limit', type='int', default=200, 1337 '-L', '--limit', type='int', default=200,
1273 help='Limit to enforce on limitless items (like number of tasks); ' 1338 help='Limit to enforce on limitless items (like number of tasks); '
1274 'default=%default') 1339 'default=%default')
1275 parser.add_option( 1340 parser.add_option(
1276 '--json', help='Path to JSON output file (otherwise prints to stdout)') 1341 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1277 parser.add_option( 1342 parser.add_option(
1278 '--progress', action='store_true', 1343 '--progress', action='store_true',
1279 help='Prints a dot at each request to show progress') 1344 help='Prints a dot at each request to show progress')
1280 options, args = parser.parse_args(args) 1345 options, args = parser.parse_args(args)
1281 if len(args) != 1: 1346 if len(args) != 1:
1282 parser.error( 1347 parser.error(
1283 'Must specify only method name and optionally query args properly ' 1348 'Must specify only method name and optionally query args properly '
1284 'escaped.') 1349 'escaped.')
1285 base_url = options.swarming + '/api/swarming/v1/' + args[0] 1350 base_url = options.swarming + '/api/swarming/v1/' + args[0]
1286 url = base_url 1351 try:
1287 if options.limit: 1352 data, yielder = get_yielder(base_url, options.limit)
1288 # Check check, change if not working out. 1353 for items in yielder():
1289 merge_char = '&' if '?' in url else '?' 1354 if items:
1290 url += '%slimit=%d' % (merge_char, min(CHUNK_SIZE, options.limit)) 1355 data['items'].extend(items)
1291 data = net.url_read_json(url) 1356 if options.progress:
1292 if data is None: 1357 sys.stderr.write('.')
1293 # TODO(maruel): Do basic diagnostic. 1358 sys.stderr.flush()
1294 print >> sys.stderr, 'Failed to access %s' % url 1359 except Failure as e:
1360 sys.stderr.write('\n%s\n' % e)
1295 return 1 1361 return 1
1296
1297 # Some items support cursors. Try to get automatically if cursors are needed
1298 # by looking at the 'cursor' items.
1299 while (
1300 data.get('cursor') and
1301 (not options.limit or len(data['items']) < options.limit)):
1302 merge_char = '&' if '?' in base_url else '?'
1303 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(data['cursor']))
1304 if options.limit:
1305 url += '&limit=%d' % min(CHUNK_SIZE, options.limit - len(data['items']))
1306 if options.progress:
1307 sys.stdout.write('.')
1308 sys.stdout.flush()
1309 new = net.url_read_json(url)
1310 if new is None:
1311 if options.progress:
1312 print('')
1313 print >> sys.stderr, 'Failed to access %s' % options.swarming
1314 return 1
1315 data['items'].extend(new.get('items', []))
1316 data['cursor'] = new.get('cursor')
1317
1318 if options.progress: 1362 if options.progress:
1319 print('') 1363 sys.stderr.write('\n')
1320 if options.limit and len(data.get('items', [])) > options.limit: 1364 sys.stderr.flush()
1321 data['items'] = data['items'][:options.limit]
1322 data.pop('cursor', None)
1323
1324 if options.json: 1365 if options.json:
1325 options.json = unicode(os.path.abspath(options.json)) 1366 options.json = unicode(os.path.abspath(options.json))
1326 tools.write_json(options.json, data, True) 1367 tools.write_json(options.json, data, True)
1327 else: 1368 else:
1328 try: 1369 try:
1329 tools.write_json(sys.stdout, data, False) 1370 tools.write_json(sys.stdout, data, False)
1330 sys.stdout.write('\n') 1371 sys.stdout.write('\n')
1331 except IOError: 1372 except IOError:
1332 pass 1373 pass
1333 return 0 1374 return 0
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after
1652 dispatcher = subcommand.CommandDispatcher(__name__) 1693 dispatcher = subcommand.CommandDispatcher(__name__)
1653 return dispatcher.execute(OptionParserSwarming(version=__version__), args) 1694 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
1654 1695
1655 1696
1656 if __name__ == '__main__': 1697 if __name__ == '__main__':
1657 subprocess42.inhibit_os_error_reporting() 1698 subprocess42.inhibit_os_error_reporting()
1658 fix_encoding.fix_encoding() 1699 fix_encoding.fix_encoding()
1659 tools.disable_buffering() 1700 tools.disable_buffering()
1660 colorama.init() 1701 colorama.init()
1661 sys.exit(main(sys.argv[1:])) 1702 sys.exit(main(sys.argv[1:]))
OLDNEW
« no previous file with comments | « no previous file | client/tests/swarming_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698