Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(204)

Side by Side Diff: appengine/swarming/swarming_bot/bot_code/task_runner.py

Issue 2024313003: Send authorization headers when calling Swarming backend. (Closed) Base URL: https://chromium.googlesource.com/external/github.com/luci/luci-py@master
Patch Set: extract into separate function Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2013 The LUCI Authors. All rights reserved. 1 # Copyright 2013 The LUCI Authors. All rights reserved.
2 # Use of this source code is governed under the Apache License, Version 2.0 2 # Use of this source code is governed under the Apache License, Version 2.0
3 # that can be found in the LICENSE file. 3 # that can be found in the LICENSE file.
4 4
5 """Runs a Swarming task. 5 """Runs a Swarming task.
6 6
7 Downloads all the necessary files to run the task, executes the command and 7 Downloads all the necessary files to run the task, executes the command and
8 streams results back to the Swarming server. 8 streams results back to the Swarming server.
9 9
10 The process exit code is 0 when the task was executed, even if the task itself 10 The process exit code is 0 when the task was executed, even if the task itself
(...skipping 10 matching lines...) Expand all
21 import os 21 import os
22 import signal 22 import signal
23 import sys 23 import sys
24 import time 24 import time
25 25
26 from utils import net 26 from utils import net
27 from utils import on_error 27 from utils import on_error
28 from utils import subprocess42 28 from utils import subprocess42
29 from utils import zip_package 29 from utils import zip_package
30 30
31 import file_reader
32
31 33
32 # Path to this file or the zip containing this file. 34 # Path to this file or the zip containing this file.
33 THIS_FILE = os.path.abspath(zip_package.get_main_script_path()) 35 THIS_FILE = os.path.abspath(zip_package.get_main_script_path())
34 36
35 37
36 # Sends a maximum of 100kb of stdout per task_update packet. 38 # Sends a maximum of 100kb of stdout per task_update packet.
37 MAX_CHUNK_SIZE = 102400 39 MAX_CHUNK_SIZE = 102400
38 40
39 41
40 # Maximum wait between task_update packet when there's no output. 42 # Maximum wait between task_update packet when there's no output.
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 152
151 153
152 class MustExit(Exception): 154 class MustExit(Exception):
153 """Raised on signal that the process must exit immediately.""" 155 """Raised on signal that the process must exit immediately."""
154 def __init__(self, sig): 156 def __init__(self, sig):
155 super(MustExit, self).__init__() 157 super(MustExit, self).__init__()
156 self.signal = sig 158 self.signal = sig
157 159
158 160
159 def load_and_run( 161 def load_and_run(
160 in_file, swarming_server, cost_usd_hour, start, out_file, min_free_space): 162 in_file, swarming_server, auth_headers_file, cost_usd_hour, start,
163 out_file, min_free_space):
161 """Loads the task's metadata and execute it. 164 """Loads the task's metadata and execute it.
162 165
163 This may throw all sorts of exceptions in case of failure. It's up to the 166 This may throw all sorts of exceptions in case of failure. It's up to the
164 caller to trap them. These shall be considered 'internal_failure' instead of 167 caller to trap them. These shall be considered 'internal_failure' instead of
165 'failure' from a TaskRunResult standpoint. 168 'failure' from a TaskRunResult standpoint.
166 """ 169 """
167 # The work directory is guaranteed to exist since it was created by 170 # The work directory is guaranteed to exist since it was created by
168 # bot_main.py and contains the manifest. Temporary files will be downloaded 171 # bot_main.py and contains the manifest. Temporary files will be downloaded
169 # there. It's bot_main.py that will delete the directory afterward. Tests are 172 # there. It's bot_main.py that will delete the directory afterward. Tests are
170 # not run from there. 173 # not run from there.
171 task_result = None 174 task_result = None
172 def handler(sig, _): 175 def handler(sig, _):
173 logging.info('Got signal %s', sig) 176 logging.info('Got signal %s', sig)
174 raise MustExit(sig) 177 raise MustExit(sig)
175 work_dir = os.path.dirname(out_file) 178 work_dir = os.path.dirname(out_file)
176 try: 179 try:
177 with subprocess42.set_signal_handler([SIG_BREAK_OR_TERM], handler): 180 with subprocess42.set_signal_handler([SIG_BREAK_OR_TERM], handler):
178 if not os.path.isdir(work_dir): 181 if not os.path.isdir(work_dir):
179 raise ValueError('%s expected to exist' % work_dir) 182 raise ValueError('%s expected to exist' % work_dir)
180 183
181 with open(in_file, 'rb') as f: 184 with open(in_file, 'rb') as f:
182 task_details = TaskDetails(json.load(f)) 185 task_details = TaskDetails(json.load(f))
183 186
184 task_result = run_command( 187 task_result = run_command(
185 swarming_server, task_details, work_dir, cost_usd_hour, start, 188 swarming_server, task_details, work_dir, auth_headers_file,
186 min_free_space) 189 cost_usd_hour, start, min_free_space)
187 except MustExit as e: 190 except MustExit as e:
188 # This normally means run_command() didn't get the chance to run, as it 191 # This normally means run_command() didn't get the chance to run, as it
189 # itself trap MustExit and will report accordingly. In this case, we want 192 # itself trap MustExit and will report accordingly. In this case, we want
190 # the parent process to send the message instead. 193 # the parent process to send the message instead.
191 if not task_result: 194 if not task_result:
192 task_result = { 195 task_result = {
193 u'exit_code': None, 196 u'exit_code': None,
194 u'hard_timeout': False, 197 u'hard_timeout': False,
195 u'io_timeout': False, 198 u'io_timeout': False,
196 u'must_signal_internal_failure': 199 u'must_signal_internal_failure':
197 u'task_runner received signal %s' % e.signal, 200 u'task_runner received signal %s' % e.signal,
198 u'version': OUT_VERSION, 201 u'version': OUT_VERSION,
199 } 202 }
200 finally: 203 finally:
201 # We've found tests to delete 'work' when quitting, causing an exception 204 # We've found tests to delete 'work' when quitting, causing an exception
202 # here. Try to recreate the directory if necessary. 205 # here. Try to recreate the directory if necessary.
203 if not os.path.isdir(work_dir): 206 if not os.path.isdir(work_dir):
204 os.mkdir(work_dir) 207 os.mkdir(work_dir)
205 with open(out_file, 'wb') as f: 208 with open(out_file, 'wb') as f:
206 json.dump(task_result, f) 209 json.dump(task_result, f)
207 210
208 211
209 def post_update(swarming_server, params, exit_code, stdout, output_chunk_start): 212 def post_update(
213 swarming_server, auth_headers, params, exit_code,
214 stdout, output_chunk_start):
210 """Posts task update to task_update. 215 """Posts task update to task_update.
211 216
212 Arguments: 217 Arguments:
213 swarming_server: Base URL to Swarming server. 218 swarming_server: Base URL to Swarming server.
219 auth_headers: dict with HTTP authentication headers.
214 params: Default JSON parameters for the POST. 220 params: Default JSON parameters for the POST.
215 exit_code: Process exit code, only when a command completed. 221 exit_code: Process exit code, only when a command completed.
216 stdout: Incremental output since last call, if any. 222 stdout: Incremental output since last call, if any.
217 output_chunk_start: Total number of stdout previously sent, for coherency 223 output_chunk_start: Total number of stdout previously sent, for coherency
218 with the server. 224 with the server.
219 """ 225 """
220 params = params.copy() 226 params = params.copy()
221 if exit_code is not None: 227 if exit_code is not None:
222 params['exit_code'] = exit_code 228 params['exit_code'] = exit_code
223 if stdout: 229 if stdout:
224 # The output_chunk_start is used by the server to make sure that the stdout 230 # The output_chunk_start is used by the server to make sure that the stdout
225 # chunks are processed and saved in the DB in order. 231 # chunks are processed and saved in the DB in order.
226 params['output'] = base64.b64encode(stdout) 232 params['output'] = base64.b64encode(stdout)
227 params['output_chunk_start'] = output_chunk_start 233 params['output_chunk_start'] = output_chunk_start
228 # TODO(maruel): Support early cancellation. 234 # TODO(maruel): Support early cancellation.
229 # https://code.google.com/p/swarming/issues/detail?id=62 235 # https://code.google.com/p/swarming/issues/detail?id=62
230 resp = net.url_read_json( 236 resp = net.url_read_json(
231 swarming_server+'/swarming/api/v1/bot/task_update/%s' % params['task_id'], 237 swarming_server+'/swarming/api/v1/bot/task_update/%s' % params['task_id'],
232 data=params) 238 data=params,
239 headers=auth_headers,
240 follow_redirects=False)
233 logging.debug('post_update() = %s', resp) 241 logging.debug('post_update() = %s', resp)
234 if not resp or resp.get('error'): 242 if not resp or resp.get('error'):
235 # Abandon it. This will force a process exit. 243 # Abandon it. This will force a process exit.
236 raise ValueError(resp.get('error') if resp else 'Failed to contact server') 244 raise ValueError(resp.get('error') if resp else 'Failed to contact server')
237 245
238 246
239 def should_post_update(stdout, now, last_packet): 247 def should_post_update(stdout, now, last_packet):
240 """Returns True if it's time to send a task_update packet via post_update(). 248 """Returns True if it's time to send a task_update packet via post_update().
241 249
242 Sends a packet when one of this condition is met: 250 Sends a packet when one of this condition is met:
243 - more than MAX_CHUNK_SIZE of stdout is buffered. 251 - more than MAX_CHUNK_SIZE of stdout is buffered.
244 - last packet was sent more than MIN_PACKET_INTERNAL seconds ago and there was 252 - last packet was sent more than MIN_PACKET_INTERNAL seconds ago and there was
245 stdout. 253 stdout.
246 - last packet was sent more than MAX_PACKET_INTERVAL seconds ago. 254 - last packet was sent more than MAX_PACKET_INTERVAL seconds ago.
247 """ 255 """
248 packet_interval = MIN_PACKET_INTERNAL if stdout else MAX_PACKET_INTERVAL 256 packet_interval = MIN_PACKET_INTERNAL if stdout else MAX_PACKET_INTERVAL
249 return len(stdout) >= MAX_CHUNK_SIZE or (now - last_packet) > packet_interval 257 return len(stdout) >= MAX_CHUNK_SIZE or (now - last_packet) > packet_interval
250 258
251 259
260 def read_auth_headers(path):
M-A Ruel 2016/06/06 21:18:36 dead code?
Vadim Sh. 2016/06/06 22:17:09 oops, removed
261 """Reads authentication headers from the given file.
262
263 The file is kept up-to-date by the main bot process (see AuthHeadersDumper in
264 bot_main.py).
265
266 Retries a bunch of times on errors to workaround Windows file locking issues.
267 If it fails to read the file even after a bunch of retries, raises ValueError
268 that eventually aborts the task (since we can't run it without
269 authentication).
270 """
271 attempts = 100
272 while True:
273 try:
274 with open(path, 'rb') as f:
275 headers = json.load(f)
276 if not isinstance(headers, dict):
277 raise ValueError('Expecting dict, got %r' % (headers,))
278 # The headers are ASCII for sure, so don't bother with picking the
279 # correct unicode encoding, default would work.
280 return {str(k): str(v) for k, v in headers.iteritems()}
281 except (OSError, IOError, ValueError) as e:
282 last_error = 'Failed to read auth headers from %s: %s' % (path, e)
283 attempts -= 1
284 if not attempts:
285 raise ValueError(last_error)
286 time.sleep(0.05)
287
288
252 def calc_yield_wait(task_details, start, last_io, timed_out, stdout): 289 def calc_yield_wait(task_details, start, last_io, timed_out, stdout):
253 """Calculates the maximum number of seconds to wait in yield_any().""" 290 """Calculates the maximum number of seconds to wait in yield_any()."""
254 now = monotonic_time() 291 now = monotonic_time()
255 if timed_out: 292 if timed_out:
256 # Give a |grace_period| seconds delay. 293 # Give a |grace_period| seconds delay.
257 if task_details.grace_period: 294 if task_details.grace_period:
258 return max(now - timed_out - task_details.grace_period, 0.) 295 return max(now - timed_out - task_details.grace_period, 0.)
259 return 0. 296 return 0.
260 297
261 out = MIN_PACKET_INTERNAL if stdout else MAX_PACKET_INTERVAL 298 out = MIN_PACKET_INTERNAL if stdout else MAX_PACKET_INTERVAL
(...skipping 12 matching lines...) Expand all
274 try: 311 try:
275 proc.wait(grace_period) 312 proc.wait(grace_period)
276 except subprocess42.TimeoutError: 313 except subprocess42.TimeoutError:
277 logging.warning('SIGKILL finally due to %s', reason) 314 logging.warning('SIGKILL finally due to %s', reason)
278 proc.kill() 315 proc.kill()
279 exit_code = proc.wait() 316 exit_code = proc.wait()
280 logging.info('Waiting for proces exit in finally - done') 317 logging.info('Waiting for proces exit in finally - done')
281 return exit_code 318 return exit_code
282 319
283 320
321 def start_reading_headers(auth_headers_file):
322 """Spawns a thread that reread headers file.
323
324 Returns:
325 Tuple (callback that returns the last known headers, stop callback).
326
327 Raises:
328 file_reader.FatalReadError if headers file can't be read.
329 ValueError if it can be read, but its body is invalid.
330 """
331 # Read headers more often than bot_main writes them, to reduce maximum
332 # possible latency between headers are updated and read.
333 headers_reader = file_reader.FileReaderThread(
334 auth_headers_file, interval_sec=30)
335
336 def read_and_validate_headers():
337 val = headers_reader.last_value or {}
338 if not isinstance(val, dict):
339 raise ValueError('Expecting dict with headers, got %r' % (val,))
340 # The headers must be ASCII for sure, so don't bother with picking the
341 # correct unicode encoding, default would work.
342 return {str(k): str(v) for k, v in val.iteritems()}
343
344 headers_reader.start()
345 read_and_validate_headers() # initial validation, may raise ValueError
346 return read_and_validate_headers, headers_reader.stop
347
348
284 def run_command( 349 def run_command(
285 swarming_server, task_details, work_dir, cost_usd_hour, task_start, 350 swarming_server, task_details, work_dir, auth_headers_file, cost_usd_hour,
286 min_free_space): 351 task_start, min_free_space):
287 """Runs a command and sends packets to the server to stream results back. 352 """Runs a command and sends packets to the server to stream results back.
288 353
289 Implements both I/O and hard timeouts. Sends the packets numbered, so the 354 Implements both I/O and hard timeouts. Sends the packets numbered, so the
290 server can ensure they are processed in order. 355 server can ensure they are processed in order.
291 356
292 Returns: 357 Returns:
293 Metadata about the command. 358 Metadata about the command.
294 """ 359 """
295 # TODO(maruel): This function is incomprehensible, split and refactor. 360 # TODO(maruel): This function is incomprehensible, split and refactor.
361
362 # Grab initial auth headers and start rereading them in parallel thread. They
363 # MUST be there already. It's fatal internal error if they are not.
364 headers_cb = lambda: {}
365 stop_headers_reader = lambda: None
366 if auth_headers_file:
367 try:
368 headers_cb, stop_headers_reader = start_reading_headers(auth_headers_file)
369 except (ValueError, file_reader.FatalReadError) as e:
370 return {
371 u'exit_code': 1,
372 u'hard_timeout': False,
373 u'io_timeout': False,
374 u'must_signal_internal_failure': str(e),
375 u'version': OUT_VERSION,
376 }
377
296 # Signal the command is about to be started. 378 # Signal the command is about to be started.
297 last_packet = start = now = monotonic_time() 379 last_packet = start = now = monotonic_time()
298 params = { 380 params = {
299 'cost_usd': cost_usd_hour * (now - task_start) / 60. / 60., 381 'cost_usd': cost_usd_hour * (now - task_start) / 60. / 60.,
300 'id': task_details.bot_id, 382 'id': task_details.bot_id,
301 'task_id': task_details.task_id, 383 'task_id': task_details.task_id,
302 } 384 }
303 post_update(swarming_server, params, None, '', 0) 385 post_update(swarming_server, headers_cb(), params, None, '', 0)
304 386
305 isolated_result = os.path.join(work_dir, 'isolated_result.json') 387 isolated_result = os.path.join(work_dir, 'isolated_result.json')
306 cmd = get_isolated_cmd( 388 cmd = get_isolated_cmd(
307 work_dir, task_details, isolated_result, min_free_space) 389 work_dir, task_details, isolated_result, min_free_space)
308 # Hard timeout enforcement is deferred to run_isolated. Grace is doubled to 390 # Hard timeout enforcement is deferred to run_isolated. Grace is doubled to
309 # give one 'grace_period' slot to the child process and one slot to upload 391 # give one 'grace_period' slot to the child process and one slot to upload
310 # the results back. 392 # the results back.
311 task_details.hard_timeout = 0 393 task_details.hard_timeout = 0
312 if task_details.grace_period: 394 if task_details.grace_period:
313 task_details.grace_period *= 2 395 task_details.grace_period *= 2
(...skipping 21 matching lines...) Expand all
335 stdout=subprocess42.PIPE, 417 stdout=subprocess42.PIPE,
336 stderr=subprocess42.STDOUT, 418 stderr=subprocess42.STDOUT,
337 stdin=subprocess42.PIPE) 419 stdin=subprocess42.PIPE)
338 except OSError as e: 420 except OSError as e:
339 stdout = 'Command "%s" failed to start.\nError: %s' % (' '.join(cmd), e) 421 stdout = 'Command "%s" failed to start.\nError: %s' % (' '.join(cmd), e)
340 now = monotonic_time() 422 now = monotonic_time()
341 params['cost_usd'] = cost_usd_hour * (now - task_start) / 60. / 60. 423 params['cost_usd'] = cost_usd_hour * (now - task_start) / 60. / 60.
342 params['duration'] = now - start 424 params['duration'] = now - start
343 params['io_timeout'] = False 425 params['io_timeout'] = False
344 params['hard_timeout'] = False 426 params['hard_timeout'] = False
345 post_update(swarming_server, params, 1, stdout, 0) 427 post_update(swarming_server, headers_cb(), params, 1, stdout, 0)
346 return { 428 return {
347 u'exit_code': 1, 429 u'exit_code': 1,
348 u'hard_timeout': False, 430 u'hard_timeout': False,
349 u'io_timeout': False, 431 u'io_timeout': False,
350 u'must_signal_internal_failure': None, 432 u'must_signal_internal_failure': None,
351 u'version': OUT_VERSION, 433 u'version': OUT_VERSION,
352 } 434 }
353 435
354 output_chunk_start = 0 436 output_chunk_start = 0
355 stdout = '' 437 stdout = ''
(...skipping 11 matching lines...) Expand all
367 now = monotonic_time() 449 now = monotonic_time()
368 if new_data: 450 if new_data:
369 stdout += new_data 451 stdout += new_data
370 last_io = now 452 last_io = now
371 453
372 # Post update if necessary. 454 # Post update if necessary.
373 if should_post_update(stdout, now, last_packet): 455 if should_post_update(stdout, now, last_packet):
374 last_packet = monotonic_time() 456 last_packet = monotonic_time()
375 params['cost_usd'] = ( 457 params['cost_usd'] = (
376 cost_usd_hour * (last_packet - task_start) / 60. / 60.) 458 cost_usd_hour * (last_packet - task_start) / 60. / 60.)
377 post_update(swarming_server, params, None, stdout, output_chunk_start) 459 post_update(
460 swarming_server, headers_cb(), params, None,
461 stdout, output_chunk_start)
378 output_chunk_start += len(stdout) 462 output_chunk_start += len(stdout)
379 stdout = '' 463 stdout = ''
380 464
381 # Send signal on timeout if necessary. Both are failures, not 465 # Send signal on timeout if necessary. Both are failures, not
382 # internal_failures. 466 # internal_failures.
383 # Eventually kill but return 0 so bot_main.py doesn't cancel the task. 467 # Eventually kill but return 0 so bot_main.py doesn't cancel the task.
384 if not timed_out: 468 if not timed_out:
385 if (task_details.io_timeout and 469 if (task_details.io_timeout and
386 now - last_io > task_details.io_timeout): 470 now - last_io > task_details.io_timeout):
387 had_io_timeout = True 471 had_io_timeout = True
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
473 params['isolated_stats'] = isolated_stats 557 params['isolated_stats'] = isolated_stats
474 except (IOError, OSError, ValueError) as e: 558 except (IOError, OSError, ValueError) as e:
475 logging.error('Swallowing error: %s', e) 559 logging.error('Swallowing error: %s', e)
476 if not must_signal_internal_failure: 560 if not must_signal_internal_failure:
477 must_signal_internal_failure = str(e) 561 must_signal_internal_failure = str(e)
478 # TODO(maruel): Send the internal failure here instead of sending it through 562 # TODO(maruel): Send the internal failure here instead of sending it through
479 # bot_main, this causes a race condition. 563 # bot_main, this causes a race condition.
480 if exit_code is None: 564 if exit_code is None:
481 exit_code = -1 565 exit_code = -1
482 params['hard_timeout'] = had_hard_timeout 566 params['hard_timeout'] = had_hard_timeout
483 post_update(swarming_server, params, exit_code, stdout, output_chunk_start) 567 post_update(
568 swarming_server, headers_cb(), params, exit_code,
569 stdout, output_chunk_start)
484 return { 570 return {
485 u'exit_code': exit_code, 571 u'exit_code': exit_code,
486 u'hard_timeout': had_hard_timeout, 572 u'hard_timeout': had_hard_timeout,
487 u'io_timeout': had_io_timeout, 573 u'io_timeout': had_io_timeout,
488 u'must_signal_internal_failure': must_signal_internal_failure, 574 u'must_signal_internal_failure': must_signal_internal_failure,
489 u'version': OUT_VERSION, 575 u'version': OUT_VERSION,
490 } 576 }
491 finally: 577 finally:
492 try: 578 try:
493 os.remove(isolated_result) 579 os.remove(isolated_result)
494 except OSError: 580 except OSError:
495 pass 581 pass
582 stop_headers_reader()
496 583
497 584
498 def main(args): 585 def main(args):
499 subprocess42.inhibit_os_error_reporting() 586 subprocess42.inhibit_os_error_reporting()
500 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) 587 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
501 parser.add_option('--in-file', help='Name of the request file') 588 parser.add_option('--in-file', help='Name of the request file')
502 parser.add_option( 589 parser.add_option(
503 '--out-file', help='Name of the JSON file to write a task summary to') 590 '--out-file', help='Name of the JSON file to write a task summary to')
504 parser.add_option( 591 parser.add_option(
505 '--swarming-server', help='Swarming server to send data back') 592 '--swarming-server', help='Swarming server to send data back')
506 parser.add_option( 593 parser.add_option(
594 '--auth-headers-file',
595 help='Name of the file to read authentication headers from')
596 parser.add_option(
507 '--cost-usd-hour', type='float', help='Cost of this VM in $/h') 597 '--cost-usd-hour', type='float', help='Cost of this VM in $/h')
508 parser.add_option('--start', type='float', help='Time this task was started') 598 parser.add_option('--start', type='float', help='Time this task was started')
509 parser.add_option( 599 parser.add_option(
510 '--min-free-space', type='int', 600 '--min-free-space', type='int',
511 help='Value to send down to run_isolated') 601 help='Value to send down to run_isolated')
512 602
513 options, args = parser.parse_args(args) 603 options, args = parser.parse_args(args)
514 if not options.in_file or not options.out_file or args: 604 if not options.in_file or not options.out_file or args:
515 parser.error('task_runner is meant to be used by swarming_bot.') 605 parser.error('task_runner is meant to be used by swarming_bot.')
516 606
517 on_error.report_on_exception_exit(options.swarming_server) 607 on_error.report_on_exception_exit(options.swarming_server)
518 608
519 logging.info('starting') 609 logging.info('starting')
520 now = monotonic_time() 610 now = monotonic_time()
521 if options.start > now: 611 if options.start > now:
522 options.start = now 612 options.start = now
523 613
524 try: 614 try:
525 load_and_run( 615 load_and_run(
526 options.in_file, options.swarming_server, options.cost_usd_hour, 616 options.in_file, options.swarming_server, options.auth_headers_file,
527 options.start, options.out_file, options.min_free_space) 617 options.cost_usd_hour, options.start, options.out_file,
618 options.min_free_space)
528 return 0 619 return 0
529 finally: 620 finally:
530 logging.info('quitting') 621 logging.info('quitting')
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698