Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(93)

Side by Side Diff: cli/job.py

Issue 6539001: Merge remote branch 'cros/upstream' into master. (Closed) Base URL: ssh://git@gitrw.chromium.org:9222/autotest.git@master
Patch Set: patch Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | cli/job_unittest.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # 1 #
2 # Copyright 2008 Google Inc. All Rights Reserved. 2 # Copyright 2008 Google Inc. All Rights Reserved.
3 3
4 """ 4 """
5 The job module contains the objects and methods used to 5 The job module contains the objects and methods used to
6 manage jobs in Autotest. 6 manage jobs in Autotest.
7 7
8 The valid actions are: 8 The valid actions are:
9 list: lists job(s) 9 list: lists job(s)
10 create: create a job 10 create: create a job
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after
292 meta_hosts += int(num) * [host] 292 meta_hosts += int(num) * [host]
293 elif re.match('^[*](\w*)', host): 293 elif re.match('^[*](\w*)', host):
294 meta_hosts += [re.match('^[*](\w*)', host).group(1)] 294 meta_hosts += [re.match('^[*](\w*)', host).group(1)]
295 elif host != '' and host not in hosts: 295 elif host != '' and host not in hosts:
296 # Real hostname and not a duplicate 296 # Real hostname and not a duplicate
297 hosts.append(host) 297 hosts.append(host)
298 298
299 return (hosts, meta_hosts) 299 return (hosts, meta_hosts)
300 300
301 301
302 def parse(self): 302 def parse(self, parse_info=[]):
303 host_info = topic_common.item_parse_info(attribute_name='hosts', 303 host_info = topic_common.item_parse_info(attribute_name='hosts',
304 inline_option='machine', 304 inline_option='machine',
305 filename_option='mlist') 305 filename_option='mlist')
306 job_info = topic_common.item_parse_info(attribute_name='jobname', 306 job_info = topic_common.item_parse_info(attribute_name='jobname',
307 use_leftover=True) 307 use_leftover=True)
308 oth_info = topic_common.item_parse_info(attribute_name='one_time_hosts', 308 oth_info = topic_common.item_parse_info(attribute_name='one_time_hosts',
309 inline_option='one_time_hosts') 309 inline_option='one_time_hosts')
310 label_info = topic_common.item_parse_info(attribute_name='labels', 310 label_info = topic_common.item_parse_info(attribute_name='labels',
311 inline_option='labels') 311 inline_option='labels')
312 312
313 options, leftover = super(job_create_or_clone, 313 options, leftover = super(job_create_or_clone, self).parse(
314 self).parse([host_info, job_info, oth_info, 314 [host_info, job_info, oth_info, label_info] + parse_info,
315 label_info], req_items='jobname') 315 req_items='jobname')
316 self.data = {} 316 self.data = {}
317 if len(self.jobname) > 1: 317 if len(self.jobname) > 1:
318 self.invalid_syntax('Too many arguments specified, only expected ' 318 self.invalid_syntax('Too many arguments specified, only expected '
319 'to receive job name: %s' % self.jobname) 319 'to receive job name: %s' % self.jobname)
320 self.jobname = self.jobname[0] 320 self.jobname = self.jobname[0]
321 321
322 if options.priority: 322 if options.priority:
323 self.data['priority'] = options.priority.capitalize() 323 self.data['priority'] = options.priority.capitalize()
324 324
325 if self.one_time_hosts: 325 if self.one_time_hosts:
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
438 continue 438 continue
439 kernel_info = {'version': version} 439 kernel_info = {'version': version}
440 if cmdline: 440 if cmdline:
441 kernel_info['cmdline'] = cmdline 441 kernel_info['cmdline'] = cmdline
442 kernels.append(kernel_info) 442 kernels.append(kernel_info)
443 443
444 return kernels 444 return kernels
445 445
446 446
447 def parse(self): 447 def parse(self):
448 options, leftover = super(job_create, self).parse() 448 deps_info = topic_common.item_parse_info(attribute_name='dependencies',
449 inline_option='dependencies')
450 options, leftover = super(job_create, self).parse(
451 parse_info=[deps_info])
449 452
450 if (len(self.hosts) == 0 and not self.one_time_hosts 453 if (len(self.hosts) == 0 and not self.one_time_hosts
451 and not options.labels and not options.atomic_group): 454 and not options.labels and not options.atomic_group):
452 self.invalid_syntax('Must specify at least one machine ' 455 self.invalid_syntax('Must specify at least one machine '
453 'or an atomic group ' 456 'or an atomic group '
454 '(-m, -M, -b, -G or --one-time-hosts).') 457 '(-m, -M, -b, -G or --one-time-hosts).')
455 if not options.control_file and not options.test: 458 if not options.control_file and not options.test:
456 self.invalid_syntax('Must specify either --test or --control-file' 459 self.invalid_syntax('Must specify either --test or --control-file'
457 ' to create a job.') 460 ' to create a job.')
458 if options.control_file and options.test: 461 if options.control_file and options.test:
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
499 if options.noverify: 502 if options.noverify:
500 self.data['run_verify'] = False 503 self.data['run_verify'] = False
501 if options.timeout: 504 if options.timeout:
502 self.data['timeout'] = options.timeout 505 self.data['timeout'] = options.timeout
503 if options.max_runtime: 506 if options.max_runtime:
504 self.data['max_runtime_hrs'] = options.max_runtime 507 self.data['max_runtime_hrs'] = options.max_runtime
505 508
506 if options.atomic_group: 509 if options.atomic_group:
507 self.data['atomic_group_name'] = options.atomic_group 510 self.data['atomic_group_name'] = options.atomic_group
508 511
509 deps = options.dependencies.split(',') 512 self.data['dependencies'] = self.dependencies
510 deps = [dep.strip() for dep in deps if dep.strip()]
511 self.data['dependencies'] = deps
512 513
513 if options.synch_count: 514 if options.synch_count:
514 self.data['synch_count'] = options.synch_count 515 self.data['synch_count'] = options.synch_count
515 if options.server: 516 if options.server:
516 self.data['control_type'] = 'Server' 517 self.data['control_type'] = 'Server'
517 else: 518 else:
518 self.data['control_type'] = 'Client' 519 self.data['control_type'] = 'Client'
519 520
520 return options, leftover 521 return options, leftover
521 522
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
643 644
644 645
645 def execute(self): 646 def execute(self):
646 data = {'job__id__in': self.jobids} 647 data = {'job__id__in': self.jobids}
647 self.execute_rpc(op='abort_host_queue_entries', **data) 648 self.execute_rpc(op='abort_host_queue_entries', **data)
648 print 'Aborting jobs: %s' % ', '.join(self.jobids) 649 print 'Aborting jobs: %s' % ', '.join(self.jobids)
649 650
650 651
651 def get_items(self): 652 def get_items(self):
652 return self.jobids 653 return self.jobids
OLDNEW
« no previous file with comments | « no previous file | cli/job_unittest.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698