OLD | NEW |
1 # Copyright 2009 Google Inc. All Rights Reserved. | 1 # Copyright 2009 Google Inc. All Rights Reserved. |
2 # | 2 # |
3 # Licensed under the Apache License, Version 2.0 (the "License"); | 3 # Licensed under the Apache License, Version 2.0 (the "License"); |
4 # you may not use this file except in compliance with the License. | 4 # you may not use this file except in compliance with the License. |
5 # You may obtain a copy of the License at | 5 # You may obtain a copy of the License at |
6 # | 6 # |
7 # http://www.apache.org/licenses/LICENSE-2.0 | 7 # http://www.apache.org/licenses/LICENSE-2.0 |
8 # | 8 # |
9 # Unless required by applicable law or agreed to in writing, software | 9 # Unless required by applicable law or agreed to in writing, software |
10 # distributed under the License is distributed on an "AS IS" BASIS, | 10 # distributed under the License is distributed on an "AS IS" BASIS, |
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 # See the License for the specific language governing permissions and | 12 # See the License for the specific language governing permissions and |
13 # limitations under the License. | 13 # limitations under the License. |
14 | 14 |
15 """Generic utils.""" | 15 """Generic utils.""" |
16 | 16 |
17 import copy | |
18 import errno | 17 import errno |
19 import logging | 18 import logging |
20 import os | 19 import os |
21 import Queue | 20 import Queue |
22 import re | 21 import re |
23 import stat | 22 import stat |
24 import subprocess | 23 import subprocess |
25 import sys | 24 import sys |
26 import threading | 25 import threading |
27 import time | 26 import time |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
293 kwargs['call_filter_on_first_line'] = True | 292 kwargs['call_filter_on_first_line'] = True |
294 # Obviously. | 293 # Obviously. |
295 kwargs['print_stdout'] = True | 294 kwargs['print_stdout'] = True |
296 return CheckCallAndFilter(args, **kwargs) | 295 return CheckCallAndFilter(args, **kwargs) |
297 | 296 |
298 | 297 |
299 def SoftClone(obj): | 298 def SoftClone(obj): |
300 """Clones an object. copy.copy() doesn't work on 'file' objects.""" | 299 """Clones an object. copy.copy() doesn't work on 'file' objects.""" |
301 if obj.__class__.__name__ == 'SoftCloned': | 300 if obj.__class__.__name__ == 'SoftCloned': |
302 return obj | 301 return obj |
303 class SoftCloned(object): pass | 302 class SoftCloned(object): |
| 303 pass |
304 new_obj = SoftCloned() | 304 new_obj = SoftCloned() |
305 for member in dir(obj): | 305 for member in dir(obj): |
306 if member.startswith('_'): | 306 if member.startswith('_'): |
307 continue | 307 continue |
308 setattr(new_obj, member, getattr(obj, member)) | 308 setattr(new_obj, member, getattr(obj, member)) |
309 return new_obj | 309 return new_obj |
310 | 310 |
311 | 311 |
312 def MakeFileAutoFlush(fileobj, delay=10): | 312 def MakeFileAutoFlush(fileobj, delay=10): |
313 """Creates a file object clone to automatically flush after N seconds.""" | 313 """Creates a file object clone to automatically flush after N seconds.""" |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
382 new_fileobj.old_annotated_write('%d>%s\n' % (index, line)) | 382 new_fileobj.old_annotated_write('%d>%s\n' % (index, line)) |
383 obj[0] = remaining | 383 obj[0] = remaining |
384 | 384 |
385 def full_flush(): | 385 def full_flush(): |
386 """Flush buffered output.""" | 386 """Flush buffered output.""" |
387 orphans = [] | 387 orphans = [] |
388 new_fileobj.lock.acquire() | 388 new_fileobj.lock.acquire() |
389 try: | 389 try: |
390 # Detect threads no longer existing. | 390 # Detect threads no longer existing. |
391 indexes = (getattr(t, 'index', None) for t in threading.enumerate()) | 391 indexes = (getattr(t, 'index', None) for t in threading.enumerate()) |
392 indexed = filter(None, indexes) | 392 indexes = filter(None, indexes) |
393 for index in new_fileobj.output_buffers: | 393 for index in new_fileobj.output_buffers: |
394 if not index in indexes: | 394 if not index in indexes: |
395 orphans.append((index, new_fileobj.output_buffers[index][0])) | 395 orphans.append((index, new_fileobj.output_buffers[index][0])) |
396 for orphan in orphans: | 396 for orphan in orphans: |
397 del new_fileobj.output_buffers[orphan[0]] | 397 del new_fileobj.output_buffers[orphan[0]] |
398 finally: | 398 finally: |
399 new_fileobj.lock.release() | 399 new_fileobj.lock.release() |
400 | 400 |
401 # Don't keep the lock while writting. Will append \n when it shouldn't. | 401 # Don't keep the lock while writting. Will append \n when it shouldn't. |
402 for orphan in orphans: | 402 for orphan in orphans: |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
473 | 473 |
474 # If we did not find the file in the current directory, make sure we are in a | 474 # If we did not find the file in the current directory, make sure we are in a |
475 # sub directory that is controlled by this configuration. | 475 # sub directory that is controlled by this configuration. |
476 if path != real_from_dir: | 476 if path != real_from_dir: |
477 entries_filename = os.path.join(path, filename + '_entries') | 477 entries_filename = os.path.join(path, filename + '_entries') |
478 if not os.path.exists(entries_filename): | 478 if not os.path.exists(entries_filename): |
479 # If .gclient_entries does not exist, a previous call to gclient sync | 479 # If .gclient_entries does not exist, a previous call to gclient sync |
480 # might have failed. In that case, we cannot verify that the .gclient | 480 # might have failed. In that case, we cannot verify that the .gclient |
481 # is the one we want to use. In order to not to cause too much trouble, | 481 # is the one we want to use. In order to not to cause too much trouble, |
482 # just issue a warning and return the path anyway. | 482 # just issue a warning and return the path anyway. |
483 print >>sys.stderr, ("%s file in parent directory %s might not be the " | 483 print >> sys.stderr, ("%s file in parent directory %s might not be the " |
484 "file you want to use" % (filename, path)) | 484 "file you want to use" % (filename, path)) |
485 return path | 485 return path |
486 scope = {} | 486 scope = {} |
487 try: | 487 try: |
488 exec(FileRead(entries_filename), scope) | 488 exec(FileRead(entries_filename), scope) |
489 except SyntaxError, e: | 489 except SyntaxError, e: |
490 SyntaxErrorToError(filename, e) | 490 SyntaxErrorToError(filename, e) |
491 all_directories = scope['entries'].keys() | 491 all_directories = scope['entries'].keys() |
492 path_to_check = real_from_dir[len(path)+1:] | 492 path_to_check = real_from_dir[len(path)+1:] |
493 while path_to_check: | 493 while path_to_check: |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
705 logging.info('Caught exception in thread %s' % self.item.name) | 705 logging.info('Caught exception in thread %s' % self.item.name) |
706 logging.info(str(sys.exc_info())) | 706 logging.info(str(sys.exc_info())) |
707 work_queue.exceptions.put(sys.exc_info()) | 707 work_queue.exceptions.put(sys.exc_info()) |
708 logging.info('Task %s done' % self.item.name) | 708 logging.info('Task %s done' % self.item.name) |
709 | 709 |
710 work_queue.ready_cond.acquire() | 710 work_queue.ready_cond.acquire() |
711 try: | 711 try: |
712 work_queue.ready_cond.notifyAll() | 712 work_queue.ready_cond.notifyAll() |
713 finally: | 713 finally: |
714 work_queue.ready_cond.release() | 714 work_queue.ready_cond.release() |
OLD | NEW |