OLD | NEW |
1 # Copyright (c) 2006-2009 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2006-2009 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 # TODO(gwilson): 1. Change text differs to use external utils. | 5 # TODO(gwilson): 1. Change text differs to use external utils. |
6 # 2. Change text_expectations parsing to existing | 6 # 2. Change text_expectations parsing to existing |
7 # logic in layout_pagckage.test_expectations. | 7 # logic in layout_pagckage.test_expectations. |
8 | 8 |
9 import difflib | 9 import difflib |
10 import errno | 10 import errno |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
135 | 135 |
136 def IsMacPlatform(platform): | 136 def IsMacPlatform(platform): |
137 return (platform and platform.find("Mac") > -1) | 137 return (platform and platform.find("Mac") > -1) |
138 | 138 |
139 def CreateDirectory(dir): | 139 def CreateDirectory(dir): |
140 """ | 140 """ |
141 Method that creates the directory structure given. | 141 Method that creates the directory structure given. |
142 This will create directories recursively until the given dir exists. | 142 This will create directories recursively until the given dir exists. |
143 """ | 143 """ |
144 if not os.path.exists(dir): | 144 if not os.path.exists(dir): |
145 os.makedirs(dir) | 145 os.makedirs(dir, 0777) |
146 | 146 |
147 def ExtractFirstValue(string, regex): | 147 def ExtractFirstValue(string, regex): |
148 m = re.search(regex, string) | 148 m = re.search(regex, string) |
149 if m and m.group(1): | 149 if m and m.group(1): |
150 return m.group(1) | 150 return m.group(1) |
151 return None | 151 return None |
152 | 152 |
153 def ExtractSingleRegexAtURL(url, regex): | 153 def ExtractSingleRegexAtURL(url, regex): |
154 content = ScrapeURL(url) | 154 content = ScrapeURL(url) |
155 m = re.search(regex, content, re.DOTALL) | 155 m = re.search(regex, content, re.DOTALL) |
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
445 if self.verbose: | 445 if self.verbose: |
446 print "Downloaded file '%s' doesn't look like a zip file." % filename | 446 print "Downloaded file '%s' doesn't look like a zip file." % filename |
447 return False | 447 return False |
448 | 448 |
449 def _UnzipZipfile(self, zip, base_dir): | 449 def _UnzipZipfile(self, zip, base_dir): |
450 for i, name in enumerate(zip.namelist()): | 450 for i, name in enumerate(zip.namelist()): |
451 if not name.endswith('/'): | 451 if not name.endswith('/'): |
452 extracted_file_path = os.path.join(base_dir, name) | 452 extracted_file_path = os.path.join(base_dir, name) |
453 try: | 453 try: |
454 (path, filename) = os.path.split(extracted_file_path) | 454 (path, filename) = os.path.split(extracted_file_path) |
455 os.makedirs(path) | 455 os.makedirs(path, 0777) |
456 except: | 456 except: |
457 pass | 457 pass |
458 outfile = open(extracted_file_path, 'wb') | 458 outfile = open(extracted_file_path, 'wb') |
459 outfile.write(zip.read(name)) | 459 outfile.write(zip.read(name)) |
460 outfile.flush() | 460 outfile.flush() |
461 outfile.close() | 461 outfile.close() |
| 462 os.chmod(extracted_file_path, 0777) |
462 | 463 |
463 def _GetRevisionAndBuildFromArchiveStep(self): | 464 def _GetRevisionAndBuildFromArchiveStep(self): |
464 if self.archive_step_log_file: | 465 if self.archive_step_log_file: |
465 log = open(self.archive_step_log_file, 'r') | 466 log = open(self.archive_step_log_file, 'r') |
466 content = "".join(log.readlines()) | 467 content = "".join(log.readlines()) |
467 else: | 468 else: |
468 content = ScrapeURL(GetArchiveURL(self.build, | 469 content = ScrapeURL(GetArchiveURL(self.build, |
469 self.platform, | 470 self.platform, |
470 self.fyi_builder)) | 471 self.fyi_builder)) |
471 revision = ExtractFirstValue(content, ARCHIVE_URL_REGEX) | 472 revision = ExtractFirstValue(content, ARCHIVE_URL_REGEX) |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
782 | 783 |
783 def _ExtractFileFromZip(self, zip, file_in_zip, file_to_create): | 784 def _ExtractFileFromZip(self, zip, file_in_zip, file_to_create): |
784 modifiers = "" | 785 modifiers = "" |
785 if file_to_create.endswith(".png"): | 786 if file_to_create.endswith(".png"): |
786 modifiers = "b" | 787 modifiers = "b" |
787 try: | 788 try: |
788 CreateDirectory(file_to_create[0:file_to_create.rfind("/")]) | 789 CreateDirectory(file_to_create[0:file_to_create.rfind("/")]) |
789 localFile = open(file_to_create, "w%s" % modifiers) | 790 localFile = open(file_to_create, "w%s" % modifiers) |
790 localFile.write(zip.read(file_in_zip)) | 791 localFile.write(zip.read(file_in_zip)) |
791 localFile.close() | 792 localFile.close() |
| 793 os.chmod(file_to_create, 0777) |
792 return True | 794 return True |
793 except KeyError: | 795 except KeyError: |
794 print "File %s does not exist in zip file." % (file_in_zip) | 796 print "File %s does not exist in zip file." % (file_in_zip) |
795 except AttributeError: | 797 except AttributeError: |
796 print "File %s does not exist in zip file." % (file_in_zip) | 798 print "File %s does not exist in zip file." % (file_in_zip) |
797 print "Is this zip file assembled correctly?" | 799 print "Is this zip file assembled correctly?" |
798 return False | 800 return False |
799 | 801 |
800 | 802 |
801 def _DownloadFile(self, url, local_filename = None, modifiers = "", | 803 def _DownloadFile(self, url, local_filename = None, modifiers = "", |
802 force = False): | 804 force = False): |
803 """ | 805 """ |
804 Copy the contents of a file from a given URL | 806 Copy the contents of a file from a given URL |
805 to a local file. | 807 to a local file. |
806 """ | 808 """ |
807 try: | 809 try: |
808 if local_filename == None: | 810 if local_filename == None: |
809 local_filename = url.split('/')[-1] | 811 local_filename = url.split('/')[-1] |
810 if os.path.isfile(local_filename) and not force: | 812 if os.path.isfile(local_filename) and not force: |
811 if self.verbose: | 813 if self.verbose: |
812 print "File at %s already exists." % local_filename | 814 print "File at %s already exists." % local_filename |
813 return local_filename | 815 return local_filename |
814 if self.dont_download: | 816 if self.dont_download: |
815 return local_filename | 817 return local_filename |
816 webFile = urllib2.urlopen(url) | 818 webFile = urllib2.urlopen(url) |
817 localFile = open(local_filename, ("w%s" % modifiers)) | 819 localFile = open(local_filename, ("w%s" % modifiers)) |
818 localFile.write(webFile.read()) | 820 localFile.write(webFile.read()) |
819 webFile.close() | 821 webFile.close() |
820 localFile.close() | 822 localFile.close() |
| 823 os.chmod(local_filename, 0777) |
821 except urllib2.HTTPError: | 824 except urllib2.HTTPError: |
822 return None | 825 return None |
823 except urllib2.URLError: | 826 except urllib2.URLError: |
824 print "The url %s is malformed." % url | 827 print "The url %s is malformed." % url |
825 return None | 828 return None |
826 return localFile.name | 829 return localFile.name |
OLD | NEW |