OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Downloads a Firefox Nightly build for the current platform.""" | 6 """Downloads a Firefox Nightly build for the current platform.""" |
7 | 7 |
8 import datetime | 8 import datetime |
9 import glob | 9 import glob |
10 import os | 10 import os |
(...skipping 13 matching lines...) Expand all Loading... | |
24 sys.path.append(os.path.join(THIRD_PARTY_DIR, 'mozinfo')) | 24 sys.path.append(os.path.join(THIRD_PARTY_DIR, 'mozinfo')) |
25 | 25 |
26 from mozdownload import scraper | 26 from mozdownload import scraper |
27 | 27 |
28 | 28 |
29 def _Touch(a_file): | 29 def _Touch(a_file): |
30 with open(a_file, 'a'): | 30 with open(a_file, 'a'): |
31 os.utime(a_file, None) | 31 os.utime(a_file, None) |
32 | 32 |
33 | 33 |
34 def _FindFallbackFirefoxBuild(target_dir): | 34 def _GetFirefoxArchivesSortedOnModifiedDate(target_dir): |
35 firefox_archives = glob.glob(os.path.join(target_dir, '*tar.bz2')) | 35 firefox_archives = glob.glob(os.path.join(target_dir, '*tar.bz2')) |
36 if not firefox_archives: | 36 if not firefox_archives: |
37 return None | 37 return None |
38 | 38 |
39 firefox_archives.sort(key=os.path.getmtime, reverse=True) | 39 firefox_archives.sort(key=os.path.getmtime, reverse=True) |
40 return firefox_archives | |
41 | |
42 | |
43 def _CleanOldFirefoxArchives(target_dir): | |
44 firefox_archives = _GetFirefoxArchivesSortedOnModifiedDate(target_dir) | |
45 if not firefox_archives: | |
kjellander_chromium
2015/02/02 09:45:00
Add:
or len(firefox_archives) < 2
condition here?
phoglund_chromium
2015/02/02 10:04:11
Done.
| |
46 return | |
47 if len(firefox_archives) < 2: | |
48 return | |
49 | |
50 # Keep the newest archive around as a fallback build and delete the rest. | |
51 rest = firefox_archives[1:] | |
52 print 'About to delete old Firefox archives %s.' % rest | |
53 for old_archive in rest: | |
54 try: | |
55 os.remove(old_archive) | |
56 except OSError: | |
57 pass | |
kjellander_chromium
2015/02/02 09:45:00
I think it's unlikely we will fail to delete these
phoglund_chromium
2015/02/02 10:04:11
I don't think so. If we fail here it's not enough
kjellander_chromium
2015/02/02 10:13:46
Fair enough. Worst case we'll just run out of disk
| |
58 | |
59 | |
60 def _FindFallbackFirefoxBuild(target_dir): | |
61 firefox_archives = _GetFirefoxArchivesSortedOnModifiedDate(target_dir) | |
62 if not firefox_archives: | |
63 return None | |
64 | |
40 newest_build = firefox_archives[0] | 65 newest_build = firefox_archives[0] |
41 build_age_seconds = time.time() - os.path.getmtime(newest_build) | 66 build_age_seconds = time.time() - os.path.getmtime(newest_build) |
42 build_age_days = datetime.timedelta(seconds=build_age_seconds).days | 67 build_age_days = datetime.timedelta(seconds=build_age_seconds).days |
43 | 68 |
44 return newest_build, build_age_days | 69 return newest_build, build_age_days |
45 | 70 |
46 | 71 |
47 def _MaybeDownload(target_dir, force): | 72 def _MaybeDownload(target_dir, force): |
48 try: | 73 try: |
49 downloader = scraper.DailyScraper(directory=target_dir, version=None) | 74 downloader = scraper.DailyScraper(directory=target_dir, version=None) |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
109 | 134 |
110 def main(): | 135 def main(): |
111 usage = 'usage: %prog -t <target_dir>' | 136 usage = 'usage: %prog -t <target_dir>' |
112 parser = OptionParser(usage) | 137 parser = OptionParser(usage) |
113 parser.add_option('-t', '--target-dir', | 138 parser.add_option('-t', '--target-dir', |
114 help=('Target directory to put the downloaded and extracted' | 139 help=('Target directory to put the downloaded and extracted' |
115 ' folder with the Firefox Nightly build in.')) | 140 ' folder with the Firefox Nightly build in.')) |
116 parser.add_option('-f', '--force', action='store_true', | 141 parser.add_option('-f', '--force', action='store_true', |
117 help=('Force download even if the current nightly is ' | 142 help=('Force download even if the current nightly is ' |
118 'already downloaded.')) | 143 'already downloaded.')) |
144 parser.add_option('-c', '--clean-old-archives', action='store_true', | |
145 help=('Clean old firefox archives; one will always be ' | |
146 'kept as a fallback.')) | |
119 options, _args = parser.parse_args() | 147 options, _args = parser.parse_args() |
120 if not options.target_dir: | 148 if not options.target_dir: |
121 parser.error('You must specify the target directory.') | 149 parser.error('You must specify the target directory.') |
122 | 150 |
123 target_dir = options.target_dir | 151 target_dir = options.target_dir |
124 if not os.path.isdir(target_dir): | 152 if not os.path.isdir(target_dir): |
125 os.mkdir(target_dir) | 153 os.mkdir(target_dir) |
126 | 154 |
127 firefox_archive = _MaybeDownload(target_dir, options.force) | 155 firefox_archive = _MaybeDownload(target_dir, options.force) |
128 if firefox_archive: | 156 if firefox_archive: |
129 return _ExtractArchive(firefox_archive, target_dir) | 157 return _ExtractArchive(firefox_archive, target_dir) |
130 | 158 |
159 if options.clean_old_archives: | |
160 _CleanOldFirefoxArchives(target_dir) | |
161 | |
131 if __name__ == '__main__': | 162 if __name__ == '__main__': |
132 sys.exit(main()) | 163 sys.exit(main()) |
OLD | NEW |