| OLD | NEW |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import os | 5 import os |
| 6 | 6 |
| 7 from slave import recipe_api | 7 from slave import recipe_api |
| 8 | 8 |
| 9 class AndroidApi(recipe_api.RecipeApi): | 9 class AndroidApi(recipe_api.RecipeApi): |
| 10 def __init__(self, **kwargs): | 10 def __init__(self, **kwargs): |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 64 Args: | 64 Args: |
| 65 step_name: Name of the step. | 65 step_name: Name of the step. |
| 66 archive_name: Name of the archive file. | 66 archive_name: Name of the archive file. |
| 67 files: List of files. Files can be glob's or file paths. If no files | 67 files: List of files. Files can be glob's or file paths. If no files |
| 68 are provided, everything in the target directory will be included. | 68 are provided, everything in the target directory will be included. |
| 69 preserve_paths: If True, files will be stored using the subdolders | 69 preserve_paths: If True, files will be stored using the subdolders |
| 70 in the archive. | 70 in the archive. |
| 71 """ | 71 """ |
| 72 archive_args = ['--target', self.m.chromium.c.BUILD_CONFIG, | 72 archive_args = ['--target', self.m.chromium.c.BUILD_CONFIG, |
| 73 '--name', archive_name] | 73 '--name', archive_name] |
| 74 | 74 if files: |
| 75 # These are covered by build_internal. Bleh. -luqui | |
| 76 if files: # pragma: no cover | |
| 77 archive_args.extend(['--files', ','.join(files)]) | 75 archive_args.extend(['--files', ','.join(files)]) |
| 78 if not preserve_paths: # pragma: no cover | 76 if not preserve_paths: |
| 79 archive_args.append('--ignore-subfolder-names') | 77 archive_args.append('--ignore-subfolder-names') |
| 80 | 78 |
| 81 yield self.m.python( | 79 yield self.m.python( |
| 82 step_name, | 80 step_name, |
| 83 str(self.m.path['build'].join( | 81 str(self.m.path['build'].join( |
| 84 'scripts', 'slave', 'android', 'archive_build.py')), | 82 'scripts', 'slave', 'android', 'archive_build.py')), |
| 85 archive_args, | 83 archive_args, |
| 86 always_run=True, | 84 always_run=True, |
| 87 **kwargs | 85 **kwargs |
| 88 ) | 86 ) |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 174 repos = ['src', 'src-internal'] | 172 repos = ['src', 'src-internal'] |
| 175 if self.c.REPO_NAME not in repos: | 173 if self.c.REPO_NAME not in repos: |
| 176 repos.append(self.c.REPO_NAME) | 174 repos.append(self.c.REPO_NAME) |
| 177 # TODO(sivachandra): Disable subannottations after cleaning up | 175 # TODO(sivachandra): Disable subannottations after cleaning up |
| 178 # tree_truth.sh. | 176 # tree_truth.sh. |
| 179 yield self.m.step('tree truth steps', | 177 yield self.m.step('tree truth steps', |
| 180 [self.m.path['checkout'].join('build', 'tree_truth.sh'), | 178 [self.m.path['checkout'].join('build', 'tree_truth.sh'), |
| 181 self.m.path['checkout']] + repos, | 179 self.m.path['checkout']] + repos, |
| 182 allow_subannotations=False) | 180 allow_subannotations=False) |
| 183 | 181 |
| 184 def runhooks(self, extra_env={}): | 182 def runhooks(self, extra_env=None): |
| 185 return self.m.chromium.runhooks(env=dict(self.get_env().items() + | 183 run_hooks_env = self.get_env() |
| 186 extra_env.items())) | 184 if self.c.INTERNAL: |
| 185 run_hooks_env['EXTRA_LANDMINES_SCRIPT'] = self.internal_dir.join( |
| 186 'build', 'get_internal_landmines.py') |
| 187 if extra_env: |
| 188 run_hooks_env.update(extra_env) |
| 189 return self.m.chromium.runhooks(env=run_hooks_env) |
| 187 | 190 |
| 188 def apply_svn_patch(self): | 191 def apply_svn_patch(self): |
| 189 # TODO(sivachandra): We should probably pull this into its own module | 192 # TODO(sivachandra): We should probably pull this into its own module |
| 190 # (maybe a 'tryserver' module) at some point. | 193 # (maybe a 'tryserver' module) at some point. |
| 191 return self.m.step( | 194 return self.m.step( |
| 192 'apply_patch', | 195 'apply_patch', |
| 193 [self.m.path['build'].join('scripts', 'slave', 'apply_svn_patch.py'), | 196 [self.m.path['build'].join('scripts', 'slave', 'apply_svn_patch.py'), |
| 194 '-p', self.m.properties['patch_url'], | 197 '-p', self.m.properties['patch_url'], |
| 195 '-r', self.internal_dir]) | 198 '-r', self.internal_dir]) |
| 196 | 199 |
| 197 def compile(self, **kwargs): | 200 def compile(self, **kwargs): |
| 198 assert 'env' not in kwargs, ( | 201 assert 'env' not in kwargs, ( |
| 199 "chromium_andoid compile clobbers env in keyword arguments") | 202 "chromium_andoid compile clobbers env in keyword arguments") |
| 200 kwargs['env'] = self.get_env() | 203 kwargs['env'] = self.get_env() |
| 201 return self.m.chromium.compile(**kwargs) | 204 return self.m.chromium.compile(**kwargs) |
| 202 | 205 |
| 206 def findbugs(self): |
| 207 assert self.c.INTERNAL, 'findbugs is only available on internal builds' |
| 208 cmd = [ |
| 209 self.m.path['checkout'].join('build', 'android', 'findbugs_diff.py'), |
| 210 '-b', self.internal_dir.join('bin', 'findbugs_filter'), |
| 211 '-o', 'com.google.android.apps.chrome.-,org.chromium.-', |
| 212 ] |
| 213 yield self.m.step('findbugs internal', cmd, env=self.get_env()) |
| 214 |
| 215 # If findbugs fails, there could be stale class files. Delete them, and |
| 216 # next run maybe we'll do better. |
| 217 if self.m.step_history.last_step().retcode != 0: |
| 218 yield self.m.path.rmwildcard( |
| 219 '*.class', |
| 220 self.m.path['checkout'].join('out'), |
| 221 always_run=True) |
| 222 |
| 223 def checkdeps(self): |
| 224 assert self.c.INTERNAL, 'checkdeps is only available on internal builds' |
| 225 yield self.m.step( |
| 226 'checkdeps', |
| 227 [self.m.path['checkout'].join('tools', 'checkdeps', 'checkdeps.py'), |
| 228 '--root=%s' % self.internal_dir], |
| 229 env=self.get_env()) |
| 230 |
| 231 def lint(self): |
| 232 assert self.c.INTERNAL, 'lint is only available on internal builds' |
| 233 yield self.m.step( |
| 234 'lint', |
| 235 [self.internal_dir.join('bin', 'lint.py')], |
| 236 env=self.get_env()) |
| 237 |
| 203 def git_number(self): | 238 def git_number(self): |
| 204 yield self.m.step( | 239 yield self.m.step( |
| 205 'git_number', | 240 'git_number', |
| 206 [self.m.path['depot_tools'].join('git_number.py')], | 241 [self.m.path['depot_tools'].join('git_number.py')], |
| 207 stdout = self.m.raw_io.output(), | 242 stdout = self.m.raw_io.output(), |
| 208 step_test_data=( | 243 step_test_data=( |
| 209 lambda: | 244 lambda: |
| 210 self.m.raw_io.test_api.stream_output('3000\n') | 245 self.m.raw_io.test_api.stream_output('3000\n') |
| 211 ), | 246 ), |
| 212 cwd=self.m.path['checkout']) | 247 cwd=self.m.path['checkout']) |
| 213 | 248 |
| 214 def upload_build(self, bucket, path): | 249 def _upload_build(self, bucket, path): |
| 215 archive_name = 'build_product.zip' | 250 archive_name = 'build_product.zip' |
| 216 | 251 |
| 217 zipfile = self.m.path['checkout'].join('out', archive_name) | 252 zipfile = self.m.path['checkout'].join('out', archive_name) |
| 218 self._cleanup_list.append(zipfile) | 253 self._cleanup_list.append(zipfile) |
| 219 | 254 |
| 220 yield self.make_zip_archive( | 255 yield self.make_zip_archive( |
| 221 'zip_build_product', | 256 'zip_build_product', |
| 222 archive_name, | 257 archive_name, |
| 223 preserve_paths=True, | 258 preserve_paths=True, |
| 224 cwd=self.m.path['checkout'] | 259 cwd=self.m.path['checkout'] |
| 225 ) | 260 ) |
| 226 | 261 |
| 227 yield self.m.gsutil.upload( | 262 yield self.m.gsutil.upload( |
| 228 name='upload_build_product', | 263 name='upload_build_product', |
| 229 source=zipfile, | 264 source=zipfile, |
| 230 bucket=bucket, | 265 bucket=bucket, |
| 231 dest=path | 266 dest=path |
| 232 ) | 267 ) |
| 233 | 268 |
| 269 def upload_clusterfuzz(self): |
| 270 revision = self.m.properties['revision'] |
| 271 # When unpacking, ".." will be stripped from the path and the library will |
| 272 # end up in ./third_party/llvm-build/... |
| 273 files = ['apks/*', 'lib/*.so', |
| 274 '../../third_party/llvm-build/Release+Asserts/lib/clang/*/lib/' + |
| 275 'linux/libclang_rt.asan-arm-android.so'] |
| 276 |
| 277 archive_name = 'clusterfuzz.zip' |
| 278 zipfile = self.m.path['checkout'].join('out', archive_name) |
| 279 self._cleanup_list.append(zipfile) |
| 280 |
| 281 yield self.git_number() |
| 282 git_number = str.strip(self.m.step_history['git_number'].stdout) |
| 283 |
| 284 yield self.make_zip_archive( |
| 285 'zip_clusterfuzz', |
| 286 archive_name, |
| 287 files=files, |
| 288 preserve_paths=False, |
| 289 cwd=self.m.path['checkout'] |
| 290 ) |
| 291 yield self.m.python( |
| 292 'git_revisions', |
| 293 self.m.path['checkout'].join(self.c.internal_dir_name, 'build', |
| 294 'clusterfuzz_generate_revision.py'), |
| 295 ['--file', git_number], |
| 296 always_run=True, |
| 297 ) |
| 298 yield self.m.gsutil.upload( |
| 299 name='upload_revision_data', |
| 300 source=self.m.path['checkout'].join('out', git_number), |
| 301 bucket='%s/revisions' % self.c.storage_bucket, |
| 302 dest=git_number |
| 303 ) |
| 304 yield self.m.gsutil.upload( |
| 305 name='upload_clusterfuzz', |
| 306 source=zipfile, |
| 307 bucket=self.c.storage_bucket, |
| 308 dest='%s%s.zip' % (self.c.upload_dest_prefix, git_number) |
| 309 ) |
| 310 |
| 234 def upload_build_for_tester(self): | 311 def upload_build_for_tester(self): |
| 235 return self.upload_build( | 312 return self._upload_build( |
| 236 bucket=self._internal_names['BUILD_BUCKET'], | 313 bucket=self._internal_names['BUILD_BUCKET'], |
| 237 path='%s/build_product_%s.zip' % ( | 314 path='%s/build_product_%s.zip' % ( |
| 238 self.m.properties['buildername'], self.m.properties['revision'])) | 315 self.m.properties['buildername'], self.m.properties['revision'])) |
| 239 | 316 |
| 240 def _download_build(self, bucket, path): | 317 def _download_build(self, bucket, path): |
| 241 base_path = path.split('/')[-1] | 318 base_path = path.split('/')[-1] |
| 242 zipfile = self.m.path['checkout'].join('out', base_path) | 319 zipfile = self.m.path['checkout'].join('out', base_path) |
| 243 self._cleanup_list.append(zipfile) | 320 self._cleanup_list.append(zipfile) |
| 244 yield self.m.gsutil.download( | 321 yield self.m.gsutil.download( |
| 245 name='download_build_product', | 322 name='download_build_product', |
| (...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 537 | 614 |
| 538 yield self.m.gsutil.upload( | 615 yield self.m.gsutil.upload( |
| 539 source=self.coverage_dir.join('coverage_html'), | 616 source=self.coverage_dir.join('coverage_html'), |
| 540 bucket='chrome-code-coverage', | 617 bucket='chrome-code-coverage', |
| 541 dest=gs_dest, | 618 dest=gs_dest, |
| 542 args=['-R'], | 619 args=['-R'], |
| 543 name='upload coverage report', | 620 name='upload coverage report', |
| 544 link_name='Coverage report', | 621 link_name='Coverage report', |
| 545 always_run=True, | 622 always_run=True, |
| 546 **kwargs) | 623 **kwargs) |
| OLD | NEW |