| OLD | NEW |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import os | 5 import os |
| 6 | 6 |
| 7 from slave import recipe_api | 7 from slave import recipe_api |
| 8 | 8 |
| 9 class AndroidApi(recipe_api.RecipeApi): | 9 class AndroidApi(recipe_api.RecipeApi): |
| 10 def __init__(self, **kwargs): | 10 def __init__(self, **kwargs): |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 64 Args: | 64 Args: |
| 65 step_name: Name of the step. | 65 step_name: Name of the step. |
| 66 archive_name: Name of the archive file. | 66 archive_name: Name of the archive file. |
| 67 files: List of files. Files can be glob's or file paths. If no files | 67 files: List of files. Files can be glob's or file paths. If no files |
| 68 are provided, everything in the target directory will be included. | 68 are provided, everything in the target directory will be included. |
| 69 preserve_paths: If True, files will be stored using the subdolders | 69 preserve_paths: If True, files will be stored using the subdolders |
| 70 in the archive. | 70 in the archive. |
| 71 """ | 71 """ |
| 72 archive_args = ['--target', self.m.chromium.c.BUILD_CONFIG, | 72 archive_args = ['--target', self.m.chromium.c.BUILD_CONFIG, |
| 73 '--name', archive_name] | 73 '--name', archive_name] |
| 74 if files: | 74 |
| 75 # These are covered by build_internal. Bleh. -luqui |
| 76 if files: # pragma: no cover |
| 75 archive_args.extend(['--files', ','.join(files)]) | 77 archive_args.extend(['--files', ','.join(files)]) |
| 76 if not preserve_paths: | 78 if not preserve_paths: # pragma: no cover |
| 77 archive_args.append('--ignore-subfolder-names') | 79 archive_args.append('--ignore-subfolder-names') |
| 78 | 80 |
| 79 yield self.m.python( | 81 yield self.m.python( |
| 80 step_name, | 82 step_name, |
| 81 str(self.m.path['build'].join( | 83 str(self.m.path['build'].join( |
| 82 'scripts', 'slave', 'android', 'archive_build.py')), | 84 'scripts', 'slave', 'android', 'archive_build.py')), |
| 83 archive_args, | 85 archive_args, |
| 84 always_run=True, | 86 always_run=True, |
| 85 **kwargs | 87 **kwargs |
| 86 ) | 88 ) |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 172 repos = ['src', 'src-internal'] | 174 repos = ['src', 'src-internal'] |
| 173 if self.c.REPO_NAME not in repos: | 175 if self.c.REPO_NAME not in repos: |
| 174 repos.append(self.c.REPO_NAME) | 176 repos.append(self.c.REPO_NAME) |
| 175 # TODO(sivachandra): Disable subannottations after cleaning up | 177 # TODO(sivachandra): Disable subannottations after cleaning up |
| 176 # tree_truth.sh. | 178 # tree_truth.sh. |
| 177 yield self.m.step('tree truth steps', | 179 yield self.m.step('tree truth steps', |
| 178 [self.m.path['checkout'].join('build', 'tree_truth.sh'), | 180 [self.m.path['checkout'].join('build', 'tree_truth.sh'), |
| 179 self.m.path['checkout']] + repos, | 181 self.m.path['checkout']] + repos, |
| 180 allow_subannotations=False) | 182 allow_subannotations=False) |
| 181 | 183 |
| 182 def runhooks(self, extra_env=None): | 184 def runhooks(self, extra_env={}): |
| 183 run_hooks_env = self.get_env() | 185 return self.m.chromium.runhooks(env=dict(self.get_env().items() + |
| 184 if self.c.INTERNAL: | 186 extra_env.items())) |
| 185 run_hooks_env['EXTRA_LANDMINES_SCRIPT'] = self.internal_dir.join( | |
| 186 'build', 'get_internal_landmines.py') | |
| 187 if extra_env: | |
| 188 run_hooks_env.update(extra_env) | |
| 189 return self.m.chromium.runhooks(env=run_hooks_env) | |
| 190 | 187 |
| 191 def apply_svn_patch(self): | 188 def apply_svn_patch(self): |
| 192 # TODO(sivachandra): We should probably pull this into its own module | 189 # TODO(sivachandra): We should probably pull this into its own module |
| 193 # (maybe a 'tryserver' module) at some point. | 190 # (maybe a 'tryserver' module) at some point. |
| 194 return self.m.step( | 191 return self.m.step( |
| 195 'apply_patch', | 192 'apply_patch', |
| 196 [self.m.path['build'].join('scripts', 'slave', 'apply_svn_patch.py'), | 193 [self.m.path['build'].join('scripts', 'slave', 'apply_svn_patch.py'), |
| 197 '-p', self.m.properties['patch_url'], | 194 '-p', self.m.properties['patch_url'], |
| 198 '-r', self.internal_dir]) | 195 '-r', self.internal_dir]) |
| 199 | 196 |
| 200 def compile(self, **kwargs): | 197 def compile(self, **kwargs): |
| 201 assert 'env' not in kwargs, ( | 198 assert 'env' not in kwargs, ( |
| 202 "chromium_andoid compile clobbers env in keyword arguments") | 199 "chromium_andoid compile clobbers env in keyword arguments") |
| 203 kwargs['env'] = self.get_env() | 200 kwargs['env'] = self.get_env() |
| 204 return self.m.chromium.compile(**kwargs) | 201 return self.m.chromium.compile(**kwargs) |
| 205 | 202 |
| 206 def findbugs(self): | |
| 207 assert self.c.INTERNAL, 'findbugs is only available on internal builds' | |
| 208 cmd = [ | |
| 209 self.m.path['checkout'].join('build', 'android', 'findbugs_diff.py'), | |
| 210 '-b', self.internal_dir.join('bin', 'findbugs_filter'), | |
| 211 '-o', 'com.google.android.apps.chrome.-,org.chromium.-', | |
| 212 ] | |
| 213 yield self.m.step('findbugs internal', cmd, env=self.get_env()) | |
| 214 | |
| 215 # If findbugs fails, there could be stale class files. Delete them, and | |
| 216 # next run maybe we'll do better. | |
| 217 if self.m.step_history.last_step().retcode != 0: | |
| 218 yield self.m.path.rmwildcard( | |
| 219 '*.class', | |
| 220 self.m.path['checkout'].join('out'), | |
| 221 always_run=True) | |
| 222 | |
| 223 def checkdeps(self): | |
| 224 assert self.c.INTERNAL, 'checkdeps is only available on internal builds' | |
| 225 yield self.m.step( | |
| 226 'checkdeps', | |
| 227 [self.m.path['checkout'].join('tools', 'checkdeps', 'checkdeps.py'), | |
| 228 '--root=%s' % self.internal_dir], | |
| 229 env=self.get_env()) | |
| 230 | |
| 231 def lint(self): | |
| 232 assert self.c.INTERNAL, 'lint is only available on internal builds' | |
| 233 yield self.m.step( | |
| 234 'lint', | |
| 235 [self.internal_dir.join('bin', 'lint.py')], | |
| 236 env=self.get_env()) | |
| 237 | |
| 238 def git_number(self): | 203 def git_number(self): |
| 239 yield self.m.step( | 204 yield self.m.step( |
| 240 'git_number', | 205 'git_number', |
| 241 [self.m.path['depot_tools'].join('git_number.py')], | 206 [self.m.path['depot_tools'].join('git_number.py')], |
| 242 stdout = self.m.raw_io.output(), | 207 stdout = self.m.raw_io.output(), |
| 243 step_test_data=( | 208 step_test_data=( |
| 244 lambda: | 209 lambda: |
| 245 self.m.raw_io.test_api.stream_output('3000\n') | 210 self.m.raw_io.test_api.stream_output('3000\n') |
| 246 ), | 211 ), |
| 247 cwd=self.m.path['checkout']) | 212 cwd=self.m.path['checkout']) |
| 248 | 213 |
| 249 def _upload_build(self, bucket, path): | 214 def upload_build(self, bucket, path): |
| 250 archive_name = 'build_product.zip' | 215 archive_name = 'build_product.zip' |
| 251 | 216 |
| 252 zipfile = self.m.path['checkout'].join('out', archive_name) | 217 zipfile = self.m.path['checkout'].join('out', archive_name) |
| 253 self._cleanup_list.append(zipfile) | 218 self._cleanup_list.append(zipfile) |
| 254 | 219 |
| 255 yield self.make_zip_archive( | 220 yield self.make_zip_archive( |
| 256 'zip_build_product', | 221 'zip_build_product', |
| 257 archive_name, | 222 archive_name, |
| 258 preserve_paths=True, | 223 preserve_paths=True, |
| 259 cwd=self.m.path['checkout'] | 224 cwd=self.m.path['checkout'] |
| 260 ) | 225 ) |
| 261 | 226 |
| 262 yield self.m.gsutil.upload( | 227 yield self.m.gsutil.upload( |
| 263 name='upload_build_product', | 228 name='upload_build_product', |
| 264 source=zipfile, | 229 source=zipfile, |
| 265 bucket=bucket, | 230 bucket=bucket, |
| 266 dest=path | 231 dest=path |
| 267 ) | 232 ) |
| 268 | 233 |
| 269 def upload_clusterfuzz(self): | |
| 270 revision = self.m.properties['revision'] | |
| 271 # When unpacking, ".." will be stripped from the path and the library will | |
| 272 # end up in ./third_party/llvm-build/... | |
| 273 files = ['apks/*', 'lib/*.so', | |
| 274 '../../third_party/llvm-build/Release+Asserts/lib/clang/*/lib/' + | |
| 275 'linux/libclang_rt.asan-arm-android.so'] | |
| 276 | |
| 277 archive_name = 'clusterfuzz.zip' | |
| 278 zipfile = self.m.path['checkout'].join('out', archive_name) | |
| 279 self._cleanup_list.append(zipfile) | |
| 280 | |
| 281 yield self.git_number() | |
| 282 git_number = str.strip(self.m.step_history['git_number'].stdout) | |
| 283 | |
| 284 yield self.make_zip_archive( | |
| 285 'zip_clusterfuzz', | |
| 286 archive_name, | |
| 287 files=files, | |
| 288 preserve_paths=False, | |
| 289 cwd=self.m.path['checkout'] | |
| 290 ) | |
| 291 yield self.m.python( | |
| 292 'git_revisions', | |
| 293 self.m.path['checkout'].join(self.c.internal_dir_name, 'build', | |
| 294 'clusterfuzz_generate_revision.py'), | |
| 295 ['--file', git_number], | |
| 296 always_run=True, | |
| 297 ) | |
| 298 yield self.m.gsutil.upload( | |
| 299 name='upload_revision_data', | |
| 300 source=self.m.path['checkout'].join('out', git_number), | |
| 301 bucket='%s/revisions' % self.c.storage_bucket, | |
| 302 dest=git_number | |
| 303 ) | |
| 304 yield self.m.gsutil.upload( | |
| 305 name='upload_clusterfuzz', | |
| 306 source=zipfile, | |
| 307 bucket=self.c.storage_bucket, | |
| 308 dest='%s%s.zip' % (self.c.upload_dest_prefix, git_number) | |
| 309 ) | |
| 310 | |
| 311 def upload_build_for_tester(self): | 234 def upload_build_for_tester(self): |
| 312 return self._upload_build( | 235 return self.upload_build( |
| 313 bucket=self._internal_names['BUILD_BUCKET'], | 236 bucket=self._internal_names['BUILD_BUCKET'], |
| 314 path='%s/build_product_%s.zip' % ( | 237 path='%s/build_product_%s.zip' % ( |
| 315 self.m.properties['buildername'], self.m.properties['revision'])) | 238 self.m.properties['buildername'], self.m.properties['revision'])) |
| 316 | 239 |
| 317 def _download_build(self, bucket, path): | 240 def _download_build(self, bucket, path): |
| 318 base_path = path.split('/')[-1] | 241 base_path = path.split('/')[-1] |
| 319 zipfile = self.m.path['checkout'].join('out', base_path) | 242 zipfile = self.m.path['checkout'].join('out', base_path) |
| 320 self._cleanup_list.append(zipfile) | 243 self._cleanup_list.append(zipfile) |
| 321 yield self.m.gsutil.download( | 244 yield self.m.gsutil.download( |
| 322 name='download_build_product', | 245 name='download_build_product', |
| (...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 614 | 537 |
| 615 yield self.m.gsutil.upload( | 538 yield self.m.gsutil.upload( |
| 616 source=self.coverage_dir.join('coverage_html'), | 539 source=self.coverage_dir.join('coverage_html'), |
| 617 bucket='chrome-code-coverage', | 540 bucket='chrome-code-coverage', |
| 618 dest=gs_dest, | 541 dest=gs_dest, |
| 619 args=['-R'], | 542 args=['-R'], |
| 620 name='upload coverage report', | 543 name='upload coverage report', |
| 621 link_name='Coverage report', | 544 link_name='Coverage report', |
| 622 always_run=True, | 545 always_run=True, |
| 623 **kwargs) | 546 **kwargs) |
| OLD | NEW |