Index: tools/auto_bisect/bisect_perf_regression.py |
diff --git a/tools/auto_bisect/bisect_perf_regression.py b/tools/auto_bisect/bisect_perf_regression.py |
index 62f9cb27d9e85c02020b9b5315f69e6c85b1232d..30aacac88d247036d5a31004e5a15e1107e42197 100755 |
--- a/tools/auto_bisect/bisect_perf_regression.py |
+++ b/tools/auto_bisect/bisect_perf_regression.py |
@@ -1178,7 +1178,12 @@ class BisectPerformanceMetrics(object): |
build_success = self._DownloadAndUnzipBuild( |
revision, depot, build_type='Release', create_patch=create_patch) |
else: |
- # Build locally. |
+ # Print the current environment set on the machine. |
+ print 'Full Environment:' |
+ for key, value in sorted(os.environ.items()): |
+ print '%s: %s' % (key, value) |
qyearsley
2015/02/27 03:37:57
A possible alternative to using print would be usi
prasadv
2015/02/27 18:08:16
I tried using logging but output was delayed till
|
+ # Print the environment before proceeding with compile. |
+ sys.stdout.flush() |
build_success = self.builder.Build(depot, self.opts) |
os.chdir(cwd) |
return build_success |
@@ -2731,6 +2736,8 @@ class BisectOptions(object): |
raise RuntimeError('Invalid metric specified: [%s]' % opts.metric) |
opts.metric = metric_values |
+ if opts.target_arch == 'x64' and opts.target_build_type == 'Release': |
+ opts.target_build_type = 'Release_x64' |
opts.repeat_test_count = min(max(opts.repeat_test_count, 1), 100) |
opts.max_time_minutes = min(max(opts.max_time_minutes, 1), 60) |
opts.truncate_percent = min(max(opts.truncate_percent, 0), 25) |