OLD | NEW |
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """Defines various log processors used by buildbot steps. | 5 """Defines various log processors used by buildbot steps. |
6 | 6 |
7 Current approach is to set an instance of log processor in | 7 Current approach is to set an instance of log processor in |
8 the ProcessLogTestStep implementation and it will call process() | 8 the ProcessLogTestStep implementation and it will call process() |
9 method upon completion with full data from process stdio. | 9 method upon completion with full data from process stdio. |
10 """ | 10 """ |
(...skipping 933 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
944 chromium_utils.GetParentClass(self)._ProcessLine(self, line) | 944 chromium_utils.GetParentClass(self)._ProcessLine(self, line) |
945 | 945 |
946 def _CalculateStatistics(self, value_list, trace_name): | 946 def _CalculateStatistics(self, value_list, trace_name): |
947 """Handles statistics generation and recording for page-cycler data. | 947 """Handles statistics generation and recording for page-cycler data. |
948 | 948 |
949 Sums the timings over all pages for each iteration and returns a tuple | 949 Sums the timings over all pages for each iteration and returns a tuple |
950 (mean, standard deviation) of those sums. Also saves a data file | 950 (mean, standard deviation) of those sums. Also saves a data file |
951 <revision>_<tracename>.dat holding a line of times for each URL loaded, | 951 <revision>_<tracename>.dat holding a line of times for each URL loaded, |
952 for use by humans when debugging a regression. | 952 for use by humans when debugging a regression. |
953 """ | 953 """ |
| 954 |
| 955 # If the name of the trace is one of the pages in the page list then we are |
| 956 # dealing with the results for that page only, not the overall results. So |
| 957 # calculate the statistics like a normal GraphingLogProcessor, not the |
| 958 # GraphingPageCyclerLogProcessor. |
| 959 if trace_name in self._page_list: |
| 960 return super(GraphingPageCyclerLogProcessor, self)._CalculateStatistics( |
| 961 value_list, trace_name) |
| 962 |
954 sums = [] | 963 sums = [] |
955 page_times = {} | 964 page_times = {} |
956 page_count = len(self._page_list) | 965 page_count = len(self._page_list) |
957 | 966 |
958 iteration_count = len(value_list) / page_count | 967 iteration_count = len(value_list) / page_count |
959 for iteration in range(iteration_count): | 968 for iteration in range(iteration_count): |
960 start = page_count * iteration | 969 start = page_count * iteration |
961 end = start + page_count | 970 end = start + page_count |
962 iteration_times = value_list[start:end] | 971 iteration_times = value_list[start:end] |
963 sums += [sum(iteration_times)] | 972 sums += [sum(iteration_times)] |
(...skipping 21 matching lines...) Expand all Loading... |
985 FormatFloat(mean), | 994 FormatFloat(mean), |
986 FormatFloat(stddev), | 995 FormatFloat(stddev), |
987 JoinWithSpacesAndNewLine(times))) | 996 JoinWithSpacesAndNewLine(times))) |
988 | 997 |
989 filename = os.path.join(self._output_dir, | 998 filename = os.path.join(self._output_dir, |
990 '%s_%s.dat' % (self._revision, trace_name)) | 999 '%s_%s.dat' % (self._revision, trace_name)) |
991 fileobj = open(filename, 'w') | 1000 fileobj = open(filename, 'w') |
992 fileobj.write(''.join(file_data)) | 1001 fileobj.write(''.join(file_data)) |
993 fileobj.close() | 1002 fileobj.close() |
994 os.chmod(filename, READABLE_FILE_PERMISSIONS) | 1003 os.chmod(filename, READABLE_FILE_PERMISSIONS) |
OLD | NEW |