| OLD | NEW |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """ | 5 """ |
| 6 Implements a task builder for benchmarking effects of NoState Prefetch. | 6 Implements a task builder for benchmarking effects of NoState Prefetch. |
| 7 Noticeable steps of the task pipeline: | 7 Noticeable steps of the task pipeline: |
| 8 * Save a WPR archive | 8 * Save a WPR archive |
| 9 * Process the WPR archive to make all resources cacheable | 9 * Process the WPR archive to make all resources cacheable |
| 10 * Process cache archive to patch response headers back to their original | 10 * Process cache archive to patch response headers back to their original |
| (...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 347 effective_encoded_data_lengths = {} | 347 effective_encoded_data_lengths = {} |
| 348 for request in sandwich_utils.FilterOutDataAndIncompleteRequests( | 348 for request in sandwich_utils.FilterOutDataAndIncompleteRequests( |
| 349 trace.request_track.GetEvents()): | 349 trace.request_track.GetEvents()): |
| 350 if request.from_disk_cache or request.served_from_cache: | 350 if request.from_disk_cache or request.served_from_cache: |
| 351 # At cache archive creation time, a request might be loaded several times, | 351 # At cache archive creation time, a request might be loaded several times, |
| 352 # but avoid the request.encoded_data_length == 0 if loaded from cache. | 352 # but avoid the request.encoded_data_length == 0 if loaded from cache. |
| 353 continue | 353 continue |
| 354 if request.url in effective_encoded_data_lengths: | 354 if request.url in effective_encoded_data_lengths: |
| 355 effective_encoded_data_lengths[request.url] = max( | 355 effective_encoded_data_lengths[request.url] = max( |
| 356 effective_encoded_data_lengths[request.url], | 356 effective_encoded_data_lengths[request.url], |
| 357 request.GetEncodedDataLength()) | 357 request.GetResponseTransportLength()) |
| 358 else: | 358 else: |
| 359 effective_encoded_data_lengths[request.url] = ( | 359 effective_encoded_data_lengths[request.url] = ( |
| 360 request.GetEncodedDataLength()) | 360 request.GetResponseTransportLength()) |
| 361 | 361 |
| 362 upload_data_stream_cache_entry_keys = set() | 362 upload_data_stream_cache_entry_keys = set() |
| 363 upload_data_stream_requests = set() | 363 upload_data_stream_requests = set() |
| 364 for cache_entry_key in cache_keys: | 364 for cache_entry_key in cache_keys: |
| 365 match = _UPLOAD_DATA_STREAM_REQUESTS_REGEX.match(cache_entry_key) | 365 match = _UPLOAD_DATA_STREAM_REQUESTS_REGEX.match(cache_entry_key) |
| 366 if not match: | 366 if not match: |
| 367 continue | 367 continue |
| 368 upload_data_stream_cache_entry_keys.add(cache_entry_key) | 368 upload_data_stream_cache_entry_keys.add(cache_entry_key) |
| 369 upload_data_stream_requests.add(match.group('url')) | 369 upload_data_stream_requests.add(match.group('url')) |
| 370 | 370 |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 429 response_size = cached_encoded_data_lengths[request.url] | 429 response_size = cached_encoded_data_lengths[request.url] |
| 430 else: | 430 else: |
| 431 # Some fat webpages may overflow the Memory cache, and so some | 431 # Some fat webpages may overflow the Memory cache, and so some |
| 432 # requests might be served from disk cache couple of times per page | 432 # requests might be served from disk cache couple of times per page |
| 433 # load. | 433 # load. |
| 434 logging.warning('Looks like could be served from memory cache: %s', | 434 logging.warning('Looks like could be served from memory cache: %s', |
| 435 request.url) | 435 request.url) |
| 436 response_size = response_sizes[request.url] | 436 response_size = response_sizes[request.url] |
| 437 served_from_cache_bytes += response_size | 437 served_from_cache_bytes += response_size |
| 438 else: | 438 else: |
| 439 response_size = request.GetEncodedDataLength() | 439 response_size = request.GetResponseTransportLength() |
| 440 served_from_network_bytes += response_size | 440 served_from_network_bytes += response_size |
| 441 response_sizes[request.url] = response_size | 441 response_sizes[request.url] = response_size |
| 442 | 442 |
| 443 # Make sure the served from blink's cache requests have at least one | 443 # Make sure the served from blink's cache requests have at least one |
| 444 # corresponding request that was not served from the blink's cache. | 444 # corresponding request that was not served from the blink's cache. |
| 445 for request in sandwich_utils.FilterOutDataAndIncompleteRequests( | 445 for request in sandwich_utils.FilterOutDataAndIncompleteRequests( |
| 446 trace.request_track.GetEvents()): | 446 trace.request_track.GetEvents()): |
| 447 assert (request.url in urls_hitting_network or | 447 assert (request.url in urls_hitting_network or |
| 448 not request.served_from_cache) | 448 not request.served_from_cache) |
| 449 | 449 |
| (...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 657 run_metrics_list = _ProcessRunOutputDir( | 657 run_metrics_list = _ProcessRunOutputDir( |
| 658 cache_validation_result, benchmark_setup, RunBenchmark.path) | 658 cache_validation_result, benchmark_setup, RunBenchmark.path) |
| 659 with open(ProcessRunOutputDir.path, 'w') as csv_file: | 659 with open(ProcessRunOutputDir.path, 'w') as csv_file: |
| 660 writer = csv.DictWriter(csv_file, fieldnames=(additional_column_names + | 660 writer = csv.DictWriter(csv_file, fieldnames=(additional_column_names + |
| 661 sandwich_metrics.COMMON_CSV_COLUMN_NAMES)) | 661 sandwich_metrics.COMMON_CSV_COLUMN_NAMES)) |
| 662 writer.writeheader() | 662 writer.writeheader() |
| 663 for trace_metrics in run_metrics_list: | 663 for trace_metrics in run_metrics_list: |
| 664 writer.writerow(trace_metrics) | 664 writer.writerow(trace_metrics) |
| 665 | 665 |
| 666 self._common_builder.default_final_tasks.append(ProcessRunOutputDir) | 666 self._common_builder.default_final_tasks.append(ProcessRunOutputDir) |
| OLD | NEW |