Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(129)

Side by Side Diff: tools/android/loading/loading_model_unittest.py

Issue 1619713002: Upgrade analyze.py and related scripts to new world order. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: comments Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/android/loading/loading_model.py ('k') | tools/android/loading/log_parser.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2015 The Chromium Authors. All rights reserved. 1 # Copyright 2015 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import os 5 import os
6 import sys 6 import sys
7 import unittest 7 import unittest
8 8
9 import dag 9 import dag
10 import loading_model 10 import loading_model
11 import loading_trace 11 import loading_trace
12 import request_track 12 import request_track
13 import request_dependencies_lens 13 import request_dependencies_lens
14 14
15 15
16 class SimpleLens(object): 16 class SimpleLens(object):
17 def __init__(self, trace): 17 def __init__(self, trace):
18 self._trace = trace 18 self._trace = trace
19 19
20 def GetRequestDependencies(self): 20 def GetRequestDependencies(self):
21 url_to_rq = {} 21 url_to_rq = {}
22 deps = [] 22 deps = []
23 for rq in self._trace.request_track.GetEvents(): 23 for rq in self._trace.request_track.GetEvents():
24 assert rq.url not in url_to_rq 24 assert rq.url not in url_to_rq
25 url_to_rq[rq.url] = rq 25 url_to_rq[rq.url] = rq
26 for rq in self._trace.request_track.GetEvents(): 26 for rq in self._trace.request_track.GetEvents():
27 if rq.initiator in url_to_rq: 27 if rq.initiator in url_to_rq:
28 deps.append((rq, url_to_rq[rq.initiator], '')) 28 deps.append(( url_to_rq[rq.initiator], rq, ''))
29 return deps 29 return deps
30 30
31 31
32 class MockRequestTrack(object): 32 class MockRequestTrack(object):
33 def __init__(self, requests): 33 def __init__(self, requests):
34 self._requests = requests 34 self._requests = requests
35 35
36 def GetEvents(self): 36 def GetEvents(self):
37 return self._requests 37 return self._requests
38 38
39 39
40 class LoadingModelTestCase(unittest.TestCase): 40 class LoadingModelTestCase(unittest.TestCase):
41 41
42 def setUp(self): 42 def setUp(self):
43 request_dependencies_lens.RequestDependencyLens = SimpleLens 43 request_dependencies_lens.RequestDependencyLens = SimpleLens
44 self._next_request_id = 0 44 self._next_request_id = 0
45 45
46 def MakeParserRequest(self, url, source_url, start_time, end_time, 46 def MakeParserRequest(self, url, source_url, start_time, end_time,
47 magic_content_type=False): 47 magic_content_type=False):
48 timing = request_track.TimingAsList(request_track.TimingFromDict({
49 # connectEnd should be ignored.
50 'connectEnd': (end_time - start_time) / 2,
51 'receiveHeadersEnd': end_time - start_time,
52 'requestTime': start_time / 1000.0}))
48 rq = request_track.Request.FromJsonDict({ 53 rq = request_track.Request.FromJsonDict({
49 'request_id': self._next_request_id, 54 'request_id': self._next_request_id,
50 'url': 'http://' + str(url), 55 'url': 'http://' + str(url),
51 'initiator': 'http://' + str(source_url), 56 'initiator': 'http://' + str(source_url),
52 'response_headers': {'Content-Type': 57 'response_headers': {'Content-Type':
53 'null' if not magic_content_type 58 'null' if not magic_content_type
54 else 'magic-debug-content' }, 59 else 'magic-debug-content' },
55 'timing': request_track.TimingFromDict({ 60 'timing': timing
56 # connectEnd should be ignored.
57 'connectEnd': (end_time - start_time) / 2,
58 'receiveHeadersEnd': end_time - start_time,
59 'requestTime': start_time / 1000.0})
60 }) 61 })
61 self._next_request_id += 1 62 self._next_request_id += 1
62 return rq 63 return rq
63 64
64 def MakeGraph(self, requests): 65 def MakeGraph(self, requests):
65 return loading_model.ResourceGraph(loading_trace.LoadingTrace( 66 return loading_model.ResourceGraph(loading_trace.LoadingTrace(
66 None, None, None, MockRequestTrack(requests), None)) 67 None, None, None, MockRequestTrack(requests), None))
67 68
68 def SortedIndicies(self, graph): 69 def SortedIndicies(self, graph):
69 return [n.Index() for n in dag.TopologicalSort(graph._nodes)] 70 return [n.Index() for n in dag.TopologicalSort(graph._nodes)]
70 71
71 def SuccessorIndicies(self, node): 72 def SuccessorIndicies(self, node):
72 return [c.Index() for c in node.SortedSuccessors()] 73 return [c.Index() for c in node.SortedSuccessors()]
73 74
75 def test_DictConstruction(self):
76 graph = loading_model.ResourceGraph(
77 {'request_track': {
78 'events': [self.MakeParserRequest(0, 'null', 100, 101).ToJsonDict(),
79 self.MakeParserRequest(1, 0, 102, 103).ToJsonDict(),
80 self.MakeParserRequest(2, 0, 102, 103).ToJsonDict(),
81 self.MakeParserRequest(3, 2, 104, 105).ToJsonDict()]},
82 'url': 'foo.com',
83 'tracing_track': {'events': []},
84 'page_track': {'events': []},
85 'metadata': {}})
86 self.assertEqual(self.SuccessorIndicies(graph._nodes[0]), [1, 2])
87 self.assertEqual(self.SuccessorIndicies(graph._nodes[1]), [])
88 self.assertEqual(self.SuccessorIndicies(graph._nodes[2]), [3])
89 self.assertEqual(self.SuccessorIndicies(graph._nodes[3]), [])
90
74 def test_Costing(self): 91 def test_Costing(self):
75 requests = [self.MakeParserRequest(0, 'null', 100, 110), 92 requests = [self.MakeParserRequest(0, 'null', 100, 110),
76 self.MakeParserRequest(1, 0, 115, 120), 93 self.MakeParserRequest(1, 0, 115, 120),
77 self.MakeParserRequest(2, 0, 112, 120), 94 self.MakeParserRequest(2, 0, 112, 120),
78 self.MakeParserRequest(3, 1, 122, 126), 95 self.MakeParserRequest(3, 1, 122, 126),
79 self.MakeParserRequest(4, 3, 127, 128), 96 self.MakeParserRequest(4, 3, 127, 128),
80 self.MakeParserRequest(5, 'null', 100, 105), 97 self.MakeParserRequest(5, 'null', 100, 105),
81 self.MakeParserRequest(6, 5, 105, 110)] 98 self.MakeParserRequest(6, 5, 105, 110)]
82 graph = self.MakeGraph(requests) 99 graph = self.MakeGraph(requests)
83 self.assertEqual(self.SuccessorIndicies(graph._nodes[0]), [1, 2]) 100 self.assertEqual(self.SuccessorIndicies(graph._nodes[0]), [1, 2])
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 204
188 self.assertTrue(loading_model.ResourceGraph._IsAdUrl( 205 self.assertTrue(loading_model.ResourceGraph._IsAdUrl(
189 'http://ums.adtechus.com/mapuser?providerid=1003;' 206 'http://ums.adtechus.com/mapuser?providerid=1003;'
190 'userid=RUmecco4z3o====')) 207 'userid=RUmecco4z3o===='))
191 self.assertTrue(loading_model.ResourceGraph._IsAdUrl( 208 self.assertTrue(loading_model.ResourceGraph._IsAdUrl(
192 'http://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js')) 209 'http://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js'))
193 210
194 211
195 if __name__ == '__main__': 212 if __name__ == '__main__':
196 unittest.main() 213 unittest.main()
OLDNEW
« no previous file with comments | « tools/android/loading/loading_model.py ('k') | tools/android/loading/log_parser.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698