Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(240)

Side by Side Diff: tools/accessibility/rebase_dump_accessibility_tree_test.py

Issue 2439203003: Reland of Accessibility: Ignore all anonymous blocks (Closed)
Patch Set: Rebased. Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « third_party/WebKit/Source/modules/accessibility/AXLayoutObject.cpp ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Rebase DumpAccessibilityTree Tests. 6 """Rebase DumpAccessibilityTree Tests.
7 7
8 This script is intended to be run when you make a change that could affect the 8 This script is intended to be run when you make a change that could affect the
9 expected results of tests in: 9 expected results of tests in:
10 10
11 content/test/data/accessibility 11 content/test/data/accessibility
12 12
13 It assumes that you've already uploaded a change and the try jobs have finished. 13 It assumes that you've already uploaded a change and the try jobs have finished.
14 It collects all of the results from try jobs on all platforms and updates the 14 It collects all of the results from try jobs on all platforms and updates the
15 expectation files locally. From there you can run 'git diff' to make sure all 15 expectation files locally. From there you can run 'git diff' to make sure all
16 of the changes look reasonable, then upload the change for code review. 16 of the changes look reasonable, then upload the change for code review.
17 """ 17 """
18 18
19 import json
19 import os 20 import os
20 import re 21 import re
21 import sys 22 import sys
22 import time 23 import time
23 import urllib 24 import urllib
24 25
25 # Load BeautifulSoup. It's checked into two places in the Chromium tree. 26 # Load BeautifulSoup. It's checked into two places in the Chromium tree.
26 sys.path.append( 27 sys.path.append(
27 'third_party/trace-viewer/third_party/tvcm/third_party/beautifulsoup') 28 'third_party/trace-viewer/third_party/tvcm/third_party/beautifulsoup')
28 from BeautifulSoup import BeautifulSoup 29 from BeautifulSoup import BeautifulSoup
29 30
30 # The location of the DumpAccessibilityTree html test files and expectations. 31 # The location of the DumpAccessibilityTree html test files and expectations.
31 TEST_DATA_PATH = os.path.join(os.getcwd(), 'content/test/data/accessibility') 32 TEST_DATA_PATH = os.path.join(os.getcwd(), 'content/test/data/accessibility')
32 33
33 # A global that keeps track of files we've already updated, so we don't 34 # A global that keeps track of files we've already updated, so we don't
34 # bother to update the same file twice. 35 # bother to update the same file twice.
35 completed_files = set() 36 completed_files = set()
36 37
37 def GitClIssue(): 38 def GitClIssue():
38 '''Retrieve the current issue number as a string.''' 39 '''Retrieve the current issue number as a string.'''
39 result = os.popen('git cl issue').read() 40 result = os.popen('git cl issue').read()
40 # Returns string like: 'Issue number: 12345 (https://...)' 41 # Returns string like: 'Issue number: 12345 (https://...)'
41 return result.split()[2] 42 return result.split()[2]
42 43
43 def ParseFailure(name, url): 44 def ParseFailure(name, url):
44 '''Parse given the name of a failing trybot and the url of its build log.''' 45 '''Parse given the name of a failing trybot and the url of its build log.'''
46 print
47 print "Checking trybot: %s" % name
48 url = url.replace('/builders/', '/json/builders/')
49 response = urllib.urlopen(url)
50 if response.getcode() == 200:
51 jsondata = response.read()
45 52
46 # Figure out the platform. 53 if not jsondata:
47 if name.find('android') >= 0: 54 print "Failed to fetch from: " + url
48 platform_suffix = '-expected-android.txt'
49 elif name.find('mac') >= 0:
50 platform_suffix = '-expected-mac.txt'
51 elif name.find('win') >= 0:
52 platform_suffix = '-expected-win.txt'
53 else:
54 return 55 return
55 56
56 # Read the content_browsertests log file. 57 try:
57 data = None 58 data = json.loads(jsondata)
58 lines = None 59 except:
59 urls = [] 60 print "Failed to parse JSON from: " + url
60 for url_suffix in [
61 '/steps/content_browsertests%20(with%20patch)/logs/stdio/text',
62 '/steps/content_browsertests/logs/stdio/text']:
63 urls.append(url + url_suffix)
64 for url in urls:
65 response = urllib.urlopen(url)
66 if response.getcode() == 200:
67 data = response.read()
68 lines = data.splitlines()
69 break
70
71 if not data:
72 return 61 return
73 62
74 # Parse the log file for failing tests and overwrite the expected 63 for step in data["steps"]:
75 # result file locally with the actual results from the log. 64 name = step["name"]
76 test_name = None 65 if name[:len("content_browsertests")] == "content_browsertests":
66 if name.find("without") >= 0:
67 continue
68 if name.find("retry") >= 0:
69 continue
70 print "Found content_browsertests logs"
71 for log in step["logs"]:
72 (log_name, log_url) = log
73 if log_name == "stdio":
74 continue
75 log_url += '/text'
76 log_response = urllib.urlopen(log_url)
77 if log_response.getcode() == 200:
78 logdata = log_response.read()
79 ParseLog(logdata)
80 else:
81 print "Failed to fetch test log data from: " + url
82
83 def Fix(line):
84 if line[:3] == '@@@':
85 try:
86 line = re.search('[^@]@([^@]*)@@@', line).group(1)
87 except:
88 pass
89 return line
90
91 def ParseLog(logdata):
92 '''Parse the log file for failing tests and overwrite the expected
93 result file locally with the actual results from the log.'''
94 lines = logdata.splitlines()
95 test_file = None
96 expected_file = None
77 start = None 97 start = None
78 filename = None
79 for i in range(len(lines)): 98 for i in range(len(lines)):
80 line = lines[i] 99 line = Fix(lines[i])
81 if line[:12] == '[ RUN ]': 100 if line.find('Testing:') >= 0:
82 test_name = line[13:] 101 test_file = re.search(
83 if test_name and line[:8] == 'Testing:': 102 'content.test.*accessibility.([^@]*)', line).group(1)
84 filename = re.search('content.test.*accessibility.(.*)', line).group(1) 103 expected_file = None
85 if test_name and line == 'Actual': 104 start = None
105 if line.find('Expected output:') >= 0:
106 expected_file = re.search(
107 'content.test.*accessibility.([^@]*)', line).group(1)
108 if line == 'Actual':
86 start = i + 2 109 start = i + 2
87 if start and test_name and filename and line[:12] == '[ FAILED ]': 110 if start and test_file and expected_file and line.find('End-of-file') >= 0:
88 # Get the path to the html file. 111 dst_fullpath = os.path.join(TEST_DATA_PATH, expected_file)
89 dst_fullpath = os.path.join(TEST_DATA_PATH, filename)
90 # Strip off .html and replace it with the platform expected suffix.
91 dst_fullpath = dst_fullpath[:-5] + platform_suffix
92 if dst_fullpath in completed_files: 112 if dst_fullpath in completed_files:
93 continue 113 continue
94 114
95 actual = [line for line in lines[start : i - 1] if line] 115 actual = [Fix(line) for line in lines[start : i] if line]
96 fp = open(dst_fullpath, 'w') 116 fp = open(dst_fullpath, 'w')
97 fp.write('\n'.join(actual)) 117 fp.write('\n'.join(actual))
98 fp.close() 118 fp.close()
99 print dst_fullpath 119 print "* %s" % os.path.relpath(dst_fullpath)
100 completed_files.add(dst_fullpath) 120 completed_files.add(dst_fullpath)
101 start = None 121 start = None
102 test_name = None 122 test_file = None
103 filename = None 123 expected_file = None
104 124
105 def ParseTrybots(data): 125 def ParseTrybots(data):
106 '''Parse the code review page to find links to try bots.''' 126 '''Parse the code review page to find links to try bots.'''
107 soup = BeautifulSoup(data) 127 soup = BeautifulSoup(data)
108 failures = soup.findAll( 128 failures = soup.findAll(
109 'a', 129 'a',
110 { "class" : "build-result build-status-color-failure" }) 130 { "class" : "build-result build-status-color-failure" })
111 print 'Found %d trybots that failed' % len(failures) 131 print 'Found %d trybots that failed' % len(failures)
112 for f in failures: 132 for f in failures:
113 name = f.text.replace(' ', '') 133 name = f.text.replace(' ', '')
114 url = f['href'] 134 url = f['href']
115 ParseFailure(name, url) 135 ParseFailure(name, url)
116 136
117 def Run(): 137 def Run():
118 '''Main. Get the issue number and parse the code review page.''' 138 '''Main. Get the issue number and parse the code review page.'''
119 if len(sys.argv) == 2: 139 if len(sys.argv) == 2:
120 issue = sys.argv[1] 140 issue = sys.argv[1]
121 else: 141 else:
122 issue = GitClIssue() 142 issue = GitClIssue()
123 143
124 url = 'https://codereview.chromium.org/%s' % issue 144 url = 'https://codereview.chromium.org/%s' % issue
125 print 'Fetching issue from %s' % url 145 print 'Fetching issue from %s' % url
126 response = urllib.urlopen(url) 146 response = urllib.urlopen(url)
127 if response.getcode() != 200: 147 if response.getcode() != 200:
128 print 'Error code %d accessing url: %s' % (response.getcode(), url) 148 print 'Error code %d accessing url: %s' % (response.getcode(), url)
149 return
129 data = response.read() 150 data = response.read()
130 ParseTrybots(data) 151 ParseTrybots(data)
131 152
153 print
154 if len(completed_files) == 0:
155 print "No output from DumpAccessibilityTree test results found."
156 return
157 else:
158 print "Summary: modified the following files:"
159 all_files = list(completed_files)
160 all_files.sort()
161 for f in all_files:
162 print "* %s" % os.path.relpath(f)
163
132 if __name__ == '__main__': 164 if __name__ == '__main__':
133 sys.exit(Run()) 165 sys.exit(Run())
OLDNEW
« no previous file with comments | « third_party/WebKit/Source/modules/accessibility/AXLayoutObject.cpp ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698