| OLD | NEW |
| 1 from __future__ import print_function | 1 from __future__ import print_function |
| 2 | 2 |
| 3 import argparse | 3 import argparse |
| 4 import json | 4 import json |
| 5 import logging | 5 import logging |
| 6 import os | 6 import os |
| 7 import re | 7 import re |
| 8 import stat | 8 import stat |
| 9 import subprocess | 9 import subprocess |
| 10 import sys | 10 import sys |
| (...skipping 18 matching lines...) Expand all Loading... |
| 29 reader = None | 29 reader = None |
| 30 wptcommandline = None | 30 wptcommandline = None |
| 31 wptrunner = None | 31 wptrunner = None |
| 32 wpt_root = None | 32 wpt_root = None |
| 33 wptrunner_root = None | 33 wptrunner_root = None |
| 34 | 34 |
| 35 logger = logging.getLogger(os.path.splitext(__file__)[0]) | 35 logger = logging.getLogger(os.path.splitext(__file__)[0]) |
| 36 | 36 |
| 37 | 37 |
| 38 def do_delayed_imports(): | 38 def do_delayed_imports(): |
| 39 """Import and set up modules only needed if execution gets to this point.""" |
| 39 global BaseHandler | 40 global BaseHandler |
| 40 global LogLevelFilter | 41 global LogLevelFilter |
| 41 global StreamHandler | 42 global StreamHandler |
| 42 global TbplFormatter | 43 global TbplFormatter |
| 43 global reader | 44 global reader |
| 44 global wptcommandline | 45 global wptcommandline |
| 45 global wptrunner | 46 global wptrunner |
| 46 from mozlog import reader | 47 from mozlog import reader |
| 47 from mozlog.formatters import TbplFormatter | 48 from mozlog.formatters import TbplFormatter |
| 48 from mozlog.handlers import BaseHandler, LogLevelFilter, StreamHandler | 49 from mozlog.handlers import BaseHandler, LogLevelFilter, StreamHandler |
| 49 from wptrunner import wptcommandline, wptrunner | 50 from wptrunner import wptcommandline, wptrunner |
| 50 setup_log_handler() | 51 setup_log_handler() |
| 51 setup_action_filter() | 52 setup_action_filter() |
| 52 | 53 |
| 53 | 54 |
| 54 def setup_logging(): | 55 def setup_logging(): |
| 56 """Set up basic debug logger.""" |
| 55 handler = logging.StreamHandler(sys.stdout) | 57 handler = logging.StreamHandler(sys.stdout) |
| 56 formatter = logging.Formatter(logging.BASIC_FORMAT, None) | 58 formatter = logging.Formatter(logging.BASIC_FORMAT, None) |
| 57 handler.setFormatter(formatter) | 59 handler.setFormatter(formatter) |
| 58 logger.addHandler(handler) | 60 logger.addHandler(handler) |
| 59 logger.setLevel(logging.DEBUG) | 61 logger.setLevel(logging.DEBUG) |
| 60 | 62 |
| 61 setup_logging() | 63 setup_logging() |
| 62 | 64 |
| 63 | 65 |
| 64 def setup_action_filter(): | 66 def setup_action_filter(): |
| 67 """Create global LogActionFilter class as part of deferred module load.""" |
| 65 global LogActionFilter | 68 global LogActionFilter |
| 66 | 69 |
| 67 class LogActionFilter(BaseHandler): | 70 class LogActionFilter(BaseHandler): |
| 68 """Handler that filters out messages with action of log and a level | 71 |
| 69 lower than some specified level. | 72 """Handler that filters out messages not of a given set of actions. |
| 73 |
| 74 Subclasses BaseHandler. |
| 70 | 75 |
| 71 :param inner: Handler to use for messages that pass this filter | 76 :param inner: Handler to use for messages that pass this filter |
| 72 :param level: Minimum log level to process | 77 :param actions: List of actions for which to fire the handler |
| 73 """ | 78 """ |
| 79 |
| 74 def __init__(self, inner, actions): | 80 def __init__(self, inner, actions): |
| 81 """Extend BaseHandler and set inner and actions props on self.""" |
| 75 BaseHandler.__init__(self, inner) | 82 BaseHandler.__init__(self, inner) |
| 76 self.inner = inner | 83 self.inner = inner |
| 77 self.actions = actions | 84 self.actions = actions |
| 78 | 85 |
| 79 def __call__(self, item): | 86 def __call__(self, item): |
| 87 """Invoke handler if action is in list passed as constructor param."
"" |
| 80 if item["action"] in self.actions: | 88 if item["action"] in self.actions: |
| 81 return self.inner(item) | 89 return self.inner(item) |
| 82 | 90 |
| 83 | 91 |
| 84 class TravisFold(object): | 92 class TravisFold(object): |
| 93 |
| 94 """Context for TravisCI folding mechanism. Subclasses object. |
| 95 |
| 96 See: https://blog.travis-ci.com/2013-05-22-improving-build-visibility-log-fo
lds/ |
| 97 """ |
| 98 |
| 85 def __init__(self, name): | 99 def __init__(self, name): |
| 100 """Register TravisCI folding section name.""" |
| 86 self.name = name | 101 self.name = name |
| 87 | 102 |
| 88 def __enter__(self): | 103 def __enter__(self): |
| 104 """Emit fold start syntax.""" |
| 89 print("travis_fold:start:%s" % self.name, file=sys.stderr) | 105 print("travis_fold:start:%s" % self.name, file=sys.stderr) |
| 90 | 106 |
| 91 def __exit__(self, type, value, traceback): | 107 def __exit__(self, type, value, traceback): |
| 108 """Emit fold end syntax.""" |
| 92 print("travis_fold:end:%s" % self.name, file=sys.stderr) | 109 print("travis_fold:end:%s" % self.name, file=sys.stderr) |
| 93 | 110 |
| 94 | 111 |
| 95 class GitHub(object): | 112 class GitHub(object): |
| 113 |
| 114 """Interface for the GitHub API.""" |
| 115 |
| 96 def __init__(self, org, repo, token, product): | 116 def __init__(self, org, repo, token, product): |
| 117 """Set properties required for communicating with GH API on self.""" |
| 97 self.token = token | 118 self.token = token |
| 98 self.headers = {"Accept": "application/vnd.github.v3+json"} | 119 self.headers = {"Accept": "application/vnd.github.v3+json"} |
| 99 self.auth = (self.token, "x-oauth-basic") | 120 self.auth = (self.token, "x-oauth-basic") |
| 100 self.org = org | 121 self.org = org |
| 101 self.repo = repo | 122 self.repo = repo |
| 102 self.base_url = "https://api.github.com/repos/%s/%s/" % (org, repo) | 123 self.base_url = "https://api.github.com/repos/%s/%s/" % (org, repo) |
| 103 self.product = product | 124 self.product = product |
| 104 | 125 |
| 105 def _headers(self, headers): | 126 def _headers(self, headers): |
| 127 """Extend existing HTTP headers and return new value.""" |
| 106 if headers is None: | 128 if headers is None: |
| 107 headers = {} | 129 headers = {} |
| 108 rv = self.headers.copy() | 130 rv = self.headers.copy() |
| 109 rv.update(headers) | 131 rv.update(headers) |
| 110 return rv | 132 return rv |
| 111 | 133 |
| 112 def post(self, url, data, headers=None): | 134 def post(self, url, data, headers=None): |
| 135 """Serialize and POST data to given URL.""" |
| 113 logger.debug("POST %s" % url) | 136 logger.debug("POST %s" % url) |
| 114 if data is not None: | 137 if data is not None: |
| 115 data = json.dumps(data) | 138 data = json.dumps(data) |
| 116 resp = requests.post( | 139 resp = requests.post( |
| 117 url, | 140 url, |
| 118 data=data, | 141 data=data, |
| 119 headers=self._headers(headers), | 142 headers=self._headers(headers), |
| 120 auth=self.auth | 143 auth=self.auth |
| 121 ) | 144 ) |
| 122 resp.raise_for_status() | 145 resp.raise_for_status() |
| 123 return resp | 146 return resp |
| 124 | 147 |
| 125 def patch(self, url, data, headers=None): | 148 def patch(self, url, data, headers=None): |
| 149 """Serialize and PATCH data to given URL.""" |
| 126 logger.debug("PATCH %s" % url) | 150 logger.debug("PATCH %s" % url) |
| 127 if data is not None: | 151 if data is not None: |
| 128 data = json.dumps(data) | 152 data = json.dumps(data) |
| 129 resp = requests.patch( | 153 resp = requests.patch( |
| 130 url, | 154 url, |
| 131 data=data, | 155 data=data, |
| 132 headers=self._headers(headers), | 156 headers=self._headers(headers), |
| 133 auth=self.auth | 157 auth=self.auth |
| 134 ) | 158 ) |
| 135 resp.raise_for_status() | 159 resp.raise_for_status() |
| 136 return resp | 160 return resp |
| 137 | 161 |
| 138 def get(self, url, headers=None): | 162 def get(self, url, headers=None): |
| 163 """Execute GET request for given URL.""" |
| 139 logger.debug("GET %s" % url) | 164 logger.debug("GET %s" % url) |
| 140 resp = requests.get( | 165 resp = requests.get( |
| 141 url, | 166 url, |
| 142 headers=self._headers(headers), | 167 headers=self._headers(headers), |
| 143 auth=self.auth | 168 auth=self.auth |
| 144 ) | 169 ) |
| 145 resp.raise_for_status() | 170 resp.raise_for_status() |
| 146 return resp | 171 return resp |
| 147 | 172 |
| 148 def post_comment(self, issue_number, body): | 173 def post_comment(self, issue_number, body): |
| 174 """Create or update comment in appropriate GitHub pull request comments.
""" |
| 149 user = self.get(urljoin(self.base_url, "/user")).json() | 175 user = self.get(urljoin(self.base_url, "/user")).json() |
| 150 issue_comments_url = urljoin(self.base_url, "issues/%s/comments" % issue
_number) | 176 issue_comments_url = urljoin(self.base_url, "issues/%s/comments" % issue
_number) |
| 151 comments = self.get(issue_comments_url).json() | 177 comments = self.get(issue_comments_url).json() |
| 152 title_line = format_comment_title(self.product) | 178 title_line = format_comment_title(self.product) |
| 153 data = {"body": body} | 179 data = {"body": body} |
| 154 for comment in comments: | 180 for comment in comments: |
| 155 if (comment["user"]["login"] == user["login"] and | 181 if (comment["user"]["login"] == user["login"] and |
| 156 comment["body"].startswith(title_line)): | 182 comment["body"].startswith(title_line)): |
| 157 comment_url = urljoin(self.base_url, "issues/comments/%s" % comm
ent["id"]) | 183 comment_url = urljoin(self.base_url, "issues/comments/%s" % comm
ent["id"]) |
| 158 self.patch(comment_url, data) | 184 self.patch(comment_url, data) |
| 159 break | 185 break |
| 160 else: | 186 else: |
| 161 self.post(issue_comments_url, data) | 187 self.post(issue_comments_url, data) |
| 162 | 188 |
| 163 | 189 |
| 164 class GitHubCommentHandler(logging.Handler): | 190 class GitHubCommentHandler(logging.Handler): |
| 191 |
| 192 """GitHub pull request comment handler. |
| 193 |
| 194 Subclasses logging.Handler to add ability to post comments to GitHub. |
| 195 """ |
| 196 |
| 165 def __init__(self, github, pull_number): | 197 def __init__(self, github, pull_number): |
| 198 """Extend logging.Handler and set required properties on self.""" |
| 166 logging.Handler.__init__(self) | 199 logging.Handler.__init__(self) |
| 167 self.github = github | 200 self.github = github |
| 168 self.pull_number = pull_number | 201 self.pull_number = pull_number |
| 169 self.log_data = [] | 202 self.log_data = [] |
| 170 | 203 |
| 171 def emit(self, record): | 204 def emit(self, record): |
| 205 """Format record and add to log""" |
| 172 try: | 206 try: |
| 173 msg = self.format(record) | 207 msg = self.format(record) |
| 174 self.log_data.append(msg) | 208 self.log_data.append(msg) |
| 175 except Exception: | 209 except Exception: |
| 176 self.handleError(record) | 210 self.handleError(record) |
| 177 | 211 |
| 178 def send(self): | 212 def send(self): |
| 213 """Post log to GitHub and flush log.""" |
| 179 self.github.post_comment(self.pull_number, "\n".join(self.log_data)) | 214 self.github.post_comment(self.pull_number, "\n".join(self.log_data)) |
| 180 self.log_data = [] | 215 self.log_data = [] |
| 181 | 216 |
| 182 | 217 |
| 183 class Browser(object): | 218 class Browser(object): |
| 219 |
| 220 """Base browser class that sets a reference to a GitHub token.""" |
| 221 |
| 184 product = None | 222 product = None |
| 185 binary = None | 223 binary = None |
| 186 | 224 |
| 187 def __init__(self, github_token): | 225 def __init__(self, github_token): |
| 226 """Set GitHub token property on self.""" |
| 188 self.github_token = github_token | 227 self.github_token = github_token |
| 189 | 228 |
| 190 | 229 |
| 191 class Firefox(Browser): | 230 class Firefox(Browser): |
| 231 |
| 232 """Firefox-specific interface. |
| 233 |
| 234 Includes installation, webdriver installation, and wptrunner setup methods. |
| 235 """ |
| 236 |
| 192 product = "firefox" | 237 product = "firefox" |
| 193 binary = "%s/firefox/firefox" | 238 binary = "%s/firefox/firefox" |
| 194 platform_ini = "%s/firefox/platform.ini" | 239 platform_ini = "%s/firefox/platform.ini" |
| 195 | 240 |
| 196 def install(self): | 241 def install(self): |
| 242 """Install Firefox.""" |
| 197 call("pip", "install", "-r", os.path.join(wptrunner_root, "requirements_
firefox.txt")) | 243 call("pip", "install", "-r", os.path.join(wptrunner_root, "requirements_
firefox.txt")) |
| 198 resp = get("https://archive.mozilla.org/pub/firefox/nightly/latest-mozil
la-central/firefox-53.0a1.en-US.linux-x86_64.tar.bz2") | 244 resp = get("https://archive.mozilla.org/pub/firefox/nightly/latest-mozil
la-central/firefox-53.0a1.en-US.linux-x86_64.tar.bz2") |
| 199 untar(resp.raw) | 245 untar(resp.raw) |
| 200 | 246 |
| 201 if not os.path.exists("profiles"): | 247 if not os.path.exists("profiles"): |
| 202 os.mkdir("profiles") | 248 os.mkdir("profiles") |
| 203 with open(os.path.join("profiles", "prefs_general.js"), "wb") as f: | 249 with open(os.path.join("profiles", "prefs_general.js"), "wb") as f: |
| 204 resp = get("https://hg.mozilla.org/mozilla-central/raw-file/tip/test
ing/profiles/prefs_general.js") | 250 resp = get("https://hg.mozilla.org/mozilla-central/raw-file/tip/test
ing/profiles/prefs_general.js") |
| 205 f.write(resp.content) | 251 f.write(resp.content) |
| 206 call("pip", "install", "-r", os.path.join(wptrunner_root, "requirements_
firefox.txt")) | 252 call("pip", "install", "-r", os.path.join(wptrunner_root, "requirements_
firefox.txt")) |
| 207 | 253 |
| 208 def _latest_geckodriver_version(self): | 254 def _latest_geckodriver_version(self): |
| 255 """Get and return latest version number for geckodriver.""" |
| 209 # This is used rather than an API call to avoid rate limits | 256 # This is used rather than an API call to avoid rate limits |
| 210 tags = call("git", "ls-remote", "--tags", "--refs", | 257 tags = call("git", "ls-remote", "--tags", "--refs", |
| 211 "https://github.com/mozilla/geckodriver.git") | 258 "https://github.com/mozilla/geckodriver.git") |
| 212 release_re = re.compile(".*refs/tags/v(\d+)\.(\d+)\.(\d+)") | 259 release_re = re.compile(".*refs/tags/v(\d+)\.(\d+)\.(\d+)") |
| 213 latest_release = 0 | 260 latest_release = 0 |
| 214 for item in tags.split("\n"): | 261 for item in tags.split("\n"): |
| 215 m = release_re.match(item) | 262 m = release_re.match(item) |
| 216 if m: | 263 if m: |
| 217 version = [int(item) for item in m.groups()] | 264 version = [int(item) for item in m.groups()] |
| 218 if version > latest_release: | 265 if version > latest_release: |
| 219 latest_release = version | 266 latest_release = version |
| 220 assert latest_release != 0 | 267 assert latest_release != 0 |
| 221 return "v%s.%s.%s" % tuple(str(item) for item in latest_release) | 268 return "v%s.%s.%s" % tuple(str(item) for item in latest_release) |
| 222 | 269 |
| 223 def install_webdriver(self): | 270 def install_webdriver(self): |
| 271 """Install latest Geckodriver.""" |
| 224 version = self._latest_geckodriver_version() | 272 version = self._latest_geckodriver_version() |
| 225 logger.debug("Latest geckodriver release %s" % version) | 273 logger.debug("Latest geckodriver release %s" % version) |
| 226 url = "https://github.com/mozilla/geckodriver/releases/download/%s/gecko
driver-%s-linux64.tar.gz" % (version, version) | 274 url = "https://github.com/mozilla/geckodriver/releases/download/%s/gecko
driver-%s-linux64.tar.gz" % (version, version) |
| 227 untar(get(url).raw) | 275 untar(get(url).raw) |
| 228 | 276 |
| 229 def version(self, root): | 277 def version(self, root): |
| 230 """Retrieve the release version of the installed browser.""" | 278 """Retrieve the release version of the installed browser.""" |
| 231 platform_info = RawConfigParser() | 279 platform_info = RawConfigParser() |
| 232 | 280 |
| 233 with open(self.platform_ini % root, "r") as fp: | 281 with open(self.platform_ini % root, "r") as fp: |
| 234 platform_info.readfp(BytesIO(fp.read())) | 282 platform_info.readfp(BytesIO(fp.read())) |
| 235 return "BuildID %s; SourceStamp %s" % ( | 283 return "BuildID %s; SourceStamp %s" % ( |
| 236 platform_info.get("Build", "BuildID"), | 284 platform_info.get("Build", "BuildID"), |
| 237 platform_info.get("Build", "SourceStamp")) | 285 platform_info.get("Build", "SourceStamp")) |
| 238 | 286 |
| 239 def wptrunner_args(self, root): | 287 def wptrunner_args(self, root): |
| 288 """Return Firefox-specific wpt-runner arguments.""" |
| 240 return { | 289 return { |
| 241 "product": "firefox", | 290 "product": "firefox", |
| 242 "binary": self.binary % root, | 291 "binary": self.binary % root, |
| 243 "certutil_binary": "certutil", | 292 "certutil_binary": "certutil", |
| 244 "webdriver_binary": "%s/geckodriver" % root, | 293 "webdriver_binary": "%s/geckodriver" % root, |
| 245 "prefs_root": "%s/profiles" % root, | 294 "prefs_root": "%s/profiles" % root, |
| 246 } | 295 } |
| 247 | 296 |
| 248 | 297 |
| 249 class Chrome(Browser): | 298 class Chrome(Browser): |
| 299 """Chrome-specific interface. |
| 300 |
| 301 Includes installation, webdriver installation, and wptrunner setup methods. |
| 302 """ |
| 303 |
| 250 product = "chrome" | 304 product = "chrome" |
| 251 binary = "/usr/bin/google-chrome" | 305 binary = "/usr/bin/google-chrome" |
| 252 | 306 |
| 253 def install(self): | 307 def install(self): |
| 308 """Install Chrome.""" |
| 309 |
| 254 # Installing the Google Chrome browser requires administrative | 310 # Installing the Google Chrome browser requires administrative |
| 255 # privileges, so that installation is handled by the invoking script. | 311 # privileges, so that installation is handled by the invoking script. |
| 256 | 312 |
| 257 call("pip", "install", "-r", os.path.join(wptrunner_root, "requirements_
chrome.txt")) | 313 call("pip", "install", "-r", os.path.join(wptrunner_root, "requirements_
chrome.txt")) |
| 258 | 314 |
| 259 def install_webdriver(self): | 315 def install_webdriver(self): |
| 316 """Install latest Webdriver.""" |
| 260 latest = get("http://chromedriver.storage.googleapis.com/LATEST_RELEASE"
).text.strip() | 317 latest = get("http://chromedriver.storage.googleapis.com/LATEST_RELEASE"
).text.strip() |
| 261 url = "http://chromedriver.storage.googleapis.com/%s/chromedriver_linux6
4.zip" % latest | 318 url = "http://chromedriver.storage.googleapis.com/%s/chromedriver_linux6
4.zip" % latest |
| 262 unzip(get(url).raw) | 319 unzip(get(url).raw) |
| 263 st = os.stat('chromedriver') | 320 st = os.stat('chromedriver') |
| 264 os.chmod('chromedriver', st.st_mode | stat.S_IEXEC) | 321 os.chmod('chromedriver', st.st_mode | stat.S_IEXEC) |
| 265 | 322 |
| 266 def version(self, root): | 323 def version(self, root): |
| 267 """Retrieve the release version of the installed browser.""" | 324 """Retrieve the release version of the installed browser.""" |
| 268 output = call(self.binary, "--version") | 325 output = call(self.binary, "--version") |
| 269 return re.search(r"[0-9a-z\.]+$", output.strip()).group(0) | 326 return re.search(r"[0-9\.]+( [a-z]+)?$", output.strip()).group(0) |
| 270 | 327 |
| 271 def wptrunner_args(self, root): | 328 def wptrunner_args(self, root): |
| 329 """Return Chrome-specific wpt-runner arguments.""" |
| 272 return { | 330 return { |
| 273 "product": "chrome", | 331 "product": "chrome", |
| 274 "binary": self.binary, | 332 "binary": self.binary, |
| 275 # Chrome's "sandbox" security feature must be disabled in order to | |
| 276 # run the browser in OpenVZ environments such as the one provided | |
| 277 # by TravisCI. | |
| 278 # | |
| 279 # Reference: https://github.com/travis-ci/travis-ci/issues/938 | |
| 280 "binary_arg": "--no-sandbox", | |
| 281 "webdriver_binary": "%s/chromedriver" % root, | 333 "webdriver_binary": "%s/chromedriver" % root, |
| 282 "test_types": ["testharness", "reftest"] | 334 "test_types": ["testharness", "reftest"] |
| 283 } | 335 } |
| 284 | 336 |
| 285 | 337 |
| 286 def get(url): | 338 def get(url): |
| 339 """Issue GET request to a given URL and return the response.""" |
| 287 logger.debug("GET %s" % url) | 340 logger.debug("GET %s" % url) |
| 288 resp = requests.get(url, stream=True) | 341 resp = requests.get(url, stream=True) |
| 289 resp.raise_for_status() | 342 resp.raise_for_status() |
| 290 return resp | 343 return resp |
| 291 | 344 |
| 292 | 345 |
| 293 def call(*args): | 346 def call(*args): |
| 347 """Log terminal command, invoke it as a subprocess. |
| 348 |
| 349 Returns a bytestring of the subprocess output if no error. |
| 350 """ |
| 294 logger.debug("%s" % " ".join(args)) | 351 logger.debug("%s" % " ".join(args)) |
| 295 try: | 352 try: |
| 296 return subprocess.check_output(args) | 353 return subprocess.check_output(args) |
| 297 except subprocess.CalledProcessError as e: | 354 except subprocess.CalledProcessError as e: |
| 298 logger.critical("%s exited with return code %i" % | 355 logger.critical("%s exited with return code %i" % |
| 299 (e.cmd, e.returncode)) | 356 (e.cmd, e.returncode)) |
| 300 logger.critical(e.output) | 357 logger.critical(e.output) |
| 301 raise | 358 raise |
| 302 | 359 |
| 303 | 360 |
| 304 def get_git_cmd(repo_path): | 361 def get_git_cmd(repo_path): |
| 362 """Create a function for invoking git commands as a subprocess.""" |
| 305 def git(cmd, *args): | 363 def git(cmd, *args): |
| 306 full_cmd = ["git", cmd] + list(args) | 364 full_cmd = ["git", cmd] + list(args) |
| 307 try: | 365 try: |
| 308 return subprocess.check_output(full_cmd, cwd=repo_path, stderr=subpr
ocess.STDOUT) | 366 return subprocess.check_output(full_cmd, cwd=repo_path, stderr=subpr
ocess.STDOUT) |
| 309 except subprocess.CalledProcessError as e: | 367 except subprocess.CalledProcessError as e: |
| 310 logger.error("Git command exited with status %i" % e.returncode) | 368 logger.error("Git command exited with status %i" % e.returncode) |
| 311 logger.error(e.output) | 369 logger.error(e.output) |
| 312 sys.exit(1) | 370 sys.exit(1) |
| 313 return git | 371 return git |
| 314 | 372 |
| 315 | 373 |
| 316 def seekable(fileobj): | 374 def seekable(fileobj): |
| 375 """Attempt to use file.seek on given file, with fallbacks.""" |
| 317 try: | 376 try: |
| 318 fileobj.seek(fileobj.tell()) | 377 fileobj.seek(fileobj.tell()) |
| 319 except Exception: | 378 except Exception: |
| 320 return StringIO(fileobj.read()) | 379 return StringIO(fileobj.read()) |
| 321 else: | 380 else: |
| 322 return fileobj | 381 return fileobj |
| 323 | 382 |
| 324 | 383 |
| 325 def untar(fileobj): | 384 def untar(fileobj): |
| 385 """Extract tar archive.""" |
| 326 logger.debug("untar") | 386 logger.debug("untar") |
| 327 fileobj = seekable(fileobj) | 387 fileobj = seekable(fileobj) |
| 328 with tarfile.open(fileobj=fileobj) as tar_data: | 388 with tarfile.open(fileobj=fileobj) as tar_data: |
| 329 tar_data.extractall() | 389 tar_data.extractall() |
| 330 | 390 |
| 331 | 391 |
| 332 def unzip(fileobj): | 392 def unzip(fileobj): |
| 393 """Extract zip archive.""" |
| 333 logger.debug("unzip") | 394 logger.debug("unzip") |
| 334 fileobj = seekable(fileobj) | 395 fileobj = seekable(fileobj) |
| 335 with zipfile.ZipFile(fileobj) as zip_data: | 396 with zipfile.ZipFile(fileobj) as zip_data: |
| 336 for info in zip_data.infolist(): | 397 for info in zip_data.infolist(): |
| 337 zip_data.extract(info) | 398 zip_data.extract(info) |
| 338 perm = info.external_attr >> 16 & 0x1FF | 399 perm = info.external_attr >> 16 & 0x1FF |
| 339 os.chmod(info.filename, perm) | 400 os.chmod(info.filename, perm) |
| 340 | 401 |
| 341 | 402 |
| 342 def setup_github_logging(args): | 403 def setup_github_logging(args): |
| 404 """Set up and return GitHub comment handler. |
| 405 |
| 406 :param args: the parsed arguments passed to the script |
| 407 """ |
| 343 gh_handler = None | 408 gh_handler = None |
| 344 if args.comment_pr: | 409 if args.comment_pr: |
| 345 github = GitHub(args.user, "web-platform-tests", args.gh_token, args.pro
duct) | 410 github = GitHub(args.user, "web-platform-tests", args.gh_token, args.pro
duct) |
| 346 try: | 411 try: |
| 347 pr_number = int(args.comment_pr) | 412 pr_number = int(args.comment_pr) |
| 348 except ValueError: | 413 except ValueError: |
| 349 pass | 414 pass |
| 350 else: | 415 else: |
| 351 gh_handler = GitHubCommentHandler(github, pr_number) | 416 gh_handler = GitHubCommentHandler(github, pr_number) |
| 352 gh_handler.setLevel(logging.INFO) | 417 gh_handler.setLevel(logging.INFO) |
| 353 logger.debug("Setting up GitHub logging") | 418 logger.debug("Setting up GitHub logging") |
| 354 logger.addHandler(gh_handler) | 419 logger.addHandler(gh_handler) |
| 355 else: | 420 else: |
| 356 logger.warning("No PR number found; not posting to GitHub") | 421 logger.warning("No PR number found; not posting to GitHub") |
| 357 return gh_handler | 422 return gh_handler |
| 358 | 423 |
| 359 | 424 |
| 360 class pwd(object): | 425 class pwd(object): |
| 426 """Create context for temporarily changing present working directory.""" |
| 361 def __init__(self, dir): | 427 def __init__(self, dir): |
| 362 self.dir = dir | 428 self.dir = dir |
| 363 self.old_dir = None | 429 self.old_dir = None |
| 364 | 430 |
| 365 def __enter__(self): | 431 def __enter__(self): |
| 366 self.old_dir = os.path.abspath(os.curdir) | 432 self.old_dir = os.path.abspath(os.curdir) |
| 367 os.chdir(self.dir) | 433 os.chdir(self.dir) |
| 368 | 434 |
| 369 def __exit__(self, *args, **kwargs): | 435 def __exit__(self, *args, **kwargs): |
| 370 os.chdir(self.old_dir) | 436 os.chdir(self.old_dir) |
| 371 self.old_dir = None | 437 self.old_dir = None |
| 372 | 438 |
| 373 | 439 |
| 374 def fetch_wpt_master(user): | 440 def fetch_wpt_master(user): |
| 441 """Fetch the master branch via git.""" |
| 375 git = get_git_cmd(wpt_root) | 442 git = get_git_cmd(wpt_root) |
| 376 git("fetch", "https://github.com/%s/web-platform-tests.git" % user, "master:
master") | 443 git("fetch", "https://github.com/%s/web-platform-tests.git" % user, "master:
master") |
| 377 | 444 |
| 378 | 445 |
| 379 def get_sha1(): | 446 def get_sha1(): |
| 447 """ Get and return sha1 of current git branch HEAD commit.""" |
| 380 git = get_git_cmd(wpt_root) | 448 git = get_git_cmd(wpt_root) |
| 381 return git("rev-parse", "HEAD").strip() | 449 return git("rev-parse", "HEAD").strip() |
| 382 | 450 |
| 383 | 451 |
| 384 def build_manifest(): | 452 def build_manifest(): |
| 453 """Build manifest of all files in web-platform-tests""" |
| 385 with pwd(wpt_root): | 454 with pwd(wpt_root): |
| 386 # TODO: Call the manifest code directly | 455 # TODO: Call the manifest code directly |
| 387 call("python", "manifest") | 456 call("python", "manifest") |
| 388 | 457 |
| 389 | 458 |
| 390 def install_wptrunner(): | 459 def install_wptrunner(): |
| 460 """Clone and install wptrunner.""" |
| 391 call("git", "clone", "--depth=1", "https://github.com/w3c/wptrunner.git", wp
trunner_root) | 461 call("git", "clone", "--depth=1", "https://github.com/w3c/wptrunner.git", wp
trunner_root) |
| 392 git = get_git_cmd(wptrunner_root) | 462 git = get_git_cmd(wptrunner_root) |
| 393 git("submodule", "update", "--init", "--recursive") | 463 git("submodule", "update", "--init", "--recursive") |
| 394 call("pip", "install", wptrunner_root) | 464 call("pip", "install", wptrunner_root) |
| 395 | 465 |
| 396 | 466 |
| 397 def get_files_changed(): | 467 def get_files_changed(): |
| 468 """Get and return files changed since current branch diverged from master.""
" |
| 398 root = os.path.abspath(os.curdir) | 469 root = os.path.abspath(os.curdir) |
| 399 git = get_git_cmd(wpt_root) | 470 git = get_git_cmd(wpt_root) |
| 400 branch_point = git("merge-base", "HEAD", "master").strip() | 471 branch_point = git("merge-base", "HEAD", "master").strip() |
| 401 logger.debug("Branch point from master: %s" % branch_point) | 472 logger.debug("Branch point from master: %s" % branch_point) |
| 402 logger.debug(git("log", "--oneline", "%s.." % branch_point)) | |
| 403 files = git("diff", "--name-only", "-z", "%s.." % branch_point) | 473 files = git("diff", "--name-only", "-z", "%s.." % branch_point) |
| 404 if not files: | 474 if not files: |
| 405 return [] | 475 return [] |
| 406 assert files[-1] == "\0" | 476 assert files[-1] == "\0" |
| 407 return [os.path.join(wpt_root, item) | 477 return [os.path.join(wpt_root, item) |
| 408 for item in files[:-1].split("\0")] | 478 for item in files[:-1].split("\0")] |
| 409 | 479 |
| 410 | 480 |
| 411 def get_affected_testfiles(files_changed): | 481 def get_affected_testfiles(files_changed): |
| 482 """Determine and return list of test files that reference changed files.""" |
| 412 affected_testfiles = set() | 483 affected_testfiles = set() |
| 413 nontests_changed = set(files_changed) | 484 nontests_changed = set(files_changed) |
| 414 manifest_file = os.path.join(wpt_root, "MANIFEST.json") | 485 manifest_file = os.path.join(wpt_root, "MANIFEST.json") |
| 415 skip_dirs = ["conformance-checkers", "docs", "tools"] | 486 skip_dirs = ["conformance-checkers", "docs", "tools"] |
| 416 test_types = ["testharness", "reftest", "wdspec"] | 487 test_types = ["testharness", "reftest", "wdspec"] |
| 417 | 488 |
| 418 wpt_manifest = manifest.load(wpt_root, manifest_file) | 489 wpt_manifest = manifest.load(wpt_root, manifest_file) |
| 419 | 490 |
| 420 support_files = {os.path.join(wpt_root, path) | 491 support_files = {os.path.join(wpt_root, path) |
| 421 for _, path, _ in wpt_manifest.itertypes("support")} | 492 for _, path, _ in wpt_manifest.itertypes("support")} |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 457 file_contents = file_contents.decode("utf-16le") | 528 file_contents = file_contents.decode("utf-16le") |
| 458 for full_path, repo_path in nontest_changed_paths: | 529 for full_path, repo_path in nontest_changed_paths: |
| 459 rel_path = os.path.relpath(full_path, root).replace(os.path.
sep, "/") | 530 rel_path = os.path.relpath(full_path, root).replace(os.path.
sep, "/") |
| 460 if rel_path in file_contents or repo_path in file_contents: | 531 if rel_path in file_contents or repo_path in file_contents: |
| 461 affected_testfiles.add(test_full_path) | 532 affected_testfiles.add(test_full_path) |
| 462 continue | 533 continue |
| 463 return affected_testfiles | 534 return affected_testfiles |
| 464 | 535 |
| 465 | 536 |
| 466 def wptrunner_args(root, files_changed, iterations, browser): | 537 def wptrunner_args(root, files_changed, iterations, browser): |
| 538 """Derive and return arguments for wpt-runner.""" |
| 467 parser = wptcommandline.create_parser([browser.product]) | 539 parser = wptcommandline.create_parser([browser.product]) |
| 468 args = vars(parser.parse_args([])) | 540 args = vars(parser.parse_args([])) |
| 469 args.update(browser.wptrunner_args(root)) | 541 args.update(browser.wptrunner_args(root)) |
| 470 args.update({ | 542 args.update({ |
| 471 "tests_root": wpt_root, | 543 "tests_root": wpt_root, |
| 472 "metadata_root": wpt_root, | 544 "metadata_root": wpt_root, |
| 473 "repeat": iterations, | 545 "repeat": iterations, |
| 474 "config": "%s//wptrunner.default.ini" % (wptrunner_root), | 546 "config": "%s//wptrunner.default.ini" % (wptrunner_root), |
| 475 "test_list": files_changed, | 547 "test_list": files_changed, |
| 476 "restart_on_unexpected": False, | 548 "restart_on_unexpected": False, |
| 477 "pause_after_test": False | 549 "pause_after_test": False |
| 478 }) | 550 }) |
| 479 wptcommandline.check_args(args) | 551 wptcommandline.check_args(args) |
| 480 return args | 552 return args |
| 481 | 553 |
| 482 | 554 |
| 483 def setup_log_handler(): | 555 def setup_log_handler(): |
| 556 """Set up LogHandler class as part of deferred module load.""" |
| 484 global LogHandler | 557 global LogHandler |
| 485 | 558 |
| 486 class LogHandler(reader.LogHandler): | 559 class LogHandler(reader.LogHandler): |
| 560 |
| 561 """Handle updating test and subtest status in log. |
| 562 |
| 563 Subclasses reader.LogHandler. |
| 564 """ |
| 487 def __init__(self): | 565 def __init__(self): |
| 488 self.results = defaultdict(lambda: defaultdict(lambda: defaultdict(i
nt))) | 566 self.results = defaultdict(lambda: defaultdict(lambda: defaultdict(i
nt))) |
| 489 | 567 |
| 490 def test_status(self, data): | 568 def test_status(self, data): |
| 491 self.results[data["test"]][data.get("subtest")][data["status"]] += 1 | 569 self.results[data["test"]][data.get("subtest")][data["status"]] += 1 |
| 492 | 570 |
| 493 def test_end(self, data): | 571 def test_end(self, data): |
| 494 self.results[data["test"]][None][data["status"]] += 1 | 572 self.results[data["test"]][None][data["status"]] += 1 |
| 495 | 573 |
| 496 | 574 |
| 497 def is_inconsistent(results_dict, iterations): | 575 def is_inconsistent(results_dict, iterations): |
| 576 """Return whether or not a single test is inconsistent.""" |
| 498 return len(results_dict) > 1 or sum(results_dict.values()) != iterations | 577 return len(results_dict) > 1 or sum(results_dict.values()) != iterations |
| 499 | 578 |
| 500 | 579 |
| 501 def err_string(results_dict, iterations): | 580 def err_string(results_dict, iterations): |
| 581 """Create and return string with errors from test run.""" |
| 502 rv = [] | 582 rv = [] |
| 503 total_results = sum(results_dict.values()) | 583 total_results = sum(results_dict.values()) |
| 504 for key, value in sorted(results_dict.items()): | 584 for key, value in sorted(results_dict.items()): |
| 505 rv.append("%s%s" % | 585 rv.append("%s%s" % |
| 506 (key, ": %s/%s" % (value, iterations) if value != iterations e
lse "")) | 586 (key, ": %s/%s" % (value, iterations) if value != iterations e
lse "")) |
| 507 rv = ", ".join(rv) | 587 rv = ", ".join(rv) |
| 508 if total_results < iterations: | 588 if total_results < iterations: |
| 509 rv.append("MISSING: %s/%s" % (iterations - total_results, iterations)) | 589 rv.append("MISSING: %s/%s" % (iterations - total_results, iterations)) |
| 510 if len(results_dict) > 1 or total_results != iterations: | 590 if len(results_dict) > 1 or total_results != iterations: |
| 511 rv = "**%s**" % rv | 591 rv = "**%s**" % rv |
| 512 return rv | 592 return rv |
| 513 | 593 |
| 514 | 594 |
| 515 def process_results(log, iterations): | 595 def process_results(log, iterations): |
| 596 """Process test log and return overall results and list of inconsistent test
s.""" |
| 516 inconsistent = [] | 597 inconsistent = [] |
| 517 handler = LogHandler() | 598 handler = LogHandler() |
| 518 reader.handle_log(reader.read(log), handler) | 599 reader.handle_log(reader.read(log), handler) |
| 519 results = handler.results | 600 results = handler.results |
| 520 for test, test_results in results.iteritems(): | 601 for test, test_results in results.iteritems(): |
| 521 for subtest, result in test_results.iteritems(): | 602 for subtest, result in test_results.iteritems(): |
| 522 if is_inconsistent(result, iterations): | 603 if is_inconsistent(result, iterations): |
| 523 inconsistent.append((test, subtest, result)) | 604 inconsistent.append((test, subtest, result)) |
| 524 return results, inconsistent | 605 return results, inconsistent |
| 525 | 606 |
| 526 | 607 |
| 527 def format_comment_title(product): | 608 def format_comment_title(product): |
| 528 """Produce a Markdown-formatted string based on a given "product"--a string | 609 """Produce a Markdown-formatted string based on a given "product"--a string |
| 529 containing a browser identifier optionally followed by a colon and a | 610 containing a browser identifier optionally followed by a colon and a |
| 530 release channel. (For example: "firefox" or "chrome:dev".) The generated | 611 release channel. (For example: "firefox" or "chrome:dev".) The generated |
| 531 title string is used both to create new comments and to locate (and | 612 title string is used both to create new comments and to locate (and |
| 532 subsequently update) previously-submitted comments.""" | 613 subsequently update) previously-submitted comments.""" |
| 533 parts = product.split(":") | 614 parts = product.split(":") |
| 534 title = parts[0].title() | 615 title = parts[0].title() |
| 535 | 616 |
| 536 if len(parts) > 1: | 617 if len(parts) > 1: |
| 537 title += " (%s channel)" % parts[1] | 618 title += " (%s channel)" % parts[1] |
| 538 | 619 |
| 539 return "# %s #" % title | 620 return "# %s #" % title |
| 540 | 621 |
| 541 | 622 |
| 542 def markdown_adjust(s): | 623 def markdown_adjust(s): |
| 624 """Escape problematic markdown sequences.""" |
| 543 s = s.replace('\t', u'\\t') | 625 s = s.replace('\t', u'\\t') |
| 544 s = s.replace('\n', u'\\n') | 626 s = s.replace('\n', u'\\n') |
| 545 s = s.replace('\r', u'\\r') | 627 s = s.replace('\r', u'\\r') |
| 546 s = s.replace('`', u'\\`') | 628 s = s.replace('`', u'\\`') |
| 547 return s | 629 return s |
| 548 | 630 |
| 549 | 631 |
| 550 def table(headings, data, log): | 632 def table(headings, data, log): |
| 633 """Create and log data to specified logger in tabular format.""" |
| 551 cols = range(len(headings)) | 634 cols = range(len(headings)) |
| 552 assert all(len(item) == len(cols) for item in data) | 635 assert all(len(item) == len(cols) for item in data) |
| 553 max_widths = reduce(lambda prev, cur: [(len(cur[i]) + 2) | 636 max_widths = reduce(lambda prev, cur: [(len(cur[i]) + 2) |
| 554 if (len(cur[i]) + 2) > prev[i] | 637 if (len(cur[i]) + 2) > prev[i] |
| 555 else prev[i] | 638 else prev[i] |
| 556 for i in cols], | 639 for i in cols], |
| 557 data, | 640 data, |
| 558 [len(item) + 2 for item in headings]) | 641 [len(item) + 2 for item in headings]) |
| 559 log("|%s|" % "|".join(item.center(max_widths[i]) for i, item in enumerate(he
adings))) | 642 log("|%s|" % "|".join(item.center(max_widths[i]) for i, item in enumerate(he
adings))) |
| 560 log("|%s|" % "|".join("-" * max_widths[i] for i in cols)) | 643 log("|%s|" % "|".join("-" * max_widths[i] for i in cols)) |
| 561 for row in data: | 644 for row in data: |
| 562 log("|%s|" % "|".join(" %s" % row[i].ljust(max_widths[i] - 1) for i in c
ols)) | 645 log("|%s|" % "|".join(" %s" % row[i].ljust(max_widths[i] - 1) for i in c
ols)) |
| 563 log("") | 646 log("") |
| 564 | 647 |
| 565 | 648 |
| 566 def write_inconsistent(inconsistent, iterations): | 649 def write_inconsistent(inconsistent, iterations): |
| 650 """Output inconsistent tests to logger.error.""" |
| 567 logger.error("## Unstable results ##\n") | 651 logger.error("## Unstable results ##\n") |
| 568 strings = [("`%s`" % markdown_adjust(test), ("`%s`" % markdown_adjust(subtes
t)) if subtest else "", err_string(results, iterations)) | 652 strings = [("`%s`" % markdown_adjust(test), ("`%s`" % markdown_adjust(subtes
t)) if subtest else "", err_string(results, iterations)) |
| 569 for test, subtest, results in inconsistent] | 653 for test, subtest, results in inconsistent] |
| 570 table(["Test", "Subtest", "Results"], strings, logger.error) | 654 table(["Test", "Subtest", "Results"], strings, logger.error) |
| 571 | 655 |
| 572 | 656 |
| 573 def write_results(results, iterations, comment_pr): | 657 def write_results(results, iterations, comment_pr): |
| 658 """Output all test results to logger.info.""" |
| 574 logger.info("## All results ##\n") | 659 logger.info("## All results ##\n") |
| 575 for test, test_results in results.iteritems(): | 660 for test, test_results in results.iteritems(): |
| 576 baseurl = "http://w3c-test.org/submissions" | 661 baseurl = "http://w3c-test.org/submissions" |
| 577 if "https" in os.path.splitext(test)[0].split(".")[1:]: | 662 if "https" in os.path.splitext(test)[0].split(".")[1:]: |
| 578 baseurl = "https://w3c-test.org/submissions" | 663 baseurl = "https://w3c-test.org/submissions" |
| 579 pr_number = None | 664 pr_number = None |
| 580 if comment_pr: | 665 if comment_pr: |
| 581 try: | 666 try: |
| 582 pr_number = int(comment_pr) | 667 pr_number = int(comment_pr) |
| 583 except ValueError: | 668 except ValueError: |
| 584 pass | 669 pass |
| 585 if pr_number: | 670 if pr_number: |
| 586 logger.info("<details>\n") | 671 logger.info("<details>\n") |
| 587 logger.info('<summary><a href="%s/%s%s">%s</a></summary>\n\n' % | 672 logger.info('<summary><a href="%s/%s%s">%s</a></summary>\n\n' % |
| 588 (baseurl, pr_number, test, test)) | 673 (baseurl, pr_number, test, test)) |
| 589 else: | 674 else: |
| 590 logger.info("### %s ###" % test) | 675 logger.info("### %s ###" % test) |
| 591 parent = test_results.pop(None) | 676 parent = test_results.pop(None) |
| 592 strings = [("", err_string(parent, iterations))] | 677 strings = [("", err_string(parent, iterations))] |
| 593 strings.extend(((("`%s`" % markdown_adjust(subtest)) if subtest | 678 strings.extend(((("`%s`" % markdown_adjust(subtest)) if subtest |
| 594 else "", err_string(results, iterations)) | 679 else "", err_string(results, iterations)) |
| 595 for subtest, results in test_results.iteritems())) | 680 for subtest, results in test_results.iteritems())) |
| 596 table(["Subtest", "Results"], strings, logger.info) | 681 table(["Subtest", "Results"], strings, logger.info) |
| 597 if pr_number: | 682 if pr_number: |
| 598 logger.info("</details>\n") | 683 logger.info("</details>\n") |
| 599 | 684 |
| 600 | 685 |
| 601 def get_parser(): | 686 def get_parser(): |
| 687 """Create and return script-specific argument parser.""" |
| 602 parser = argparse.ArgumentParser() | 688 parser = argparse.ArgumentParser() |
| 603 parser.add_argument("--root", | 689 parser.add_argument("--root", |
| 604 action="store", | 690 action="store", |
| 605 default=os.path.join(os.path.expanduser("~"), "build"), | 691 default=os.path.join(os.path.expanduser("~"), "build"), |
| 606 help="Root path") | 692 help="Root path") |
| 607 parser.add_argument("--iterations", | 693 parser.add_argument("--iterations", |
| 608 action="store", | 694 action="store", |
| 609 default=10, | 695 default=10, |
| 610 type=int, | 696 type=int, |
| 611 help="Number of times to run tests") | 697 help="Number of times to run tests") |
| (...skipping 11 matching lines...) Expand all Loading... |
| 623 # This is a workaround to get what should be the same va
lue | 709 # This is a workaround to get what should be the same va
lue |
| 624 default=os.environ.get("TRAVIS_REPO_SLUG").split('/')[0]
, | 710 default=os.environ.get("TRAVIS_REPO_SLUG").split('/')[0]
, |
| 625 help="Travis user name") | 711 help="Travis user name") |
| 626 parser.add_argument("product", | 712 parser.add_argument("product", |
| 627 action="store", | 713 action="store", |
| 628 help="Product to run against (`browser-name` or 'browser
-name:channel')") | 714 help="Product to run against (`browser-name` or 'browser
-name:channel')") |
| 629 return parser | 715 return parser |
| 630 | 716 |
| 631 | 717 |
| 632 def main(): | 718 def main(): |
| 719 """Perform check_stability functionality and return exit code.""" |
| 633 global wpt_root | 720 global wpt_root |
| 634 global wptrunner_root | 721 global wptrunner_root |
| 635 | 722 |
| 636 retcode = 0 | 723 retcode = 0 |
| 637 parser = get_parser() | 724 parser = get_parser() |
| 638 args = parser.parse_args() | 725 args = parser.parse_args() |
| 639 | 726 |
| 640 wpt_root = os.path.abspath(os.curdir) | 727 wpt_root = os.path.abspath(os.curdir) |
| 641 wptrunner_root = os.path.normpath(os.path.join(wpt_root, "..", "wptrunner")) | 728 wptrunner_root = os.path.normpath(os.path.join(wpt_root, "..", "wptrunner")) |
| 642 | 729 |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 744 return retcode | 831 return retcode |
| 745 | 832 |
| 746 | 833 |
| 747 if __name__ == "__main__": | 834 if __name__ == "__main__": |
| 748 try: | 835 try: |
| 749 retcode = main() | 836 retcode = main() |
| 750 except: | 837 except: |
| 751 raise | 838 raise |
| 752 else: | 839 else: |
| 753 sys.exit(retcode) | 840 sys.exit(retcode) |
| OLD | NEW |