Chromium Code Reviews| Index: recipe_modules/url/api.py |
| diff --git a/recipe_modules/url/api.py b/recipe_modules/url/api.py |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..22c736f1ac3192e462926b6a8d0658e5494a8735 |
| --- /dev/null |
| +++ b/recipe_modules/url/api.py |
| @@ -0,0 +1,178 @@ |
| +# Copyright 2017 The LUCI Authors. All rights reserved. |
| +# Use of this source code is governed under the Apache License, Version 2.0 |
| +# that can be found in the LICENSE file. |
| + |
| +from recipe_engine import recipe_api |
| + |
| +import urllib |
| + |
| +class UrlApi(recipe_api.RecipeApi): |
| + quote = staticmethod(urllib.quote) |
|
iannucci
2017/05/12 00:53:52
buh? staticmethod shouldn't be needed
dnj
2017/05/12 02:15:04
Done.
|
| + urlencode = staticmethod(urllib.urlencode) |
| + |
| + # JSON prefix used with Gerrit and Gitiles. |
| + GERRIT_JSON_PREFIX = ')]}\n' |
| + |
| + |
| + class Response(object): |
|
iannucci
2017/05/12 00:53:52
note: capture error body and stuff in different fi
dnj
2017/05/12 02:15:04
Done.
|
| + """Response is an HTTP response object.""" |
| + |
| + def __init__(self, method, output, status): |
| + self._method = method |
| + self._status = status |
| + self._result = output |
| + |
| + @property |
| + def method(self): |
|
iannucci
2017/05/12 00:53:52
DOOOOGGGGG STRIIING
dnj
2017/05/12 02:15:04
Done.
|
| + return self._method |
| + |
| + @property |
| + def status_code(self): |
| + return self._status['status_code'] |
| + |
| + def raise_on_error(self): |
| + if not self._status['success']: |
| + raise ValueError('HTTP status (%d)' % (self.status_code,)) |
|
iannucci
2017/05/12 00:53:52
real exception?
dnj
2017/05/12 02:15:04
Done.
|
| + |
| + @property |
| + def output(self): |
| + return self._result |
| + |
| + |
| + @recipe_api.non_step |
| + def join(self, *parts): |
| + """Constructs a URL path from composite parts. |
| + |
| + Args: |
| + parts (str...): Strings to concastenate. Any leading or trailing slashes |
| + will be stripped from intermediate strings to ensure that they join |
| + together. Trailing slashes will not be stripped from the last part. |
| + """ |
| + if parts: |
| + parts = list(parts) |
| + if len(parts) > 1: |
| + for i, p in enumerate(parts[:-1]): |
| + parts[i] = p.strip('/') |
| + parts[-1] = parts[-1].lstrip('/') |
| + return '/'.join(parts) |
| + |
| + def get_file(self, url, path, step_name=None, headers=None, |
| + transient_retry=True, strip_prefix=None, **kwargs): |
| + """GET data at given URL and writes it to file. |
| + |
| + Args: |
| + url: URL to request. |
| + path (Path): the Path where the content will be written. |
| + step_name: optional step name, 'fetch <url>' by default. |
| + headers: a {header_name: value} dictionary for HTTP headers. |
| + transient_retry (bool): If True (default), transient HTTP errors (>500) |
| + will automatically be retried with exponential backoff. If False, |
| + exactly one attempt will be made. |
| + strip_prefix (str or None): If not None, this prefix must be present at |
| + the beginning of the response, and will be stripped from the resulting |
| + content (e.g., GERRIT_JSON_PREFIX). |
| + |
| + Returns: |
| + Response with "path" as its "output" value. |
| + """ |
| + return self._get_step(url, path, step_name, headers, transient_retry, |
| + strip_prefix, False, **kwargs) |
| + |
| + def must_get_file(self, *args, **kwargs): |
| + """Like "get_file", but always raises an exception on error.""" |
| + resp = self.get_file(*args, **kwargs) |
| + resp.raise_on_error() |
| + return resp |
| + |
| + def get_text(self, url, step_name=None, headers=None, transient_retry=True, |
| + **kwargs): |
| + """GET data at given URL and writes it to file. |
| + |
| + Args: |
| + url: URL to request. |
| + step_name: optional step name, 'fetch <url>' by default. |
| + headers: a {header_name: value} dictionary for HTTP headers. |
| + transient_retry (bool): If True (default), transient HTTP errors (>500) |
| + will automatically be retried with exponential backoff. If False, |
| + exactly one attempt will be made. |
| + |
| + Returns: |
| + Response with a string "output" value. |
| + """ |
| + return self._get_step(url, None, step_name, headers, transient_retry, |
| + None, False, **kwargs) |
| + |
| + def must_get_text(self, *args, **kwargs): |
| + """Like "get_text", but always raises an exception on error.""" |
| + resp = self.get_text(*args, **kwargs) |
| + resp.raise_on_error() |
| + return resp |
| + |
| + def get_json(self, url, step_name=None, headers=None, transient_retry=True, |
| + strip_prefix=None, log=False, **kwargs): |
|
iannucci
2017/05/12 00:53:52
no kwargs just passthrough timeout
dnj
2017/05/12 02:15:04
Done.
|
| + """GET data at given URL and writes it to file. |
| + |
| + Args: |
| + url: URL to request. |
| + step_name: optional step name, 'fetch <url>' by default. |
| + headers: a {header_name: value} dictionary for HTTP headers. |
| + transient_retry (bool): If True (default), transient HTTP errors (>500) |
| + will automatically be retried with exponential backoff. If False, |
| + exactly one attempt will be made. |
| + strip_prefix (str or None): If not None, this prefix must be present at |
| + the beginning of the response, and will be stripped from the resulting |
| + content (e.g., GERRIT_JSON_PREFIX). |
| + log (bool): If True, emit the JSON content as a log. |
| + |
| + Returns: |
| + Response with JSON "output" value. |
| + """ |
| + return self._get_step(url, None, step_name, headers, transient_retry, |
| + strip_prefix, 'log' if log else True, **kwargs) |
| + |
| + def must_get_json(self, *args, **kwargs): |
| + """Like "get_json", but always raises an exception on error.""" |
| + resp = self.get_json(*args, **kwargs) |
| + resp.raise_on_error() |
| + return resp |
| + |
| + def _get_step(self, url, path, step_name, headers, transient_retry, |
| + strip_prefix, as_json, **kwargs): |
| + step_name = step_name or 'GET %s' % url |
| + |
| + args = [ |
| + url, |
| + '--status-json', self.m.json.output(add_json_log=False, |
| + name='status_json'), |
| + ] |
| + |
| + if as_json: |
| + log = as_json == 'log' |
| + args += ['--outfile', self.m.json.output(add_json_log=log, |
| + name='output')] |
| + else: |
| + args += ['--outfile', self.m.raw_io.output_text(leak_to=path, |
| + name='output')] |
| + |
| + if headers: |
| + args += ['--headers-json', self.m.json.input(headers)] |
| + if strip_prefix: |
| + args += ['--strip-prefix', strip_prefix] |
| + if not transient_retry: |
| + args.append('--no-transient-retry') |
| + |
| + result = self.m.python( |
| + step_name, |
| + self.resource('pycurl.py'), |
| + args=args, |
| + venv=True, |
| + **kwargs) |
| + status = result.json.outputs['status_json'] |
| + |
| + output = path |
| + if not output: |
| + if as_json: |
| + output = result.json.outputs['output'] |
| + else: |
| + output = result.raw_io.output_texts['output'] |
| + return self.Response('GET', output, status) |