Index: third_party/google-endpoints/future/backports/urllib/request.py |
diff --git a/third_party/google-endpoints/future/backports/urllib/request.py b/third_party/google-endpoints/future/backports/urllib/request.py |
new file mode 100644 |
index 0000000000000000000000000000000000000000..b1545ca0f5d55c1689a71b4dda1389080bb9bbe0 |
--- /dev/null |
+++ b/third_party/google-endpoints/future/backports/urllib/request.py |
@@ -0,0 +1,2641 @@ |
+""" |
+Ported using Python-Future from the Python 3.3 standard library. |
+ |
+An extensible library for opening URLs using a variety of protocols |
+ |
+The simplest way to use this module is to call the urlopen function, |
+which accepts a string containing a URL or a Request object (described |
+below). It opens the URL and returns the results as file-like |
+object; the returned object has some extra methods described below. |
+ |
+The OpenerDirector manages a collection of Handler objects that do |
+all the actual work. Each Handler implements a particular protocol or |
+option. The OpenerDirector is a composite object that invokes the |
+Handlers needed to open the requested URL. For example, the |
+HTTPHandler performs HTTP GET and POST requests and deals with |
+non-error returns. The HTTPRedirectHandler automatically deals with |
+HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler |
+deals with digest authentication. |
+ |
+urlopen(url, data=None) -- Basic usage is the same as original |
+urllib. pass the url and optionally data to post to an HTTP URL, and |
+get a file-like object back. One difference is that you can also pass |
+a Request instance instead of URL. Raises a URLError (subclass of |
+IOError); for HTTP errors, raises an HTTPError, which can also be |
+treated as a valid response. |
+ |
+build_opener -- Function that creates a new OpenerDirector instance. |
+Will install the default handlers. Accepts one or more Handlers as |
+arguments, either instances or Handler classes that it will |
+instantiate. If one of the argument is a subclass of the default |
+handler, the argument will be installed instead of the default. |
+ |
+install_opener -- Installs a new opener as the default opener. |
+ |
+objects of interest: |
+ |
+OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages |
+the Handler classes, while dealing with requests and responses. |
+ |
+Request -- An object that encapsulates the state of a request. The |
+state can be as simple as the URL. It can also include extra HTTP |
+headers, e.g. a User-Agent. |
+ |
+BaseHandler -- |
+ |
+internals: |
+BaseHandler and parent |
+_call_chain conventions |
+ |
+Example usage: |
+ |
+import urllib.request |
+ |
+# set up authentication info |
+authinfo = urllib.request.HTTPBasicAuthHandler() |
+authinfo.add_password(realm='PDQ Application', |
+ uri='https://mahler:8092/site-updates.py', |
+ user='klem', |
+ passwd='geheim$parole') |
+ |
+proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) |
+ |
+# build a new opener that adds authentication and caching FTP handlers |
+opener = urllib.request.build_opener(proxy_support, authinfo, |
+ urllib.request.CacheFTPHandler) |
+ |
+# install it |
+urllib.request.install_opener(opener) |
+ |
+f = urllib.request.urlopen('http://www.python.org/') |
+""" |
+ |
+# XXX issues: |
+# If an authentication error handler that tries to perform |
+# authentication for some reason but fails, how should the error be |
+# signalled? The client needs to know the HTTP error code. But if |
+# the handler knows that the problem was, e.g., that it didn't know |
+# that hash algo that requested in the challenge, it would be good to |
+# pass that information along to the client, too. |
+# ftp errors aren't handled cleanly |
+# check digest against correct (i.e. non-apache) implementation |
+ |
+# Possible extensions: |
+# complex proxies XXX not sure what exactly was meant by this |
+# abstract factory for opener |
+ |
+from __future__ import absolute_import, division, print_function, unicode_literals |
+from future.builtins import bytes, dict, filter, input, int, map, open, str |
+from future.utils import PY2, PY3, raise_with_traceback |
+ |
+import base64 |
+import bisect |
+import hashlib |
+import array |
+ |
+from future.backports import email |
+from future.backports.http import client as http_client |
+from .error import URLError, HTTPError, ContentTooShortError |
+from .parse import ( |
+ urlparse, urlsplit, urljoin, unwrap, quote, unquote, |
+ splittype, splithost, splitport, splituser, splitpasswd, |
+ splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) |
+from .response import addinfourl, addclosehook |
+ |
+import io |
+import os |
+import posixpath |
+import re |
+import socket |
+import sys |
+import time |
+import collections |
+import tempfile |
+import contextlib |
+import warnings |
+ |
+# check for SSL |
+try: |
+ import ssl |
+ # Not available in the SSL module in Py2: |
+ from ssl import SSLContext |
+except ImportError: |
+ _have_ssl = False |
+else: |
+ _have_ssl = True |
+ |
+__all__ = [ |
+ # Classes |
+ 'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler', |
+ 'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler', |
+ 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', |
+ 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', |
+ 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', |
+ 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', |
+ 'UnknownHandler', 'HTTPErrorProcessor', |
+ # Functions |
+ 'urlopen', 'install_opener', 'build_opener', |
+ 'pathname2url', 'url2pathname', 'getproxies', |
+ # Legacy interface |
+ 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', |
+] |
+ |
+# used in User-Agent header sent |
+__version__ = sys.version[:3] |
+ |
+_opener = None |
+def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs): |
+ if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault'] |
+ else: cadefault = False |
+ if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath'] |
+ else: capath = None |
+ if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile'] |
+ else: cafile = None |
+ global _opener |
+ if cafile or capath or cadefault: |
+ if not _have_ssl: |
+ raise ValueError('SSL support not available') |
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) |
+ context.options |= ssl.OP_NO_SSLv2 |
+ context.verify_mode = ssl.CERT_REQUIRED |
+ if cafile or capath: |
+ context.load_verify_locations(cafile, capath) |
+ else: |
+ context.set_default_verify_paths() |
+ https_handler = HTTPSHandler(context=context, check_hostname=True) |
+ opener = build_opener(https_handler) |
+ elif _opener is None: |
+ _opener = opener = build_opener() |
+ else: |
+ opener = _opener |
+ return opener.open(url, data, timeout) |
+ |
+def install_opener(opener): |
+ global _opener |
+ _opener = opener |
+ |
+_url_tempfiles = [] |
+def urlretrieve(url, filename=None, reporthook=None, data=None): |
+ """ |
+ Retrieve a URL into a temporary location on disk. |
+ |
+ Requires a URL argument. If a filename is passed, it is used as |
+ the temporary file location. The reporthook argument should be |
+ a callable that accepts a block number, a read size, and the |
+ total file size of the URL target. The data argument should be |
+ valid URL encoded data. |
+ |
+ If a filename is passed and the URL points to a local resource, |
+ the result is a copy from local file to new file. |
+ |
+ Returns a tuple containing the path to the newly created |
+ data file as well as the resulting HTTPMessage object. |
+ """ |
+ url_type, path = splittype(url) |
+ |
+ with contextlib.closing(urlopen(url, data)) as fp: |
+ headers = fp.info() |
+ |
+ # Just return the local path and the "headers" for file:// |
+ # URLs. No sense in performing a copy unless requested. |
+ if url_type == "file" and not filename: |
+ return os.path.normpath(path), headers |
+ |
+ # Handle temporary file setup. |
+ if filename: |
+ tfp = open(filename, 'wb') |
+ else: |
+ tfp = tempfile.NamedTemporaryFile(delete=False) |
+ filename = tfp.name |
+ _url_tempfiles.append(filename) |
+ |
+ with tfp: |
+ result = filename, headers |
+ bs = 1024*8 |
+ size = -1 |
+ read = 0 |
+ blocknum = 0 |
+ if "content-length" in headers: |
+ size = int(headers["Content-Length"]) |
+ |
+ if reporthook: |
+ reporthook(blocknum, bs, size) |
+ |
+ while True: |
+ block = fp.read(bs) |
+ if not block: |
+ break |
+ read += len(block) |
+ tfp.write(block) |
+ blocknum += 1 |
+ if reporthook: |
+ reporthook(blocknum, bs, size) |
+ |
+ if size >= 0 and read < size: |
+ raise ContentTooShortError( |
+ "retrieval incomplete: got only %i out of %i bytes" |
+ % (read, size), result) |
+ |
+ return result |
+ |
+def urlcleanup(): |
+ for temp_file in _url_tempfiles: |
+ try: |
+ os.unlink(temp_file) |
+ except EnvironmentError: |
+ pass |
+ |
+ del _url_tempfiles[:] |
+ global _opener |
+ if _opener: |
+ _opener = None |
+ |
+if PY3: |
+ _cut_port_re = re.compile(r":\d+$", re.ASCII) |
+else: |
+ _cut_port_re = re.compile(r":\d+$") |
+ |
+def request_host(request): |
+ |
+ """Return request-host, as defined by RFC 2965. |
+ |
+ Variation from RFC: returned value is lowercased, for convenient |
+ comparison. |
+ |
+ """ |
+ url = request.full_url |
+ host = urlparse(url)[1] |
+ if host == "": |
+ host = request.get_header("Host", "") |
+ |
+ # remove port, if present |
+ host = _cut_port_re.sub("", host, 1) |
+ return host.lower() |
+ |
+class Request(object): |
+ |
+ def __init__(self, url, data=None, headers={}, |
+ origin_req_host=None, unverifiable=False, |
+ method=None): |
+ # unwrap('<URL:type://host/path>') --> 'type://host/path' |
+ self.full_url = unwrap(url) |
+ self.full_url, self.fragment = splittag(self.full_url) |
+ self.data = data |
+ self.headers = {} |
+ self._tunnel_host = None |
+ for key, value in headers.items(): |
+ self.add_header(key, value) |
+ self.unredirected_hdrs = {} |
+ if origin_req_host is None: |
+ origin_req_host = request_host(self) |
+ self.origin_req_host = origin_req_host |
+ self.unverifiable = unverifiable |
+ self.method = method |
+ self._parse() |
+ |
+ def _parse(self): |
+ self.type, rest = splittype(self.full_url) |
+ if self.type is None: |
+ raise ValueError("unknown url type: %r" % self.full_url) |
+ self.host, self.selector = splithost(rest) |
+ if self.host: |
+ self.host = unquote(self.host) |
+ |
+ def get_method(self): |
+ """Return a string indicating the HTTP request method.""" |
+ if self.method is not None: |
+ return self.method |
+ elif self.data is not None: |
+ return "POST" |
+ else: |
+ return "GET" |
+ |
+ def get_full_url(self): |
+ if self.fragment: |
+ return '%s#%s' % (self.full_url, self.fragment) |
+ else: |
+ return self.full_url |
+ |
+ # Begin deprecated methods |
+ |
+ def add_data(self, data): |
+ msg = "Request.add_data method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ self.data = data |
+ |
+ def has_data(self): |
+ msg = "Request.has_data method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.data is not None |
+ |
+ def get_data(self): |
+ msg = "Request.get_data method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.data |
+ |
+ def get_type(self): |
+ msg = "Request.get_type method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.type |
+ |
+ def get_host(self): |
+ msg = "Request.get_host method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.host |
+ |
+ def get_selector(self): |
+ msg = "Request.get_selector method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.selector |
+ |
+ def is_unverifiable(self): |
+ msg = "Request.is_unverifiable method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.unverifiable |
+ |
+ def get_origin_req_host(self): |
+ msg = "Request.get_origin_req_host method is deprecated." |
+ warnings.warn(msg, DeprecationWarning, stacklevel=1) |
+ return self.origin_req_host |
+ |
+ # End deprecated methods |
+ |
+ def set_proxy(self, host, type): |
+ if self.type == 'https' and not self._tunnel_host: |
+ self._tunnel_host = self.host |
+ else: |
+ self.type= type |
+ self.selector = self.full_url |
+ self.host = host |
+ |
+ def has_proxy(self): |
+ return self.selector == self.full_url |
+ |
+ def add_header(self, key, val): |
+ # useful for something like authentication |
+ self.headers[key.capitalize()] = val |
+ |
+ def add_unredirected_header(self, key, val): |
+ # will not be added to a redirected request |
+ self.unredirected_hdrs[key.capitalize()] = val |
+ |
+ def has_header(self, header_name): |
+ return (header_name in self.headers or |
+ header_name in self.unredirected_hdrs) |
+ |
+ def get_header(self, header_name, default=None): |
+ return self.headers.get( |
+ header_name, |
+ self.unredirected_hdrs.get(header_name, default)) |
+ |
+ def header_items(self): |
+ hdrs = self.unredirected_hdrs.copy() |
+ hdrs.update(self.headers) |
+ return list(hdrs.items()) |
+ |
+class OpenerDirector(object): |
+ def __init__(self): |
+ client_version = "Python-urllib/%s" % __version__ |
+ self.addheaders = [('User-agent', client_version)] |
+ # self.handlers is retained only for backward compatibility |
+ self.handlers = [] |
+ # manage the individual handlers |
+ self.handle_open = {} |
+ self.handle_error = {} |
+ self.process_response = {} |
+ self.process_request = {} |
+ |
+ def add_handler(self, handler): |
+ if not hasattr(handler, "add_parent"): |
+ raise TypeError("expected BaseHandler instance, got %r" % |
+ type(handler)) |
+ |
+ added = False |
+ for meth in dir(handler): |
+ if meth in ["redirect_request", "do_open", "proxy_open"]: |
+ # oops, coincidental match |
+ continue |
+ |
+ i = meth.find("_") |
+ protocol = meth[:i] |
+ condition = meth[i+1:] |
+ |
+ if condition.startswith("error"): |
+ j = condition.find("_") + i + 1 |
+ kind = meth[j+1:] |
+ try: |
+ kind = int(kind) |
+ except ValueError: |
+ pass |
+ lookup = self.handle_error.get(protocol, {}) |
+ self.handle_error[protocol] = lookup |
+ elif condition == "open": |
+ kind = protocol |
+ lookup = self.handle_open |
+ elif condition == "response": |
+ kind = protocol |
+ lookup = self.process_response |
+ elif condition == "request": |
+ kind = protocol |
+ lookup = self.process_request |
+ else: |
+ continue |
+ |
+ handlers = lookup.setdefault(kind, []) |
+ if handlers: |
+ bisect.insort(handlers, handler) |
+ else: |
+ handlers.append(handler) |
+ added = True |
+ |
+ if added: |
+ bisect.insort(self.handlers, handler) |
+ handler.add_parent(self) |
+ |
+ def close(self): |
+ # Only exists for backwards compatibility. |
+ pass |
+ |
+ def _call_chain(self, chain, kind, meth_name, *args): |
+ # Handlers raise an exception if no one else should try to handle |
+ # the request, or return None if they can't but another handler |
+ # could. Otherwise, they return the response. |
+ handlers = chain.get(kind, ()) |
+ for handler in handlers: |
+ func = getattr(handler, meth_name) |
+ result = func(*args) |
+ if result is not None: |
+ return result |
+ |
+ def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): |
+ """ |
+ Accept a URL or a Request object |
+ |
+ Python-Future: if the URL is passed as a byte-string, decode it first. |
+ """ |
+ if isinstance(fullurl, bytes): |
+ fullurl = fullurl.decode() |
+ if isinstance(fullurl, str): |
+ req = Request(fullurl, data) |
+ else: |
+ req = fullurl |
+ if data is not None: |
+ req.data = data |
+ |
+ req.timeout = timeout |
+ protocol = req.type |
+ |
+ # pre-process request |
+ meth_name = protocol+"_request" |
+ for processor in self.process_request.get(protocol, []): |
+ meth = getattr(processor, meth_name) |
+ req = meth(req) |
+ |
+ response = self._open(req, data) |
+ |
+ # post-process response |
+ meth_name = protocol+"_response" |
+ for processor in self.process_response.get(protocol, []): |
+ meth = getattr(processor, meth_name) |
+ response = meth(req, response) |
+ |
+ return response |
+ |
+ def _open(self, req, data=None): |
+ result = self._call_chain(self.handle_open, 'default', |
+ 'default_open', req) |
+ if result: |
+ return result |
+ |
+ protocol = req.type |
+ result = self._call_chain(self.handle_open, protocol, protocol + |
+ '_open', req) |
+ if result: |
+ return result |
+ |
+ return self._call_chain(self.handle_open, 'unknown', |
+ 'unknown_open', req) |
+ |
+ def error(self, proto, *args): |
+ if proto in ('http', 'https'): |
+ # XXX http[s] protocols are special-cased |
+ dict = self.handle_error['http'] # https is not different than http |
+ proto = args[2] # YUCK! |
+ meth_name = 'http_error_%s' % proto |
+ http_err = 1 |
+ orig_args = args |
+ else: |
+ dict = self.handle_error |
+ meth_name = proto + '_error' |
+ http_err = 0 |
+ args = (dict, proto, meth_name) + args |
+ result = self._call_chain(*args) |
+ if result: |
+ return result |
+ |
+ if http_err: |
+ args = (dict, 'default', 'http_error_default') + orig_args |
+ return self._call_chain(*args) |
+ |
+# XXX probably also want an abstract factory that knows when it makes |
+# sense to skip a superclass in favor of a subclass and when it might |
+# make sense to include both |
+ |
+def build_opener(*handlers): |
+ """Create an opener object from a list of handlers. |
+ |
+ The opener will use several default handlers, including support |
+ for HTTP, FTP and when applicable HTTPS. |
+ |
+ If any of the handlers passed as arguments are subclasses of the |
+ default handlers, the default handlers will not be used. |
+ """ |
+ def isclass(obj): |
+ return isinstance(obj, type) or hasattr(obj, "__bases__") |
+ |
+ opener = OpenerDirector() |
+ default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, |
+ HTTPDefaultErrorHandler, HTTPRedirectHandler, |
+ FTPHandler, FileHandler, HTTPErrorProcessor] |
+ if hasattr(http_client, "HTTPSConnection"): |
+ default_classes.append(HTTPSHandler) |
+ skip = set() |
+ for klass in default_classes: |
+ for check in handlers: |
+ if isclass(check): |
+ if issubclass(check, klass): |
+ skip.add(klass) |
+ elif isinstance(check, klass): |
+ skip.add(klass) |
+ for klass in skip: |
+ default_classes.remove(klass) |
+ |
+ for klass in default_classes: |
+ opener.add_handler(klass()) |
+ |
+ for h in handlers: |
+ if isclass(h): |
+ h = h() |
+ opener.add_handler(h) |
+ return opener |
+ |
+class BaseHandler(object): |
+ handler_order = 500 |
+ |
+ def add_parent(self, parent): |
+ self.parent = parent |
+ |
+ def close(self): |
+ # Only exists for backwards compatibility |
+ pass |
+ |
+ def __lt__(self, other): |
+ if not hasattr(other, "handler_order"): |
+ # Try to preserve the old behavior of having custom classes |
+ # inserted after default ones (works only for custom user |
+ # classes which are not aware of handler_order). |
+ return True |
+ return self.handler_order < other.handler_order |
+ |
+ |
+class HTTPErrorProcessor(BaseHandler): |
+ """Process HTTP error responses.""" |
+ handler_order = 1000 # after all other processing |
+ |
+ def http_response(self, request, response): |
+ code, msg, hdrs = response.code, response.msg, response.info() |
+ |
+ # According to RFC 2616, "2xx" code indicates that the client's |
+ # request was successfully received, understood, and accepted. |
+ if not (200 <= code < 300): |
+ response = self.parent.error( |
+ 'http', request, response, code, msg, hdrs) |
+ |
+ return response |
+ |
+ https_response = http_response |
+ |
+class HTTPDefaultErrorHandler(BaseHandler): |
+ def http_error_default(self, req, fp, code, msg, hdrs): |
+ raise HTTPError(req.full_url, code, msg, hdrs, fp) |
+ |
+class HTTPRedirectHandler(BaseHandler): |
+ # maximum number of redirections to any single URL |
+ # this is needed because of the state that cookies introduce |
+ max_repeats = 4 |
+ # maximum total number of redirections (regardless of URL) before |
+ # assuming we're in a loop |
+ max_redirections = 10 |
+ |
+ def redirect_request(self, req, fp, code, msg, headers, newurl): |
+ """Return a Request or None in response to a redirect. |
+ |
+ This is called by the http_error_30x methods when a |
+ redirection response is received. If a redirection should |
+ take place, return a new Request to allow http_error_30x to |
+ perform the redirect. Otherwise, raise HTTPError if no-one |
+ else should try to handle this url. Return None if you can't |
+ but another Handler might. |
+ """ |
+ m = req.get_method() |
+ if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD") |
+ or code in (301, 302, 303) and m == "POST")): |
+ raise HTTPError(req.full_url, code, msg, headers, fp) |
+ |
+ # Strictly (according to RFC 2616), 301 or 302 in response to |
+ # a POST MUST NOT cause a redirection without confirmation |
+ # from the user (of urllib.request, in this case). In practice, |
+ # essentially all clients do redirect in this case, so we do |
+ # the same. |
+ # be conciliant with URIs containing a space |
+ newurl = newurl.replace(' ', '%20') |
+ CONTENT_HEADERS = ("content-length", "content-type") |
+ newheaders = dict((k, v) for k, v in req.headers.items() |
+ if k.lower() not in CONTENT_HEADERS) |
+ return Request(newurl, |
+ headers=newheaders, |
+ origin_req_host=req.origin_req_host, |
+ unverifiable=True) |
+ |
+ # Implementation note: To avoid the server sending us into an |
+ # infinite loop, the request object needs to track what URLs we |
+ # have already seen. Do this by adding a handler-specific |
+ # attribute to the Request object. |
+ def http_error_302(self, req, fp, code, msg, headers): |
+ # Some servers (incorrectly) return multiple Location headers |
+ # (so probably same goes for URI). Use first header. |
+ if "location" in headers: |
+ newurl = headers["location"] |
+ elif "uri" in headers: |
+ newurl = headers["uri"] |
+ else: |
+ return |
+ |
+ # fix a possible malformed URL |
+ urlparts = urlparse(newurl) |
+ |
+ # For security reasons we don't allow redirection to anything other |
+ # than http, https or ftp. |
+ |
+ if urlparts.scheme not in ('http', 'https', 'ftp', ''): |
+ raise HTTPError( |
+ newurl, code, |
+ "%s - Redirection to url '%s' is not allowed" % (msg, newurl), |
+ headers, fp) |
+ |
+ if not urlparts.path: |
+ urlparts = list(urlparts) |
+ urlparts[2] = "/" |
+ newurl = urlunparse(urlparts) |
+ |
+ newurl = urljoin(req.full_url, newurl) |
+ |
+ # XXX Probably want to forget about the state of the current |
+ # request, although that might interact poorly with other |
+ # handlers that also use handler-specific request attributes |
+ new = self.redirect_request(req, fp, code, msg, headers, newurl) |
+ if new is None: |
+ return |
+ |
+ # loop detection |
+ # .redirect_dict has a key url if url was previously visited. |
+ if hasattr(req, 'redirect_dict'): |
+ visited = new.redirect_dict = req.redirect_dict |
+ if (visited.get(newurl, 0) >= self.max_repeats or |
+ len(visited) >= self.max_redirections): |
+ raise HTTPError(req.full_url, code, |
+ self.inf_msg + msg, headers, fp) |
+ else: |
+ visited = new.redirect_dict = req.redirect_dict = {} |
+ visited[newurl] = visited.get(newurl, 0) + 1 |
+ |
+ # Don't close the fp until we are sure that we won't use it |
+ # with HTTPError. |
+ fp.read() |
+ fp.close() |
+ |
+ return self.parent.open(new, timeout=req.timeout) |
+ |
+ http_error_301 = http_error_303 = http_error_307 = http_error_302 |
+ |
+ inf_msg = "The HTTP server returned a redirect error that would " \ |
+ "lead to an infinite loop.\n" \ |
+ "The last 30x error message was:\n" |
+ |
+ |
+def _parse_proxy(proxy): |
+ """Return (scheme, user, password, host/port) given a URL or an authority. |
+ |
+ If a URL is supplied, it must have an authority (host:port) component. |
+ According to RFC 3986, having an authority component means the URL must |
+ have two slashes after the scheme: |
+ |
+ >>> _parse_proxy('file:/ftp.example.com/') |
+ Traceback (most recent call last): |
+ ValueError: proxy URL with no authority: 'file:/ftp.example.com/' |
+ |
+ The first three items of the returned tuple may be None. |
+ |
+ Examples of authority parsing: |
+ |
+ >>> _parse_proxy('proxy.example.com') |
+ (None, None, None, 'proxy.example.com') |
+ >>> _parse_proxy('proxy.example.com:3128') |
+ (None, None, None, 'proxy.example.com:3128') |
+ |
+ The authority component may optionally include userinfo (assumed to be |
+ username:password): |
+ |
+ >>> _parse_proxy('joe:password@proxy.example.com') |
+ (None, 'joe', 'password', 'proxy.example.com') |
+ >>> _parse_proxy('joe:password@proxy.example.com:3128') |
+ (None, 'joe', 'password', 'proxy.example.com:3128') |
+ |
+ Same examples, but with URLs instead: |
+ |
+ >>> _parse_proxy('http://proxy.example.com/') |
+ ('http', None, None, 'proxy.example.com') |
+ >>> _parse_proxy('http://proxy.example.com:3128/') |
+ ('http', None, None, 'proxy.example.com:3128') |
+ >>> _parse_proxy('http://joe:password@proxy.example.com/') |
+ ('http', 'joe', 'password', 'proxy.example.com') |
+ >>> _parse_proxy('http://joe:password@proxy.example.com:3128') |
+ ('http', 'joe', 'password', 'proxy.example.com:3128') |
+ |
+ Everything after the authority is ignored: |
+ |
+ >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128') |
+ ('ftp', 'joe', 'password', 'proxy.example.com') |
+ |
+ Test for no trailing '/' case: |
+ |
+ >>> _parse_proxy('http://joe:password@proxy.example.com') |
+ ('http', 'joe', 'password', 'proxy.example.com') |
+ |
+ """ |
+ scheme, r_scheme = splittype(proxy) |
+ if not r_scheme.startswith("/"): |
+ # authority |
+ scheme = None |
+ authority = proxy |
+ else: |
+ # URL |
+ if not r_scheme.startswith("//"): |
+ raise ValueError("proxy URL with no authority: %r" % proxy) |
+ # We have an authority, so for RFC 3986-compliant URLs (by ss 3. |
+ # and 3.3.), path is empty or starts with '/' |
+ end = r_scheme.find("/", 2) |
+ if end == -1: |
+ end = None |
+ authority = r_scheme[2:end] |
+ userinfo, hostport = splituser(authority) |
+ if userinfo is not None: |
+ user, password = splitpasswd(userinfo) |
+ else: |
+ user = password = None |
+ return scheme, user, password, hostport |
+ |
+class ProxyHandler(BaseHandler): |
+ # Proxies must be in front |
+ handler_order = 100 |
+ |
+ def __init__(self, proxies=None): |
+ if proxies is None: |
+ proxies = getproxies() |
+ assert hasattr(proxies, 'keys'), "proxies must be a mapping" |
+ self.proxies = proxies |
+ for type, url in proxies.items(): |
+ setattr(self, '%s_open' % type, |
+ lambda r, proxy=url, type=type, meth=self.proxy_open: |
+ meth(r, proxy, type)) |
+ |
+ def proxy_open(self, req, proxy, type): |
+ orig_type = req.type |
+ proxy_type, user, password, hostport = _parse_proxy(proxy) |
+ if proxy_type is None: |
+ proxy_type = orig_type |
+ |
+ if req.host and proxy_bypass(req.host): |
+ return None |
+ |
+ if user and password: |
+ user_pass = '%s:%s' % (unquote(user), |
+ unquote(password)) |
+ creds = base64.b64encode(user_pass.encode()).decode("ascii") |
+ req.add_header('Proxy-authorization', 'Basic ' + creds) |
+ hostport = unquote(hostport) |
+ req.set_proxy(hostport, proxy_type) |
+ if orig_type == proxy_type or orig_type == 'https': |
+ # let other handlers take care of it |
+ return None |
+ else: |
+ # need to start over, because the other handlers don't |
+ # grok the proxy's URL type |
+ # e.g. if we have a constructor arg proxies like so: |
+ # {'http': 'ftp://proxy.example.com'}, we may end up turning |
+ # a request for http://acme.example.com/a into one for |
+ # ftp://proxy.example.com/a |
+ return self.parent.open(req, timeout=req.timeout) |
+ |
+class HTTPPasswordMgr(object): |
+ |
+ def __init__(self): |
+ self.passwd = {} |
+ |
+ def add_password(self, realm, uri, user, passwd): |
+ # uri could be a single URI or a sequence |
+ if isinstance(uri, str): |
+ uri = [uri] |
+ if realm not in self.passwd: |
+ self.passwd[realm] = {} |
+ for default_port in True, False: |
+ reduced_uri = tuple( |
+ [self.reduce_uri(u, default_port) for u in uri]) |
+ self.passwd[realm][reduced_uri] = (user, passwd) |
+ |
+ def find_user_password(self, realm, authuri): |
+ domains = self.passwd.get(realm, {}) |
+ for default_port in True, False: |
+ reduced_authuri = self.reduce_uri(authuri, default_port) |
+ for uris, authinfo in domains.items(): |
+ for uri in uris: |
+ if self.is_suburi(uri, reduced_authuri): |
+ return authinfo |
+ return None, None |
+ |
+ def reduce_uri(self, uri, default_port=True): |
+ """Accept authority or URI and extract only the authority and path.""" |
+ # note HTTP URLs do not have a userinfo component |
+ parts = urlsplit(uri) |
+ if parts[1]: |
+ # URI |
+ scheme = parts[0] |
+ authority = parts[1] |
+ path = parts[2] or '/' |
+ else: |
+ # host or host:port |
+ scheme = None |
+ authority = uri |
+ path = '/' |
+ host, port = splitport(authority) |
+ if default_port and port is None and scheme is not None: |
+ dport = {"http": 80, |
+ "https": 443, |
+ }.get(scheme) |
+ if dport is not None: |
+ authority = "%s:%d" % (host, dport) |
+ return authority, path |
+ |
+ def is_suburi(self, base, test): |
+ """Check if test is below base in a URI tree |
+ |
+ Both args must be URIs in reduced form. |
+ """ |
+ if base == test: |
+ return True |
+ if base[0] != test[0]: |
+ return False |
+ common = posixpath.commonprefix((base[1], test[1])) |
+ if len(common) == len(base[1]): |
+ return True |
+ return False |
+ |
+ |
+class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): |
+ |
+ def find_user_password(self, realm, authuri): |
+ user, password = HTTPPasswordMgr.find_user_password(self, realm, |
+ authuri) |
+ if user is not None: |
+ return user, password |
+ return HTTPPasswordMgr.find_user_password(self, None, authuri) |
+ |
+ |
+class AbstractBasicAuthHandler(object): |
+ |
+ # XXX this allows for multiple auth-schemes, but will stupidly pick |
+ # the last one with a realm specified. |
+ |
+ # allow for double- and single-quoted realm values |
+ # (single quotes are a violation of the RFC, but appear in the wild) |
+ rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+' |
+ 'realm=(["\']?)([^"\']*)\\2', re.I) |
+ |
+ # XXX could pre-emptively send auth info already accepted (RFC 2617, |
+ # end of section 2, and section 1.2 immediately after "credentials" |
+ # production). |
+ |
+ def __init__(self, password_mgr=None): |
+ if password_mgr is None: |
+ password_mgr = HTTPPasswordMgr() |
+ self.passwd = password_mgr |
+ self.add_password = self.passwd.add_password |
+ self.retried = 0 |
+ |
+ def reset_retry_count(self): |
+ self.retried = 0 |
+ |
+ def http_error_auth_reqed(self, authreq, host, req, headers): |
+ # host may be an authority (without userinfo) or a URL with an |
+ # authority |
+ # XXX could be multiple headers |
+ authreq = headers.get(authreq, None) |
+ |
+ if self.retried > 5: |
+ # retry sending the username:password 5 times before failing. |
+ raise HTTPError(req.get_full_url(), 401, "basic auth failed", |
+ headers, None) |
+ else: |
+ self.retried += 1 |
+ |
+ if authreq: |
+ scheme = authreq.split()[0] |
+ if scheme.lower() != 'basic': |
+ raise ValueError("AbstractBasicAuthHandler does not" |
+ " support the following scheme: '%s'" % |
+ scheme) |
+ else: |
+ mo = AbstractBasicAuthHandler.rx.search(authreq) |
+ if mo: |
+ scheme, quote, realm = mo.groups() |
+ if quote not in ['"',"'"]: |
+ warnings.warn("Basic Auth Realm was unquoted", |
+ UserWarning, 2) |
+ if scheme.lower() == 'basic': |
+ response = self.retry_http_basic_auth(host, req, realm) |
+ if response and response.code != 401: |
+ self.retried = 0 |
+ return response |
+ |
+ def retry_http_basic_auth(self, host, req, realm): |
+ user, pw = self.passwd.find_user_password(realm, host) |
+ if pw is not None: |
+ raw = "%s:%s" % (user, pw) |
+ auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii") |
+ if req.headers.get(self.auth_header, None) == auth: |
+ return None |
+ req.add_unredirected_header(self.auth_header, auth) |
+ return self.parent.open(req, timeout=req.timeout) |
+ else: |
+ return None |
+ |
+ |
+class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): |
+ |
+ auth_header = 'Authorization' |
+ |
+ def http_error_401(self, req, fp, code, msg, headers): |
+ url = req.full_url |
+ response = self.http_error_auth_reqed('www-authenticate', |
+ url, req, headers) |
+ self.reset_retry_count() |
+ return response |
+ |
+ |
+class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): |
+ |
+ auth_header = 'Proxy-authorization' |
+ |
+ def http_error_407(self, req, fp, code, msg, headers): |
+ # http_error_auth_reqed requires that there is no userinfo component in |
+ # authority. Assume there isn't one, since urllib.request does not (and |
+ # should not, RFC 3986 s. 3.2.1) support requests for URLs containing |
+ # userinfo. |
+ authority = req.host |
+ response = self.http_error_auth_reqed('proxy-authenticate', |
+ authority, req, headers) |
+ self.reset_retry_count() |
+ return response |
+ |
+ |
+# Return n random bytes. |
+_randombytes = os.urandom |
+ |
+ |
+class AbstractDigestAuthHandler(object): |
+ # Digest authentication is specified in RFC 2617. |
+ |
+ # XXX The client does not inspect the Authentication-Info header |
+ # in a successful response. |
+ |
+ # XXX It should be possible to test this implementation against |
+ # a mock server that just generates a static set of challenges. |
+ |
+ # XXX qop="auth-int" supports is shaky |
+ |
+ def __init__(self, passwd=None): |
+ if passwd is None: |
+ passwd = HTTPPasswordMgr() |
+ self.passwd = passwd |
+ self.add_password = self.passwd.add_password |
+ self.retried = 0 |
+ self.nonce_count = 0 |
+ self.last_nonce = None |
+ |
+ def reset_retry_count(self): |
+ self.retried = 0 |
+ |
+ def http_error_auth_reqed(self, auth_header, host, req, headers): |
+ authreq = headers.get(auth_header, None) |
+ if self.retried > 5: |
+ # Don't fail endlessly - if we failed once, we'll probably |
+ # fail a second time. Hm. Unless the Password Manager is |
+ # prompting for the information. Crap. This isn't great |
+ # but it's better than the current 'repeat until recursion |
+ # depth exceeded' approach <wink> |
+ raise HTTPError(req.full_url, 401, "digest auth failed", |
+ headers, None) |
+ else: |
+ self.retried += 1 |
+ if authreq: |
+ scheme = authreq.split()[0] |
+ if scheme.lower() == 'digest': |
+ return self.retry_http_digest_auth(req, authreq) |
+ elif scheme.lower() != 'basic': |
+ raise ValueError("AbstractDigestAuthHandler does not support" |
+ " the following scheme: '%s'" % scheme) |
+ |
+ def retry_http_digest_auth(self, req, auth): |
+ token, challenge = auth.split(' ', 1) |
+ chal = parse_keqv_list(filter(None, parse_http_list(challenge))) |
+ auth = self.get_authorization(req, chal) |
+ if auth: |
+ auth_val = 'Digest %s' % auth |
+ if req.headers.get(self.auth_header, None) == auth_val: |
+ return None |
+ req.add_unredirected_header(self.auth_header, auth_val) |
+ resp = self.parent.open(req, timeout=req.timeout) |
+ return resp |
+ |
+ def get_cnonce(self, nonce): |
+ # The cnonce-value is an opaque |
+ # quoted string value provided by the client and used by both client |
+ # and server to avoid chosen plaintext attacks, to provide mutual |
+ # authentication, and to provide some message integrity protection. |
+ # This isn't a fabulous effort, but it's probably Good Enough. |
+ s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime()) |
+ b = s.encode("ascii") + _randombytes(8) |
+ dig = hashlib.sha1(b).hexdigest() |
+ return dig[:16] |
+ |
+ def get_authorization(self, req, chal): |
+ try: |
+ realm = chal['realm'] |
+ nonce = chal['nonce'] |
+ qop = chal.get('qop') |
+ algorithm = chal.get('algorithm', 'MD5') |
+ # mod_digest doesn't send an opaque, even though it isn't |
+ # supposed to be optional |
+ opaque = chal.get('opaque', None) |
+ except KeyError: |
+ return None |
+ |
+ H, KD = self.get_algorithm_impls(algorithm) |
+ if H is None: |
+ return None |
+ |
+ user, pw = self.passwd.find_user_password(realm, req.full_url) |
+ if user is None: |
+ return None |
+ |
+ # XXX not implemented yet |
+ if req.data is not None: |
+ entdig = self.get_entity_digest(req.data, chal) |
+ else: |
+ entdig = None |
+ |
+ A1 = "%s:%s:%s" % (user, realm, pw) |
+ A2 = "%s:%s" % (req.get_method(), |
+ # XXX selector: what about proxies and full urls |
+ req.selector) |
+ if qop == 'auth': |
+ if nonce == self.last_nonce: |
+ self.nonce_count += 1 |
+ else: |
+ self.nonce_count = 1 |
+ self.last_nonce = nonce |
+ ncvalue = '%08x' % self.nonce_count |
+ cnonce = self.get_cnonce(nonce) |
+ noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) |
+ respdig = KD(H(A1), noncebit) |
+ elif qop is None: |
+ respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) |
+ else: |
+ # XXX handle auth-int. |
+ raise URLError("qop '%s' is not supported." % qop) |
+ |
+ # XXX should the partial digests be encoded too? |
+ |
+ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ |
+ 'response="%s"' % (user, realm, nonce, req.selector, |
+ respdig) |
+ if opaque: |
+ base += ', opaque="%s"' % opaque |
+ if entdig: |
+ base += ', digest="%s"' % entdig |
+ base += ', algorithm="%s"' % algorithm |
+ if qop: |
+ base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) |
+ return base |
+ |
+ def get_algorithm_impls(self, algorithm): |
+ # lambdas assume digest modules are imported at the top level |
+ if algorithm == 'MD5': |
+ H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest() |
+ elif algorithm == 'SHA': |
+ H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest() |
+ # XXX MD5-sess |
+ KD = lambda s, d: H("%s:%s" % (s, d)) |
+ return H, KD |
+ |
+ def get_entity_digest(self, data, chal): |
+ # XXX not implemented yet |
+ return None |
+ |
+ |
+class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): |
+ """An authentication protocol defined by RFC 2069 |
+ |
+ Digest authentication improves on basic authentication because it |
+ does not transmit passwords in the clear. |
+ """ |
+ |
+ auth_header = 'Authorization' |
+ handler_order = 490 # before Basic auth |
+ |
+ def http_error_401(self, req, fp, code, msg, headers): |
+ host = urlparse(req.full_url)[1] |
+ retry = self.http_error_auth_reqed('www-authenticate', |
+ host, req, headers) |
+ self.reset_retry_count() |
+ return retry |
+ |
+ |
+class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): |
+ |
+ auth_header = 'Proxy-Authorization' |
+ handler_order = 490 # before Basic auth |
+ |
+ def http_error_407(self, req, fp, code, msg, headers): |
+ host = req.host |
+ retry = self.http_error_auth_reqed('proxy-authenticate', |
+ host, req, headers) |
+ self.reset_retry_count() |
+ return retry |
+ |
+class AbstractHTTPHandler(BaseHandler): |
+ |
+ def __init__(self, debuglevel=0): |
+ self._debuglevel = debuglevel |
+ |
+ def set_http_debuglevel(self, level): |
+ self._debuglevel = level |
+ |
+ def do_request_(self, request): |
+ host = request.host |
+ if not host: |
+ raise URLError('no host given') |
+ |
+ if request.data is not None: # POST |
+ data = request.data |
+ if isinstance(data, str): |
+ msg = "POST data should be bytes or an iterable of bytes. " \ |
+ "It cannot be of type str." |
+ raise TypeError(msg) |
+ if not request.has_header('Content-type'): |
+ request.add_unredirected_header( |
+ 'Content-type', |
+ 'application/x-www-form-urlencoded') |
+ if not request.has_header('Content-length'): |
+ size = None |
+ try: |
+ ### For Python-Future: |
+ if PY2 and isinstance(data, array.array): |
+ # memoryviews of arrays aren't supported |
+ # in Py2.7. (e.g. memoryview(array.array('I', |
+ # [1, 2, 3, 4])) raises a TypeError.) |
+ # So we calculate the size manually instead: |
+ size = len(data) * data.itemsize |
+ ### |
+ else: |
+ mv = memoryview(data) |
+ size = len(mv) * mv.itemsize |
+ except TypeError: |
+ if isinstance(data, collections.Iterable): |
+ raise ValueError("Content-Length should be specified " |
+ "for iterable data of type %r %r" % (type(data), |
+ data)) |
+ else: |
+ request.add_unredirected_header( |
+ 'Content-length', '%d' % size) |
+ |
+ sel_host = host |
+ if request.has_proxy(): |
+ scheme, sel = splittype(request.selector) |
+ sel_host, sel_path = splithost(sel) |
+ if not request.has_header('Host'): |
+ request.add_unredirected_header('Host', sel_host) |
+ for name, value in self.parent.addheaders: |
+ name = name.capitalize() |
+ if not request.has_header(name): |
+ request.add_unredirected_header(name, value) |
+ |
+ return request |
+ |
+ def do_open(self, http_class, req, **http_conn_args): |
+ """Return an HTTPResponse object for the request, using http_class. |
+ |
+ http_class must implement the HTTPConnection API from http.client. |
+ """ |
+ host = req.host |
+ if not host: |
+ raise URLError('no host given') |
+ |
+ # will parse host:port |
+ h = http_class(host, timeout=req.timeout, **http_conn_args) |
+ |
+ headers = dict(req.unredirected_hdrs) |
+ headers.update(dict((k, v) for k, v in req.headers.items() |
+ if k not in headers)) |
+ |
+ # TODO(jhylton): Should this be redesigned to handle |
+ # persistent connections? |
+ |
+ # We want to make an HTTP/1.1 request, but the addinfourl |
+ # class isn't prepared to deal with a persistent connection. |
+ # It will try to read all remaining data from the socket, |
+ # which will block while the server waits for the next request. |
+ # So make sure the connection gets closed after the (only) |
+ # request. |
+ headers["Connection"] = "close" |
+ headers = dict((name.title(), val) for name, val in headers.items()) |
+ |
+ if req._tunnel_host: |
+ tunnel_headers = {} |
+ proxy_auth_hdr = "Proxy-Authorization" |
+ if proxy_auth_hdr in headers: |
+ tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] |
+ # Proxy-Authorization should not be sent to origin |
+ # server. |
+ del headers[proxy_auth_hdr] |
+ h.set_tunnel(req._tunnel_host, headers=tunnel_headers) |
+ |
+ try: |
+ h.request(req.get_method(), req.selector, req.data, headers) |
+ except socket.error as err: # timeout error |
+ h.close() |
+ raise URLError(err) |
+ else: |
+ r = h.getresponse() |
+ # If the server does not send us a 'Connection: close' header, |
+ # HTTPConnection assumes the socket should be left open. Manually |
+ # mark the socket to be closed when this response object goes away. |
+ if h.sock: |
+ h.sock.close() |
+ h.sock = None |
+ |
+ |
+ r.url = req.get_full_url() |
+ # This line replaces the .msg attribute of the HTTPResponse |
+ # with .headers, because urllib clients expect the response to |
+ # have the reason in .msg. It would be good to mark this |
+ # attribute is deprecated and get then to use info() or |
+ # .headers. |
+ r.msg = r.reason |
+ return r |
+ |
+ |
+class HTTPHandler(AbstractHTTPHandler): |
+ |
+ def http_open(self, req): |
+ return self.do_open(http_client.HTTPConnection, req) |
+ |
+ http_request = AbstractHTTPHandler.do_request_ |
+ |
+if hasattr(http_client, 'HTTPSConnection'): |
+ |
+ class HTTPSHandler(AbstractHTTPHandler): |
+ |
+ def __init__(self, debuglevel=0, context=None, check_hostname=None): |
+ AbstractHTTPHandler.__init__(self, debuglevel) |
+ self._context = context |
+ self._check_hostname = check_hostname |
+ |
+ def https_open(self, req): |
+ return self.do_open(http_client.HTTPSConnection, req, |
+ context=self._context, check_hostname=self._check_hostname) |
+ |
+ https_request = AbstractHTTPHandler.do_request_ |
+ |
+ __all__.append('HTTPSHandler') |
+ |
+class HTTPCookieProcessor(BaseHandler): |
+ def __init__(self, cookiejar=None): |
+ import future.backports.http.cookiejar as http_cookiejar |
+ if cookiejar is None: |
+ cookiejar = http_cookiejar.CookieJar() |
+ self.cookiejar = cookiejar |
+ |
+ def http_request(self, request): |
+ self.cookiejar.add_cookie_header(request) |
+ return request |
+ |
+ def http_response(self, request, response): |
+ self.cookiejar.extract_cookies(response, request) |
+ return response |
+ |
+ https_request = http_request |
+ https_response = http_response |
+ |
+class UnknownHandler(BaseHandler): |
+ def unknown_open(self, req): |
+ type = req.type |
+ raise URLError('unknown url type: %s' % type) |
+ |
+def parse_keqv_list(l): |
+ """Parse list of key=value strings where keys are not duplicated.""" |
+ parsed = {} |
+ for elt in l: |
+ k, v = elt.split('=', 1) |
+ if v[0] == '"' and v[-1] == '"': |
+ v = v[1:-1] |
+ parsed[k] = v |
+ return parsed |
+ |
+def parse_http_list(s): |
+ """Parse lists as described by RFC 2068 Section 2. |
+ |
+ In particular, parse comma-separated lists where the elements of |
+ the list may include quoted-strings. A quoted-string could |
+ contain a comma. A non-quoted string could have quotes in the |
+ middle. Neither commas nor quotes count if they are escaped. |
+ Only double-quotes count, not single-quotes. |
+ """ |
+ res = [] |
+ part = '' |
+ |
+ escape = quote = False |
+ for cur in s: |
+ if escape: |
+ part += cur |
+ escape = False |
+ continue |
+ if quote: |
+ if cur == '\\': |
+ escape = True |
+ continue |
+ elif cur == '"': |
+ quote = False |
+ part += cur |
+ continue |
+ |
+ if cur == ',': |
+ res.append(part) |
+ part = '' |
+ continue |
+ |
+ if cur == '"': |
+ quote = True |
+ |
+ part += cur |
+ |
+ # append last part |
+ if part: |
+ res.append(part) |
+ |
+ return [part.strip() for part in res] |
+ |
+class FileHandler(BaseHandler): |
+ # Use local file or FTP depending on form of URL |
+ def file_open(self, req): |
+ url = req.selector |
+ if url[:2] == '//' and url[2:3] != '/' and (req.host and |
+ req.host != 'localhost'): |
+ if not req.host is self.get_names(): |
+ raise URLError("file:// scheme is supported only on localhost") |
+ else: |
+ return self.open_local_file(req) |
+ |
+ # names for the localhost |
+ names = None |
+ def get_names(self): |
+ if FileHandler.names is None: |
+ try: |
+ FileHandler.names = tuple( |
+ socket.gethostbyname_ex('localhost')[2] + |
+ socket.gethostbyname_ex(socket.gethostname())[2]) |
+ except socket.gaierror: |
+ FileHandler.names = (socket.gethostbyname('localhost'),) |
+ return FileHandler.names |
+ |
+ # not entirely sure what the rules are here |
+ def open_local_file(self, req): |
+ import future.backports.email.utils as email_utils |
+ import mimetypes |
+ host = req.host |
+ filename = req.selector |
+ localfile = url2pathname(filename) |
+ try: |
+ stats = os.stat(localfile) |
+ size = stats.st_size |
+ modified = email_utils.formatdate(stats.st_mtime, usegmt=True) |
+ mtype = mimetypes.guess_type(filename)[0] |
+ headers = email.message_from_string( |
+ 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' % |
+ (mtype or 'text/plain', size, modified)) |
+ if host: |
+ host, port = splitport(host) |
+ if not host or \ |
+ (not port and _safe_gethostbyname(host) in self.get_names()): |
+ if host: |
+ origurl = 'file://' + host + filename |
+ else: |
+ origurl = 'file://' + filename |
+ return addinfourl(open(localfile, 'rb'), headers, origurl) |
+ except OSError as exp: |
+ # users shouldn't expect OSErrors coming from urlopen() |
+ raise URLError(exp) |
+ raise URLError('file not on local host') |
+ |
+def _safe_gethostbyname(host): |
+ try: |
+ return socket.gethostbyname(host) |
+ except socket.gaierror: |
+ return None |
+ |
+class FTPHandler(BaseHandler): |
+ def ftp_open(self, req): |
+ import ftplib |
+ import mimetypes |
+ host = req.host |
+ if not host: |
+ raise URLError('ftp error: no host given') |
+ host, port = splitport(host) |
+ if port is None: |
+ port = ftplib.FTP_PORT |
+ else: |
+ port = int(port) |
+ |
+ # username/password handling |
+ user, host = splituser(host) |
+ if user: |
+ user, passwd = splitpasswd(user) |
+ else: |
+ passwd = None |
+ host = unquote(host) |
+ user = user or '' |
+ passwd = passwd or '' |
+ |
+ try: |
+ host = socket.gethostbyname(host) |
+ except socket.error as msg: |
+ raise URLError(msg) |
+ path, attrs = splitattr(req.selector) |
+ dirs = path.split('/') |
+ dirs = list(map(unquote, dirs)) |
+ dirs, file = dirs[:-1], dirs[-1] |
+ if dirs and not dirs[0]: |
+ dirs = dirs[1:] |
+ try: |
+ fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) |
+ type = file and 'I' or 'D' |
+ for attr in attrs: |
+ attr, value = splitvalue(attr) |
+ if attr.lower() == 'type' and \ |
+ value in ('a', 'A', 'i', 'I', 'd', 'D'): |
+ type = value.upper() |
+ fp, retrlen = fw.retrfile(file, type) |
+ headers = "" |
+ mtype = mimetypes.guess_type(req.full_url)[0] |
+ if mtype: |
+ headers += "Content-type: %s\n" % mtype |
+ if retrlen is not None and retrlen >= 0: |
+ headers += "Content-length: %d\n" % retrlen |
+ headers = email.message_from_string(headers) |
+ return addinfourl(fp, headers, req.full_url) |
+ except ftplib.all_errors as exp: |
+ exc = URLError('ftp error: %r' % exp) |
+ raise_with_traceback(exc) |
+ |
+ def connect_ftp(self, user, passwd, host, port, dirs, timeout): |
+ return ftpwrapper(user, passwd, host, port, dirs, timeout, |
+ persistent=False) |
+ |
+class CacheFTPHandler(FTPHandler): |
+ # XXX would be nice to have pluggable cache strategies |
+ # XXX this stuff is definitely not thread safe |
+ def __init__(self): |
+ self.cache = {} |
+ self.timeout = {} |
+ self.soonest = 0 |
+ self.delay = 60 |
+ self.max_conns = 16 |
+ |
+ def setTimeout(self, t): |
+ self.delay = t |
+ |
+ def setMaxConns(self, m): |
+ self.max_conns = m |
+ |
+ def connect_ftp(self, user, passwd, host, port, dirs, timeout): |
+ key = user, host, port, '/'.join(dirs), timeout |
+ if key in self.cache: |
+ self.timeout[key] = time.time() + self.delay |
+ else: |
+ self.cache[key] = ftpwrapper(user, passwd, host, port, |
+ dirs, timeout) |
+ self.timeout[key] = time.time() + self.delay |
+ self.check_cache() |
+ return self.cache[key] |
+ |
+ def check_cache(self): |
+ # first check for old ones |
+ t = time.time() |
+ if self.soonest <= t: |
+ for k, v in list(self.timeout.items()): |
+ if v < t: |
+ self.cache[k].close() |
+ del self.cache[k] |
+ del self.timeout[k] |
+ self.soonest = min(list(self.timeout.values())) |
+ |
+ # then check the size |
+ if len(self.cache) == self.max_conns: |
+ for k, v in list(self.timeout.items()): |
+ if v == self.soonest: |
+ del self.cache[k] |
+ del self.timeout[k] |
+ break |
+ self.soonest = min(list(self.timeout.values())) |
+ |
+ def clear_cache(self): |
+ for conn in self.cache.values(): |
+ conn.close() |
+ self.cache.clear() |
+ self.timeout.clear() |
+ |
+ |
+# Code move from the old urllib module |
+ |
+MAXFTPCACHE = 10 # Trim the ftp cache beyond this size |
+ |
+# Helper for non-unix systems |
+if os.name == 'nt': |
+ from nturl2path import url2pathname, pathname2url |
+else: |
+ def url2pathname(pathname): |
+ """OS-specific conversion from a relative URL of the 'file' scheme |
+ to a file system path; not recommended for general use.""" |
+ return unquote(pathname) |
+ |
+ def pathname2url(pathname): |
+ """OS-specific conversion from a file system path to a relative URL |
+ of the 'file' scheme; not recommended for general use.""" |
+ return quote(pathname) |
+ |
+# This really consists of two pieces: |
+# (1) a class which handles opening of all sorts of URLs |
+# (plus assorted utilities etc.) |
+# (2) a set of functions for parsing URLs |
+# XXX Should these be separated out into different modules? |
+ |
+ |
+ftpcache = {} |
+class URLopener(object): |
+ """Class to open URLs. |
+ This is a class rather than just a subroutine because we may need |
+ more than one set of global protocol-specific options. |
+ Note -- this is a base class for those who don't want the |
+ automatic handling of errors type 302 (relocated) and 401 |
+ (authorization needed).""" |
+ |
+ __tempfiles = None |
+ |
+ version = "Python-urllib/%s" % __version__ |
+ |
+ # Constructor |
+ def __init__(self, proxies=None, **x509): |
+ msg = "%(class)s style of invoking requests is deprecated. " \ |
+ "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} |
+ warnings.warn(msg, DeprecationWarning, stacklevel=3) |
+ if proxies is None: |
+ proxies = getproxies() |
+ assert hasattr(proxies, 'keys'), "proxies must be a mapping" |
+ self.proxies = proxies |
+ self.key_file = x509.get('key_file') |
+ self.cert_file = x509.get('cert_file') |
+ self.addheaders = [('User-Agent', self.version)] |
+ self.__tempfiles = [] |
+ self.__unlink = os.unlink # See cleanup() |
+ self.tempcache = None |
+ # Undocumented feature: if you assign {} to tempcache, |
+ # it is used to cache files retrieved with |
+ # self.retrieve(). This is not enabled by default |
+ # since it does not work for changing documents (and I |
+ # haven't got the logic to check expiration headers |
+ # yet). |
+ self.ftpcache = ftpcache |
+ # Undocumented feature: you can use a different |
+ # ftp cache by assigning to the .ftpcache member; |
+ # in case you want logically independent URL openers |
+ # XXX This is not threadsafe. Bah. |
+ |
+ def __del__(self): |
+ self.close() |
+ |
+ def close(self): |
+ self.cleanup() |
+ |
+ def cleanup(self): |
+ # This code sometimes runs when the rest of this module |
+ # has already been deleted, so it can't use any globals |
+ # or import anything. |
+ if self.__tempfiles: |
+ for file in self.__tempfiles: |
+ try: |
+ self.__unlink(file) |
+ except OSError: |
+ pass |
+ del self.__tempfiles[:] |
+ if self.tempcache: |
+ self.tempcache.clear() |
+ |
+ def addheader(self, *args): |
+ """Add a header to be used by the HTTP interface only |
+ e.g. u.addheader('Accept', 'sound/basic')""" |
+ self.addheaders.append(args) |
+ |
+ # External interface |
+ def open(self, fullurl, data=None): |
+ """Use URLopener().open(file) instead of open(file, 'r').""" |
+ fullurl = unwrap(to_bytes(fullurl)) |
+ fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") |
+ if self.tempcache and fullurl in self.tempcache: |
+ filename, headers = self.tempcache[fullurl] |
+ fp = open(filename, 'rb') |
+ return addinfourl(fp, headers, fullurl) |
+ urltype, url = splittype(fullurl) |
+ if not urltype: |
+ urltype = 'file' |
+ if urltype in self.proxies: |
+ proxy = self.proxies[urltype] |
+ urltype, proxyhost = splittype(proxy) |
+ host, selector = splithost(proxyhost) |
+ url = (host, fullurl) # Signal special case to open_*() |
+ else: |
+ proxy = None |
+ name = 'open_' + urltype |
+ self.type = urltype |
+ name = name.replace('-', '_') |
+ if not hasattr(self, name): |
+ if proxy: |
+ return self.open_unknown_proxy(proxy, fullurl, data) |
+ else: |
+ return self.open_unknown(fullurl, data) |
+ try: |
+ if data is None: |
+ return getattr(self, name)(url) |
+ else: |
+ return getattr(self, name)(url, data) |
+ except HTTPError: |
+ raise |
+ except socket.error as msg: |
+ raise_with_traceback(IOError('socket error', msg)) |
+ |
+ def open_unknown(self, fullurl, data=None): |
+ """Overridable interface to open unknown URL type.""" |
+ type, url = splittype(fullurl) |
+ raise IOError('url error', 'unknown url type', type) |
+ |
+ def open_unknown_proxy(self, proxy, fullurl, data=None): |
+ """Overridable interface to open unknown URL type.""" |
+ type, url = splittype(fullurl) |
+ raise IOError('url error', 'invalid proxy for %s' % type, proxy) |
+ |
+ # External interface |
+ def retrieve(self, url, filename=None, reporthook=None, data=None): |
+ """retrieve(url) returns (filename, headers) for a local object |
+ or (tempfilename, headers) for a remote object.""" |
+ url = unwrap(to_bytes(url)) |
+ if self.tempcache and url in self.tempcache: |
+ return self.tempcache[url] |
+ type, url1 = splittype(url) |
+ if filename is None and (not type or type == 'file'): |
+ try: |
+ fp = self.open_local_file(url1) |
+ hdrs = fp.info() |
+ fp.close() |
+ return url2pathname(splithost(url1)[1]), hdrs |
+ except IOError as msg: |
+ pass |
+ fp = self.open(url, data) |
+ try: |
+ headers = fp.info() |
+ if filename: |
+ tfp = open(filename, 'wb') |
+ else: |
+ import tempfile |
+ garbage, path = splittype(url) |
+ garbage, path = splithost(path or "") |
+ path, garbage = splitquery(path or "") |
+ path, garbage = splitattr(path or "") |
+ suffix = os.path.splitext(path)[1] |
+ (fd, filename) = tempfile.mkstemp(suffix) |
+ self.__tempfiles.append(filename) |
+ tfp = os.fdopen(fd, 'wb') |
+ try: |
+ result = filename, headers |
+ if self.tempcache is not None: |
+ self.tempcache[url] = result |
+ bs = 1024*8 |
+ size = -1 |
+ read = 0 |
+ blocknum = 0 |
+ if "content-length" in headers: |
+ size = int(headers["Content-Length"]) |
+ if reporthook: |
+ reporthook(blocknum, bs, size) |
+ while 1: |
+ block = fp.read(bs) |
+ if not block: |
+ break |
+ read += len(block) |
+ tfp.write(block) |
+ blocknum += 1 |
+ if reporthook: |
+ reporthook(blocknum, bs, size) |
+ finally: |
+ tfp.close() |
+ finally: |
+ fp.close() |
+ |
+ # raise exception if actual size does not match content-length header |
+ if size >= 0 and read < size: |
+ raise ContentTooShortError( |
+ "retrieval incomplete: got only %i out of %i bytes" |
+ % (read, size), result) |
+ |
+ return result |
+ |
+ # Each method named open_<type> knows how to open that type of URL |
+ |
+ def _open_generic_http(self, connection_factory, url, data): |
+ """Make an HTTP connection using connection_class. |
+ |
+ This is an internal method that should be called from |
+ open_http() or open_https(). |
+ |
+ Arguments: |
+ - connection_factory should take a host name and return an |
+ HTTPConnection instance. |
+ - url is the url to retrieval or a host, relative-path pair. |
+ - data is payload for a POST request or None. |
+ """ |
+ |
+ user_passwd = None |
+ proxy_passwd= None |
+ if isinstance(url, str): |
+ host, selector = splithost(url) |
+ if host: |
+ user_passwd, host = splituser(host) |
+ host = unquote(host) |
+ realhost = host |
+ else: |
+ host, selector = url |
+ # check whether the proxy contains authorization information |
+ proxy_passwd, host = splituser(host) |
+ # now we proceed with the url we want to obtain |
+ urltype, rest = splittype(selector) |
+ url = rest |
+ user_passwd = None |
+ if urltype.lower() != 'http': |
+ realhost = None |
+ else: |
+ realhost, rest = splithost(rest) |
+ if realhost: |
+ user_passwd, realhost = splituser(realhost) |
+ if user_passwd: |
+ selector = "%s://%s%s" % (urltype, realhost, rest) |
+ if proxy_bypass(realhost): |
+ host = realhost |
+ |
+ if not host: raise IOError('http error', 'no host given') |
+ |
+ if proxy_passwd: |
+ proxy_passwd = unquote(proxy_passwd) |
+ proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') |
+ else: |
+ proxy_auth = None |
+ |
+ if user_passwd: |
+ user_passwd = unquote(user_passwd) |
+ auth = base64.b64encode(user_passwd.encode()).decode('ascii') |
+ else: |
+ auth = None |
+ http_conn = connection_factory(host) |
+ headers = {} |
+ if proxy_auth: |
+ headers["Proxy-Authorization"] = "Basic %s" % proxy_auth |
+ if auth: |
+ headers["Authorization"] = "Basic %s" % auth |
+ if realhost: |
+ headers["Host"] = realhost |
+ |
+ # Add Connection:close as we don't support persistent connections yet. |
+ # This helps in closing the socket and avoiding ResourceWarning |
+ |
+ headers["Connection"] = "close" |
+ |
+ for header, value in self.addheaders: |
+ headers[header] = value |
+ |
+ if data is not None: |
+ headers["Content-Type"] = "application/x-www-form-urlencoded" |
+ http_conn.request("POST", selector, data, headers) |
+ else: |
+ http_conn.request("GET", selector, headers=headers) |
+ |
+ try: |
+ response = http_conn.getresponse() |
+ except http_client.BadStatusLine: |
+ # something went wrong with the HTTP status line |
+ raise URLError("http protocol error: bad status line") |
+ |
+ # According to RFC 2616, "2xx" code indicates that the client's |
+ # request was successfully received, understood, and accepted. |
+ if 200 <= response.status < 300: |
+ return addinfourl(response, response.msg, "http:" + url, |
+ response.status) |
+ else: |
+ return self.http_error( |
+ url, response.fp, |
+ response.status, response.reason, response.msg, data) |
+ |
+ def open_http(self, url, data=None): |
+ """Use HTTP protocol.""" |
+ return self._open_generic_http(http_client.HTTPConnection, url, data) |
+ |
+ def http_error(self, url, fp, errcode, errmsg, headers, data=None): |
+ """Handle http errors. |
+ |
+ Derived class can override this, or provide specific handlers |
+ named http_error_DDD where DDD is the 3-digit error code.""" |
+ # First check if there's a specific handler for this error |
+ name = 'http_error_%d' % errcode |
+ if hasattr(self, name): |
+ method = getattr(self, name) |
+ if data is None: |
+ result = method(url, fp, errcode, errmsg, headers) |
+ else: |
+ result = method(url, fp, errcode, errmsg, headers, data) |
+ if result: return result |
+ return self.http_error_default(url, fp, errcode, errmsg, headers) |
+ |
+ def http_error_default(self, url, fp, errcode, errmsg, headers): |
+ """Default error handler: close the connection and raise IOError.""" |
+ fp.close() |
+ raise HTTPError(url, errcode, errmsg, headers, None) |
+ |
+ if _have_ssl: |
+ def _https_connection(self, host): |
+ return http_client.HTTPSConnection(host, |
+ key_file=self.key_file, |
+ cert_file=self.cert_file) |
+ |
+ def open_https(self, url, data=None): |
+ """Use HTTPS protocol.""" |
+ return self._open_generic_http(self._https_connection, url, data) |
+ |
+ def open_file(self, url): |
+ """Use local file or FTP depending on form of URL.""" |
+ if not isinstance(url, str): |
+ raise URLError('file error: proxy support for file protocol currently not implemented') |
+ if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': |
+ raise ValueError("file:// scheme is supported only on localhost") |
+ else: |
+ return self.open_local_file(url) |
+ |
+ def open_local_file(self, url): |
+ """Use local file.""" |
+ import future.backports.email.utils as email_utils |
+ import mimetypes |
+ host, file = splithost(url) |
+ localname = url2pathname(file) |
+ try: |
+ stats = os.stat(localname) |
+ except OSError as e: |
+ raise URLError(e.strerror, e.filename) |
+ size = stats.st_size |
+ modified = email_utils.formatdate(stats.st_mtime, usegmt=True) |
+ mtype = mimetypes.guess_type(url)[0] |
+ headers = email.message_from_string( |
+ 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % |
+ (mtype or 'text/plain', size, modified)) |
+ if not host: |
+ urlfile = file |
+ if file[:1] == '/': |
+ urlfile = 'file://' + file |
+ return addinfourl(open(localname, 'rb'), headers, urlfile) |
+ host, port = splitport(host) |
+ if (not port |
+ and socket.gethostbyname(host) in ((localhost(),) + thishost())): |
+ urlfile = file |
+ if file[:1] == '/': |
+ urlfile = 'file://' + file |
+ elif file[:2] == './': |
+ raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) |
+ return addinfourl(open(localname, 'rb'), headers, urlfile) |
+ raise URLError('local file error: not on local host') |
+ |
+ def open_ftp(self, url): |
+ """Use FTP protocol.""" |
+ if not isinstance(url, str): |
+ raise URLError('ftp error: proxy support for ftp protocol currently not implemented') |
+ import mimetypes |
+ host, path = splithost(url) |
+ if not host: raise URLError('ftp error: no host given') |
+ host, port = splitport(host) |
+ user, host = splituser(host) |
+ if user: user, passwd = splitpasswd(user) |
+ else: passwd = None |
+ host = unquote(host) |
+ user = unquote(user or '') |
+ passwd = unquote(passwd or '') |
+ host = socket.gethostbyname(host) |
+ if not port: |
+ import ftplib |
+ port = ftplib.FTP_PORT |
+ else: |
+ port = int(port) |
+ path, attrs = splitattr(path) |
+ path = unquote(path) |
+ dirs = path.split('/') |
+ dirs, file = dirs[:-1], dirs[-1] |
+ if dirs and not dirs[0]: dirs = dirs[1:] |
+ if dirs and not dirs[0]: dirs[0] = '/' |
+ key = user, host, port, '/'.join(dirs) |
+ # XXX thread unsafe! |
+ if len(self.ftpcache) > MAXFTPCACHE: |
+ # Prune the cache, rather arbitrarily |
+ for k in self.ftpcache.keys(): |
+ if k != key: |
+ v = self.ftpcache[k] |
+ del self.ftpcache[k] |
+ v.close() |
+ try: |
+ if key not in self.ftpcache: |
+ self.ftpcache[key] = \ |
+ ftpwrapper(user, passwd, host, port, dirs) |
+ if not file: type = 'D' |
+ else: type = 'I' |
+ for attr in attrs: |
+ attr, value = splitvalue(attr) |
+ if attr.lower() == 'type' and \ |
+ value in ('a', 'A', 'i', 'I', 'd', 'D'): |
+ type = value.upper() |
+ (fp, retrlen) = self.ftpcache[key].retrfile(file, type) |
+ mtype = mimetypes.guess_type("ftp:" + url)[0] |
+ headers = "" |
+ if mtype: |
+ headers += "Content-Type: %s\n" % mtype |
+ if retrlen is not None and retrlen >= 0: |
+ headers += "Content-Length: %d\n" % retrlen |
+ headers = email.message_from_string(headers) |
+ return addinfourl(fp, headers, "ftp:" + url) |
+ except ftperrors() as exp: |
+ raise_with_traceback(URLError('ftp error %r' % exp)) |
+ |
+ def open_data(self, url, data=None): |
+ """Use "data" URL.""" |
+ if not isinstance(url, str): |
+ raise URLError('data error: proxy support for data protocol currently not implemented') |
+ # ignore POSTed data |
+ # |
+ # syntax of data URLs: |
+ # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data |
+ # mediatype := [ type "/" subtype ] *( ";" parameter ) |
+ # data := *urlchar |
+ # parameter := attribute "=" value |
+ try: |
+ [type, data] = url.split(',', 1) |
+ except ValueError: |
+ raise IOError('data error', 'bad data URL') |
+ if not type: |
+ type = 'text/plain;charset=US-ASCII' |
+ semi = type.rfind(';') |
+ if semi >= 0 and '=' not in type[semi:]: |
+ encoding = type[semi+1:] |
+ type = type[:semi] |
+ else: |
+ encoding = '' |
+ msg = [] |
+ msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', |
+ time.gmtime(time.time()))) |
+ msg.append('Content-type: %s' % type) |
+ if encoding == 'base64': |
+ # XXX is this encoding/decoding ok? |
+ data = base64.decodebytes(data.encode('ascii')).decode('latin-1') |
+ else: |
+ data = unquote(data) |
+ msg.append('Content-Length: %d' % len(data)) |
+ msg.append('') |
+ msg.append(data) |
+ msg = '\n'.join(msg) |
+ headers = email.message_from_string(msg) |
+ f = io.StringIO(msg) |
+ #f.fileno = None # needed for addinfourl |
+ return addinfourl(f, headers, url) |
+ |
+ |
+class FancyURLopener(URLopener): |
+ """Derived class with handlers for errors we can handle (perhaps).""" |
+ |
+ def __init__(self, *args, **kwargs): |
+ URLopener.__init__(self, *args, **kwargs) |
+ self.auth_cache = {} |
+ self.tries = 0 |
+ self.maxtries = 10 |
+ |
+ def http_error_default(self, url, fp, errcode, errmsg, headers): |
+ """Default error handling -- don't raise an exception.""" |
+ return addinfourl(fp, headers, "http:" + url, errcode) |
+ |
+ def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): |
+ """Error 302 -- relocated (temporarily).""" |
+ self.tries += 1 |
+ if self.maxtries and self.tries >= self.maxtries: |
+ if hasattr(self, "http_error_500"): |
+ meth = self.http_error_500 |
+ else: |
+ meth = self.http_error_default |
+ self.tries = 0 |
+ return meth(url, fp, 500, |
+ "Internal Server Error: Redirect Recursion", headers) |
+ result = self.redirect_internal(url, fp, errcode, errmsg, headers, |
+ data) |
+ self.tries = 0 |
+ return result |
+ |
+ def redirect_internal(self, url, fp, errcode, errmsg, headers, data): |
+ if 'location' in headers: |
+ newurl = headers['location'] |
+ elif 'uri' in headers: |
+ newurl = headers['uri'] |
+ else: |
+ return |
+ fp.close() |
+ |
+ # In case the server sent a relative URL, join with original: |
+ newurl = urljoin(self.type + ":" + url, newurl) |
+ |
+ urlparts = urlparse(newurl) |
+ |
+ # For security reasons, we don't allow redirection to anything other |
+ # than http, https and ftp. |
+ |
+ # We are using newer HTTPError with older redirect_internal method |
+ # This older method will get deprecated in 3.3 |
+ |
+ if urlparts.scheme not in ('http', 'https', 'ftp', ''): |
+ raise HTTPError(newurl, errcode, |
+ errmsg + |
+ " Redirection to url '%s' is not allowed." % newurl, |
+ headers, fp) |
+ |
+ return self.open(newurl) |
+ |
+ def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): |
+ """Error 301 -- also relocated (permanently).""" |
+ return self.http_error_302(url, fp, errcode, errmsg, headers, data) |
+ |
+ def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): |
+ """Error 303 -- also relocated (essentially identical to 302).""" |
+ return self.http_error_302(url, fp, errcode, errmsg, headers, data) |
+ |
+ def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): |
+ """Error 307 -- relocated, but turn POST into error.""" |
+ if data is None: |
+ return self.http_error_302(url, fp, errcode, errmsg, headers, data) |
+ else: |
+ return self.http_error_default(url, fp, errcode, errmsg, headers) |
+ |
+ def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, |
+ retry=False): |
+ """Error 401 -- authentication required. |
+ This function supports Basic authentication only.""" |
+ if 'www-authenticate' not in headers: |
+ URLopener.http_error_default(self, url, fp, |
+ errcode, errmsg, headers) |
+ stuff = headers['www-authenticate'] |
+ match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) |
+ if not match: |
+ URLopener.http_error_default(self, url, fp, |
+ errcode, errmsg, headers) |
+ scheme, realm = match.groups() |
+ if scheme.lower() != 'basic': |
+ URLopener.http_error_default(self, url, fp, |
+ errcode, errmsg, headers) |
+ if not retry: |
+ URLopener.http_error_default(self, url, fp, errcode, errmsg, |
+ headers) |
+ name = 'retry_' + self.type + '_basic_auth' |
+ if data is None: |
+ return getattr(self,name)(url, realm) |
+ else: |
+ return getattr(self,name)(url, realm, data) |
+ |
+ def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, |
+ retry=False): |
+ """Error 407 -- proxy authentication required. |
+ This function supports Basic authentication only.""" |
+ if 'proxy-authenticate' not in headers: |
+ URLopener.http_error_default(self, url, fp, |
+ errcode, errmsg, headers) |
+ stuff = headers['proxy-authenticate'] |
+ match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) |
+ if not match: |
+ URLopener.http_error_default(self, url, fp, |
+ errcode, errmsg, headers) |
+ scheme, realm = match.groups() |
+ if scheme.lower() != 'basic': |
+ URLopener.http_error_default(self, url, fp, |
+ errcode, errmsg, headers) |
+ if not retry: |
+ URLopener.http_error_default(self, url, fp, errcode, errmsg, |
+ headers) |
+ name = 'retry_proxy_' + self.type + '_basic_auth' |
+ if data is None: |
+ return getattr(self,name)(url, realm) |
+ else: |
+ return getattr(self,name)(url, realm, data) |
+ |
+ def retry_proxy_http_basic_auth(self, url, realm, data=None): |
+ host, selector = splithost(url) |
+ newurl = 'http://' + host + selector |
+ proxy = self.proxies['http'] |
+ urltype, proxyhost = splittype(proxy) |
+ proxyhost, proxyselector = splithost(proxyhost) |
+ i = proxyhost.find('@') + 1 |
+ proxyhost = proxyhost[i:] |
+ user, passwd = self.get_user_passwd(proxyhost, realm, i) |
+ if not (user or passwd): return None |
+ proxyhost = "%s:%s@%s" % (quote(user, safe=''), |
+ quote(passwd, safe=''), proxyhost) |
+ self.proxies['http'] = 'http://' + proxyhost + proxyselector |
+ if data is None: |
+ return self.open(newurl) |
+ else: |
+ return self.open(newurl, data) |
+ |
+ def retry_proxy_https_basic_auth(self, url, realm, data=None): |
+ host, selector = splithost(url) |
+ newurl = 'https://' + host + selector |
+ proxy = self.proxies['https'] |
+ urltype, proxyhost = splittype(proxy) |
+ proxyhost, proxyselector = splithost(proxyhost) |
+ i = proxyhost.find('@') + 1 |
+ proxyhost = proxyhost[i:] |
+ user, passwd = self.get_user_passwd(proxyhost, realm, i) |
+ if not (user or passwd): return None |
+ proxyhost = "%s:%s@%s" % (quote(user, safe=''), |
+ quote(passwd, safe=''), proxyhost) |
+ self.proxies['https'] = 'https://' + proxyhost + proxyselector |
+ if data is None: |
+ return self.open(newurl) |
+ else: |
+ return self.open(newurl, data) |
+ |
+ def retry_http_basic_auth(self, url, realm, data=None): |
+ host, selector = splithost(url) |
+ i = host.find('@') + 1 |
+ host = host[i:] |
+ user, passwd = self.get_user_passwd(host, realm, i) |
+ if not (user or passwd): return None |
+ host = "%s:%s@%s" % (quote(user, safe=''), |
+ quote(passwd, safe=''), host) |
+ newurl = 'http://' + host + selector |
+ if data is None: |
+ return self.open(newurl) |
+ else: |
+ return self.open(newurl, data) |
+ |
+ def retry_https_basic_auth(self, url, realm, data=None): |
+ host, selector = splithost(url) |
+ i = host.find('@') + 1 |
+ host = host[i:] |
+ user, passwd = self.get_user_passwd(host, realm, i) |
+ if not (user or passwd): return None |
+ host = "%s:%s@%s" % (quote(user, safe=''), |
+ quote(passwd, safe=''), host) |
+ newurl = 'https://' + host + selector |
+ if data is None: |
+ return self.open(newurl) |
+ else: |
+ return self.open(newurl, data) |
+ |
+ def get_user_passwd(self, host, realm, clear_cache=0): |
+ key = realm + '@' + host.lower() |
+ if key in self.auth_cache: |
+ if clear_cache: |
+ del self.auth_cache[key] |
+ else: |
+ return self.auth_cache[key] |
+ user, passwd = self.prompt_user_passwd(host, realm) |
+ if user or passwd: self.auth_cache[key] = (user, passwd) |
+ return user, passwd |
+ |
+ def prompt_user_passwd(self, host, realm): |
+ """Override this in a GUI environment!""" |
+ import getpass |
+ try: |
+ user = input("Enter username for %s at %s: " % (realm, host)) |
+ passwd = getpass.getpass("Enter password for %s in %s at %s: " % |
+ (user, realm, host)) |
+ return user, passwd |
+ except KeyboardInterrupt: |
+ print() |
+ return None, None |
+ |
+ |
+# Utility functions |
+ |
+_localhost = None |
+def localhost(): |
+ """Return the IP address of the magic hostname 'localhost'.""" |
+ global _localhost |
+ if _localhost is None: |
+ _localhost = socket.gethostbyname('localhost') |
+ return _localhost |
+ |
+_thishost = None |
+def thishost(): |
+ """Return the IP addresses of the current host.""" |
+ global _thishost |
+ if _thishost is None: |
+ try: |
+ _thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2]) |
+ except socket.gaierror: |
+ _thishost = tuple(socket.gethostbyname_ex('localhost')[2]) |
+ return _thishost |
+ |
+_ftperrors = None |
+def ftperrors(): |
+ """Return the set of errors raised by the FTP class.""" |
+ global _ftperrors |
+ if _ftperrors is None: |
+ import ftplib |
+ _ftperrors = ftplib.all_errors |
+ return _ftperrors |
+ |
+_noheaders = None |
+def noheaders(): |
+ """Return an empty email Message object.""" |
+ global _noheaders |
+ if _noheaders is None: |
+ _noheaders = email.message_from_string("") |
+ return _noheaders |
+ |
+ |
+# Utility classes |
+ |
+class ftpwrapper(object): |
+ """Class used by open_ftp() for cache of open FTP connections.""" |
+ |
+ def __init__(self, user, passwd, host, port, dirs, timeout=None, |
+ persistent=True): |
+ self.user = user |
+ self.passwd = passwd |
+ self.host = host |
+ self.port = port |
+ self.dirs = dirs |
+ self.timeout = timeout |
+ self.refcount = 0 |
+ self.keepalive = persistent |
+ self.init() |
+ |
+ def init(self): |
+ import ftplib |
+ self.busy = 0 |
+ self.ftp = ftplib.FTP() |
+ self.ftp.connect(self.host, self.port, self.timeout) |
+ self.ftp.login(self.user, self.passwd) |
+ _target = '/'.join(self.dirs) |
+ self.ftp.cwd(_target) |
+ |
+ def retrfile(self, file, type): |
+ import ftplib |
+ self.endtransfer() |
+ if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 |
+ else: cmd = 'TYPE ' + type; isdir = 0 |
+ try: |
+ self.ftp.voidcmd(cmd) |
+ except ftplib.all_errors: |
+ self.init() |
+ self.ftp.voidcmd(cmd) |
+ conn = None |
+ if file and not isdir: |
+ # Try to retrieve as a file |
+ try: |
+ cmd = 'RETR ' + file |
+ conn, retrlen = self.ftp.ntransfercmd(cmd) |
+ except ftplib.error_perm as reason: |
+ if str(reason)[:3] != '550': |
+ raise_with_traceback(URLError('ftp error: %r' % reason)) |
+ if not conn: |
+ # Set transfer mode to ASCII! |
+ self.ftp.voidcmd('TYPE A') |
+ # Try a directory listing. Verify that directory exists. |
+ if file: |
+ pwd = self.ftp.pwd() |
+ try: |
+ try: |
+ self.ftp.cwd(file) |
+ except ftplib.error_perm as reason: |
+ ### Was: |
+ # raise URLError('ftp error: %r' % reason) from reason |
+ exc = URLError('ftp error: %r' % reason) |
+ exc.__cause__ = reason |
+ raise exc |
+ finally: |
+ self.ftp.cwd(pwd) |
+ cmd = 'LIST ' + file |
+ else: |
+ cmd = 'LIST' |
+ conn, retrlen = self.ftp.ntransfercmd(cmd) |
+ self.busy = 1 |
+ |
+ ftpobj = addclosehook(conn.makefile('rb'), self.file_close) |
+ self.refcount += 1 |
+ conn.close() |
+ # Pass back both a suitably decorated object and a retrieval length |
+ return (ftpobj, retrlen) |
+ |
+ def endtransfer(self): |
+ self.busy = 0 |
+ |
+ def close(self): |
+ self.keepalive = False |
+ if self.refcount <= 0: |
+ self.real_close() |
+ |
+ def file_close(self): |
+ self.endtransfer() |
+ self.refcount -= 1 |
+ if self.refcount <= 0 and not self.keepalive: |
+ self.real_close() |
+ |
+ def real_close(self): |
+ self.endtransfer() |
+ try: |
+ self.ftp.close() |
+ except ftperrors(): |
+ pass |
+ |
+# Proxy handling |
+def getproxies_environment(): |
+ """Return a dictionary of scheme -> proxy server URL mappings. |
+ |
+ Scan the environment for variables named <scheme>_proxy; |
+ this seems to be the standard convention. If you need a |
+ different way, you can pass a proxies dictionary to the |
+ [Fancy]URLopener constructor. |
+ |
+ """ |
+ proxies = {} |
+ for name, value in os.environ.items(): |
+ name = name.lower() |
+ if value and name[-6:] == '_proxy': |
+ proxies[name[:-6]] = value |
+ return proxies |
+ |
+def proxy_bypass_environment(host): |
+ """Test if proxies should not be used for a particular host. |
+ |
+ Checks the environment for a variable named no_proxy, which should |
+ be a list of DNS suffixes separated by commas, or '*' for all hosts. |
+ """ |
+ no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '') |
+ # '*' is special case for always bypass |
+ if no_proxy == '*': |
+ return 1 |
+ # strip port off host |
+ hostonly, port = splitport(host) |
+ # check if the host ends with any of the DNS suffixes |
+ no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] |
+ for name in no_proxy_list: |
+ if name and (hostonly.endswith(name) or host.endswith(name)): |
+ return 1 |
+ # otherwise, don't bypass |
+ return 0 |
+ |
+ |
+# This code tests an OSX specific data structure but is testable on all |
+# platforms |
+def _proxy_bypass_macosx_sysconf(host, proxy_settings): |
+ """ |
+ Return True iff this host shouldn't be accessed using a proxy |
+ |
+ This function uses the MacOSX framework SystemConfiguration |
+ to fetch the proxy information. |
+ |
+ proxy_settings come from _scproxy._get_proxy_settings or get mocked ie: |
+ { 'exclude_simple': bool, |
+ 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16'] |
+ } |
+ """ |
+ from fnmatch import fnmatch |
+ |
+ hostonly, port = splitport(host) |
+ |
+ def ip2num(ipAddr): |
+ parts = ipAddr.split('.') |
+ parts = list(map(int, parts)) |
+ if len(parts) != 4: |
+ parts = (parts + [0, 0, 0, 0])[:4] |
+ return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3] |
+ |
+ # Check for simple host names: |
+ if '.' not in host: |
+ if proxy_settings['exclude_simple']: |
+ return True |
+ |
+ hostIP = None |
+ |
+ for value in proxy_settings.get('exceptions', ()): |
+ # Items in the list are strings like these: *.local, 169.254/16 |
+ if not value: continue |
+ |
+ m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) |
+ if m is not None: |
+ if hostIP is None: |
+ try: |
+ hostIP = socket.gethostbyname(hostonly) |
+ hostIP = ip2num(hostIP) |
+ except socket.error: |
+ continue |
+ |
+ base = ip2num(m.group(1)) |
+ mask = m.group(2) |
+ if mask is None: |
+ mask = 8 * (m.group(1).count('.') + 1) |
+ else: |
+ mask = int(mask[1:]) |
+ mask = 32 - mask |
+ |
+ if (hostIP >> mask) == (base >> mask): |
+ return True |
+ |
+ elif fnmatch(host, value): |
+ return True |
+ |
+ return False |
+ |
+ |
+if sys.platform == 'darwin': |
+ from _scproxy import _get_proxy_settings, _get_proxies |
+ |
+ def proxy_bypass_macosx_sysconf(host): |
+ proxy_settings = _get_proxy_settings() |
+ return _proxy_bypass_macosx_sysconf(host, proxy_settings) |
+ |
+ def getproxies_macosx_sysconf(): |
+ """Return a dictionary of scheme -> proxy server URL mappings. |
+ |
+ This function uses the MacOSX framework SystemConfiguration |
+ to fetch the proxy information. |
+ """ |
+ return _get_proxies() |
+ |
+ |
+ |
+ def proxy_bypass(host): |
+ if getproxies_environment(): |
+ return proxy_bypass_environment(host) |
+ else: |
+ return proxy_bypass_macosx_sysconf(host) |
+ |
+ def getproxies(): |
+ return getproxies_environment() or getproxies_macosx_sysconf() |
+ |
+ |
+elif os.name == 'nt': |
+ def getproxies_registry(): |
+ """Return a dictionary of scheme -> proxy server URL mappings. |
+ |
+ Win32 uses the registry to store proxies. |
+ |
+ """ |
+ proxies = {} |
+ try: |
+ import winreg |
+ except ImportError: |
+ # Std module, so should be around - but you never know! |
+ return proxies |
+ try: |
+ internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, |
+ r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') |
+ proxyEnable = winreg.QueryValueEx(internetSettings, |
+ 'ProxyEnable')[0] |
+ if proxyEnable: |
+ # Returned as Unicode but problems if not converted to ASCII |
+ proxyServer = str(winreg.QueryValueEx(internetSettings, |
+ 'ProxyServer')[0]) |
+ if '=' in proxyServer: |
+ # Per-protocol settings |
+ for p in proxyServer.split(';'): |
+ protocol, address = p.split('=', 1) |
+ # See if address has a type:// prefix |
+ if not re.match('^([^/:]+)://', address): |
+ address = '%s://%s' % (protocol, address) |
+ proxies[protocol] = address |
+ else: |
+ # Use one setting for all protocols |
+ if proxyServer[:5] == 'http:': |
+ proxies['http'] = proxyServer |
+ else: |
+ proxies['http'] = 'http://%s' % proxyServer |
+ proxies['https'] = 'https://%s' % proxyServer |
+ proxies['ftp'] = 'ftp://%s' % proxyServer |
+ internetSettings.Close() |
+ except (WindowsError, ValueError, TypeError): |
+ # Either registry key not found etc, or the value in an |
+ # unexpected format. |
+ # proxies already set up to be empty so nothing to do |
+ pass |
+ return proxies |
+ |
+ def getproxies(): |
+ """Return a dictionary of scheme -> proxy server URL mappings. |
+ |
+ Returns settings gathered from the environment, if specified, |
+ or the registry. |
+ |
+ """ |
+ return getproxies_environment() or getproxies_registry() |
+ |
+ def proxy_bypass_registry(host): |
+ try: |
+ import winreg |
+ except ImportError: |
+ # Std modules, so should be around - but you never know! |
+ return 0 |
+ try: |
+ internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, |
+ r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') |
+ proxyEnable = winreg.QueryValueEx(internetSettings, |
+ 'ProxyEnable')[0] |
+ proxyOverride = str(winreg.QueryValueEx(internetSettings, |
+ 'ProxyOverride')[0]) |
+ # ^^^^ Returned as Unicode but problems if not converted to ASCII |
+ except WindowsError: |
+ return 0 |
+ if not proxyEnable or not proxyOverride: |
+ return 0 |
+ # try to make a host list from name and IP address. |
+ rawHost, port = splitport(host) |
+ host = [rawHost] |
+ try: |
+ addr = socket.gethostbyname(rawHost) |
+ if addr != rawHost: |
+ host.append(addr) |
+ except socket.error: |
+ pass |
+ try: |
+ fqdn = socket.getfqdn(rawHost) |
+ if fqdn != rawHost: |
+ host.append(fqdn) |
+ except socket.error: |
+ pass |
+ # make a check value list from the registry entry: replace the |
+ # '<local>' string by the localhost entry and the corresponding |
+ # canonical entry. |
+ proxyOverride = proxyOverride.split(';') |
+ # now check if we match one of the registry values. |
+ for test in proxyOverride: |
+ if test == '<local>': |
+ if '.' not in rawHost: |
+ return 1 |
+ test = test.replace(".", r"\.") # mask dots |
+ test = test.replace("*", r".*") # change glob sequence |
+ test = test.replace("?", r".") # change glob char |
+ for val in host: |
+ if re.match(test, val, re.I): |
+ return 1 |
+ return 0 |
+ |
+ def proxy_bypass(host): |
+ """Return a dictionary of scheme -> proxy server URL mappings. |
+ |
+ Returns settings gathered from the environment, if specified, |
+ or the registry. |
+ |
+ """ |
+ if getproxies_environment(): |
+ return proxy_bypass_environment(host) |
+ else: |
+ return proxy_bypass_registry(host) |
+ |
+else: |
+ # By default use environment variables |
+ getproxies = getproxies_environment |
+ proxy_bypass = proxy_bypass_environment |