upgrade requests to v2.5.3

This commit is contained in:
Alan Hamlett 2015-03-03 13:19:04 -08:00
parent ef6342f16f
commit c8e8e705be
80 changed files with 810 additions and 713 deletions

4
wakatime/packages/requests/__init__.py Executable file → Normal file
View file

@ -42,8 +42,8 @@ is at <http://python-requests.org>.
""" """
__title__ = 'requests' __title__ = 'requests'
__version__ = '2.5.1' __version__ = '2.5.3'
__build__ = 0x020501 __build__ = 0x020503
__author__ = 'Kenneth Reitz' __author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0' __license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Kenneth Reitz' __copyright__ = 'Copyright 2015 Kenneth Reitz'

0
wakatime/packages/requests/adapters.py Executable file → Normal file
View file

0
wakatime/packages/requests/api.py Executable file → Normal file
View file

0
wakatime/packages/requests/auth.py Executable file → Normal file
View file

1185
wakatime/packages/requests/cacert.pem Executable file → Normal file

File diff suppressed because it is too large Load diff

0
wakatime/packages/requests/certs.py Executable file → Normal file
View file

0
wakatime/packages/requests/compat.py Executable file → Normal file
View file

0
wakatime/packages/requests/cookies.py Executable file → Normal file
View file

0
wakatime/packages/requests/exceptions.py Executable file → Normal file
View file

0
wakatime/packages/requests/hooks.py Executable file → Normal file
View file

2
wakatime/packages/requests/models.py Executable file → Normal file
View file

@ -145,7 +145,7 @@ class RequestEncodingMixin(object):
fp = v fp = v
if isinstance(fp, str): if isinstance(fp, str):
fp = StringIO(fp) fp = StringIO(fp)
if isinstance(fp, bytes): if isinstance(fp, (bytes, bytearray)):
fp = BytesIO(fp) fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(), rf = RequestField(name=k, data=fp.read(),

0
wakatime/packages/requests/packages/README.rst Executable file → Normal file
View file

0
wakatime/packages/requests/packages/__init__.py Executable file → Normal file
View file

View file

View file

View file

View file

View file

View file

View file

0
wakatime/packages/requests/packages/chardet/compat.py Executable file → Normal file
View file

View file

View file

View file

0
wakatime/packages/requests/packages/chardet/escsm.py Executable file → Normal file
View file

View file

View file

View file

View file

View file

View file

View file

View file

0
wakatime/packages/requests/packages/chardet/jisfreq.py Executable file → Normal file
View file

0
wakatime/packages/requests/packages/chardet/jpcntx.py Executable file → Normal file
View file

View file

View file

View file

View file

View file

View file

View file

View file

View file

0
wakatime/packages/requests/packages/chardet/mbcssm.py Executable file → Normal file
View file

View file

View file

View file

View file

View file

View file

View file

@ -1,7 +1,7 @@
from collections import Mapping, MutableMapping from collections import Mapping, MutableMapping
try: try:
from threading import RLock from threading import RLock
except ImportError: # Platform-specific: No threads available except ImportError: # Platform-specific: No threads available
class RLock: class RLock:
def __enter__(self): def __enter__(self):
pass pass
@ -10,16 +10,18 @@ except ImportError: # Platform-specific: No threads available
pass pass
try: # Python 2.7+ try: # Python 2.7+
from collections import OrderedDict from collections import OrderedDict
except ImportError: except ImportError:
from .packages.ordered_dict import OrderedDict from .packages.ordered_dict import OrderedDict
from .packages.six import iterkeys, itervalues from .packages.six import iterkeys, itervalues, PY3
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
MULTIPLE_HEADERS_ALLOWED = frozenset(['cookie', 'set-cookie', 'set-cookie2'])
_Null = object() _Null = object()
@ -97,7 +99,14 @@ class RecentlyUsedContainer(MutableMapping):
return list(iterkeys(self._container)) return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping): _dict_setitem = dict.__setitem__
_dict_getitem = dict.__getitem__
_dict_delitem = dict.__delitem__
_dict_contains = dict.__contains__
_dict_setdefault = dict.setdefault
class HTTPHeaderDict(dict):
""" """
:param headers: :param headers:
An iterable of field-value pairs. Must not contain multiple field names An iterable of field-value pairs. Must not contain multiple field names
@ -129,25 +138,72 @@ class HTTPHeaderDict(MutableMapping):
'foo=bar, baz=quxx' 'foo=bar, baz=quxx'
>>> headers['Content-Length'] >>> headers['Content-Length']
'7' '7'
If you want to access the raw headers with their original casing
for debugging purposes you can access the private ``._data`` attribute
which is a normal python ``dict`` that maps the case-insensitive key to a
list of tuples stored as (case-sensitive-original-name, value). Using the
structure from above as our example:
>>> headers._data
{'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
'content-length': [('content-length', '7')]}
""" """
def __init__(self, headers=None, **kwargs): def __init__(self, headers=None, **kwargs):
self._data = {} dict.__init__(self)
if headers is None: if headers is not None:
headers = {} self.extend(headers)
self.update(headers, **kwargs) if kwargs:
self.extend(kwargs)
def add(self, key, value): def __setitem__(self, key, val):
return _dict_setitem(self, key.lower(), (key, val))
def __getitem__(self, key):
val = _dict_getitem(self, key.lower())
return ', '.join(val[1:])
def __delitem__(self, key):
return _dict_delitem(self, key.lower())
def __contains__(self, key):
return _dict_contains(self, key.lower())
def __eq__(self, other):
if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other)
def __ne__(self, other):
return not self.__eq__(other)
values = MutableMapping.values
get = MutableMapping.get
update = MutableMapping.update
if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
__marker = object()
def pop(self, key, default=__marker):
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def discard(self, key):
try:
del self[key]
except KeyError:
pass
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already """Adds a (name, value) pair, doesn't overwrite the value if it already
exists. exists.
@ -156,43 +212,108 @@ class HTTPHeaderDict(MutableMapping):
>>> headers['foo'] >>> headers['foo']
'bar, baz' 'bar, baz'
""" """
self._data.setdefault(key.lower(), []).append((key, value)) key_lower = key.lower()
new_vals = key, val
# Keep the common case aka no item present as fast as possible
vals = _dict_setdefault(self, key_lower, new_vals)
if new_vals is not vals:
# new_vals was not inserted, as there was a previous one
if isinstance(vals, list):
# If already several items got inserted, we have a list
vals.append(val)
else:
# vals should be a tuple then, i.e. only one item so far
if key_lower in MULTIPLE_HEADERS_ALLOWED:
# Need to convert the tuple to list for further extension
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
else:
_dict_setitem(self, key_lower, new_vals)
def extend(*args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
for key in other.keys():
self.add(key, other[key])
else:
for key, value in other:
self.add(key, value)
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key): def getlist(self, key):
"""Returns a list of all the values for the named field. Returns an """Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist.""" empty list if the key doesn't exist."""
return self[key].split(', ') if key in self else [] try:
vals = _dict_getitem(self, key.lower())
except KeyError:
return []
else:
if isinstance(vals, tuple):
return [vals[1]]
else:
return vals[1:]
def copy(self): # Backwards compatibility for httplib
h = HTTPHeaderDict() getheaders = getlist
for key in self._data: getallmatchingheaders = getlist
for rawkey, value in self._data[key]: iget = getlist
h.add(rawkey, value)
return h
def __eq__(self, other):
if not isinstance(other, Mapping):
return False
other = HTTPHeaderDict(other)
return dict((k1, self[k1]) for k1 in self._data) == \
dict((k2, other[k2]) for k2 in other._data)
def __getitem__(self, key):
values = self._data[key.lower()]
return ', '.join(value[1] for value in values)
def __setitem__(self, key, value):
self._data[key.lower()] = [(key, value)]
def __delitem__(self, key):
del self._data[key.lower()]
def __len__(self):
return len(self._data)
def __iter__(self):
for headers in itervalues(self._data):
yield headers[0][0]
def __repr__(self): def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items())) return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
def copy(self):
clone = type(self)()
for key in self:
val = _dict_getitem(self, key)
if isinstance(val, list):
# Don't need to convert tuples
val = list(val)
_dict_setitem(clone, key, val)
return clone
def iteritems(self):
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = _dict_getitem(self, key)
for val in vals[1:]:
yield vals[0], val
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = _dict_getitem(self, key)
yield val[0], ', '.join(val[1:])
def items(self):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2
"""Read headers from a Python 2 httplib message object."""
ret = cls(message.items())
# ret now contains only the last header line for each duplicate.
# Importing with all duplicates would be nice, but this would
# mean to repeat most of the raw parsing already done, when the
# message object was created. Extracting only the headers of interest
# separately, the cookies, should be faster and requires less
# extra code.
for key in duplicates:
ret.discard(key)
for val in message.getheaders(key):
ret.add(key, val)
return ret

View file

View file

@ -72,6 +72,21 @@ class ConnectionPool(object):
return '%s(host=%r, port=%r)' % (type(self).__name__, return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port) self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close():
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
@ -353,7 +368,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Receive the response from the server # Receive the response from the server
try: try:
try: # Python 2.7+, use buffering of HTTP responses try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True) httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse() httplib_response = conn.getresponse()
@ -558,6 +573,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = None conn = None
raise SSLError(e) raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
if conn:
conn.close()
conn = None
raise
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e: except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
if conn: if conn:
# Discard the connection for these exceptions. It will be # Discard the connection for these exceptions. It will be
@ -565,14 +588,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.close() conn.close()
conn = None conn = None
stacktrace = sys.exc_info()[2]
if isinstance(e, SocketError) and self.proxy: if isinstance(e, SocketError) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e) e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)): elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e) e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e, retries = retries.increment(method, url, error=e, _pool=self,
_pool=self, _stacktrace=stacktrace) _stacktrace=sys.exc_info()[2])
retries.sleep() retries.sleep()
# Keep track of the error for the retry warning. # Keep track of the error for the retry warning.

View file

View file

View file

View file

0
wakatime/packages/requests/packages/urllib3/fields.py Executable file → Normal file
View file

View file

View file

View file

View file

View file

@ -8,7 +8,7 @@ except ImportError:
from ._collections import RecentlyUsedContainer from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme from .connectionpool import port_by_scheme
from .exceptions import LocationValueError from .exceptions import LocationValueError, MaxRetryError
from .request import RequestMethods from .request import RequestMethods
from .util.url import parse_url from .util.url import parse_url
from .util.retry import Retry from .util.retry import Retry
@ -64,6 +64,14 @@ class PoolManager(RequestMethods):
self.pools = RecentlyUsedContainer(num_pools, self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close()) dispose_func=lambda p: p.close())
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port): def _new_pool(self, scheme, host, port):
""" """
Create a new :class:`ConnectionPool` based on host, port and scheme. Create a new :class:`ConnectionPool` based on host, port and scheme.
@ -167,7 +175,14 @@ class PoolManager(RequestMethods):
if not isinstance(retries, Retry): if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect) retries = Retry.from_int(retries, redirect=redirect)
kw['retries'] = retries.increment(method, redirect_location) try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
raise
return response
kw['retries'] = retries
kw['redirect'] = redirect kw['redirect'] = redirect
log.info("Redirecting %s -> %s" % (url, redirect_location)) log.info("Redirecting %s -> %s" % (url, redirect_location))

0
wakatime/packages/requests/packages/urllib3/request.py Executable file → Normal file
View file

44
wakatime/packages/requests/packages/urllib3/response.py Executable file → Normal file
View file

@ -4,12 +4,11 @@ from socket import timeout as SocketTimeout
from ._collections import HTTPHeaderDict from ._collections import HTTPHeaderDict
from .exceptions import ProtocolError, DecodeError, ReadTimeoutError from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
from .packages.six import string_types as basestring, binary_type from .packages.six import string_types as basestring, binary_type, PY3
from .connection import HTTPException, BaseSSLError from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed from .util.response import is_fp_closed
class DeflateDecoder(object): class DeflateDecoder(object):
def __init__(self): def __init__(self):
@ -21,6 +20,9 @@ class DeflateDecoder(object):
return getattr(self._obj, name) return getattr(self._obj, name)
def decompress(self, data): def decompress(self, data):
if not data:
return data
if not self._first_try: if not self._first_try:
return self._obj.decompress(data) return self._obj.decompress(data)
@ -36,9 +38,23 @@ class DeflateDecoder(object):
self._data = None self._data = None
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
return self._obj.decompress(data)
def _get_decoder(mode): def _get_decoder(mode):
if mode == 'gzip': if mode == 'gzip':
return zlib.decompressobj(16 + zlib.MAX_WBITS) return GzipDecoder()
return DeflateDecoder() return DeflateDecoder()
@ -76,9 +92,10 @@ class HTTPResponse(io.IOBase):
strict=0, preload_content=True, decode_content=True, strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None): original_response=None, pool=None, connection=None):
self.headers = HTTPHeaderDict() if isinstance(headers, HTTPHeaderDict):
if headers: self.headers = headers
self.headers.update(headers) else:
self.headers = HTTPHeaderDict(headers)
self.status = status self.status = status
self.version = version self.version = version
self.reason = reason self.reason = reason
@ -202,7 +219,7 @@ class HTTPResponse(io.IOBase):
except BaseSSLError as e: except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors? # FIXME: Is there a better way to differentiate between SSLErrors?
if not 'read operation timed out' in str(e): # Defensive: if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge # This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors. # case, let's avoid swallowing SSL errors.
raise raise
@ -267,14 +284,16 @@ class HTTPResponse(io.IOBase):
Remaining parameters are passed to the HTTPResponse constructor, along Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``. with ``original_response=r``.
""" """
headers = r.msg
headers = HTTPHeaderDict() if not isinstance(headers, HTTPHeaderDict):
for k, v in r.getheaders(): if PY3: # Python 3
headers.add(k, v) headers = HTTPHeaderDict(headers.items())
else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute # HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0) strict = getattr(r, 'strict', 0)
return ResponseCls(body=r, resp = ResponseCls(body=r,
headers=headers, headers=headers,
status=r.status, status=r.status,
version=r.version, version=r.version,
@ -282,6 +301,7 @@ class HTTPResponse(io.IOBase):
strict=strict, strict=strict,
original_response=r, original_response=r,
**response_kw) **response_kw)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse # Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self): def getheaders(self):

View file

View file

@ -82,6 +82,7 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
err = _ err = _
if sock is not None: if sock is not None:
sock.close() sock.close()
sock = None
if err is not None: if err is not None:
raise err raise err

View file

View file

View file

View file

@ -1,5 +1,5 @@
from binascii import hexlify, unhexlify from binascii import hexlify, unhexlify
from hashlib import md5, sha1 from hashlib import md5, sha1, sha256
from ..exceptions import SSLError from ..exceptions import SSLError
@ -29,8 +29,8 @@ try:
except ImportError: except ImportError:
_DEFAULT_CIPHERS = ( _DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5' '!eNULL:!MD5'
) )
try: try:
@ -96,7 +96,8 @@ def assert_fingerprint(cert, fingerprint):
# this digest. # this digest.
hashfunc_map = { hashfunc_map = {
16: md5, 16: md5,
20: sha1 20: sha1,
32: sha256,
} }
fingerprint = fingerprint.replace(':', '').lower() fingerprint = fingerprint.replace(':', '').lower()
@ -211,7 +212,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
context.verify_mode = cert_reqs context.verify_mode = cert_reqs
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED) # We do our own verification, including fingerprints and alternative
# hostnames. So disable it here
context.check_hostname = False
return context return context

View file

View file

0
wakatime/packages/requests/sessions.py Executable file → Normal file
View file

0
wakatime/packages/requests/status_codes.py Executable file → Normal file
View file

0
wakatime/packages/requests/structures.py Executable file → Normal file
View file

0
wakatime/packages/requests/utils.py Executable file → Normal file
View file