Revert "upgrade requests package to v2.7.0"
This reverts commit f5357a9e7b
.
This commit is contained in:
parent
85ab611632
commit
052f255a0c
18 changed files with 254 additions and 290 deletions
|
@ -6,7 +6,7 @@
|
|||
# /
|
||||
|
||||
"""
|
||||
Requests HTTP library
|
||||
requests HTTP library
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Requests is an HTTP library, written in Python, for human beings. Basic GET
|
||||
|
@ -42,8 +42,8 @@ is at <http://python-requests.org>.
|
|||
"""
|
||||
|
||||
__title__ = 'requests'
|
||||
__version__ = '2.7.0'
|
||||
__build__ = 0x020700
|
||||
__version__ = '2.6.0'
|
||||
__build__ = 0x020503
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = 'Copyright 2015 Kenneth Reitz'
|
||||
|
|
|
@ -35,7 +35,6 @@ from .auth import _basic_auth_str
|
|||
DEFAULT_POOLBLOCK = False
|
||||
DEFAULT_POOLSIZE = 10
|
||||
DEFAULT_RETRIES = 0
|
||||
DEFAULT_POOL_TIMEOUT = None
|
||||
|
||||
|
||||
class BaseAdapter(object):
|
||||
|
@ -376,7 +375,7 @@ class HTTPAdapter(BaseAdapter):
|
|||
if hasattr(conn, 'proxy_pool'):
|
||||
conn = conn.proxy_pool
|
||||
|
||||
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
|
||||
low_conn = conn._get_conn(timeout=timeout)
|
||||
|
||||
try:
|
||||
low_conn.putrequest(request.method,
|
||||
|
@ -408,6 +407,9 @@ class HTTPAdapter(BaseAdapter):
|
|||
# Then, reraise so that we can handle the actual exception.
|
||||
low_conn.close()
|
||||
raise
|
||||
else:
|
||||
# All is well, return the connection to the pool.
|
||||
conn._put_conn(low_conn)
|
||||
|
||||
except (ProtocolError, socket.error) as err:
|
||||
raise ConnectionError(err, request=request)
|
||||
|
|
|
@ -55,18 +55,17 @@ def request(method, url, **kwargs):
|
|||
return response
|
||||
|
||||
|
||||
def get(url, params=None, **kwargs):
|
||||
def get(url, **kwargs):
|
||||
"""Sends a GET request.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
:return: :class:`Response <Response>` object
|
||||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, params=params, **kwargs)
|
||||
return request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
|
|
|
@ -103,8 +103,7 @@ class HTTPDigestAuth(AuthBase):
|
|||
# XXX not implemented yet
|
||||
entdig = None
|
||||
p_parsed = urlparse(url)
|
||||
#: path is request-uri defined in RFC 2616 which should not be empty
|
||||
path = p_parsed.path or "/"
|
||||
path = p_parsed.path
|
||||
if p_parsed.query:
|
||||
path += '?' + p_parsed.query
|
||||
|
||||
|
@ -179,7 +178,7 @@ class HTTPDigestAuth(AuthBase):
|
|||
# Consume content and release the original connection
|
||||
# to allow our new request to reuse the same one.
|
||||
r.content
|
||||
r.close()
|
||||
r.raw.release_conn()
|
||||
prep = r.request.copy()
|
||||
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
||||
prep.prepare_cookies(prep._cookies)
|
||||
|
|
|
@ -6,7 +6,6 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
|
|||
requests.utils imports from here, so be careful with imports.
|
||||
"""
|
||||
|
||||
import copy
|
||||
import time
|
||||
import collections
|
||||
from .compat import cookielib, urlparse, urlunparse, Morsel
|
||||
|
@ -303,7 +302,7 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
|
|||
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
||||
if isinstance(other, cookielib.CookieJar):
|
||||
for cookie in other:
|
||||
self.set_cookie(copy.copy(cookie))
|
||||
self.set_cookie(cookie)
|
||||
else:
|
||||
super(RequestsCookieJar, self).update(other)
|
||||
|
||||
|
@ -360,21 +359,6 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
|
|||
return new_cj
|
||||
|
||||
|
||||
def _copy_cookie_jar(jar):
|
||||
if jar is None:
|
||||
return None
|
||||
|
||||
if hasattr(jar, 'copy'):
|
||||
# We're dealing with an instane of RequestsCookieJar
|
||||
return jar.copy()
|
||||
# We're dealing with a generic CookieJar instance
|
||||
new_jar = copy.copy(jar)
|
||||
new_jar.clear()
|
||||
for cookie in jar:
|
||||
new_jar.set_cookie(copy.copy(cookie))
|
||||
return new_jar
|
||||
|
||||
|
||||
def create_cookie(name, value, **kwargs):
|
||||
"""Make a cookie from underspecified parameters.
|
||||
|
||||
|
@ -415,14 +399,11 @@ def morsel_to_cookie(morsel):
|
|||
|
||||
expires = None
|
||||
if morsel['max-age']:
|
||||
try:
|
||||
expires = int(time.time() + int(morsel['max-age']))
|
||||
except ValueError:
|
||||
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
|
||||
expires = time.time() + morsel['max-age']
|
||||
elif morsel['expires']:
|
||||
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
|
||||
expires = int(time.mktime(
|
||||
time.strptime(morsel['expires'], time_template)) - time.timezone)
|
||||
expires = time.mktime(
|
||||
time.strptime(morsel['expires'], time_template)) - time.timezone
|
||||
return create_cookie(
|
||||
comment=morsel['comment'],
|
||||
comment_url=bool(morsel['comment']),
|
||||
|
|
|
@ -15,7 +15,7 @@ from .hooks import default_hooks
|
|||
from .structures import CaseInsensitiveDict
|
||||
|
||||
from .auth import HTTPBasicAuth
|
||||
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
||||
from .cookies import cookiejar_from_dict, get_cookie_header
|
||||
from .packages.urllib3.fields import RequestField
|
||||
from .packages.urllib3.filepost import encode_multipart_formdata
|
||||
from .packages.urllib3.util import parse_url
|
||||
|
@ -30,8 +30,7 @@ from .utils import (
|
|||
iter_slices, guess_json_utf, super_len, to_native_string)
|
||||
from .compat import (
|
||||
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
|
||||
is_py2, chardet, builtin_str, basestring)
|
||||
from .compat import json as complexjson
|
||||
is_py2, chardet, json, builtin_str, basestring)
|
||||
from .status_codes import codes
|
||||
|
||||
#: The set of HTTP status codes that indicate an automatically
|
||||
|
@ -43,11 +42,12 @@ REDIRECT_STATI = (
|
|||
codes.temporary_redirect, # 307
|
||||
codes.permanent_redirect, # 308
|
||||
)
|
||||
|
||||
DEFAULT_REDIRECT_LIMIT = 30
|
||||
CONTENT_CHUNK_SIZE = 10 * 1024
|
||||
ITER_CHUNK_SIZE = 512
|
||||
|
||||
json_dumps = json.dumps
|
||||
|
||||
|
||||
class RequestEncodingMixin(object):
|
||||
@property
|
||||
|
@ -149,7 +149,8 @@ class RequestEncodingMixin(object):
|
|||
else:
|
||||
fdata = fp.read()
|
||||
|
||||
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
|
||||
rf = RequestField(name=k, data=fdata,
|
||||
filename=fn, headers=fh)
|
||||
rf.make_multipart(content_type=ft)
|
||||
new_fields.append(rf)
|
||||
|
||||
|
@ -206,8 +207,17 @@ class Request(RequestHooksMixin):
|
|||
<PreparedRequest [GET]>
|
||||
|
||||
"""
|
||||
def __init__(self, method=None, url=None, headers=None, files=None,
|
||||
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
|
||||
def __init__(self,
|
||||
method=None,
|
||||
url=None,
|
||||
headers=None,
|
||||
files=None,
|
||||
data=None,
|
||||
params=None,
|
||||
auth=None,
|
||||
cookies=None,
|
||||
hooks=None,
|
||||
json=None):
|
||||
|
||||
# Default empty dicts for dict params.
|
||||
data = [] if data is None else data
|
||||
|
@ -286,7 +296,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
self.hooks = default_hooks()
|
||||
|
||||
def prepare(self, method=None, url=None, headers=None, files=None,
|
||||
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
|
||||
data=None, params=None, auth=None, cookies=None, hooks=None,
|
||||
json=None):
|
||||
"""Prepares the entire request with the given parameters."""
|
||||
|
||||
self.prepare_method(method)
|
||||
|
@ -295,7 +306,6 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
self.prepare_cookies(cookies)
|
||||
self.prepare_body(data, files, json)
|
||||
self.prepare_auth(auth, url)
|
||||
|
||||
# Note that prepare_auth must be last to enable authentication schemes
|
||||
# such as OAuth to work on a fully prepared request.
|
||||
|
||||
|
@ -310,7 +320,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
p.method = self.method
|
||||
p.url = self.url
|
||||
p.headers = self.headers.copy() if self.headers is not None else None
|
||||
p._cookies = _copy_cookie_jar(self._cookies)
|
||||
p._cookies = self._cookies.copy() if self._cookies is not None else None
|
||||
p.body = self.body
|
||||
p.hooks = self.hooks
|
||||
return p
|
||||
|
@ -347,10 +357,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
raise InvalidURL(*e.args)
|
||||
|
||||
if not scheme:
|
||||
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
|
||||
error = error.format(to_native_string(url, 'utf8'))
|
||||
|
||||
raise MissingSchema(error)
|
||||
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
|
||||
"Perhaps you meant http://{0}?".format(url))
|
||||
|
||||
if not host:
|
||||
raise InvalidURL("Invalid URL %r: No host supplied" % url)
|
||||
|
@ -416,7 +424,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
|
||||
if json is not None:
|
||||
content_type = 'application/json'
|
||||
body = complexjson.dumps(json)
|
||||
body = json_dumps(json)
|
||||
|
||||
is_stream = all([
|
||||
hasattr(data, '__iter__'),
|
||||
|
@ -493,15 +501,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
self.prepare_content_length(self.body)
|
||||
|
||||
def prepare_cookies(self, cookies):
|
||||
"""Prepares the given HTTP cookie data.
|
||||
|
||||
This function eventually generates a ``Cookie`` header from the
|
||||
given cookies using cookielib. Due to cookielib's design, the header
|
||||
will not be regenerated if it already exists, meaning this function
|
||||
can only be called once for the life of the
|
||||
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
|
||||
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
|
||||
header is removed beforehand."""
|
||||
"""Prepares the given HTTP cookie data."""
|
||||
|
||||
if isinstance(cookies, cookielib.CookieJar):
|
||||
self._cookies = cookies
|
||||
|
@ -514,10 +514,6 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
|
||||
def prepare_hooks(self, hooks):
|
||||
"""Prepares the given hooks."""
|
||||
# hooks can be passed as None to the prepare method and to this
|
||||
# method. To prevent iterating over None, simply use an empty list
|
||||
# if hooks is False-y
|
||||
hooks = hooks or []
|
||||
for event in hooks:
|
||||
self.register_hook(event, hooks[event])
|
||||
|
||||
|
@ -528,8 +524,16 @@ class Response(object):
|
|||
"""
|
||||
|
||||
__attrs__ = [
|
||||
'_content', 'status_code', 'headers', 'url', 'history',
|
||||
'encoding', 'reason', 'cookies', 'elapsed', 'request'
|
||||
'_content',
|
||||
'status_code',
|
||||
'headers',
|
||||
'url',
|
||||
'history',
|
||||
'encoding',
|
||||
'reason',
|
||||
'cookies',
|
||||
'elapsed',
|
||||
'request',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
|
@ -649,10 +653,9 @@ class Response(object):
|
|||
If decode_unicode is True, content will be decoded using the best
|
||||
available encoding based on the response.
|
||||
"""
|
||||
|
||||
def generate():
|
||||
# Special case for urllib3.
|
||||
if hasattr(self.raw, 'stream'):
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
try:
|
||||
for chunk in self.raw.stream(chunk_size, decode_content=True):
|
||||
yield chunk
|
||||
|
@ -662,7 +665,7 @@ class Response(object):
|
|||
raise ContentDecodingError(e)
|
||||
except ReadTimeoutError as e:
|
||||
raise ConnectionError(e)
|
||||
else:
|
||||
except AttributeError:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
chunk = self.raw.read(chunk_size)
|
||||
|
@ -793,16 +796,14 @@ class Response(object):
|
|||
encoding = guess_json_utf(self.content)
|
||||
if encoding is not None:
|
||||
try:
|
||||
return complexjson.loads(
|
||||
self.content.decode(encoding), **kwargs
|
||||
)
|
||||
return json.loads(self.content.decode(encoding), **kwargs)
|
||||
except UnicodeDecodeError:
|
||||
# Wrong UTF codec detected; usually because it's not UTF-8
|
||||
# but some other 8-bit codec. This is an RFC violation,
|
||||
# and the server didn't bother to tell us what codec *was*
|
||||
# used.
|
||||
pass
|
||||
return complexjson.loads(self.text, **kwargs)
|
||||
return json.loads(self.text, **kwargs)
|
||||
|
||||
@property
|
||||
def links(self):
|
||||
|
@ -828,10 +829,10 @@ class Response(object):
|
|||
http_error_msg = ''
|
||||
|
||||
if 400 <= self.status_code < 500:
|
||||
http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
|
||||
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
|
||||
|
||||
elif 500 <= self.status_code < 600:
|
||||
http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
|
||||
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
|
||||
|
||||
if http_error_msg:
|
||||
raise HTTPError(http_error_msg, response=self)
|
||||
|
@ -842,7 +843,4 @@ class Response(object):
|
|||
|
||||
*Note: Should not normally need to be called explicitly.*
|
||||
"""
|
||||
if not self._content_consumed:
|
||||
return self.raw.close()
|
||||
|
||||
return self.raw.release_conn()
|
||||
|
|
|
@ -1,3 +1,107 @@
|
|||
"""
|
||||
Copyright (c) Donald Stufft, pip, and individual contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from . import urllib3
|
||||
import sys
|
||||
|
||||
|
||||
class VendorAlias(object):
|
||||
|
||||
def __init__(self, package_names):
|
||||
self._package_names = package_names
|
||||
self._vendor_name = __name__
|
||||
self._vendor_pkg = self._vendor_name + "."
|
||||
self._vendor_pkgs = [
|
||||
self._vendor_pkg + name for name in self._package_names
|
||||
]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname.startswith(self._vendor_pkg):
|
||||
return self
|
||||
|
||||
def load_module(self, name):
|
||||
# Ensure that this only works for the vendored name
|
||||
if not name.startswith(self._vendor_pkg):
|
||||
raise ImportError(
|
||||
"Cannot import %s, must be a subpackage of '%s'." % (
|
||||
name, self._vendor_name,
|
||||
)
|
||||
)
|
||||
|
||||
if not (name == self._vendor_name or
|
||||
any(name.startswith(pkg) for pkg in self._vendor_pkgs)):
|
||||
raise ImportError(
|
||||
"Cannot import %s, must be one of %s." % (
|
||||
name, self._vendor_pkgs
|
||||
)
|
||||
)
|
||||
|
||||
# Check to see if we already have this item in sys.modules, if we do
|
||||
# then simply return that.
|
||||
if name in sys.modules:
|
||||
return sys.modules[name]
|
||||
|
||||
# Check to see if we can import the vendor name
|
||||
try:
|
||||
# We do this dance here because we want to try and import this
|
||||
# module without hitting a recursion error because of a bunch of
|
||||
# VendorAlias instances on sys.meta_path
|
||||
real_meta_path = sys.meta_path[:]
|
||||
try:
|
||||
sys.meta_path = [
|
||||
m for m in sys.meta_path
|
||||
if not isinstance(m, VendorAlias)
|
||||
]
|
||||
__import__(name)
|
||||
module = sys.modules[name]
|
||||
finally:
|
||||
# Re-add any additions to sys.meta_path that were made while
|
||||
# during the import we just did, otherwise things like
|
||||
# requests.packages.urllib3.poolmanager will fail.
|
||||
for m in sys.meta_path:
|
||||
if m not in real_meta_path:
|
||||
real_meta_path.append(m)
|
||||
|
||||
# Restore sys.meta_path with any new items.
|
||||
sys.meta_path = real_meta_path
|
||||
except ImportError:
|
||||
# We can't import the vendor name, so we'll try to import the
|
||||
# "real" name.
|
||||
real_name = name[len(self._vendor_pkg):]
|
||||
try:
|
||||
__import__(real_name)
|
||||
module = sys.modules[real_name]
|
||||
except ImportError:
|
||||
raise ImportError("No module named '%s'" % (name,))
|
||||
|
||||
# If we've gotten here we've found the module we're looking for, either
|
||||
# as part of our vendored package, or as the real name, so we'll add
|
||||
# it to sys.modules as the vendored name so that we don't have to do
|
||||
# the lookup again.
|
||||
sys.modules[name] = module
|
||||
|
||||
# Finally, return the loaded module
|
||||
return module
|
||||
|
||||
|
||||
sys.meta_path.append(VendorAlias(["urllib3", "chardet"]))
|
||||
|
|
|
@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
|
|||
|
||||
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
||||
__license__ = 'MIT'
|
||||
__version__ = '1.10.4'
|
||||
__version__ = '1.10.2'
|
||||
|
||||
|
||||
from .connectionpool import (
|
||||
|
@ -55,12 +55,9 @@ def add_stderr_logger(level=logging.DEBUG):
|
|||
del NullHandler
|
||||
|
||||
|
||||
# Set security warning to always go off by default.
|
||||
import warnings
|
||||
# SecurityWarning's always go off by default.
|
||||
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
|
||||
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
||||
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
|
||||
append=True)
|
||||
warnings.simplefilter('always', exceptions.SecurityWarning)
|
||||
|
||||
def disable_warnings(category=exceptions.HTTPWarning):
|
||||
"""
|
||||
|
|
|
@ -227,20 +227,20 @@ class HTTPHeaderDict(dict):
|
|||
# Need to convert the tuple to list for further extension
|
||||
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
|
||||
|
||||
def extend(self, *args, **kwargs):
|
||||
def extend(*args, **kwargs):
|
||||
"""Generic import function for any type of header-like object.
|
||||
Adapted version of MutableMapping.update in order to insert items
|
||||
with self.add instead of self.__setitem__
|
||||
"""
|
||||
if len(args) > 1:
|
||||
raise TypeError("extend() takes at most 1 positional "
|
||||
if len(args) > 2:
|
||||
raise TypeError("update() takes at most 2 positional "
|
||||
"arguments ({} given)".format(len(args)))
|
||||
other = args[0] if len(args) >= 1 else ()
|
||||
elif not args:
|
||||
raise TypeError("update() takes at least 1 argument (0 given)")
|
||||
self = args[0]
|
||||
other = args[1] if len(args) >= 2 else ()
|
||||
|
||||
if isinstance(other, HTTPHeaderDict):
|
||||
for key, val in other.iteritems():
|
||||
self.add(key, val)
|
||||
elif isinstance(other, Mapping):
|
||||
if isinstance(other, Mapping):
|
||||
for key in other:
|
||||
self.add(key, other[key])
|
||||
elif hasattr(other, "keys"):
|
||||
|
@ -304,20 +304,17 @@ class HTTPHeaderDict(dict):
|
|||
return list(self.iteritems())
|
||||
|
||||
@classmethod
|
||||
def from_httplib(cls, message): # Python 2
|
||||
def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2
|
||||
"""Read headers from a Python 2 httplib message object."""
|
||||
# python2.7 does not expose a proper API for exporting multiheaders
|
||||
# efficiently. This function re-reads raw lines from the message
|
||||
# object and extracts the multiheaders properly.
|
||||
headers = []
|
||||
|
||||
for line in message.headers:
|
||||
if line.startswith((' ', '\t')):
|
||||
key, value = headers[-1]
|
||||
headers[-1] = (key, value + '\r\n' + line.rstrip())
|
||||
continue
|
||||
|
||||
key, value = line.split(':', 1)
|
||||
headers.append((key, value.strip()))
|
||||
|
||||
return cls(headers)
|
||||
ret = cls(message.items())
|
||||
# ret now contains only the last header line for each duplicate.
|
||||
# Importing with all duplicates would be nice, but this would
|
||||
# mean to repeat most of the raw parsing already done, when the
|
||||
# message object was created. Extracting only the headers of interest
|
||||
# separately, the cookies, should be faster and requires less
|
||||
# extra code.
|
||||
for key in duplicates:
|
||||
ret.discard(key)
|
||||
for val in message.getheaders(key):
|
||||
ret.add(key, val)
|
||||
return ret
|
||||
|
|
|
@ -260,5 +260,3 @@ if ssl:
|
|||
# Make a copy for testing.
|
||||
UnverifiedHTTPSConnection = HTTPSConnection
|
||||
HTTPSConnection = VerifiedHTTPSConnection
|
||||
else:
|
||||
HTTPSConnection = DummyConnection
|
||||
|
|
|
@ -735,6 +735,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
% (self.num_connections, self.host))
|
||||
|
||||
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
|
||||
# Platform-specific: Python without ssl
|
||||
raise SSLError("Can't connect to HTTPS URL because the SSL "
|
||||
"module is not available.")
|
||||
|
||||
|
|
|
@ -38,6 +38,8 @@ Module Variables
|
|||
----------------
|
||||
|
||||
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
|
||||
Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
|
||||
ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
|
||||
|
||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||
|
@ -83,7 +85,22 @@ _openssl_verify = {
|
|||
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||
}
|
||||
|
||||
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# The general intent is:
|
||||
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||
# - prefer ECDHE over DHE for better performance,
|
||||
# - prefer any AES-GCM over any AES-CBC for better performance and security,
|
||||
# - use 3DES as fallback which is secure but slow,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
|
||||
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
|
||||
"!aNULL:!MD5:!DSS"
|
||||
|
||||
|
||||
orig_util_HAS_SNI = util.HAS_SNI
|
||||
|
@ -282,9 +299,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
|||
try:
|
||||
cnx.do_handshake()
|
||||
except OpenSSL.SSL.WantReadError:
|
||||
rd, _, _ = select.select([sock], [], [], sock.gettimeout())
|
||||
if not rd:
|
||||
raise timeout('select timed out')
|
||||
select.select([sock], [], [])
|
||||
continue
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError('bad handshake', e)
|
||||
|
|
|
@ -162,8 +162,3 @@ class SystemTimeWarning(SecurityWarning):
|
|||
class InsecurePlatformWarning(SecurityWarning):
|
||||
"Warned when certain SSL configuration is not available on a platform."
|
||||
pass
|
||||
|
||||
|
||||
class ResponseNotChunked(ProtocolError, ValueError):
|
||||
"Response needs to be chunked in order to read it as chunks."
|
||||
pass
|
||||
|
|
|
@ -1,15 +1,9 @@
|
|||
try:
|
||||
import http.client as httplib
|
||||
except ImportError:
|
||||
import httplib
|
||||
import zlib
|
||||
import io
|
||||
from socket import timeout as SocketTimeout
|
||||
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .exceptions import (
|
||||
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
|
||||
)
|
||||
from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
|
||||
from .packages.six import string_types as basestring, binary_type, PY3
|
||||
from .connection import HTTPException, BaseSSLError
|
||||
from .util.response import is_fp_closed
|
||||
|
@ -123,17 +117,7 @@ class HTTPResponse(io.IOBase):
|
|||
if hasattr(body, 'read'):
|
||||
self._fp = body
|
||||
|
||||
# Are we using the chunked-style of transfer encoding?
|
||||
self.chunked = False
|
||||
self.chunk_left = None
|
||||
tr_enc = self.headers.get('transfer-encoding', '').lower()
|
||||
# Don't incur the penalty of creating a list and then discarding it
|
||||
encodings = (enc.strip() for enc in tr_enc.split(","))
|
||||
if "chunked" in encodings:
|
||||
self.chunked = True
|
||||
|
||||
# We certainly don't want to preload content when the response is chunked.
|
||||
if not self.chunked and preload_content and not self._body:
|
||||
if preload_content and not self._body:
|
||||
self._body = self.read(decode_content=decode_content)
|
||||
|
||||
def get_redirect_location(self):
|
||||
|
@ -173,35 +157,6 @@ class HTTPResponse(io.IOBase):
|
|||
"""
|
||||
return self._fp_bytes_read
|
||||
|
||||
def _init_decoder(self):
|
||||
"""
|
||||
Set-up the _decoder attribute if necessar.
|
||||
"""
|
||||
# Note: content-encoding value should be case-insensitive, per RFC 7230
|
||||
# Section 3.2
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
|
||||
def _decode(self, data, decode_content, flush_decoder):
|
||||
"""
|
||||
Decode the data passed in and potentially flush the decoder.
|
||||
"""
|
||||
try:
|
||||
if decode_content and self._decoder:
|
||||
data = self._decoder.decompress(data)
|
||||
except (IOError, zlib.error) as e:
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
raise DecodeError(
|
||||
"Received response with content-encoding: %s, but "
|
||||
"failed to decode it." % content_encoding, e)
|
||||
|
||||
if flush_decoder and decode_content and self._decoder:
|
||||
buf = self._decoder.decompress(binary_type())
|
||||
data += buf + self._decoder.flush()
|
||||
|
||||
return data
|
||||
|
||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||
"""
|
||||
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
||||
|
@ -223,7 +178,12 @@ class HTTPResponse(io.IOBase):
|
|||
after having ``.read()`` the file object. (Overridden if ``amt`` is
|
||||
set.)
|
||||
"""
|
||||
self._init_decoder()
|
||||
# Note: content-encoding value should be case-insensitive, per RFC 7230
|
||||
# Section 3.2
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
if self._decoder is None:
|
||||
if content_encoding in self.CONTENT_DECODERS:
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
if decode_content is None:
|
||||
decode_content = self.decode_content
|
||||
|
||||
|
@ -272,7 +232,17 @@ class HTTPResponse(io.IOBase):
|
|||
|
||||
self._fp_bytes_read += len(data)
|
||||
|
||||
data = self._decode(data, decode_content, flush_decoder)
|
||||
try:
|
||||
if decode_content and self._decoder:
|
||||
data = self._decoder.decompress(data)
|
||||
except (IOError, zlib.error) as e:
|
||||
raise DecodeError(
|
||||
"Received response with content-encoding: %s, but "
|
||||
"failed to decode it." % content_encoding, e)
|
||||
|
||||
if flush_decoder and decode_content and self._decoder:
|
||||
buf = self._decoder.decompress(binary_type())
|
||||
data += buf + self._decoder.flush()
|
||||
|
||||
if cache_content:
|
||||
self._body = data
|
||||
|
@ -299,15 +269,11 @@ class HTTPResponse(io.IOBase):
|
|||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
"""
|
||||
if self.chunked:
|
||||
for line in self.read_chunked(amt, decode_content=decode_content):
|
||||
yield line
|
||||
else:
|
||||
while not is_fp_closed(self._fp):
|
||||
data = self.read(amt=amt, decode_content=decode_content)
|
||||
while not is_fp_closed(self._fp):
|
||||
data = self.read(amt=amt, decode_content=decode_content)
|
||||
|
||||
if data:
|
||||
yield data
|
||||
if data:
|
||||
yield data
|
||||
|
||||
@classmethod
|
||||
def from_httplib(ResponseCls, r, **response_kw):
|
||||
|
@ -385,82 +351,3 @@ class HTTPResponse(io.IOBase):
|
|||
else:
|
||||
b[:len(temp)] = temp
|
||||
return len(temp)
|
||||
|
||||
def _update_chunk_length(self):
|
||||
# First, we'll figure out length of a chunk and then
|
||||
# we'll try to read it from socket.
|
||||
if self.chunk_left is not None:
|
||||
return
|
||||
line = self._fp.fp.readline()
|
||||
line = line.split(b';', 1)[0]
|
||||
try:
|
||||
self.chunk_left = int(line, 16)
|
||||
except ValueError:
|
||||
# Invalid chunked protocol response, abort.
|
||||
self.close()
|
||||
raise httplib.IncompleteRead(line)
|
||||
|
||||
def _handle_chunk(self, amt):
|
||||
returned_chunk = None
|
||||
if amt is None:
|
||||
chunk = self._fp._safe_read(self.chunk_left)
|
||||
returned_chunk = chunk
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
elif amt < self.chunk_left:
|
||||
value = self._fp._safe_read(amt)
|
||||
self.chunk_left = self.chunk_left - amt
|
||||
returned_chunk = value
|
||||
elif amt == self.chunk_left:
|
||||
value = self._fp._safe_read(amt)
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
returned_chunk = value
|
||||
else: # amt > self.chunk_left
|
||||
returned_chunk = self._fp._safe_read(self.chunk_left)
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
return returned_chunk
|
||||
|
||||
def read_chunked(self, amt=None, decode_content=None):
|
||||
"""
|
||||
Similar to :meth:`HTTPResponse.read`, but with an additional
|
||||
parameter: ``decode_content``.
|
||||
|
||||
:param decode_content:
|
||||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
"""
|
||||
self._init_decoder()
|
||||
# FIXME: Rewrite this method and make it a class with a better structured logic.
|
||||
if not self.chunked:
|
||||
raise ResponseNotChunked("Response is not chunked. "
|
||||
"Header 'transfer-encoding: chunked' is missing.")
|
||||
|
||||
if self._original_response and self._original_response._method.upper() == 'HEAD':
|
||||
# Don't bother reading the body of a HEAD request.
|
||||
# FIXME: Can we do this somehow without accessing private httplib _method?
|
||||
self._original_response.close()
|
||||
return
|
||||
|
||||
while True:
|
||||
self._update_chunk_length()
|
||||
if self.chunk_left == 0:
|
||||
break
|
||||
chunk = self._handle_chunk(amt)
|
||||
yield self._decode(chunk, decode_content=decode_content,
|
||||
flush_decoder=True)
|
||||
|
||||
# Chunk content ends with \r\n: discard it.
|
||||
while True:
|
||||
line = self._fp.fp.readline()
|
||||
if not line:
|
||||
# Some sites may not end with '\r\n'.
|
||||
break
|
||||
if line == b'\r\n':
|
||||
break
|
||||
|
||||
# We read everything; close the "file".
|
||||
if self._original_response:
|
||||
self._original_response.close()
|
||||
self.release_conn()
|
||||
|
|
|
@ -9,10 +9,10 @@ HAS_SNI = False
|
|||
create_default_context = None
|
||||
|
||||
import errno
|
||||
import ssl
|
||||
import warnings
|
||||
|
||||
try: # Test for SSL features
|
||||
import ssl
|
||||
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
|
||||
from ssl import HAS_SNI # Has SNI?
|
||||
except ImportError:
|
||||
|
@ -25,24 +25,14 @@ except ImportError:
|
|||
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
||||
OP_NO_COMPRESSION = 0x20000
|
||||
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# The general intent is:
|
||||
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||
# - prefer ECDHE over DHE for better performance,
|
||||
# - prefer any AES-GCM over any AES-CBC for better performance and security,
|
||||
# - use 3DES as fallback which is secure but slow,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
DEFAULT_CIPHERS = (
|
||||
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
|
||||
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
|
||||
'!eNULL:!MD5'
|
||||
)
|
||||
try:
|
||||
from ssl import _DEFAULT_CIPHERS
|
||||
except ImportError:
|
||||
_DEFAULT_CIPHERS = (
|
||||
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
|
||||
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
|
||||
'!eNULL:!MD5'
|
||||
)
|
||||
|
||||
try:
|
||||
from ssl import SSLContext # Modern SSL?
|
||||
|
@ -50,8 +40,7 @@ except ImportError:
|
|||
import sys
|
||||
|
||||
class SSLContext(object): # Platform-specific: Python 2 & 3.1
|
||||
supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
|
||||
(3, 2) <= sys.version_info)
|
||||
supports_set_ciphers = sys.version_info >= (2, 7)
|
||||
|
||||
def __init__(self, protocol_version):
|
||||
self.protocol = protocol_version
|
||||
|
@ -178,7 +167,7 @@ def resolve_ssl_version(candidate):
|
|||
return candidate
|
||||
|
||||
|
||||
def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
||||
def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
|
||||
options=None, ciphers=None):
|
||||
"""All arguments have the same meaning as ``ssl_wrap_socket``.
|
||||
|
||||
|
@ -215,9 +204,6 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
|||
"""
|
||||
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
|
||||
|
||||
# Setting the default here, as we may have no ssl module on import
|
||||
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
|
||||
|
||||
if options is None:
|
||||
options = 0
|
||||
# SSLv2 is easily broken and is considered harmful and dangerous
|
||||
|
@ -231,7 +217,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
|||
context.options |= options
|
||||
|
||||
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
|
||||
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
|
||||
context.set_ciphers(ciphers or _DEFAULT_CIPHERS)
|
||||
|
||||
context.verify_mode = cert_reqs
|
||||
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
|
||||
|
|
|
@ -15,8 +15,6 @@ class Url(namedtuple('Url', url_attrs)):
|
|||
|
||||
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
|
||||
query=None, fragment=None):
|
||||
if path and not path.startswith('/'):
|
||||
path = '/' + path
|
||||
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
|
||||
query, fragment)
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
|||
|
||||
class SessionRedirectMixin(object):
|
||||
def resolve_redirects(self, resp, req, stream=False, timeout=None,
|
||||
verify=True, cert=None, proxies=None, **adapter_kwargs):
|
||||
verify=True, cert=None, proxies=None):
|
||||
"""Receives a Response. Returns a generator of Responses."""
|
||||
|
||||
i = 0
|
||||
|
@ -193,7 +193,6 @@ class SessionRedirectMixin(object):
|
|||
cert=cert,
|
||||
proxies=proxies,
|
||||
allow_redirects=False,
|
||||
**adapter_kwargs
|
||||
)
|
||||
|
||||
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
||||
|
@ -561,6 +560,10 @@ class Session(SessionRedirectMixin):
|
|||
# Set up variables needed for resolve_redirects and dispatching of hooks
|
||||
allow_redirects = kwargs.pop('allow_redirects', True)
|
||||
stream = kwargs.get('stream')
|
||||
timeout = kwargs.get('timeout')
|
||||
verify = kwargs.get('verify')
|
||||
cert = kwargs.get('cert')
|
||||
proxies = kwargs.get('proxies')
|
||||
hooks = request.hooks
|
||||
|
||||
# Get the appropriate adapter to use
|
||||
|
@ -588,7 +591,12 @@ class Session(SessionRedirectMixin):
|
|||
extract_cookies_to_jar(self.cookies, request, r.raw)
|
||||
|
||||
# Redirect resolving generator.
|
||||
gen = self.resolve_redirects(r, request, **kwargs)
|
||||
gen = self.resolve_redirects(r, request,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
verify=verify,
|
||||
cert=cert,
|
||||
proxies=proxies)
|
||||
|
||||
# Resolve redirects if allowed.
|
||||
history = [resp for resp in gen] if allow_redirects else []
|
||||
|
|
|
@ -67,7 +67,7 @@ def super_len(o):
|
|||
return len(o.getvalue())
|
||||
|
||||
|
||||
def get_netrc_auth(url, raise_errors=False):
|
||||
def get_netrc_auth(url):
|
||||
"""Returns the Requests tuple auth for a given url from netrc."""
|
||||
|
||||
try:
|
||||
|
@ -105,9 +105,8 @@ def get_netrc_auth(url, raise_errors=False):
|
|||
return (_netrc[login_i], _netrc[2])
|
||||
except (NetrcParseError, IOError):
|
||||
# If there was a parsing error or a permissions issue reading the file,
|
||||
# we'll just skip netrc auth unless explicitly asked to raise errors.
|
||||
if raise_errors:
|
||||
raise
|
||||
# we'll just skip netrc auth
|
||||
pass
|
||||
|
||||
# AppEngine hackiness.
|
||||
except (ImportError, AttributeError):
|
||||
|
|
Loading…
Reference in a new issue