upgrade wakatime-cli to master version to fix unhandled retry exception

This commit is contained in:
Alan Hamlett 2016-06-08 20:43:24 +02:00
parent 514a8762eb
commit 19d54f3310
11 changed files with 743 additions and 103 deletions

View File

@ -1,7 +1,6 @@
""" """
urllib3 - Thread-safe connection pooling and re-using. urllib3 - Thread-safe connection pooling and re-using.
""" """
from __future__ import absolute_import from __future__ import absolute_import
import warnings import warnings
@ -32,7 +31,7 @@ except ImportError:
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT' __license__ = 'MIT'
__version__ = '1.15.1' __version__ = 'dev'
__all__ = ( __all__ = (
'HTTPConnectionPool', 'HTTPConnectionPool',

View File

@ -90,7 +90,7 @@ class ConnectionPool(object):
# Return False to re-raise any potential exceptions # Return False to re-raise any potential exceptions
return False return False
def close(): def close(self):
""" """
Close all pooled connections and disable the pool. Close all pooled connections and disable the pool.
""" """
@ -163,6 +163,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
scheme = 'http' scheme = 'http'
ConnectionCls = HTTPConnection ConnectionCls = HTTPConnection
ResponseCls = HTTPResponse
def __init__(self, host, port=None, strict=False, def __init__(self, host, port=None, strict=False,
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
@ -383,8 +384,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
try: try:
try: # Python 2.7, use buffering of HTTP responses try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True) httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older except TypeError: # Python 2.6 and older, Python 3
httplib_response = conn.getresponse() try:
httplib_response = conn.getresponse()
except Exception as e:
# Remove the TypeError from the exception chain in Python 3;
# otherwise it looks like a programming error was the cause.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e: except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout) self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise raise
@ -545,6 +551,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = None conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/shazow/urllib3/issues/651>
release_this_conn = release_conn
# Merge the proxy headers. Only do this in HTTP. We have to copy the # Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being # headers dict so we can safely change it without those changes being
# reflected in anyone else's copy. # reflected in anyone else's copy.
@ -584,10 +601,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
response_conn = conn if not release_conn else None response_conn = conn if not release_conn else None
# Import httplib's response into our own wrapper object # Import httplib's response into our own wrapper object
response = HTTPResponse.from_httplib(httplib_response, response = self.ResponseCls.from_httplib(httplib_response,
pool=self, pool=self,
connection=response_conn, connection=response_conn,
**response_kw) **response_kw)
# Everything went great! # Everything went great!
clean_exit = True clean_exit = True
@ -633,9 +650,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Close the connection, set the variable to None, and make sure # Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it. # we put the None back in the pool to avoid leaking it.
conn = conn and conn.close() conn = conn and conn.close()
release_conn = True release_this_conn = True
if release_conn: if release_this_conn:
# Put the connection back to be reused. If the connection is # Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a # expired then it will be None, which will get replaced with a
# fresh connection during _get_conn. # fresh connection during _get_conn.
@ -817,7 +834,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
warnings.warn(( warnings.warn((
'Unverified HTTPS request is being made. ' 'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: ' 'Adding certificate verification is strongly advised. See: '
'https://urllib3.readthedocs.org/en/latest/security.html'), 'https://urllib3.readthedocs.io/en/latest/security.html'),
InsecureRequestWarning) InsecureRequestWarning)

View File

@ -70,7 +70,7 @@ class AppEngineManager(RequestMethods):
warnings.warn( warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead " "urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see " "of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.org/en/latest/contrib.html.", "https://urllib3.readthedocs.io/en/latest/contrib.html.",
AppEnginePlatformWarning) AppEnginePlatformWarning)
RequestMethods.__init__(self, headers) RequestMethods.__init__(self, headers)

View File

@ -26,7 +26,7 @@ except ImportError:
warnings.warn(( warnings.warn((
'SOCKS support in urllib3 requires the installation of optional ' 'SOCKS support in urllib3 requires the installation of optional '
'dependencies: specifically, PySocks. For more information, see ' 'dependencies: specifically, PySocks. For more information, see '
'https://urllib3.readthedocs.org/en/latest/contrib.html#socks-proxies' 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies'
), ),
DependencyWarning DependencyWarning
) )

View File

@ -1,34 +1,41 @@
"""Utilities for writing code that runs on Python 2 and 3""" """Utilities for writing code that runs on Python 2 and 3"""
#Copyright (c) 2010-2011 Benjamin Peterson # Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#Permission is hereby granted, free of charge, to any person obtaining a copy of from __future__ import absolute_import
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import functools
import itertools
import operator import operator
import sys import sys
import types import types
__author__ = "Benjamin Peterson <benjamin@python.org>" __author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.2.0" # Revision 41c74fef2ded __version__ = "1.10.0"
# True if we are running on Python 3. # Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3 PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3: if PY3:
string_types = str, string_types = str,
@ -51,6 +58,7 @@ else:
else: else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t). # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object): class X(object):
def __len__(self): def __len__(self):
return 1 << 31 return 1 << 31
try: try:
@ -61,7 +69,7 @@ else:
else: else:
# 64-bit # 64-bit
MAXSIZE = int((1 << 63) - 1) MAXSIZE = int((1 << 63) - 1)
del X del X
def _add_doc(func, doc): def _add_doc(func, doc):
@ -82,9 +90,13 @@ class _LazyDescr(object):
def __get__(self, obj, tp): def __get__(self, obj, tp):
result = self._resolve() result = self._resolve()
setattr(obj, self.name, result) setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again. try:
delattr(tp, self.name) # This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result return result
@ -102,6 +114,27 @@ class MovedModule(_LazyDescr):
def _resolve(self): def _resolve(self):
return _import_module(self.mod) return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr): class MovedAttribute(_LazyDescr):
@ -128,30 +161,111 @@ class MovedAttribute(_LazyDescr):
return getattr(module, self.attr) return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects""" """Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [ _moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"), MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"), MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"), MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"), MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
@ -159,12 +273,14 @@ _moved_attributes = [
MovedModule("queue", "Queue"), MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"), MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"), MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"), MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser", MovedModule("tkinter_colorchooser", "tkColorChooser",
@ -176,14 +292,195 @@ _moved_attributes = [
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"), "tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
] ]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes: for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr) setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") _MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move): def add_move(move):
@ -206,22 +503,18 @@ if PY3:
_meth_func = "__func__" _meth_func = "__func__"
_meth_self = "__self__" _meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__" _func_code = "__code__"
_func_defaults = "__defaults__" _func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
else: else:
_meth_func = "im_func" _meth_func = "im_func"
_meth_self = "im_self" _meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code" _func_code = "func_code"
_func_defaults = "func_defaults" _func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
try: try:
@ -232,18 +525,33 @@ except NameError:
next = advance_iterator next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3: if PY3:
def get_unbound_function(unbound): def get_unbound_function(unbound):
return unbound return unbound
Iterator = object create_bound_method = types.MethodType
def callable(obj): def create_unbound_method(func, cls):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) return func
Iterator = object
else: else:
def get_unbound_function(unbound): def get_unbound_function(unbound):
return unbound.im_func return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object): class Iterator(object):
def next(self): def next(self):
@ -256,90 +564,179 @@ _add_doc(get_unbound_function,
get_method_function = operator.attrgetter(_meth_func) get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self) get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code) get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults) get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
def iterkeys(d): if PY3:
"""Return an iterator over the keys of a dictionary.""" def iterkeys(d, **kw):
return iter(getattr(d, _iterkeys)()) return iter(d.keys(**kw))
def itervalues(d): def itervalues(d, **kw):
"""Return an iterator over the values of a dictionary.""" return iter(d.values(**kw))
return iter(getattr(d, _itervalues)())
def iteritems(d): def iteritems(d, **kw):
"""Return an iterator over the (key, value) pairs of a dictionary.""" return iter(d.items(**kw))
return iter(getattr(d, _iteritems)())
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3: if PY3:
def b(s): def b(s):
return s.encode("latin-1") return s.encode("latin-1")
def u(s): def u(s):
return s return s
if sys.version_info[1] <= 1: unichr = chr
def int2byte(i): import struct
return bytes((i,)) int2byte = struct.Struct(">B").pack
else: del struct
# This is about 2x faster than the implementation above on 3.2+ byte2int = operator.itemgetter(0)
int2byte = operator.methodcaller("to_bytes", 1, "big") indexbytes = operator.getitem
iterbytes = iter
import io import io
StringIO = io.StringIO StringIO = io.StringIO
BytesIO = io.BytesIO BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else: else:
def b(s): def b(s):
return s return s
# Workaround for standalone backslash
def u(s): def u(s):
return unicode(s, "unicode_escape") return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO import StringIO
StringIO = BytesIO = StringIO.StringIO StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""") _add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""") _add_doc(u, """Text literal""")
if PY3: def assertCountEqual(self, *args, **kwargs):
import builtins return getattr(self, _assertCountEqual)(*args, **kwargs)
exec_ = getattr(builtins, "exec")
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None): def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb: if value.__traceback__ is not tb:
raise value.with_traceback(tb) raise value.with_traceback(tb)
raise value raise value
print_ = getattr(builtins, "print")
del builtins
else: else:
def exec_(code, globs=None, locs=None): def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace.""" """Execute code in a namespace."""
if globs is None: if _globs_ is None:
frame = sys._getframe(1) frame = sys._getframe(1)
globs = frame.f_globals _globs_ = frame.f_globals
if locs is None: if _locs_ is None:
locs = frame.f_locals _locs_ = frame.f_locals
del frame del frame
elif locs is None: elif _locs_ is None:
locs = globs _locs_ = _globs_
exec("""exec code in globs, locs""") exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None): exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb raise tp, value, tb
""") """)
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs): def print_(*args, **kwargs):
"""The new-style print function.""" """The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout) fp = kwargs.pop("file", sys.stdout)
if fp is None: if fp is None:
return return
def write(data): def write(data):
if not isinstance(data, basestring): if not isinstance(data, basestring):
data = str(data) data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data) fp.write(data)
want_unicode = False want_unicode = False
sep = kwargs.pop("sep", None) sep = kwargs.pop("sep", None)
@ -376,10 +773,96 @@ else:
write(sep) write(sep)
write(arg) write(arg)
write(end) write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""") _add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, base=object):
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass.""" """Create a base class with a metaclass."""
return meta("NewBase", (base,), {}) # This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)

View File

@ -1,4 +1,6 @@
from __future__ import absolute_import from __future__ import absolute_import
import collections
import functools
import logging import logging
try: # Python 3 try: # Python 3
@ -23,6 +25,59 @@ log = logging.getLogger(__name__)
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
'ssl_version', 'ca_cert_dir') 'ssl_version', 'ca_cert_dir')
# The base fields to use when determining what pool to get a connection from;
# these do not rely on the ``connection_pool_kw`` and can be determined by the
# URL and potentially the ``urllib3.connection.port_by_scheme`` dictionary.
#
# All custom key schemes should include the fields in this key at a minimum.
BasePoolKey = collections.namedtuple('BasePoolKey', ('scheme', 'host', 'port'))
# The fields to use when determining what pool to get a HTTP and HTTPS
# connection from. All additional fields must be present in the PoolManager's
# ``connection_pool_kw`` instance variable.
HTTPPoolKey = collections.namedtuple(
'HTTPPoolKey', BasePoolKey._fields + ('timeout', 'retries', 'strict',
'block', 'source_address')
)
HTTPSPoolKey = collections.namedtuple(
'HTTPSPoolKey', HTTPPoolKey._fields + SSL_KEYWORDS
)
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key of type ``key_class`` for a request.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:param request_context:
A dictionary-like object that contain the context for a request.
It should contain a key for each field in the :class:`HTTPPoolKey`
"""
context = {}
for key in key_class._fields:
context[key] = request_context.get(key)
context['scheme'] = context['scheme'].lower()
context['host'] = context['host'].lower()
return key_class(**context)
# A dictionary that maps a scheme to a callable that creates a pool key.
# This can be used to alter the way pool keys are constructed, if desired.
# Each PoolManager makes a copy of this dictionary so they can be configured
# globally here, or individually on the instance.
key_fn_by_scheme = {
'http': functools.partial(_default_key_normalizer, HTTPPoolKey),
'https': functools.partial(_default_key_normalizer, HTTPSPoolKey),
}
pool_classes_by_scheme = { pool_classes_by_scheme = {
'http': HTTPConnectionPool, 'http': HTTPConnectionPool,
'https': HTTPSConnectionPool, 'https': HTTPSConnectionPool,
@ -65,8 +120,10 @@ class PoolManager(RequestMethods):
self.pools = RecentlyUsedContainer(num_pools, self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close()) dispose_func=lambda p: p.close())
# Locally set the pool classes so other PoolManagers can override them. # Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self): def __enter__(self):
return self return self
@ -113,10 +170,36 @@ class PoolManager(RequestMethods):
if not host: if not host:
raise LocationValueError("No host specified.") raise LocationValueError("No host specified.")
scheme = scheme or 'http' request_context = self.connection_pool_kw.copy()
port = port or port_by_scheme.get(scheme, 80) request_context['scheme'] = scheme or 'http'
pool_key = (scheme, host, port) if not port:
port = port_by_scheme.get(request_context['scheme'].lower(), 80)
request_context['port'] = port
request_context['host'] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
Get a :class:`ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context['scheme'].lower()
pool_key_constructor = self.key_fn_by_scheme[scheme]
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key)
def connection_from_pool_key(self, pool_key):
"""
Get a :class:`ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock: with self.pools.lock:
# If the scheme, host, or port doesn't match existing open # If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool. # connections, open a new ConnectionPool.
@ -125,7 +208,7 @@ class PoolManager(RequestMethods):
return pool return pool
# Make a fresh ConnectionPool of the desired type # Make a fresh ConnectionPool of the desired type
pool = self._new_pool(scheme, host, port) pool = self._new_pool(pool_key.scheme, pool_key.host, pool_key.port)
self.pools[pool_key] = pool self.pools[pool_key] = pool
return pool return pool

View File

@ -165,6 +165,10 @@ class HTTPResponse(io.IOBase):
if self._fp: if self._fp:
return self.read(cache_content=True) return self.read(cache_content=True)
@property
def connection(self):
return self._connection
def tell(self): def tell(self):
""" """
Obtain the number of bytes pulled over the wire so far. May differ from Obtain the number of bytes pulled over the wire so far. May differ from

View File

@ -46,6 +46,8 @@ def is_connection_dropped(conn): # Platform-specific
# This function is copied from socket.py in the Python 2.7 standard # This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`. # library test suite. Added to its signature is only `socket_options`.
# One additional modification is that we avoid binding to IPv6 servers
# discovered in DNS if the system doesn't have IPv6 functionality.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None): source_address=None, socket_options=None):
"""Connect to *address* and return the socket object. """Connect to *address* and return the socket object.
@ -64,14 +66,19 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
if host.startswith('['): if host.startswith('['):
host = host.strip('[]') host = host.strip('[]')
err = None err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
# us select whether to work with IPv4 DNS records, IPv6 records, or both.
# The original create_connection function always returns all records.
family = allowed_gai_family()
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res af, socktype, proto, canonname, sa = res
sock = None sock = None
try: try:
sock = socket.socket(af, socktype, proto) sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting. # If provided, set socket level options before connecting.
# This is the only addition urllib3 makes to this function.
_set_socket_options(sock, socket_options) _set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
@ -99,3 +106,39 @@ def _set_socket_options(sock, options):
for opt in options: for opt in options:
sock.setsockopt(*opt) sock.setsockopt(*opt)
def allowed_gai_family():
"""This function is designed to work in the context of
getaddrinfo, where family=socket.AF_UNSPEC is the default and
will perform a DNS search for both IPv6 and IPv4 records."""
family = socket.AF_INET
if HAS_IPV6:
family = socket.AF_UNSPEC
return family
def _has_ipv6(host):
""" Returns True if the system can bind an IPv6 address. """
sock = None
has_ipv6 = False
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
# https://github.com/shazow/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = True
except Exception:
pass
if sock:
sock.close()
return has_ipv6
HAS_IPV6 = _has_ipv6('::1')

View File

@ -80,21 +80,27 @@ class Retry(object):
Set of uppercased HTTP method verbs that we should retry on. Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be By default, we only retry on methods which are considered to be
indempotent (multiple requests with the same parameters end with the idempotent (multiple requests with the same parameters end with the
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
Set to a ``False`` value to retry on any verb.
:param iterable status_forcelist: :param iterable status_forcelist:
A set of HTTP status codes that we should force a retry on. A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``method_whitelist``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``. By default, this is disabled with ``None``.
:param float backoff_factor: :param float backoff_factor:
A backoff factor to apply between attempts. urllib3 will sleep for:: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
{backoff factor} * (2 ^ ({number of total retries} - 1)) {backoff factor} * (2 ^ ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.BACKOFF_MAX`. than :attr:`Retry.BACKOFF_MAX`.
By default, backoff is disabled (set to 0). By default, backoff is disabled (set to 0).

View File

@ -117,7 +117,7 @@ except ImportError:
'urllib3 from configuring SSL appropriately and may cause ' 'urllib3 from configuring SSL appropriately and may cause '
'certain SSL connections to fail. You can upgrade to a newer ' 'certain SSL connections to fail. You can upgrade to a newer '
'version of Python to solve this. For more information, see ' 'version of Python to solve this. For more information, see '
'https://urllib3.readthedocs.org/en/latest/security.html' 'https://urllib3.readthedocs.io/en/latest/security.html'
'#insecureplatformwarning.', '#insecureplatformwarning.',
InsecurePlatformWarning InsecurePlatformWarning
) )
@ -313,7 +313,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
'This may cause the server to present an incorrect TLS ' 'This may cause the server to present an incorrect TLS '
'certificate, which can cause validation failures. You can upgrade to ' 'certificate, which can cause validation failures. You can upgrade to '
'a newer version of Python to solve this. For more information, see ' 'a newer version of Python to solve this. For more information, see '
'https://urllib3.readthedocs.org/en/latest/security.html' 'https://urllib3.readthedocs.io/en/latest/security.html'
'#snimissingwarning.', '#snimissingwarning.',
SNIMissingWarning SNIMissingWarning
) )

View File

@ -38,11 +38,11 @@ class Git(BaseProject):
head = os.path.join(self._project_base(), '.git', 'HEAD') head = os.path.join(self._project_base(), '.git', 'HEAD')
try: try:
with open(head, 'r', encoding='utf-8') as fh: with open(head, 'r', encoding='utf-8') as fh:
return u(fh.readline().strip().rsplit('/', 1)[-1]) return self._get_branch_from_head_file(fh.readline())
except UnicodeDecodeError: # pragma: nocover except UnicodeDecodeError: # pragma: nocover
try: try:
with open(head, 'r', encoding=sys.getfilesystemencoding()) as fh: with open(head, 'r', encoding=sys.getfilesystemencoding()) as fh:
return u(fh.readline().strip().rsplit('/', 1)[-1]) return self._get_branch_from_head_file(fh.readline())
except: except:
log.traceback('warn') log.traceback('warn')
except IOError: # pragma: nocover except IOError: # pragma: nocover
@ -64,3 +64,8 @@ class Git(BaseProject):
if split_path[1] == '': if split_path[1] == '':
return None return None
return self._find_git_config_file(split_path[0]) return self._find_git_config_file(split_path[0])
def _get_branch_from_head_file(self, line):
if u(line.strip()).startswith('ref: '):
return u(line.strip().rsplit('/', 1)[-1])
return None