upgrade wakatime-cli to v6.0.7
This commit is contained in:
parent
b340637331
commit
52e6107c6e
15 changed files with 276 additions and 69 deletions
|
@ -1,7 +1,7 @@
|
|||
__title__ = 'wakatime'
|
||||
__description__ = 'Common interface to the WakaTime api.'
|
||||
__url__ = 'https://github.com/wakatime/wakatime'
|
||||
__version_info__ = ('6', '0', '6')
|
||||
__version_info__ = ('6', '0', '7')
|
||||
__version__ = '.'.join(__version_info__)
|
||||
__author__ = 'Alan Hamlett'
|
||||
__author_email__ = 'alan@wakatime.com'
|
||||
|
|
|
@ -73,10 +73,9 @@ class JsonFormatter(logging.Formatter):
|
|||
|
||||
|
||||
def traceback_formatter(*args, **kwargs):
|
||||
try:
|
||||
level = kwargs.get('level', args[0]).lower()
|
||||
except:
|
||||
level = None
|
||||
level = kwargs.get('level', args[0] if len(args) else None)
|
||||
if level:
|
||||
level = level.lower()
|
||||
if level == 'warn' or level == 'warning':
|
||||
logging.getLogger('WakaTime').warning(traceback.format_exc())
|
||||
elif level == 'info':
|
||||
|
|
|
@ -185,6 +185,9 @@ def parseArguments():
|
|||
return args, configs
|
||||
|
||||
# update args from configs
|
||||
if not args.hostname:
|
||||
if configs.has_option('settings', 'hostname'):
|
||||
args.hostname = configs.get('settings', 'hostname')
|
||||
if not args.key:
|
||||
default_key = None
|
||||
if configs.has_option('settings', 'api_key'):
|
||||
|
@ -385,6 +388,18 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
|||
log.warn(exception_data)
|
||||
else:
|
||||
log.error(exception_data)
|
||||
|
||||
except: # delete cached session when requests raises unknown exception
|
||||
exception_data = {
|
||||
sys.exc_info()[0].__name__: u(sys.exc_info()[1]),
|
||||
'traceback': traceback.format_exc(),
|
||||
}
|
||||
if offline:
|
||||
queue = Queue()
|
||||
queue.push(data, json.dumps(stats), plugin)
|
||||
log.warn(exception_data)
|
||||
session_cache.delete()
|
||||
|
||||
else:
|
||||
code = response.status_code if response is not None else None
|
||||
content = response.text if response is not None else None
|
||||
|
|
|
@ -44,8 +44,9 @@ from .packages.ssl_match_hostname import match_hostname, CertificateError
|
|||
from .util.ssl_ import (
|
||||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
ssl_wrap_socket,
|
||||
assert_fingerprint,
|
||||
create_urllib3_context,
|
||||
ssl_wrap_socket
|
||||
)
|
||||
|
||||
|
||||
|
@ -203,14 +204,18 @@ class HTTPConnection(_HTTPConnection, object):
|
|||
class HTTPSConnection(HTTPConnection):
|
||||
default_port = port_by_scheme['https']
|
||||
|
||||
ssl_version = None
|
||||
|
||||
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
||||
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
|
||||
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||
ssl_context=None, **kw):
|
||||
|
||||
HTTPConnection.__init__(self, host, port, strict=strict,
|
||||
timeout=timeout, **kw)
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.ssl_context = ssl_context
|
||||
|
||||
# Required property for Google AppEngine 1.9.0 which otherwise causes
|
||||
# HTTPS requests to go out as HTTP. (See Issue #356)
|
||||
|
@ -219,7 +224,19 @@ class HTTPSConnection(HTTPConnection):
|
|||
def connect(self):
|
||||
conn = self._new_conn()
|
||||
self._prepare_conn(conn)
|
||||
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
|
||||
|
||||
if self.ssl_context is None:
|
||||
self.ssl_context = create_urllib3_context(
|
||||
ssl_version=resolve_ssl_version(None),
|
||||
cert_reqs=resolve_cert_reqs(None),
|
||||
)
|
||||
|
||||
self.sock = ssl_wrap_socket(
|
||||
sock=conn,
|
||||
keyfile=self.key_file,
|
||||
certfile=self.cert_file,
|
||||
ssl_context=self.ssl_context,
|
||||
)
|
||||
|
||||
|
||||
class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
|
@ -237,9 +254,18 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
cert_reqs=None, ca_certs=None,
|
||||
assert_hostname=None, assert_fingerprint=None,
|
||||
ca_cert_dir=None):
|
||||
|
||||
if (ca_certs or ca_cert_dir) and cert_reqs is None:
|
||||
cert_reqs = 'CERT_REQUIRED'
|
||||
"""
|
||||
This method should only be called once, before the connection is used.
|
||||
"""
|
||||
# If cert_reqs is not provided, we can try to guess. If the user gave
|
||||
# us a cert database, we assume they want to use it: otherwise, if
|
||||
# they gave us an SSL Context object we should use whatever is set for
|
||||
# it.
|
||||
if cert_reqs is None:
|
||||
if ca_certs or ca_cert_dir:
|
||||
cert_reqs = 'CERT_REQUIRED'
|
||||
elif self.ssl_context is not None:
|
||||
cert_reqs = self.ssl_context.verify_mode
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
|
@ -253,9 +279,6 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
# Add certificate verification
|
||||
conn = self._new_conn()
|
||||
|
||||
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
|
||||
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
|
||||
|
||||
hostname = self.host
|
||||
if getattr(self, '_tunnel_host', None):
|
||||
# _tunnel_host was added in Python 2.6.3
|
||||
|
@ -281,17 +304,27 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
|
||||
cert_reqs=resolved_cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
server_hostname=hostname,
|
||||
ssl_version=resolved_ssl_version)
|
||||
if self.ssl_context is None:
|
||||
self.ssl_context = create_urllib3_context(
|
||||
ssl_version=resolve_ssl_version(self.ssl_version),
|
||||
cert_reqs=resolve_cert_reqs(self.cert_reqs),
|
||||
)
|
||||
|
||||
context = self.ssl_context
|
||||
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
|
||||
self.sock = ssl_wrap_socket(
|
||||
sock=conn,
|
||||
keyfile=self.key_file,
|
||||
certfile=self.cert_file,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
server_hostname=hostname,
|
||||
ssl_context=context)
|
||||
|
||||
if self.assert_fingerprint:
|
||||
assert_fingerprint(self.sock.getpeercert(binary_form=True),
|
||||
self.assert_fingerprint)
|
||||
elif resolved_cert_reqs != ssl.CERT_NONE \
|
||||
elif context.verify_mode != ssl.CERT_NONE \
|
||||
and self.assert_hostname is not False:
|
||||
cert = self.sock.getpeercert()
|
||||
if not cert.get('subjectAltName', ()):
|
||||
|
@ -304,8 +337,10 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
)
|
||||
_match_hostname(cert, self.assert_hostname or hostname)
|
||||
|
||||
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
|
||||
self.assert_fingerprint is not None)
|
||||
self.is_verified = (
|
||||
context.verify_mode == ssl.CERT_REQUIRED or
|
||||
self.assert_fingerprint is not None
|
||||
)
|
||||
|
||||
|
||||
def _match_hostname(cert, asserted_hostname):
|
||||
|
|
|
@ -397,8 +397,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||
|
||||
# AppEngine doesn't have a version attr.
|
||||
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
|
||||
log.debug("\"%s %s %s\" %s %s", method, url, http_version,
|
||||
httplib_response.status, httplib_response.length)
|
||||
log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port,
|
||||
method, url, http_version, httplib_response.status,
|
||||
httplib_response.length)
|
||||
|
||||
try:
|
||||
assert_header_parsing(httplib_response.msg)
|
||||
|
@ -604,6 +605,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||
response = self.ResponseCls.from_httplib(httplib_response,
|
||||
pool=self,
|
||||
connection=response_conn,
|
||||
retries=retries,
|
||||
**response_kw)
|
||||
|
||||
# Everything went great!
|
||||
|
@ -775,7 +777,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
assert_hostname=self.assert_hostname,
|
||||
assert_fingerprint=self.assert_fingerprint)
|
||||
conn.ssl_version = self.ssl_version
|
||||
|
||||
return conn
|
||||
|
||||
def _prepare_proxy(self, conn):
|
||||
|
|
|
@ -1,3 +1,43 @@
|
|||
"""
|
||||
This module provides a pool manager that uses Google App Engine's
|
||||
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
|
||||
|
||||
Example usage::
|
||||
|
||||
from urllib3 import PoolManager
|
||||
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
|
||||
|
||||
if is_appengine_sandbox():
|
||||
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
||||
http = AppEngineManager()
|
||||
else:
|
||||
# PoolManager uses a socket-level API behind the scenes
|
||||
http = PoolManager()
|
||||
|
||||
r = http.request('GET', 'https://google.com/')
|
||||
|
||||
There are `limitations <https://cloud.google.com/appengine/docs/python/\
|
||||
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
|
||||
the best choice for your application. There are three options for using
|
||||
urllib3 on Google App Engine:
|
||||
|
||||
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
|
||||
cost-effective in many circumstances as long as your usage is within the
|
||||
limitations.
|
||||
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
|
||||
Sockets also have `limitations and restrictions
|
||||
<https://cloud.google.com/appengine/docs/python/sockets/\
|
||||
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
|
||||
To use sockets, be sure to specify the following in your ``app.yaml``::
|
||||
|
||||
env_variables:
|
||||
GAE_USE_SOCKETS_HTTPLIB : 'true'
|
||||
|
||||
3. If you are using `App Engine Flexible
|
||||
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
|
||||
:class:`PoolManager` without any configuration or special environment variables.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
|
@ -41,13 +81,12 @@ class AppEngineManager(RequestMethods):
|
|||
|
||||
This manager uses the URLFetch service directly instead of using the
|
||||
emulated httplib, and is subject to URLFetch limitations as described in
|
||||
the App Engine documentation here:
|
||||
the App Engine documentation `here
|
||||
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
|
||||
|
||||
https://cloud.google.com/appengine/docs/python/urlfetch
|
||||
|
||||
Notably it will raise an AppEnginePlatformError if:
|
||||
Notably it will raise an :class:`AppEnginePlatformError` if:
|
||||
* URLFetch is not available.
|
||||
* If you attempt to use this on GAEv2 (Managed VMs), as full socket
|
||||
* If you attempt to use this on App Engine Flexible, as full socket
|
||||
support is available.
|
||||
* If a request size is more than 10 megabytes.
|
||||
* If a response size is more than 32 megabtyes.
|
||||
|
@ -133,7 +172,7 @@ class AppEngineManager(RequestMethods):
|
|||
"URLFetch does not support method: %s" % method, e)
|
||||
|
||||
http_response = self._urlfetch_response_to_http_response(
|
||||
response, **response_kw)
|
||||
response, retries=retries, **response_kw)
|
||||
|
||||
# Check for redirect response
|
||||
if (http_response.get_redirect_location() and
|
||||
|
@ -183,12 +222,13 @@ class AppEngineManager(RequestMethods):
|
|||
|
||||
def _get_absolute_timeout(self, timeout):
|
||||
if timeout is Timeout.DEFAULT_TIMEOUT:
|
||||
return 5 # 5s is the default timeout for URLFetch.
|
||||
return None # Defer to URLFetch's default.
|
||||
if isinstance(timeout, Timeout):
|
||||
if timeout._read is not timeout._connect:
|
||||
if timeout._read is not None or timeout._connect is not None:
|
||||
warnings.warn(
|
||||
"URLFetch does not support granular timeout settings, "
|
||||
"reverting to total timeout.", AppEnginePlatformWarning)
|
||||
"reverting to total or default URLFetch timeout.",
|
||||
AppEnginePlatformWarning)
|
||||
return timeout.total
|
||||
return timeout
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ except ImportError:
|
|||
from logging import getLogger
|
||||
from ntlm import ntlm
|
||||
|
||||
from urllib3 import HTTPSConnectionPool
|
||||
from .. import HTTPSConnectionPool
|
||||
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
'''SSL with SNI_-support for Python 2. Follow these instructions if you would
|
||||
"""
|
||||
SSL with SNI_-support for Python 2. Follow these instructions if you would
|
||||
like to verify SSL certificates in Python 2. Note, the default libraries do
|
||||
*not* do certificate checking; you need to do additional work to validate
|
||||
certificates yourself.
|
||||
|
@ -34,15 +35,9 @@ compression in Python 2 (see `CRIME attack`_).
|
|||
If you want to configure the default list of supported cipher suites, you can
|
||||
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
|
||||
|
||||
Module Variables
|
||||
----------------
|
||||
|
||||
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
|
||||
|
||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
|
@ -55,18 +50,18 @@ import OpenSSL.SSL
|
|||
from pyasn1.codec.der import decoder as der_decoder
|
||||
from pyasn1.type import univ, constraint
|
||||
from socket import timeout, error as SocketError
|
||||
from io import BytesIO
|
||||
|
||||
try: # Platform-specific: Python 2
|
||||
from socket import _fileobject
|
||||
except ImportError: # Platform-specific: Python 3
|
||||
_fileobject = None
|
||||
from urllib3.packages.backports.makefile import backport_makefile
|
||||
from ..packages.backports.makefile import backport_makefile
|
||||
|
||||
import ssl
|
||||
import select
|
||||
import six
|
||||
|
||||
from .. import connection
|
||||
from .. import util
|
||||
|
||||
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
|
||||
|
@ -98,29 +93,34 @@ _openssl_verify = {
|
|||
OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||
}
|
||||
|
||||
#: The list of supported SSL/TLS cipher suites.
|
||||
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS.encode('ascii')
|
||||
|
||||
# OpenSSL will only write 16K at a time
|
||||
SSL_WRITE_BLOCKSIZE = 16384
|
||||
|
||||
orig_util_HAS_SNI = util.HAS_SNI
|
||||
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
|
||||
orig_util_SSLContext = util.ssl_.SSLContext
|
||||
|
||||
|
||||
def inject_into_urllib3():
|
||||
'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
|
||||
|
||||
connection.ssl_wrap_socket = ssl_wrap_socket
|
||||
util.ssl_.SSLContext = PyOpenSSLContext
|
||||
util.HAS_SNI = HAS_SNI
|
||||
util.ssl_.HAS_SNI = HAS_SNI
|
||||
util.IS_PYOPENSSL = True
|
||||
util.ssl_.IS_PYOPENSSL = True
|
||||
|
||||
|
||||
def extract_from_urllib3():
|
||||
'Undo monkey-patching by :func:`inject_into_urllib3`.'
|
||||
|
||||
connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
|
||||
util.ssl_.SSLContext = orig_util_SSLContext
|
||||
util.HAS_SNI = orig_util_HAS_SNI
|
||||
util.ssl_.HAS_SNI = orig_util_HAS_SNI
|
||||
util.IS_PYOPENSSL = False
|
||||
util.ssl_.IS_PYOPENSSL = False
|
||||
|
||||
|
||||
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
||||
|
@ -308,6 +308,86 @@ else: # Platform-specific: Python 3
|
|||
WrappedSocket.makefile = makefile
|
||||
|
||||
|
||||
class PyOpenSSLContext(object):
|
||||
"""
|
||||
I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
|
||||
for translating the interface of the standard library ``SSLContext`` object
|
||||
to calls into PyOpenSSL.
|
||||
"""
|
||||
def __init__(self, protocol):
|
||||
self.protocol = _openssl_versions[protocol]
|
||||
self._ctx = OpenSSL.SSL.Context(self.protocol)
|
||||
self._options = 0
|
||||
self.check_hostname = False
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
return self._options
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
self._options = value
|
||||
self._ctx.set_options(value)
|
||||
|
||||
@property
|
||||
def verify_mode(self):
|
||||
return self._ctx.get_verify_mode()
|
||||
|
||||
@verify_mode.setter
|
||||
def verify_mode(self, value):
|
||||
self._ctx.set_verify(value, _verify_callback)
|
||||
|
||||
def set_default_verify_paths(self):
|
||||
self._ctx.set_default_verify_paths()
|
||||
|
||||
def set_ciphers(self, ciphers):
|
||||
if isinstance(ciphers, six.text_type):
|
||||
ciphers = ciphers.encode('utf-8')
|
||||
self._ctx.set_cipher_list(ciphers)
|
||||
|
||||
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
|
||||
if cafile is not None:
|
||||
cafile = cafile.encode('utf-8')
|
||||
if capath is not None:
|
||||
capath = capath.encode('utf-8')
|
||||
self._ctx.load_verify_locations(cafile, capath)
|
||||
if cadata is not None:
|
||||
self._ctx.load_verify_locations(BytesIO(cadata))
|
||||
|
||||
def load_cert_chain(self, certfile, keyfile=None, password=None):
|
||||
self._ctx.use_certificate_file(certfile)
|
||||
if password is not None:
|
||||
self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password)
|
||||
self._ctx.use_privatekey_file(keyfile or certfile)
|
||||
|
||||
def wrap_socket(self, sock, server_side=False,
|
||||
do_handshake_on_connect=True, suppress_ragged_eofs=True,
|
||||
server_hostname=None):
|
||||
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
|
||||
|
||||
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
|
||||
server_hostname = server_hostname.encode('utf-8')
|
||||
|
||||
if server_hostname is not None:
|
||||
cnx.set_tlsext_host_name(server_hostname)
|
||||
|
||||
cnx.set_connect_state()
|
||||
|
||||
while True:
|
||||
try:
|
||||
cnx.do_handshake()
|
||||
except OpenSSL.SSL.WantReadError:
|
||||
rd, _, _ = select.select([sock], [], [], sock.gettimeout())
|
||||
if not rd:
|
||||
raise timeout('select timed out')
|
||||
continue
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError('bad handshake: %r' % e)
|
||||
break
|
||||
|
||||
return WrappedSocket(cnx, sock)
|
||||
|
||||
|
||||
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
|
||||
return err_no == 0
|
||||
|
||||
|
@ -315,6 +395,8 @@ def _verify_callback(cnx, x509, err_no, err_depth, return_code):
|
|||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||
ca_certs=None, server_hostname=None,
|
||||
ssl_version=None, ca_cert_dir=None):
|
||||
# This function is no longer used by urllib3. We should strongly consider
|
||||
# removing it.
|
||||
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
|
||||
if certfile:
|
||||
keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
|
||||
|
|
|
@ -1,17 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
SOCKS support for urllib3
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This contrib module contains provisional support for SOCKS proxies from within
|
||||
This module contains provisional support for SOCKS proxies from within
|
||||
urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
|
||||
SOCKS5. To enable its functionality, either install PySocks or install this
|
||||
module with the ``socks`` extra.
|
||||
|
||||
The SOCKS implementation supports the full range of urllib3 features. It also
|
||||
supports the following SOCKS features:
|
||||
|
||||
- SOCKS4
|
||||
- SOCKS4a
|
||||
- SOCKS5
|
||||
- Usernames and passwords for the SOCKS proxy
|
||||
|
||||
Known Limitations:
|
||||
|
||||
- Currently PySocks does not support contacting remote websites via literal
|
||||
IPv6 addresses. Any such connection attempt will fail.
|
||||
IPv6 addresses. Any such connection attempt will fail. You must use a domain
|
||||
name.
|
||||
- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
|
||||
such connection attempt will fail.
|
||||
"""
|
||||
|
|
|
@ -23,7 +23,7 @@ __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
|
||||
'ssl_version', 'ca_cert_dir')
|
||||
'ssl_version', 'ca_cert_dir', 'ssl_context')
|
||||
|
||||
# The base fields to use when determining what pool to get a connection from;
|
||||
# these do not rely on the ``connection_pool_kw`` and can be determined by the
|
||||
|
|
|
@ -89,6 +89,10 @@ class HTTPResponse(io.IOBase):
|
|||
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
|
||||
object, it's convenient to include the original for debug purposes. It's
|
||||
otherwise unused.
|
||||
|
||||
:param retries:
|
||||
The retries contains the last :class:`~urllib3.util.retry.Retry` that
|
||||
was used during the request.
|
||||
"""
|
||||
|
||||
CONTENT_DECODERS = ['gzip', 'deflate']
|
||||
|
@ -96,7 +100,7 @@ class HTTPResponse(io.IOBase):
|
|||
|
||||
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
|
||||
strict=0, preload_content=True, decode_content=True,
|
||||
original_response=None, pool=None, connection=None):
|
||||
original_response=None, pool=None, connection=None, retries=None):
|
||||
|
||||
if isinstance(headers, HTTPHeaderDict):
|
||||
self.headers = headers
|
||||
|
@ -107,6 +111,7 @@ class HTTPResponse(io.IOBase):
|
|||
self.reason = reason
|
||||
self.strict = strict
|
||||
self.decode_content = decode_content
|
||||
self.retries = retries
|
||||
|
||||
self._decoder = None
|
||||
self._body = None
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
import time
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
from itertools import takewhile
|
||||
|
||||
from ..exceptions import (
|
||||
ConnectTimeoutError,
|
||||
|
@ -14,6 +16,10 @@ from ..packages import six
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Data structure for representing the metadata of requests that result in a retry.
|
||||
RequestHistory = namedtuple('RequestHistory', ["method", "url", "error",
|
||||
"status", "redirect_location"])
|
||||
|
||||
|
||||
class Retry(object):
|
||||
""" Retry configuration.
|
||||
|
@ -113,6 +119,10 @@ class Retry(object):
|
|||
whether we should raise an exception, or return a response,
|
||||
if status falls in ``status_forcelist`` range and retries have
|
||||
been exhausted.
|
||||
|
||||
:param tuple history: The history of the request encountered during
|
||||
each call to :meth:`~Retry.increment`. The list is in the order
|
||||
the requests occurred. Each list item is of class :class:`RequestHistory`.
|
||||
"""
|
||||
|
||||
DEFAULT_METHOD_WHITELIST = frozenset([
|
||||
|
@ -124,7 +134,7 @@ class Retry(object):
|
|||
def __init__(self, total=10, connect=None, read=None, redirect=None,
|
||||
method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
|
||||
backoff_factor=0, raise_on_redirect=True, raise_on_status=True,
|
||||
_observed_errors=0):
|
||||
history=None):
|
||||
|
||||
self.total = total
|
||||
self.connect = connect
|
||||
|
@ -140,7 +150,7 @@ class Retry(object):
|
|||
self.backoff_factor = backoff_factor
|
||||
self.raise_on_redirect = raise_on_redirect
|
||||
self.raise_on_status = raise_on_status
|
||||
self._observed_errors = _observed_errors # TODO: use .history instead?
|
||||
self.history = history or tuple()
|
||||
|
||||
def new(self, **kw):
|
||||
params = dict(
|
||||
|
@ -151,7 +161,7 @@ class Retry(object):
|
|||
backoff_factor=self.backoff_factor,
|
||||
raise_on_redirect=self.raise_on_redirect,
|
||||
raise_on_status=self.raise_on_status,
|
||||
_observed_errors=self._observed_errors,
|
||||
history=self.history,
|
||||
)
|
||||
params.update(kw)
|
||||
return type(self)(**params)
|
||||
|
@ -175,10 +185,13 @@ class Retry(object):
|
|||
|
||||
:rtype: float
|
||||
"""
|
||||
if self._observed_errors <= 1:
|
||||
# We want to consider only the last consecutive errors sequence (Ignore redirects).
|
||||
consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None,
|
||||
reversed(self.history))))
|
||||
if consecutive_errors_len <= 1:
|
||||
return 0
|
||||
|
||||
backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
|
||||
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
|
||||
return min(self.BACKOFF_MAX, backoff_value)
|
||||
|
||||
def sleep(self):
|
||||
|
@ -241,11 +254,12 @@ class Retry(object):
|
|||
if total is not None:
|
||||
total -= 1
|
||||
|
||||
_observed_errors = self._observed_errors
|
||||
connect = self.connect
|
||||
read = self.read
|
||||
redirect = self.redirect
|
||||
cause = 'unknown'
|
||||
status = None
|
||||
redirect_location = None
|
||||
|
||||
if error and self._is_connection_error(error):
|
||||
# Connect retry?
|
||||
|
@ -253,7 +267,6 @@ class Retry(object):
|
|||
raise six.reraise(type(error), error, _stacktrace)
|
||||
elif connect is not None:
|
||||
connect -= 1
|
||||
_observed_errors += 1
|
||||
|
||||
elif error and self._is_read_error(error):
|
||||
# Read retry?
|
||||
|
@ -261,27 +274,30 @@ class Retry(object):
|
|||
raise six.reraise(type(error), error, _stacktrace)
|
||||
elif read is not None:
|
||||
read -= 1
|
||||
_observed_errors += 1
|
||||
|
||||
elif response and response.get_redirect_location():
|
||||
# Redirect retry?
|
||||
if redirect is not None:
|
||||
redirect -= 1
|
||||
cause = 'too many redirects'
|
||||
redirect_location = response.get_redirect_location()
|
||||
status = response.status
|
||||
|
||||
else:
|
||||
# Incrementing because of a server error like a 500 in
|
||||
# status_forcelist and a the given method is in the whitelist
|
||||
_observed_errors += 1
|
||||
cause = ResponseError.GENERIC_ERROR
|
||||
if response and response.status:
|
||||
cause = ResponseError.SPECIFIC_ERROR.format(
|
||||
status_code=response.status)
|
||||
status = response.status
|
||||
|
||||
history = self.history + (RequestHistory(method, url, error, status, redirect_location),)
|
||||
|
||||
new_retry = self.new(
|
||||
total=total,
|
||||
connect=connect, read=read, redirect=redirect,
|
||||
_observed_errors=_observed_errors)
|
||||
history=history)
|
||||
|
||||
if new_retry.is_exhausted():
|
||||
raise MaxRetryError(_pool, url, error or ResponseError(cause))
|
||||
|
|
|
@ -287,6 +287,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
|||
"""
|
||||
context = ssl_context
|
||||
if context is None:
|
||||
# Note: This branch of code and all the variables in it are no longer
|
||||
# used by urllib3 itself. We should consider deprecating and removing
|
||||
# this code.
|
||||
context = create_urllib3_context(ssl_version, cert_reqs,
|
||||
ciphers=ciphers)
|
||||
|
||||
|
|
|
@ -10,7 +10,8 @@ url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
|
|||
class Url(namedtuple('Url', url_attrs)):
|
||||
"""
|
||||
Datastructure for representing an HTTP URL. Used as a return value for
|
||||
:func:`parse_url`.
|
||||
:func:`parse_url`. Both the scheme and host are normalized as they are
|
||||
both case-insensitive according to RFC 3986.
|
||||
"""
|
||||
slots = ()
|
||||
|
||||
|
@ -18,6 +19,10 @@ class Url(namedtuple('Url', url_attrs)):
|
|||
query=None, fragment=None):
|
||||
if path and not path.startswith('/'):
|
||||
path = '/' + path
|
||||
if scheme:
|
||||
scheme = scheme.lower()
|
||||
if host:
|
||||
host = host.lower()
|
||||
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
|
||||
query, fragment)
|
||||
|
||||
|
@ -211,7 +216,7 @@ def parse_url(url):
|
|||
|
||||
def get_host(url):
|
||||
"""
|
||||
Deprecated. Use :func:`.parse_url` instead.
|
||||
Deprecated. Use :func:`parse_url` instead.
|
||||
"""
|
||||
p = parse_url(url)
|
||||
return p.scheme or 'http', p.hostname, p.port
|
||||
|
|
|
@ -56,7 +56,7 @@ class ProjectMap(BaseProject):
|
|||
msg=u(ex),
|
||||
pattern=u(pattern),
|
||||
))
|
||||
except TypeError: # pragma: nocover
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
|
Loading…
Reference in a new issue