upgrade wakatime-cli to v4.0.1. fixes #20.

This commit is contained in:
Alan Hamlett 2015-03-03 13:28:03 -08:00
parent d639c5ea7b
commit 8b0bdf56df
83 changed files with 867 additions and 728 deletions

View File

@ -3,6 +3,13 @@ History
-------
4.0.1 (2015-03-03)
++++++++++++++++++
- upgrade requests library to v2.5.3 to fix SSL problem on CentOS
- new options for excluding and including directories
4.0.0 (2015-02-12)
++++++++++++++++++

View File

@ -29,11 +29,13 @@ format. An example config file looks like::
debug = false
api_key = your-api-key
hidefilenames = false
ignore =
exclude =
^COMMIT_EDITMSG$
^TAG_EDITMSG$
^/var/
^/etc/
include =
.*
offline = true
proxy = https://user:pass@localhost:8080

View File

@ -13,7 +13,7 @@
from __future__ import print_function
__title__ = 'wakatime'
__version__ = '4.0.0'
__version__ = '4.0.1'
__author__ = 'Alan Hamlett'
__license__ = 'BSD'
__copyright__ = 'Copyright 2014 Alan Hamlett'
@ -168,9 +168,13 @@ def parseArguments(argv):
parser.add_argument('--hidefilenames', dest='hidefilenames',
action='store_true',
help='obfuscate file names; will not send file names to api')
parser.add_argument('--ignore', dest='ignore', action='append',
help='filename patterns to ignore; POSIX regex syntax; can be used'+
' more than once')
parser.add_argument('--exclude', dest='exclude', action='append',
help='filename patterns to exclude from logging; POSIX regex '+
'syntax; can be used more than once')
parser.add_argument('--include', dest='include', action='append',
help='filename patterns to log; when used in combination with '+
'--exclude, files matching include will still be logged; '+
'POSIX regex syntax; can be used more than once')
parser.add_argument('--logfile', dest='logfile',
help='defaults to ~/.wakatime.log')
parser.add_argument('--config', dest='config',
@ -202,13 +206,29 @@ def parseArguments(argv):
args.key = default_key
else:
parser.error('Missing api key')
if not args.ignore:
args.ignore = []
if not args.exclude:
args.exclude = []
if configs.has_option('settings', 'ignore'):
try:
for pattern in configs.get('settings', 'ignore').split("\n"):
if pattern.strip() != '':
args.ignore.append(pattern)
args.exclude.append(pattern)
except TypeError:
pass
if configs.has_option('settings', 'exclude'):
try:
for pattern in configs.get('settings', 'exclude').split("\n"):
if pattern.strip() != '':
args.exclude.append(pattern)
except TypeError:
pass
if not args.include:
args.include = []
if configs.has_option('settings', 'include'):
try:
for pattern in configs.get('settings', 'include').split("\n"):
if pattern.strip() != '':
args.include.append(pattern)
except TypeError:
pass
if args.offline and configs.has_option('settings', 'offline'):
@ -227,16 +247,29 @@ def parseArguments(argv):
return args, configs
def should_ignore(fileName, patterns):
def should_exclude(fileName, include, exclude):
if fileName is not None and fileName.strip() != '':
try:
for pattern in patterns:
for pattern in include:
try:
compiled = re.compile(pattern, re.IGNORECASE)
if compiled.search(fileName):
return False
except re.error as ex:
log.warning(u('Regex error ({msg}) for include pattern: {pattern}').format(
msg=u(ex),
pattern=u(pattern),
))
except TypeError:
pass
try:
for pattern in exclude:
try:
compiled = re.compile(pattern, re.IGNORECASE)
if compiled.search(fileName):
return pattern
except re.error as ex:
log.warning(u('Regex error ({msg}) for ignore pattern: {pattern}').format(
log.warning(u('Regex error ({msg}) for exclude pattern: {pattern}').format(
msg=u(ex),
pattern=u(pattern),
))
@ -379,10 +412,10 @@ def main(argv=None):
setup_logging(args, __version__)
ignore = should_ignore(args.targetFile, args.ignore)
if ignore is not False:
log.debug(u('File ignored because matches pattern: {pattern}').format(
pattern=u(ignore),
exclude = should_exclude(args.targetFile, args.include, args.exclude)
if exclude is not False:
log.debug(u('File not logged because matches exclude pattern: {pattern}').format(
pattern=u(exclude),
))
return 0

View File

@ -42,8 +42,8 @@ is at <http://python-requests.org>.
"""
__title__ = 'requests'
__version__ = '2.5.1'
__build__ = 0x020501
__version__ = '2.5.3'
__build__ = 0x020503
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Kenneth Reitz'

View File

View File

View File

File diff suppressed because it is too large Load Diff

View File

View File

View File

View File

View File

View File

@ -145,7 +145,7 @@ class RequestEncodingMixin(object):
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
if isinstance(fp, (bytes, bytearray)):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(),

View File

@ -1,7 +1,7 @@
from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
except ImportError: # Platform-specific: No threads available
class RLock:
def __enter__(self):
pass
@ -10,16 +10,18 @@ except ImportError: # Platform-specific: No threads available
pass
try: # Python 2.7+
try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
from .packages.six import iterkeys, itervalues
from .packages.six import iterkeys, itervalues, PY3
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
MULTIPLE_HEADERS_ALLOWED = frozenset(['cookie', 'set-cookie', 'set-cookie2'])
_Null = object()
@ -97,7 +99,14 @@ class RecentlyUsedContainer(MutableMapping):
return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping):
_dict_setitem = dict.__setitem__
_dict_getitem = dict.__getitem__
_dict_delitem = dict.__delitem__
_dict_contains = dict.__contains__
_dict_setdefault = dict.setdefault
class HTTPHeaderDict(dict):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
@ -129,25 +138,72 @@ class HTTPHeaderDict(MutableMapping):
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
If you want to access the raw headers with their original casing
for debugging purposes you can access the private ``._data`` attribute
which is a normal python ``dict`` that maps the case-insensitive key to a
list of tuples stored as (case-sensitive-original-name, value). Using the
structure from above as our example:
>>> headers._data
{'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
'content-length': [('content-length', '7')]}
"""
def __init__(self, headers=None, **kwargs):
self._data = {}
if headers is None:
headers = {}
self.update(headers, **kwargs)
dict.__init__(self)
if headers is not None:
self.extend(headers)
if kwargs:
self.extend(kwargs)
def add(self, key, value):
def __setitem__(self, key, val):
return _dict_setitem(self, key.lower(), (key, val))
def __getitem__(self, key):
val = _dict_getitem(self, key.lower())
return ', '.join(val[1:])
def __delitem__(self, key):
return _dict_delitem(self, key.lower())
def __contains__(self, key):
return _dict_contains(self, key.lower())
def __eq__(self, other):
if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other)
def __ne__(self, other):
return not self.__eq__(other)
values = MutableMapping.values
get = MutableMapping.get
update = MutableMapping.update
if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
__marker = object()
def pop(self, key, default=__marker):
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def discard(self, key):
try:
del self[key]
except KeyError:
pass
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
@ -156,43 +212,108 @@ class HTTPHeaderDict(MutableMapping):
>>> headers['foo']
'bar, baz'
"""
self._data.setdefault(key.lower(), []).append((key, value))
key_lower = key.lower()
new_vals = key, val
# Keep the common case aka no item present as fast as possible
vals = _dict_setdefault(self, key_lower, new_vals)
if new_vals is not vals:
# new_vals was not inserted, as there was a previous one
if isinstance(vals, list):
# If already several items got inserted, we have a list
vals.append(val)
else:
# vals should be a tuple then, i.e. only one item so far
if key_lower in MULTIPLE_HEADERS_ALLOWED:
# Need to convert the tuple to list for further extension
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
else:
_dict_setitem(self, key_lower, new_vals)
def extend(*args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
for key in other.keys():
self.add(key, other[key])
else:
for key, value in other:
self.add(key, value)
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key):
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
return self[key].split(', ') if key in self else []
try:
vals = _dict_getitem(self, key.lower())
except KeyError:
return []
else:
if isinstance(vals, tuple):
return [vals[1]]
else:
return vals[1:]
def copy(self):
h = HTTPHeaderDict()
for key in self._data:
for rawkey, value in self._data[key]:
h.add(rawkey, value)
return h
def __eq__(self, other):
if not isinstance(other, Mapping):
return False
other = HTTPHeaderDict(other)
return dict((k1, self[k1]) for k1 in self._data) == \
dict((k2, other[k2]) for k2 in other._data)
def __getitem__(self, key):
values = self._data[key.lower()]
return ', '.join(value[1] for value in values)
def __setitem__(self, key, value):
self._data[key.lower()] = [(key, value)]
def __delitem__(self, key):
del self._data[key.lower()]
def __len__(self):
return len(self._data)
def __iter__(self):
for headers in itervalues(self._data):
yield headers[0][0]
# Backwards compatibility for httplib
getheaders = getlist
getallmatchingheaders = getlist
iget = getlist
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
def copy(self):
clone = type(self)()
for key in self:
val = _dict_getitem(self, key)
if isinstance(val, list):
# Don't need to convert tuples
val = list(val)
_dict_setitem(clone, key, val)
return clone
def iteritems(self):
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = _dict_getitem(self, key)
for val in vals[1:]:
yield vals[0], val
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = _dict_getitem(self, key)
yield val[0], ', '.join(val[1:])
def items(self):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2
"""Read headers from a Python 2 httplib message object."""
ret = cls(message.items())
# ret now contains only the last header line for each duplicate.
# Importing with all duplicates would be nice, but this would
# mean to repeat most of the raw parsing already done, when the
# message object was created. Extracting only the headers of interest
# separately, the cookies, should be faster and requires less
# extra code.
for key in duplicates:
ret.discard(key)
for val in message.getheaders(key):
ret.add(key, val)
return ret

View File

@ -72,6 +72,21 @@ class ConnectionPool(object):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close():
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
@ -353,7 +368,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Receive the response from the server
try:
try: # Python 2.7+, use buffering of HTTP responses
try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
@ -558,6 +573,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = None
raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
if conn:
conn.close()
conn = None
raise
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
if conn:
# Discard the connection for these exceptions. It will be
@ -565,14 +588,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.close()
conn = None
stacktrace = sys.exc_info()[2]
if isinstance(e, SocketError) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e,
_pool=self, _stacktrace=stacktrace)
retries = retries.increment(method, url, error=e, _pool=self,
_stacktrace=sys.exc_info()[2])
retries.sleep()
# Keep track of the error for the retry warning.

View File

@ -8,7 +8,7 @@ except ImportError:
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .exceptions import LocationValueError
from .exceptions import LocationValueError, MaxRetryError
from .request import RequestMethods
from .util.url import parse_url
from .util.retry import Retry
@ -64,6 +64,14 @@ class PoolManager(RequestMethods):
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port):
"""
Create a new :class:`ConnectionPool` based on host, port and scheme.
@ -167,7 +175,14 @@ class PoolManager(RequestMethods):
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
kw['retries'] = retries.increment(method, redirect_location)
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
raise
return response
kw['retries'] = retries
kw['redirect'] = redirect
log.info("Redirecting %s -> %s" % (url, redirect_location))

View File

@ -4,12 +4,11 @@ from socket import timeout as SocketTimeout
from ._collections import HTTPHeaderDict
from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
from .packages.six import string_types as basestring, binary_type
from .packages.six import string_types as basestring, binary_type, PY3
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed
class DeflateDecoder(object):
def __init__(self):
@ -21,6 +20,9 @@ class DeflateDecoder(object):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
@ -36,9 +38,23 @@ class DeflateDecoder(object):
self._data = None
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
return self._obj.decompress(data)
def _get_decoder(mode):
if mode == 'gzip':
return zlib.decompressobj(16 + zlib.MAX_WBITS)
return GzipDecoder()
return DeflateDecoder()
@ -76,9 +92,10 @@ class HTTPResponse(io.IOBase):
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
self.headers = HTTPHeaderDict()
if headers:
self.headers.update(headers)
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
@ -202,7 +219,7 @@ class HTTPResponse(io.IOBase):
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if not 'read operation timed out' in str(e): # Defensive:
if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
@ -267,14 +284,16 @@ class HTTPResponse(io.IOBase):
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = HTTPHeaderDict()
for k, v in r.getheaders():
headers.add(k, v)
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if PY3: # Python 3
headers = HTTPHeaderDict(headers.items())
else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r,
resp = ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
@ -282,6 +301,7 @@ class HTTPResponse(io.IOBase):
strict=strict,
original_response=r,
**response_kw)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):

View File

@ -82,6 +82,7 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
err = _
if sock is not None:
sock.close()
sock = None
if err is not None:
raise err

View File

@ -1,5 +1,5 @@
from binascii import hexlify, unhexlify
from hashlib import md5, sha1
from hashlib import md5, sha1, sha256
from ..exceptions import SSLError
@ -29,8 +29,8 @@ try:
except ImportError:
_DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
try:
@ -96,7 +96,8 @@ def assert_fingerprint(cert, fingerprint):
# this digest.
hashfunc_map = {
16: md5,
20: sha1
20: sha1,
32: sha256,
}
fingerprint = fingerprint.replace(':', '').lower()
@ -211,7 +212,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
context.verify_mode = cert_reqs
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED)
# We do our own verification, including fingerprints and alternative
# hostnames. So disable it here
context.check_hostname = False
return context

View File

View File

View File

View File