From a164a0fd8eadf572025a61ae6c577f6800dfb5f4 Mon Sep 17 00:00:00 2001 From: Alan Hamlett Date: Mon, 10 Aug 2015 18:34:04 -0700 Subject: [PATCH] upgrade wakatime cli to edge version --- plugin/packages/wakatime/base.py | 7 +- plugin/packages/wakatime/cli.py | 21 ++- .../packages/wakatime/languages/__init__.py | 2 +- plugin/packages/wakatime/packages/argparse.py | 24 ++- .../wakatime/packages/requests/__init__.py | 6 +- .../wakatime/packages/requests/adapters.py | 6 +- .../wakatime/packages/requests/api.py | 5 +- .../wakatime/packages/requests/auth.py | 5 +- .../wakatime/packages/requests/cookies.py | 27 ++- .../wakatime/packages/requests/models.py | 84 ++++----- .../packages/requests/packages/__init__.py | 106 +----------- .../requests/packages/urllib3/__init__.py | 9 +- .../requests/packages/urllib3/_collections.py | 45 ++--- .../requests/packages/urllib3/connection.py | 2 + .../packages/urllib3/connectionpool.py | 1 - .../packages/urllib3/contrib/pyopenssl.py | 23 +-- .../requests/packages/urllib3/exceptions.py | 5 + .../requests/packages/urllib3/response.py | 159 +++++++++++++++--- .../requests/packages/urllib3/util/ssl_.py | 38 +++-- .../requests/packages/urllib3/util/url.py | 2 + .../wakatime/packages/requests/sessions.py | 14 +- .../wakatime/packages/requests/utils.py | 7 +- plugin/packages/wakatime/stats.py | 91 +++------- 23 files changed, 356 insertions(+), 333 deletions(-) diff --git a/plugin/packages/wakatime/base.py b/plugin/packages/wakatime/base.py index 8f09bd9..419bba5 100644 --- a/plugin/packages/wakatime/base.py +++ b/plugin/packages/wakatime/base.py @@ -320,11 +320,8 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None, 'type': 'file', } if hidefilenames and targetFile is not None and not notfile: - data['entity'] = data['entity'].rsplit('/', 1)[-1].rsplit('\\', 1)[-1] - if len(data['entity'].strip('.').split('.', 1)) > 1: - data['entity'] = u('HIDDEN.{ext}').format(ext=u(data['entity'].strip('.').rsplit('.', 1)[-1])) - else: - data['entity'] = u('HIDDEN') + extension = u(os.path.splitext(data['entity'])[1]) + data['entity'] = u('HIDDEN{0}').format(extension) if stats.get('lines'): data['lines'] = stats['lines'] if stats.get('language'): diff --git a/plugin/packages/wakatime/cli.py b/plugin/packages/wakatime/cli.py index 30d0071..1b3ee57 100644 --- a/plugin/packages/wakatime/cli.py +++ b/plugin/packages/wakatime/cli.py @@ -11,8 +11,25 @@ import os import sys -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -import wakatime + + +# get path to local wakatime package +package_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +# add local wakatime package to sys.path +sys.path.insert(0, package_folder) + +# import local wakatime package +try: + import wakatime +except TypeError: + # on Windows, non-ASCII characters in import path can be fixed using + # the script path from sys.argv[0]. + # More info at https://github.com/wakatime/wakatime/issues/32 + package_folder = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) + sys.path.insert(0, package_folder) + import wakatime + if __name__ == '__main__': sys.exit(wakatime.main(sys.argv)) diff --git a/plugin/packages/wakatime/languages/__init__.py b/plugin/packages/wakatime/languages/__init__.py index 317336a..1795880 100644 --- a/plugin/packages/wakatime/languages/__init__.py +++ b/plugin/packages/wakatime/languages/__init__.py @@ -83,7 +83,7 @@ class DependencyParser(object): self.lexer = lexer if self.lexer: - module_name = self.lexer.__module__.split('.')[-1] + module_name = self.lexer.__module__.rsplit('.', 1)[-1] class_name = self.lexer.__class__.__name__.replace('Lexer', 'Parser', 1) else: module_name = 'unknown' diff --git a/plugin/packages/wakatime/packages/argparse.py b/plugin/packages/wakatime/packages/argparse.py index 32d948c..5a68b70 100644 --- a/plugin/packages/wakatime/packages/argparse.py +++ b/plugin/packages/wakatime/packages/argparse.py @@ -61,7 +61,12 @@ considered public as object names -- the API of the formatter objects is still considered an implementation detail.) """ -__version__ = '1.2.1' +__version__ = '1.3.0' # we use our own version number independant of the + # one in stdlib and we release this on pypi. + +__external_lib__ = True # to make sure the tests really test THIS lib, + # not the builtin one in Python stdlib + __all__ = [ 'ArgumentParser', 'ArgumentError', @@ -1045,9 +1050,13 @@ class _SubParsersAction(Action): class _ChoicesPseudoAction(Action): - def __init__(self, name, help): + def __init__(self, name, aliases, help): + metavar = dest = name + if aliases: + metavar += ' (%s)' % ', '.join(aliases) sup = super(_SubParsersAction._ChoicesPseudoAction, self) - sup.__init__(option_strings=[], dest=name, help=help) + sup.__init__(option_strings=[], dest=dest, help=help, + metavar=metavar) def __init__(self, option_strings, @@ -1075,15 +1084,22 @@ class _SubParsersAction(Action): if kwargs.get('prog') is None: kwargs['prog'] = '%s %s' % (self._prog_prefix, name) + aliases = kwargs.pop('aliases', ()) + # create a pseudo-action to hold the choice help if 'help' in kwargs: help = kwargs.pop('help') - choice_action = self._ChoicesPseudoAction(name, help) + choice_action = self._ChoicesPseudoAction(name, aliases, help) self._choices_actions.append(choice_action) # create the parser and add it to the map parser = self._parser_class(**kwargs) self._name_parser_map[name] = parser + + # make parser available under aliases also + for alias in aliases: + self._name_parser_map[alias] = parser + return parser def _get_subactions(self): diff --git a/plugin/packages/wakatime/packages/requests/__init__.py b/plugin/packages/wakatime/packages/requests/__init__.py index 446500b..d247128 100644 --- a/plugin/packages/wakatime/packages/requests/__init__.py +++ b/plugin/packages/wakatime/packages/requests/__init__.py @@ -6,7 +6,7 @@ # / """ -requests HTTP library +Requests HTTP library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. Basic GET @@ -42,8 +42,8 @@ is at . """ __title__ = 'requests' -__version__ = '2.6.0' -__build__ = 0x020503 +__version__ = '2.7.0' +__build__ = 0x020700 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2015 Kenneth Reitz' diff --git a/plugin/packages/wakatime/packages/requests/adapters.py b/plugin/packages/wakatime/packages/requests/adapters.py index 02e0dd1..841cbbf 100644 --- a/plugin/packages/wakatime/packages/requests/adapters.py +++ b/plugin/packages/wakatime/packages/requests/adapters.py @@ -35,6 +35,7 @@ from .auth import _basic_auth_str DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None class BaseAdapter(object): @@ -375,7 +376,7 @@ class HTTPAdapter(BaseAdapter): if hasattr(conn, 'proxy_pool'): conn = conn.proxy_pool - low_conn = conn._get_conn(timeout=timeout) + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: low_conn.putrequest(request.method, @@ -407,9 +408,6 @@ class HTTPAdapter(BaseAdapter): # Then, reraise so that we can handle the actual exception. low_conn.close() raise - else: - # All is well, return the connection to the pool. - conn._put_conn(low_conn) except (ProtocolError, socket.error) as err: raise ConnectionError(err, request=request) diff --git a/plugin/packages/wakatime/packages/requests/api.py b/plugin/packages/wakatime/packages/requests/api.py index 98c9229..d40fa38 100644 --- a/plugin/packages/wakatime/packages/requests/api.py +++ b/plugin/packages/wakatime/packages/requests/api.py @@ -55,17 +55,18 @@ def request(method, url, **kwargs): return response -def get(url, **kwargs): +def get(url, params=None, **kwargs): """Sends a GET request. :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) - return request('get', url, **kwargs) + return request('get', url, params=params, **kwargs) def options(url, **kwargs): diff --git a/plugin/packages/wakatime/packages/requests/auth.py b/plugin/packages/wakatime/packages/requests/auth.py index d1c4825..03c3302 100644 --- a/plugin/packages/wakatime/packages/requests/auth.py +++ b/plugin/packages/wakatime/packages/requests/auth.py @@ -103,7 +103,8 @@ class HTTPDigestAuth(AuthBase): # XXX not implemented yet entdig = None p_parsed = urlparse(url) - path = p_parsed.path + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" if p_parsed.query: path += '?' + p_parsed.query @@ -178,7 +179,7 @@ class HTTPDigestAuth(AuthBase): # Consume content and release the original connection # to allow our new request to reuse the same one. r.content - r.raw.release_conn() + r.close() prep = r.request.copy() extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) diff --git a/plugin/packages/wakatime/packages/requests/cookies.py b/plugin/packages/wakatime/packages/requests/cookies.py index 6969fe5..88b478c 100644 --- a/plugin/packages/wakatime/packages/requests/cookies.py +++ b/plugin/packages/wakatime/packages/requests/cookies.py @@ -6,6 +6,7 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ +import copy import time import collections from .compat import cookielib, urlparse, urlunparse, Morsel @@ -302,7 +303,7 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): """Updates this jar with cookies from another CookieJar or dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: - self.set_cookie(cookie) + self.set_cookie(copy.copy(cookie)) else: super(RequestsCookieJar, self).update(other) @@ -359,6 +360,21 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): return new_cj +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, 'copy'): + # We're dealing with an instane of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. @@ -399,11 +415,14 @@ def morsel_to_cookie(morsel): expires = None if morsel['max-age']: - expires = time.time() + morsel['max-age'] + try: + expires = int(time.time() + int(morsel['max-age'])) + except ValueError: + raise TypeError('max-age: %s must be integer' % morsel['max-age']) elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' - expires = time.mktime( - time.strptime(morsel['expires'], time_template)) - time.timezone + expires = int(time.mktime( + time.strptime(morsel['expires'], time_template)) - time.timezone) return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), diff --git a/plugin/packages/wakatime/packages/requests/models.py b/plugin/packages/wakatime/packages/requests/models.py index cb153aa..4270c64 100644 --- a/plugin/packages/wakatime/packages/requests/models.py +++ b/plugin/packages/wakatime/packages/requests/models.py @@ -15,7 +15,7 @@ from .hooks import default_hooks from .structures import CaseInsensitiveDict from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url @@ -30,7 +30,8 @@ from .utils import ( iter_slices, guess_json_utf, super_len, to_native_string) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, - is_py2, chardet, json, builtin_str, basestring) + is_py2, chardet, builtin_str, basestring) +from .compat import json as complexjson from .status_codes import codes #: The set of HTTP status codes that indicate an automatically @@ -42,12 +43,11 @@ REDIRECT_STATI = ( codes.temporary_redirect, # 307 codes.permanent_redirect, # 308 ) + DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 -json_dumps = json.dumps - class RequestEncodingMixin(object): @property @@ -149,8 +149,7 @@ class RequestEncodingMixin(object): else: fdata = fp.read() - rf = RequestField(name=k, data=fdata, - filename=fn, headers=fh) + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) @@ -207,17 +206,8 @@ class Request(RequestHooksMixin): """ - def __init__(self, - method=None, - url=None, - headers=None, - files=None, - data=None, - params=None, - auth=None, - cookies=None, - hooks=None, - json=None): + def __init__(self, method=None, url=None, headers=None, files=None, + data=None, params=None, auth=None, cookies=None, hooks=None, json=None): # Default empty dicts for dict params. data = [] if data is None else data @@ -296,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.hooks = default_hooks() def prepare(self, method=None, url=None, headers=None, files=None, - data=None, params=None, auth=None, cookies=None, hooks=None, - json=None): + data=None, params=None, auth=None, cookies=None, hooks=None, json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) @@ -306,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) + # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. @@ -320,7 +310,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): p.method = self.method p.url = self.url p.headers = self.headers.copy() if self.headers is not None else None - p._cookies = self._cookies.copy() if self._cookies is not None else None + p._cookies = _copy_cookie_jar(self._cookies) p.body = self.body p.hooks = self.hooks return p @@ -357,8 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): raise InvalidURL(*e.args) if not scheme: - raise MissingSchema("Invalid URL {0!r}: No schema supplied. " - "Perhaps you meant http://{0}?".format(url)) + error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") + error = error.format(to_native_string(url, 'utf8')) + + raise MissingSchema(error) if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) @@ -424,7 +416,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if json is not None: content_type = 'application/json' - body = json_dumps(json) + body = complexjson.dumps(json) is_stream = all([ hasattr(data, '__iter__'), @@ -501,7 +493,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_content_length(self.body) def prepare_cookies(self, cookies): - """Prepares the given HTTP cookie data.""" + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand.""" if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies @@ -514,6 +514,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): def prepare_hooks(self, hooks): """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] for event in hooks: self.register_hook(event, hooks[event]) @@ -524,16 +528,8 @@ class Response(object): """ __attrs__ = [ - '_content', - 'status_code', - 'headers', - 'url', - 'history', - 'encoding', - 'reason', - 'cookies', - 'elapsed', - 'request', + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' ] def __init__(self): @@ -653,9 +649,10 @@ class Response(object): If decode_unicode is True, content will be decoded using the best available encoding based on the response. """ + def generate(): - try: - # Special case for urllib3. + # Special case for urllib3. + if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk @@ -665,7 +662,7 @@ class Response(object): raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) - except AttributeError: + else: # Standard file-like object. while True: chunk = self.raw.read(chunk_size) @@ -796,14 +793,16 @@ class Response(object): encoding = guess_json_utf(self.content) if encoding is not None: try: - return json.loads(self.content.decode(encoding), **kwargs) + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, # and the server didn't bother to tell us what codec *was* # used. pass - return json.loads(self.text, **kwargs) + return complexjson.loads(self.text, **kwargs) @property def links(self): @@ -829,10 +828,10 @@ class Response(object): http_error_msg = '' if 400 <= self.status_code < 500: - http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason) + http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url) elif 500 <= self.status_code < 600: - http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason) + http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self) @@ -843,4 +842,7 @@ class Response(object): *Note: Should not normally need to be called explicitly.* """ + if not self._content_consumed: + return self.raw.close() + return self.raw.release_conn() diff --git a/plugin/packages/wakatime/packages/requests/packages/__init__.py b/plugin/packages/wakatime/packages/requests/packages/__init__.py index 4dcf870..d62c4b7 100644 --- a/plugin/packages/wakatime/packages/requests/packages/__init__.py +++ b/plugin/packages/wakatime/packages/requests/packages/__init__.py @@ -1,107 +1,3 @@ -""" -Copyright (c) Donald Stufft, pip, and individual contributors - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""" from __future__ import absolute_import -import sys - - -class VendorAlias(object): - - def __init__(self, package_names): - self._package_names = package_names - self._vendor_name = __name__ - self._vendor_pkg = self._vendor_name + "." - self._vendor_pkgs = [ - self._vendor_pkg + name for name in self._package_names - ] - - def find_module(self, fullname, path=None): - if fullname.startswith(self._vendor_pkg): - return self - - def load_module(self, name): - # Ensure that this only works for the vendored name - if not name.startswith(self._vendor_pkg): - raise ImportError( - "Cannot import %s, must be a subpackage of '%s'." % ( - name, self._vendor_name, - ) - ) - - if not (name == self._vendor_name or - any(name.startswith(pkg) for pkg in self._vendor_pkgs)): - raise ImportError( - "Cannot import %s, must be one of %s." % ( - name, self._vendor_pkgs - ) - ) - - # Check to see if we already have this item in sys.modules, if we do - # then simply return that. - if name in sys.modules: - return sys.modules[name] - - # Check to see if we can import the vendor name - try: - # We do this dance here because we want to try and import this - # module without hitting a recursion error because of a bunch of - # VendorAlias instances on sys.meta_path - real_meta_path = sys.meta_path[:] - try: - sys.meta_path = [ - m for m in sys.meta_path - if not isinstance(m, VendorAlias) - ] - __import__(name) - module = sys.modules[name] - finally: - # Re-add any additions to sys.meta_path that were made while - # during the import we just did, otherwise things like - # requests.packages.urllib3.poolmanager will fail. - for m in sys.meta_path: - if m not in real_meta_path: - real_meta_path.append(m) - - # Restore sys.meta_path with any new items. - sys.meta_path = real_meta_path - except ImportError: - # We can't import the vendor name, so we'll try to import the - # "real" name. - real_name = name[len(self._vendor_pkg):] - try: - __import__(real_name) - module = sys.modules[real_name] - except ImportError: - raise ImportError("No module named '%s'" % (name,)) - - # If we've gotten here we've found the module we're looking for, either - # as part of our vendored package, or as the real name, so we'll add - # it to sys.modules as the vendored name so that we don't have to do - # the lookup again. - sys.modules[name] = module - - # Finally, return the loaded module - return module - - -sys.meta_path.append(VendorAlias(["urllib3", "chardet"])) +from . import urllib3 diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/__init__.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/__init__.py index 0660b9c..f48ac4a 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/__init__.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/__init__.py @@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using. __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.10.2' +__version__ = '1.10.4' from .connectionpool import ( @@ -55,9 +55,12 @@ def add_stderr_logger(level=logging.DEBUG): del NullHandler -# Set security warning to always go off by default. import warnings -warnings.simplefilter('always', exceptions.SecurityWarning) +# SecurityWarning's always go off by default. +warnings.simplefilter('always', exceptions.SecurityWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter('default', exceptions.InsecurePlatformWarning, + append=True) def disable_warnings(category=exceptions.HTTPWarning): """ diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/_collections.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/_collections.py index cc424de..279416c 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/_collections.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/_collections.py @@ -227,20 +227,20 @@ class HTTPHeaderDict(dict): # Need to convert the tuple to list for further extension _dict_setitem(self, key_lower, [vals[0], vals[1], val]) - def extend(*args, **kwargs): + def extend(self, *args, **kwargs): """Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ """ - if len(args) > 2: - raise TypeError("update() takes at most 2 positional " + if len(args) > 1: + raise TypeError("extend() takes at most 1 positional " "arguments ({} given)".format(len(args))) - elif not args: - raise TypeError("update() takes at least 1 argument (0 given)") - self = args[0] - other = args[1] if len(args) >= 2 else () + other = args[0] if len(args) >= 1 else () - if isinstance(other, Mapping): + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, Mapping): for key in other: self.add(key, other[key]) elif hasattr(other, "keys"): @@ -304,17 +304,20 @@ class HTTPHeaderDict(dict): return list(self.iteritems()) @classmethod - def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2 + def from_httplib(cls, message): # Python 2 """Read headers from a Python 2 httplib message object.""" - ret = cls(message.items()) - # ret now contains only the last header line for each duplicate. - # Importing with all duplicates would be nice, but this would - # mean to repeat most of the raw parsing already done, when the - # message object was created. Extracting only the headers of interest - # separately, the cookies, should be faster and requires less - # extra code. - for key in duplicates: - ret.discard(key) - for val in message.getheaders(key): - ret.add(key, val) - return ret + # python2.7 does not expose a proper API for exporting multiheaders + # efficiently. This function re-reads raw lines from the message + # object and extracts the multiheaders properly. + headers = [] + + for line in message.headers: + if line.startswith((' ', '\t')): + key, value = headers[-1] + headers[-1] = (key, value + '\r\n' + line.rstrip()) + continue + + key, value = line.split(':', 1) + headers.append((key, value.strip())) + + return cls(headers) diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/connection.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/connection.py index e5de769..2a8c359 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/connection.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/connection.py @@ -260,3 +260,5 @@ if ssl: # Make a copy for testing. UnverifiedHTTPSConnection = HTTPSConnection HTTPSConnection = VerifiedHTTPSConnection +else: + HTTPSConnection = DummyConnection diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/connectionpool.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/connectionpool.py index 0085345..117269a 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/connectionpool.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/connectionpool.py @@ -735,7 +735,6 @@ class HTTPSConnectionPool(HTTPConnectionPool): % (self.num_connections, self.host)) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - # Platform-specific: Python without ssl raise SSLError("Can't connect to HTTPS URL because the SSL " "module is not available.") diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/contrib/pyopenssl.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/contrib/pyopenssl.py index ee657fb..b2c34a8 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/contrib/pyopenssl.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/contrib/pyopenssl.py @@ -38,8 +38,6 @@ Module Variables ---------------- :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. - Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES: - ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS`` .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) @@ -85,22 +83,7 @@ _openssl_verify = { + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -# A secure default. -# Sources for more information on TLS ciphers: -# -# - https://wiki.mozilla.org/Security/Server_Side_TLS -# - https://www.ssllabs.com/projects/best-practices/index.html -# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ -# -# The general intent is: -# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), -# - prefer ECDHE over DHE for better performance, -# - prefer any AES-GCM over any AES-CBC for better performance and security, -# - use 3DES as fallback which is secure but slow, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \ - "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \ - "!aNULL:!MD5:!DSS" +DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS orig_util_HAS_SNI = util.HAS_SNI @@ -299,7 +282,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, try: cnx.do_handshake() except OpenSSL.SSL.WantReadError: - select.select([sock], [], []) + rd, _, _ = select.select([sock], [], [], sock.gettimeout()) + if not rd: + raise timeout('select timed out') continue except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad handshake', e) diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/exceptions.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/exceptions.py index 5d52301..31bda1c 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/exceptions.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/exceptions.py @@ -162,3 +162,8 @@ class SystemTimeWarning(SecurityWarning): class InsecurePlatformWarning(SecurityWarning): "Warned when certain SSL configuration is not available on a platform." pass + + +class ResponseNotChunked(ProtocolError, ValueError): + "Response needs to be chunked in order to read it as chunks." + pass diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/response.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/response.py index 34cd3d7..24140c4 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/response.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/response.py @@ -1,9 +1,15 @@ +try: + import http.client as httplib +except ImportError: + import httplib import zlib import io from socket import timeout as SocketTimeout from ._collections import HTTPHeaderDict -from .exceptions import ProtocolError, DecodeError, ReadTimeoutError +from .exceptions import ( + ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked +) from .packages.six import string_types as basestring, binary_type, PY3 from .connection import HTTPException, BaseSSLError from .util.response import is_fp_closed @@ -117,7 +123,17 @@ class HTTPResponse(io.IOBase): if hasattr(body, 'read'): self._fp = body - if preload_content and not self._body: + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get('transfer-encoding', '').lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # We certainly don't want to preload content when the response is chunked. + if not self.chunked and preload_content and not self._body: self._body = self.read(decode_content=decode_content) def get_redirect_location(self): @@ -157,6 +173,35 @@ class HTTPResponse(io.IOBase): """ return self._fp_bytes_read + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessar. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get('content-encoding', '').lower() + if self._decoder is None and content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + try: + if decode_content and self._decoder: + data = self._decoder.decompress(data) + except (IOError, zlib.error) as e: + content_encoding = self.headers.get('content-encoding', '').lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, e) + + if flush_decoder and decode_content and self._decoder: + buf = self._decoder.decompress(binary_type()) + data += buf + self._decoder.flush() + + return data + def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional @@ -178,12 +223,7 @@ class HTTPResponse(io.IOBase): after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ - # Note: content-encoding value should be case-insensitive, per RFC 7230 - # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None: - if content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) + self._init_decoder() if decode_content is None: decode_content = self.decode_content @@ -232,17 +272,7 @@ class HTTPResponse(io.IOBase): self._fp_bytes_read += len(data) - try: - if decode_content and self._decoder: - data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - raise DecodeError( - "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content and self._decoder: - buf = self._decoder.decompress(binary_type()) - data += buf + self._decoder.flush() + data = self._decode(data, decode_content, flush_decoder) if cache_content: self._body = data @@ -269,11 +299,15 @@ class HTTPResponse(io.IOBase): If True, will attempt to decode the body based on the 'content-encoding' header. """ - while not is_fp_closed(self._fp): - data = self.read(amt=amt, decode_content=decode_content) + if self.chunked: + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) - if data: - yield data + if data: + yield data @classmethod def from_httplib(ResponseCls, r, **response_kw): @@ -351,3 +385,82 @@ class HTTPResponse(io.IOBase): else: b[:len(temp)] = temp return len(temp) + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b';', 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise httplib.IncompleteRead(line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked("Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing.") + + if self._original_response and self._original_response._method.upper() == 'HEAD': + # Don't bother reading the body of a HEAD request. + # FIXME: Can we do this somehow without accessing private httplib _method? + self._original_response.close() + return + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + yield self._decode(chunk, decode_content=decode_content, + flush_decoder=True) + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b'\r\n': + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + self.release_conn() diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/util/ssl_.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/util/ssl_.py index e7e7dfa..b846d42 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/util/ssl_.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/util/ssl_.py @@ -9,10 +9,10 @@ HAS_SNI = False create_default_context = None import errno -import ssl import warnings try: # Test for SSL features + import ssl from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import HAS_SNI # Has SNI? except ImportError: @@ -25,14 +25,24 @@ except ImportError: OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_COMPRESSION = 0x20000 -try: - from ssl import _DEFAULT_CIPHERS -except ImportError: - _DEFAULT_CIPHERS = ( - 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' - 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' - '!eNULL:!MD5' - ) +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM over any AES-CBC for better performance and security, +# - use 3DES as fallback which is secure but slow, +# - disable NULL authentication, MD5 MACs and DSS for security reasons. +DEFAULT_CIPHERS = ( + 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' + 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' + '!eNULL:!MD5' +) try: from ssl import SSLContext # Modern SSL? @@ -40,7 +50,8 @@ except ImportError: import sys class SSLContext(object): # Platform-specific: Python 2 & 3.1 - supports_set_ciphers = sys.version_info >= (2, 7) + supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or + (3, 2) <= sys.version_info) def __init__(self, protocol_version): self.protocol = protocol_version @@ -167,7 +178,7 @@ def resolve_ssl_version(candidate): return candidate -def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, +def create_urllib3_context(ssl_version=None, cert_reqs=None, options=None, ciphers=None): """All arguments have the same meaning as ``ssl_wrap_socket``. @@ -204,6 +215,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, """ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + if options is None: options = 0 # SSLv2 is easily broken and is considered harmful and dangerous @@ -217,7 +231,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, context.options |= options if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 - context.set_ciphers(ciphers or _DEFAULT_CIPHERS) + context.set_ciphers(ciphers or DEFAULT_CIPHERS) context.verify_mode = cert_reqs if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 diff --git a/plugin/packages/wakatime/packages/requests/packages/urllib3/util/url.py b/plugin/packages/wakatime/packages/requests/packages/urllib3/util/url.py index b2ec834..e58050c 100644 --- a/plugin/packages/wakatime/packages/requests/packages/urllib3/util/url.py +++ b/plugin/packages/wakatime/packages/requests/packages/urllib3/util/url.py @@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)): def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): + if path and not path.startswith('/'): + path = '/' + path return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) diff --git a/plugin/packages/wakatime/packages/requests/sessions.py b/plugin/packages/wakatime/packages/requests/sessions.py index ef3f22b..820919e 100644 --- a/plugin/packages/wakatime/packages/requests/sessions.py +++ b/plugin/packages/wakatime/packages/requests/sessions.py @@ -90,7 +90,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): class SessionRedirectMixin(object): def resolve_redirects(self, resp, req, stream=False, timeout=None, - verify=True, cert=None, proxies=None): + verify=True, cert=None, proxies=None, **adapter_kwargs): """Receives a Response. Returns a generator of Responses.""" i = 0 @@ -193,6 +193,7 @@ class SessionRedirectMixin(object): cert=cert, proxies=proxies, allow_redirects=False, + **adapter_kwargs ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) @@ -560,10 +561,6 @@ class Session(SessionRedirectMixin): # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') - timeout = kwargs.get('timeout') - verify = kwargs.get('verify') - cert = kwargs.get('cert') - proxies = kwargs.get('proxies') hooks = request.hooks # Get the appropriate adapter to use @@ -591,12 +588,7 @@ class Session(SessionRedirectMixin): extract_cookies_to_jar(self.cookies, request, r.raw) # Redirect resolving generator. - gen = self.resolve_redirects(r, request, - stream=stream, - timeout=timeout, - verify=verify, - cert=cert, - proxies=proxies) + gen = self.resolve_redirects(r, request, **kwargs) # Resolve redirects if allowed. history = [resp for resp in gen] if allow_redirects else [] diff --git a/plugin/packages/wakatime/packages/requests/utils.py b/plugin/packages/wakatime/packages/requests/utils.py index 8fba62d..3fd0e41 100644 --- a/plugin/packages/wakatime/packages/requests/utils.py +++ b/plugin/packages/wakatime/packages/requests/utils.py @@ -67,7 +67,7 @@ def super_len(o): return len(o.getvalue()) -def get_netrc_auth(url): +def get_netrc_auth(url, raise_errors=False): """Returns the Requests tuple auth for a given url from netrc.""" try: @@ -105,8 +105,9 @@ def get_netrc_auth(url): return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth - pass + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise # AppEngine hackiness. except (ImportError, AttributeError): diff --git a/plugin/packages/wakatime/stats.py b/plugin/packages/wakatime/stats.py index 39699a6..8bf8c01 100644 --- a/plugin/packages/wakatime/stats.py +++ b/plugin/packages/wakatime/stats.py @@ -28,56 +28,19 @@ from pygments.util import ClassNotFound log = logging.getLogger('WakaTime') -# extensions taking priority over lexer -EXTENSIONS = { - 'j2': 'HTML', - 'markdown': 'Markdown', - 'md': 'Markdown', - 'mdown': 'Markdown', - 'twig': 'Twig', -} - -# lexers to human readable languages -TRANSLATIONS = { - 'CSS+Genshi Text': 'CSS', - 'CSS+Lasso': 'CSS', - 'HTML+Django/Jinja': 'HTML', - 'HTML+Lasso': 'HTML', - 'JavaScript+Genshi Text': 'JavaScript', - 'JavaScript+Lasso': 'JavaScript', - 'Perl6': 'Perl', - 'RHTML': 'HTML', -} - -# extensions for when no lexer is found -AUXILIARY_EXTENSIONS = { - 'vb': 'VB.net', -} - - def guess_language(file_name): """Guess lexer and language for a file. Returns (language, lexer) tuple where language is a unicode string. """ + language = get_language_from_extension(file_name) + if language: + return language, None + lexer = smart_guess_lexer(file_name) - language = None - - # guess language from file extension - if file_name: - language = get_language_from_extension(file_name, EXTENSIONS) - - # get language from lexer if we didn't have a hard-coded extension rule - if language is None and lexer: - language = u(lexer.name) - - if language is None: - language = get_language_from_extension(file_name, AUXILIARY_EXTENSIONS) - - if language is not None: - language = translate_language(language) + language = u(lexer.name) return language, lexer @@ -93,14 +56,14 @@ def smart_guess_lexer(file_name): text = get_file_contents(file_name) - lexer_1, accuracy_1 = guess_lexer_using_filename(file_name, text) - lexer_2, accuracy_2 = guess_lexer_using_modeline(text) + lexer1, accuracy1 = guess_lexer_using_filename(file_name, text) + lexer2, accuracy2 = guess_lexer_using_modeline(text) - if lexer_1: - lexer = lexer_1 - if (lexer_2 and accuracy_2 and - (not accuracy_1 or accuracy_2 > accuracy_1)): - lexer = lexer_2 + if lexer1: + lexer = lexer1 + if (lexer2 and accuracy2 and + (not accuracy1 or accuracy2 > accuracy1)): + lexer = lexer2 return lexer @@ -156,30 +119,24 @@ def guess_lexer_using_modeline(text): return lexer, accuracy -def get_language_from_extension(file_name, extension_map): - """Returns a matching language for the given file_name using extension_map. +def get_language_from_extension(file_name): + """Returns a matching language for the given file extension. """ - extension = file_name.rsplit('.', 1)[-1] if len(file_name.rsplit('.', 1)) > 1 else None - - if extension: - if extension in extension_map: - return extension_map[extension] - if extension.lower() in extension_map: - return extension_map[extension.lower()] + extension = os.path.splitext(file_name)[1].lower() + if extension == '.h': + directory = os.path.dirname(file_name) + available_files = os.listdir(directory) + available_extensions = zip(*map(os.path.splitext, available_files))[1] + available_extensions = [ext.lower() for ext in available_extensions] + if '.cpp' in available_extensions: + return 'C++' + if '.c' in available_extensions: + return 'C' return None -def translate_language(language): - """Turns Pygments lexer class name string into human-readable language. - """ - - if language in TRANSLATIONS: - language = TRANSLATIONS[language] - return language - - def number_lines_in_file(file_name): lines = 0 try: