upgrade wakatime cli to v4.1.8
This commit is contained in:
parent
16bbe21be9
commit
192a5c7aa7
28 changed files with 423 additions and 318 deletions
|
@ -1,7 +1,7 @@
|
|||
__title__ = 'wakatime'
|
||||
__description__ = 'Common interface to the WakaTime api.'
|
||||
__url__ = 'https://github.com/wakatime/wakatime'
|
||||
__version_info__ = ('4', '1', '3')
|
||||
__version_info__ = ('4', '1', '8')
|
||||
__version__ = '.'.join(__version_info__)
|
||||
__author__ = 'Alan Hamlett'
|
||||
__author_email__ = 'alan@wakatime.com'
|
||||
|
|
|
@ -14,4 +14,4 @@
|
|||
__all__ = ['main']
|
||||
|
||||
|
||||
from .base import main
|
||||
from .main import execute
|
||||
|
|
|
@ -32,4 +32,4 @@ except (TypeError, ImportError):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(wakatime.main(sys.argv[1:]))
|
||||
sys.exit(wakatime.execute(sys.argv[1:]))
|
||||
|
|
|
@ -26,9 +26,12 @@ if is_py2: # pragma: nocover
|
|||
return text.decode('utf-8')
|
||||
except:
|
||||
try:
|
||||
return unicode(text)
|
||||
return text.decode(sys.getdefaultencoding())
|
||||
except:
|
||||
return text
|
||||
try:
|
||||
return unicode(text)
|
||||
except:
|
||||
return text
|
||||
open = codecs.open
|
||||
basestring = basestring
|
||||
|
||||
|
@ -39,8 +42,17 @@ elif is_py3: # pragma: nocover
|
|||
if text is None:
|
||||
return None
|
||||
if isinstance(text, bytes):
|
||||
return text.decode('utf-8')
|
||||
return str(text)
|
||||
try:
|
||||
return text.decode('utf-8')
|
||||
except:
|
||||
try:
|
||||
return text.decode(sys.getdefaultencoding())
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
return str(text)
|
||||
except:
|
||||
return text
|
||||
open = open
|
||||
basestring = (str, bytes)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.languages
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
wakatime.dependencies
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Parse dependencies from a source code file.
|
||||
|
||||
|
@ -10,10 +10,12 @@
|
|||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from ..compat import u, open, import_module
|
||||
from ..exceptions import NotYetImplemented
|
||||
|
||||
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
@ -24,26 +26,28 @@ class TokenParser(object):
|
|||
language, inherit from this class and implement the :meth:`parse` method
|
||||
to return a list of dependency strings.
|
||||
"""
|
||||
source_file = None
|
||||
lexer = None
|
||||
dependencies = []
|
||||
tokens = []
|
||||
exclude = []
|
||||
|
||||
def __init__(self, source_file, lexer=None):
|
||||
self._tokens = None
|
||||
self.dependencies = []
|
||||
self.source_file = source_file
|
||||
self.lexer = lexer
|
||||
self.exclude = [re.compile(x, re.IGNORECASE) for x in self.exclude]
|
||||
|
||||
@property
|
||||
def tokens(self):
|
||||
if self._tokens is None:
|
||||
self._tokens = self._extract_tokens()
|
||||
return self._tokens
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
""" Should return a list of dependencies.
|
||||
"""
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
raise Exception('Not yet implemented.')
|
||||
raise NotYetImplemented()
|
||||
|
||||
def append(self, dep, truncate=False, separator=None, truncate_to=None,
|
||||
strip_whitespace=True):
|
||||
if dep == 'as':
|
||||
print('***************** as')
|
||||
self._save_dependency(
|
||||
dep,
|
||||
truncate=truncate,
|
||||
|
@ -52,6 +56,9 @@ class TokenParser(object):
|
|||
strip_whitespace=strip_whitespace,
|
||||
)
|
||||
|
||||
def partial(self, token):
|
||||
return u(token).split('.')[-1]
|
||||
|
||||
def _extract_tokens(self):
|
||||
if self.lexer:
|
||||
try:
|
||||
|
@ -73,13 +80,21 @@ class TokenParser(object):
|
|||
separator = u('.')
|
||||
separator = u(separator)
|
||||
dep = dep.split(separator)
|
||||
if truncate_to is None or truncate_to < 0 or truncate_to > len(dep) - 1:
|
||||
truncate_to = len(dep) - 1
|
||||
dep = dep[0] if len(dep) == 1 else separator.join(dep[0:truncate_to])
|
||||
if truncate_to is None or truncate_to < 1:
|
||||
truncate_to = 1
|
||||
if truncate_to > len(dep):
|
||||
truncate_to = len(dep)
|
||||
dep = dep[0] if len(dep) == 1 else separator.join(dep[:truncate_to])
|
||||
if strip_whitespace:
|
||||
dep = dep.strip()
|
||||
if dep:
|
||||
self.dependencies.append(dep)
|
||||
if dep and (not separator or not dep.startswith(separator)):
|
||||
should_exclude = False
|
||||
for compiled in self.exclude:
|
||||
if compiled.search(dep):
|
||||
should_exclude = True
|
||||
break
|
||||
if not should_exclude:
|
||||
self.dependencies.append(dep)
|
||||
|
||||
|
||||
class DependencyParser(object):
|
68
packages/wakatime/dependencies/c_cpp.py
Normal file
68
packages/wakatime/dependencies/c_cpp.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.languages.c_cpp
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Parse dependencies from C++ code.
|
||||
|
||||
:copyright: (c) 2014 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from . import TokenParser
|
||||
|
||||
|
||||
class CppParser(TokenParser):
|
||||
exclude = [
|
||||
r'^stdio\.h$',
|
||||
r'^stdlib\.h$',
|
||||
r'^string\.h$',
|
||||
r'^time\.h$',
|
||||
]
|
||||
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if self.partial(token) == 'Preproc':
|
||||
self._process_preproc(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
||||
|
||||
def _process_preproc(self, token, content):
|
||||
if content.strip().startswith('include ') or content.strip().startswith("include\t"):
|
||||
content = content.replace('include', '', 1).strip().strip('"').strip('<').strip('>').strip()
|
||||
self.append(content)
|
||||
|
||||
def _process_other(self, token, content):
|
||||
pass
|
||||
|
||||
|
||||
class CParser(TokenParser):
|
||||
exclude = [
|
||||
r'^stdio\.h$',
|
||||
r'^stdlib\.h$',
|
||||
r'^string\.h$',
|
||||
r'^time\.h$',
|
||||
]
|
||||
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if self.partial(token) == 'Preproc':
|
||||
self._process_preproc(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
||||
|
||||
def _process_preproc(self, token, content):
|
||||
if content.strip().startswith('include ') or content.strip().startswith("include\t"):
|
||||
content = content.replace('include', '', 1).strip().strip('"').strip('<').strip('>').strip()
|
||||
self.append(content)
|
||||
|
||||
def _process_other(self, token, content):
|
||||
pass
|
|
@ -26,10 +26,8 @@ class JsonParser(TokenParser):
|
|||
state = None
|
||||
level = 0
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
def parse(self):
|
||||
self._process_file_name(os.path.basename(self.source_file))
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
|
@ -10,20 +10,17 @@
|
|||
"""
|
||||
|
||||
from . import TokenParser
|
||||
from ..compat import u
|
||||
|
||||
|
||||
class CSharpParser(TokenParser):
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if u(token).split('.')[-1] == 'Namespace':
|
||||
if self.partial(token) == 'Namespace':
|
||||
self._process_namespace(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
96
packages/wakatime/dependencies/jvm.py
Normal file
96
packages/wakatime/dependencies/jvm.py
Normal file
|
@ -0,0 +1,96 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.languages.java
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Parse dependencies from Java code.
|
||||
|
||||
:copyright: (c) 2014 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from . import TokenParser
|
||||
from ..compat import u
|
||||
|
||||
|
||||
class JavaParser(TokenParser):
|
||||
exclude = [
|
||||
r'^java\.',
|
||||
r'^javax\.',
|
||||
r'^import$',
|
||||
r'^package$',
|
||||
r'^namespace$',
|
||||
r'^static$',
|
||||
]
|
||||
state = None
|
||||
buffer = u('')
|
||||
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if self.partial(token) == 'Namespace':
|
||||
self._process_namespace(token, content)
|
||||
if self.partial(token) == 'Name':
|
||||
self._process_name(token, content)
|
||||
elif self.partial(token) == 'Attribute':
|
||||
self._process_attribute(token, content)
|
||||
elif self.partial(token) == 'Operator':
|
||||
self._process_operator(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
||||
|
||||
def _process_namespace(self, token, content):
|
||||
if u(content) == u('import'):
|
||||
self.state = 'import'
|
||||
|
||||
elif self.state == 'import':
|
||||
keywords = [
|
||||
u('package'),
|
||||
u('namespace'),
|
||||
u('static'),
|
||||
]
|
||||
if u(content) in keywords:
|
||||
return
|
||||
self.buffer = u('{0}{1}').format(self.buffer, u(content))
|
||||
|
||||
elif self.state == 'import-finished':
|
||||
content = content.split(u('.'))
|
||||
|
||||
if len(content) == 1:
|
||||
self.append(content[0])
|
||||
|
||||
elif len(content) > 1:
|
||||
if len(content[0]) == 3:
|
||||
content = content[1:]
|
||||
if content[-1] == u('*'):
|
||||
content = content[:len(content) - 1]
|
||||
|
||||
if len(content) == 1:
|
||||
self.append(content[0])
|
||||
elif len(content) > 1:
|
||||
self.append(u('.').join(content[:2]))
|
||||
|
||||
self.state = None
|
||||
|
||||
def _process_name(self, token, content):
|
||||
if self.state == 'import':
|
||||
self.buffer = u('{0}{1}').format(self.buffer, u(content))
|
||||
|
||||
def _process_attribute(self, token, content):
|
||||
if self.state == 'import':
|
||||
self.buffer = u('{0}{1}').format(self.buffer, u(content))
|
||||
|
||||
def _process_operator(self, token, content):
|
||||
if u(content) == u(';'):
|
||||
self.state = 'import-finished'
|
||||
self._process_namespace(token, self.buffer)
|
||||
self.state = None
|
||||
self.buffer = u('')
|
||||
elif self.state == 'import':
|
||||
self.buffer = u('{0}{1}').format(self.buffer, u(content))
|
||||
|
||||
def _process_other(self, token, content):
|
||||
pass
|
|
@ -17,15 +17,13 @@ class PhpParser(TokenParser):
|
|||
state = None
|
||||
parens = 0
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if u(token).split('.')[-1] == 'Keyword':
|
||||
if self.partial(token) == 'Keyword':
|
||||
self._process_keyword(token, content)
|
||||
elif u(token) == 'Token.Literal.String.Single' or u(token) == 'Token.Literal.String.Double':
|
||||
self._process_literal_string(token, content)
|
||||
|
@ -33,9 +31,9 @@ class PhpParser(TokenParser):
|
|||
self._process_name(token, content)
|
||||
elif u(token) == 'Token.Name.Function':
|
||||
self._process_function(token, content)
|
||||
elif u(token).split('.')[-1] == 'Punctuation':
|
||||
elif self.partial(token) == 'Punctuation':
|
||||
self._process_punctuation(token, content)
|
||||
elif u(token).split('.')[-1] == 'Text':
|
||||
elif self.partial(token) == 'Text':
|
||||
self._process_text(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
|
@ -10,33 +10,30 @@
|
|||
"""
|
||||
|
||||
from . import TokenParser
|
||||
from ..compat import u
|
||||
|
||||
|
||||
class PythonParser(TokenParser):
|
||||
state = None
|
||||
parens = 0
|
||||
nonpackage = False
|
||||
exclude = [
|
||||
r'^os$',
|
||||
r'^sys\.',
|
||||
]
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if u(token).split('.')[-1] == 'Namespace':
|
||||
if self.partial(token) == 'Namespace':
|
||||
self._process_namespace(token, content)
|
||||
elif u(token).split('.')[-1] == 'Name':
|
||||
self._process_name(token, content)
|
||||
elif u(token).split('.')[-1] == 'Word':
|
||||
self._process_word(token, content)
|
||||
elif u(token).split('.')[-1] == 'Operator':
|
||||
elif self.partial(token) == 'Operator':
|
||||
self._process_operator(token, content)
|
||||
elif u(token).split('.')[-1] == 'Punctuation':
|
||||
elif self.partial(token) == 'Punctuation':
|
||||
self._process_punctuation(token, content)
|
||||
elif u(token).split('.')[-1] == 'Text':
|
||||
elif self.partial(token) == 'Text':
|
||||
self._process_text(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
||||
|
@ -50,38 +47,6 @@ class PythonParser(TokenParser):
|
|||
else:
|
||||
self._process_import(token, content)
|
||||
|
||||
def _process_name(self, token, content):
|
||||
if self.state is not None:
|
||||
if self.nonpackage:
|
||||
self.nonpackage = False
|
||||
else:
|
||||
if self.state == 'from':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
if self.state == 'from-2' and content != 'import':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
elif self.state == 'import':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
elif self.state == 'import-2':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def _process_word(self, token, content):
|
||||
if self.state is not None:
|
||||
if self.nonpackage:
|
||||
self.nonpackage = False
|
||||
else:
|
||||
if self.state == 'from':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
if self.state == 'from-2' and content != 'import':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
elif self.state == 'import':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
elif self.state == 'import-2':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def _process_operator(self, token, content):
|
||||
if self.state is not None:
|
||||
if content == '.':
|
||||
|
@ -106,15 +71,15 @@ class PythonParser(TokenParser):
|
|||
def _process_import(self, token, content):
|
||||
if not self.nonpackage:
|
||||
if self.state == 'from':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
self.append(content, truncate=True, truncate_to=1)
|
||||
self.state = 'from-2'
|
||||
elif self.state == 'from-2' and content != 'import':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
self.append(content, truncate=True, truncate_to=1)
|
||||
elif self.state == 'import':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
self.append(content, truncate=True, truncate_to=1)
|
||||
self.state = 'import-2'
|
||||
elif self.state == 'import-2':
|
||||
self.append(content, truncate=True, truncate_to=0)
|
||||
self.append(content, truncate=True, truncate_to=1)
|
||||
else:
|
||||
self.state = None
|
||||
self.nonpackage = False
|
|
@ -71,9 +71,7 @@ KEYWORDS = [
|
|||
|
||||
class LassoJavascriptParser(TokenParser):
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
@ -99,9 +97,7 @@ class HtmlDjangoParser(TokenParser):
|
|||
current_attr = None
|
||||
current_attr_value = None
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
def parse(self):
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
|
@ -22,7 +22,7 @@ FILES = {
|
|||
|
||||
class UnknownParser(TokenParser):
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
def parse(self):
|
||||
self._process_file_name(os.path.basename(self.source_file))
|
||||
return self.dependencies
|
||||
|
14
packages/wakatime/exceptions.py
Normal file
14
packages/wakatime/exceptions.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Custom exceptions.
|
||||
|
||||
:copyright: (c) 2015 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
class NotYetImplemented(Exception):
|
||||
"""This method needs to be implemented."""
|
|
@ -1,37 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.languages.c_cpp
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Parse dependencies from C++ code.
|
||||
|
||||
:copyright: (c) 2014 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from . import TokenParser
|
||||
from ..compat import u
|
||||
|
||||
|
||||
class CppParser(TokenParser):
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if u(token).split('.')[-1] == 'Preproc':
|
||||
self._process_preproc(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
||||
|
||||
def _process_preproc(self, token, content):
|
||||
if content.strip().startswith('include ') or content.strip().startswith("include\t"):
|
||||
content = content.replace('include', '', 1).strip()
|
||||
self.append(content)
|
||||
|
||||
def _process_other(self, token, content):
|
||||
pass
|
|
@ -1,36 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.languages.java
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Parse dependencies from Java code.
|
||||
|
||||
:copyright: (c) 2014 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from . import TokenParser
|
||||
from ..compat import u
|
||||
|
||||
|
||||
class JavaParser(TokenParser):
|
||||
|
||||
def parse(self, tokens=[]):
|
||||
if not tokens and not self.tokens:
|
||||
self.tokens = self._extract_tokens()
|
||||
for index, token, content in self.tokens:
|
||||
self._process_token(token, content)
|
||||
return self.dependencies
|
||||
|
||||
def _process_token(self, token, content):
|
||||
if u(token).split('.')[-1] == 'Namespace':
|
||||
self._process_namespace(token, content)
|
||||
else:
|
||||
self._process_other(token, content)
|
||||
|
||||
def _process_namespace(self, token, content):
|
||||
if content != 'import' and content != 'package' and content != 'namespace':
|
||||
self.append(content, truncate=True)
|
||||
|
||||
def _process_other(self, token, content):
|
||||
pass
|
|
@ -16,23 +16,23 @@ import sys
|
|||
from .compat import u
|
||||
try:
|
||||
from collections import OrderedDict # pragma: nocover
|
||||
except ImportError:
|
||||
from .packages.ordereddict import OrderedDict # pragma: nocover
|
||||
except ImportError: # pragma: nocover
|
||||
from .packages.ordereddict import OrderedDict
|
||||
try:
|
||||
from .packages import simplejson as json # pragma: nocover
|
||||
except (ImportError, SyntaxError):
|
||||
import json # pragma: nocover
|
||||
except (ImportError, SyntaxError): # pragma: nocover
|
||||
import json
|
||||
|
||||
|
||||
class CustomEncoder(json.JSONEncoder):
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, bytes):
|
||||
obj = bytes.decode(obj)
|
||||
if isinstance(obj, bytes): # pragma: nocover
|
||||
obj = u(obj)
|
||||
return json.dumps(obj)
|
||||
try:
|
||||
try: # pragma: nocover
|
||||
encoded = super(CustomEncoder, self).default(obj)
|
||||
except UnicodeDecodeError:
|
||||
except UnicodeDecodeError: # pragma: nocover
|
||||
obj = u(obj)
|
||||
encoded = super(CustomEncoder, self).default(obj)
|
||||
return encoded
|
||||
|
@ -40,11 +40,11 @@ class CustomEncoder(json.JSONEncoder):
|
|||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
|
||||
def setup(self, timestamp, isWrite, targetFile, version, plugin, verbose,
|
||||
def setup(self, timestamp, isWrite, entity, version, plugin, verbose,
|
||||
warnings=False):
|
||||
self.timestamp = timestamp
|
||||
self.isWrite = isWrite
|
||||
self.targetFile = targetFile
|
||||
self.entity = entity
|
||||
self.version = version
|
||||
self.plugin = plugin
|
||||
self.verbose = verbose
|
||||
|
@ -61,7 +61,7 @@ class JsonFormatter(logging.Formatter):
|
|||
data['caller'] = record.pathname
|
||||
data['lineno'] = record.lineno
|
||||
data['isWrite'] = self.isWrite
|
||||
data['file'] = self.targetFile
|
||||
data['file'] = self.entity
|
||||
if not self.isWrite:
|
||||
del data['isWrite']
|
||||
data['level'] = record.levelname
|
||||
|
@ -83,19 +83,9 @@ def set_log_level(logger, args):
|
|||
|
||||
def setup_logging(args, version):
|
||||
logger = logging.getLogger('WakaTime')
|
||||
for handler in logger.handlers:
|
||||
logger.removeHandler(handler)
|
||||
set_log_level(logger, args)
|
||||
if len(logger.handlers) > 0:
|
||||
formatter = JsonFormatter(datefmt='%Y/%m/%d %H:%M:%S %z')
|
||||
formatter.setup(
|
||||
timestamp=args.timestamp,
|
||||
isWrite=args.isWrite,
|
||||
targetFile=args.targetFile,
|
||||
version=version,
|
||||
plugin=args.plugin,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
logger.handlers[0].setFormatter(formatter)
|
||||
return logger
|
||||
logfile = args.logfile
|
||||
if not logfile:
|
||||
logfile = '~/.wakatime.log'
|
||||
|
@ -104,7 +94,7 @@ def setup_logging(args, version):
|
|||
formatter.setup(
|
||||
timestamp=args.timestamp,
|
||||
isWrite=args.isWrite,
|
||||
targetFile=args.targetFile,
|
||||
entity=args.entity,
|
||||
version=version,
|
||||
plugin=args.plugin,
|
||||
verbose=args.verbose,
|
||||
|
@ -116,7 +106,7 @@ def setup_logging(args, version):
|
|||
warnings_formatter.setup(
|
||||
timestamp=args.timestamp,
|
||||
isWrite=args.isWrite,
|
||||
targetFile=args.targetFile,
|
||||
entity=args.entity,
|
||||
version=version,
|
||||
plugin=args.plugin,
|
||||
verbose=args.verbose,
|
||||
|
@ -127,7 +117,7 @@ def setup_logging(args, version):
|
|||
logging.getLogger('py.warnings').addHandler(warnings_handler)
|
||||
try:
|
||||
logging.captureWarnings(True)
|
||||
except AttributeError:
|
||||
except AttributeError: # pragma: nocover
|
||||
pass # Python >= 2.7 is needed to capture warnings
|
||||
|
||||
return logger
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.base
|
||||
wakatime.main
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
wakatime module entry point.
|
||||
|
@ -39,12 +39,12 @@ from .session_cache import SessionCache
|
|||
from .stats import get_file_stats
|
||||
try:
|
||||
from .packages import simplejson as json # pragma: nocover
|
||||
except (ImportError, SyntaxError):
|
||||
import json # pragma: nocover
|
||||
except (ImportError, SyntaxError): # pragma: nocover
|
||||
import json
|
||||
try:
|
||||
from .packages import tzlocal # pragma: nocover
|
||||
from .packages import tzlocal
|
||||
except: # pragma: nocover
|
||||
from .packages import tzlocal3 as tzlocal # pragma: nocover
|
||||
from .packages import tzlocal3 as tzlocal
|
||||
|
||||
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
@ -53,7 +53,11 @@ log = logging.getLogger('WakaTime')
|
|||
class FileAction(argparse.Action):
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
values = os.path.realpath(values)
|
||||
try:
|
||||
if os.path.isfile(values):
|
||||
values = os.path.realpath(values)
|
||||
except: # pragma: nocover
|
||||
pass
|
||||
setattr(namespace, self.dest, values)
|
||||
|
||||
|
||||
|
@ -88,9 +92,12 @@ def parseArguments():
|
|||
# define supported command line arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Common interface for the WakaTime api.')
|
||||
parser.add_argument('--file', dest='targetFile', metavar='file',
|
||||
action=FileAction, required=True,
|
||||
help='absolute path to file for current heartbeat')
|
||||
parser.add_argument('--entity', dest='entity', metavar='FILE',
|
||||
action=FileAction,
|
||||
help='absolute path to file for the heartbeat; can also be a '+
|
||||
'url, domain, or app when --entitytype is not file')
|
||||
parser.add_argument('--file', dest='file', action=FileAction,
|
||||
help=argparse.SUPPRESS)
|
||||
parser.add_argument('--key', dest='key',
|
||||
help='your wakatime api key; uses api_key from '+
|
||||
'~/.wakatime.conf by default')
|
||||
|
@ -109,9 +116,9 @@ def parseArguments():
|
|||
help='optional line number; current line being edited')
|
||||
parser.add_argument('--cursorpos', dest='cursorpos',
|
||||
help='optional cursor position in the current file')
|
||||
parser.add_argument('--notfile', dest='notfile', action='store_true',
|
||||
help='when set, will accept any value for the file. for example, '+
|
||||
'a domain name or other item you want to log time towards.')
|
||||
parser.add_argument('--entitytype', dest='entity_type',
|
||||
help='entity type for this heartbeat. can be one of "file", '+
|
||||
'"url", "domain", or "app"; defaults to file.')
|
||||
parser.add_argument('--proxy', dest='proxy',
|
||||
help='optional https proxy url; for example: '+
|
||||
'https://user:pass@localhost:8080')
|
||||
|
@ -139,6 +146,8 @@ def parseArguments():
|
|||
help='defaults to ~/.wakatime.log')
|
||||
parser.add_argument('--apiurl', dest='api_url',
|
||||
help='heartbeats api url; for debugging with a local server')
|
||||
parser.add_argument('--timeout', dest='timeout', type=int,
|
||||
help='number of seconds to wait when sending heartbeats to api')
|
||||
parser.add_argument('--config', dest='config',
|
||||
help='defaults to ~/.wakatime.conf')
|
||||
parser.add_argument('--verbose', dest='verbose', action='store_true',
|
||||
|
@ -168,6 +177,13 @@ def parseArguments():
|
|||
args.key = default_key
|
||||
else:
|
||||
parser.error('Missing api key')
|
||||
if not args.entity_type:
|
||||
args.entity_type = 'file'
|
||||
if not args.entity:
|
||||
if args.file:
|
||||
args.entity = args.file
|
||||
else:
|
||||
parser.error('argument --entity is required')
|
||||
if not args.exclude:
|
||||
args.exclude = []
|
||||
if configs.has_option('settings', 'ignore'):
|
||||
|
@ -175,14 +191,14 @@ def parseArguments():
|
|||
for pattern in configs.get('settings', 'ignore').split("\n"):
|
||||
if pattern.strip() != '':
|
||||
args.exclude.append(pattern)
|
||||
except TypeError:
|
||||
except TypeError: # pragma: nocover
|
||||
pass
|
||||
if configs.has_option('settings', 'exclude'):
|
||||
try:
|
||||
for pattern in configs.get('settings', 'exclude').split("\n"):
|
||||
if pattern.strip() != '':
|
||||
args.exclude.append(pattern)
|
||||
except TypeError:
|
||||
except TypeError: # pragma: nocover
|
||||
pass
|
||||
if not args.include:
|
||||
args.include = []
|
||||
|
@ -191,7 +207,7 @@ def parseArguments():
|
|||
for pattern in configs.get('settings', 'include').split("\n"):
|
||||
if pattern.strip() != '':
|
||||
args.include.append(pattern)
|
||||
except TypeError:
|
||||
except TypeError: # pragma: nocover
|
||||
pass
|
||||
if args.offline and configs.has_option('settings', 'offline'):
|
||||
args.offline = configs.getboolean('settings', 'offline')
|
||||
|
@ -207,17 +223,22 @@ def parseArguments():
|
|||
args.logfile = configs.get('settings', 'logfile')
|
||||
if not args.api_url and configs.has_option('settings', 'api_url'):
|
||||
args.api_url = configs.get('settings', 'api_url')
|
||||
if not args.timeout and configs.has_option('settings', 'timeout'):
|
||||
try:
|
||||
args.timeout = int(configs.get('settings', 'timeout'))
|
||||
except ValueError:
|
||||
print(traceback.format_exc())
|
||||
|
||||
return args, configs
|
||||
|
||||
|
||||
def should_exclude(fileName, include, exclude):
|
||||
if fileName is not None and fileName.strip() != '':
|
||||
def should_exclude(entity, include, exclude):
|
||||
if entity is not None and entity.strip() != '':
|
||||
try:
|
||||
for pattern in include:
|
||||
try:
|
||||
compiled = re.compile(pattern, re.IGNORECASE)
|
||||
if compiled.search(fileName):
|
||||
if compiled.search(entity):
|
||||
return False
|
||||
except re.error as ex:
|
||||
log.warning(u('Regex error ({msg}) for include pattern: {pattern}').format(
|
||||
|
@ -230,7 +251,7 @@ def should_exclude(fileName, include, exclude):
|
|||
for pattern in exclude:
|
||||
try:
|
||||
compiled = re.compile(pattern, re.IGNORECASE)
|
||||
if compiled.search(fileName):
|
||||
if compiled.search(entity):
|
||||
return pattern
|
||||
except re.error as ex:
|
||||
log.warning(u('Regex error ({msg}) for exclude pattern: {pattern}').format(
|
||||
|
@ -262,21 +283,23 @@ def get_user_agent(plugin):
|
|||
return user_agent
|
||||
|
||||
|
||||
def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None, targetFile=None,
|
||||
timestamp=None, isWrite=None, plugin=None, offline=None, notfile=False,
|
||||
hidefilenames=None, proxy=None, api_url=None, **kwargs):
|
||||
def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None, entity=None,
|
||||
timestamp=None, isWrite=None, plugin=None, offline=None, entity_type='file',
|
||||
hidefilenames=None, proxy=None, api_url=None, timeout=None, **kwargs):
|
||||
"""Sends heartbeat as POST request to WakaTime api server.
|
||||
"""
|
||||
|
||||
if not api_url:
|
||||
api_url = 'https://wakatime.com/api/v1/heartbeats'
|
||||
if not timeout:
|
||||
timeout = 30
|
||||
log.debug('Sending heartbeat to api at %s' % api_url)
|
||||
data = {
|
||||
'time': timestamp,
|
||||
'entity': targetFile,
|
||||
'type': 'file',
|
||||
'entity': entity,
|
||||
'type': entity_type,
|
||||
}
|
||||
if hidefilenames and targetFile is not None and not notfile:
|
||||
if hidefilenames and entity is not None and entity_type == 'file':
|
||||
extension = u(os.path.splitext(data['entity'])[1])
|
||||
data['entity'] = u('HIDDEN{0}').format(extension)
|
||||
if stats.get('lines'):
|
||||
|
@ -328,7 +351,7 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
|||
response = None
|
||||
try:
|
||||
response = session.post(api_url, data=request_body, headers=headers,
|
||||
proxies=proxies)
|
||||
proxies=proxies, timeout=timeout)
|
||||
except RequestException:
|
||||
exception_data = {
|
||||
sys.exc_info()[0].__name__: u(sys.exc_info()[1]),
|
||||
|
@ -379,8 +402,9 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
|||
return False
|
||||
|
||||
|
||||
def main(argv):
|
||||
sys.argv = ['wakatime'] + argv
|
||||
def execute(argv=None):
|
||||
if argv:
|
||||
sys.argv = ['wakatime'] + argv
|
||||
|
||||
args, configs = parseArguments()
|
||||
if configs is None:
|
||||
|
@ -388,27 +412,29 @@ def main(argv):
|
|||
|
||||
setup_logging(args, __version__)
|
||||
|
||||
exclude = should_exclude(args.targetFile, args.include, args.exclude)
|
||||
exclude = should_exclude(args.entity, args.include, args.exclude)
|
||||
if exclude is not False:
|
||||
log.debug(u('File not logged because matches exclude pattern: {pattern}').format(
|
||||
log.debug(u('Skipping because matches exclude pattern: {pattern}').format(
|
||||
pattern=u(exclude),
|
||||
))
|
||||
return 0
|
||||
|
||||
if os.path.isfile(args.targetFile) or args.notfile:
|
||||
if args.entity_type != 'file' or os.path.isfile(args.entity):
|
||||
|
||||
stats = get_file_stats(args.targetFile, notfile=args.notfile,
|
||||
stats = get_file_stats(args.entity, entity_type=args.entity_type,
|
||||
lineno=args.lineno, cursorpos=args.cursorpos)
|
||||
|
||||
project, branch = None, None
|
||||
if not args.notfile:
|
||||
project, branch = get_project_info(configs=configs, args=args)
|
||||
project = args.project or args.alternate_project
|
||||
branch = None
|
||||
if args.entity_type == 'file':
|
||||
project, branch = get_project_info(configs, args)
|
||||
|
||||
kwargs = vars(args)
|
||||
kwargs['project'] = project
|
||||
kwargs['branch'] = branch
|
||||
kwargs['stats'] = stats
|
||||
kwargs['hostname'] = args.hostname or socket.gethostname()
|
||||
kwargs['timeout'] = args.timeout
|
||||
|
||||
if send_heartbeat(**kwargs):
|
||||
queue = Queue()
|
||||
|
@ -418,7 +444,7 @@ def main(argv):
|
|||
break
|
||||
sent = send_heartbeat(
|
||||
project=heartbeat['project'],
|
||||
targetFile=heartbeat['file'],
|
||||
entity=heartbeat['entity'],
|
||||
timestamp=heartbeat['time'],
|
||||
branch=heartbeat['branch'],
|
||||
hostname=kwargs['hostname'],
|
||||
|
@ -428,9 +454,10 @@ def main(argv):
|
|||
plugin=heartbeat['plugin'],
|
||||
offline=args.offline,
|
||||
hidefilenames=args.hidefilenames,
|
||||
notfile=args.notfile,
|
||||
entity_type=heartbeat['type'],
|
||||
proxy=args.proxy,
|
||||
api_url=args.api_url,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
if not sent:
|
||||
break
|
|
@ -18,7 +18,7 @@ from time import sleep
|
|||
try:
|
||||
import sqlite3
|
||||
HAS_SQL = True
|
||||
except ImportError:
|
||||
except ImportError: # pragma: nocover
|
||||
HAS_SQL = False
|
||||
|
||||
from .compat import u
|
||||
|
@ -28,13 +28,18 @@ log = logging.getLogger('WakaTime')
|
|||
|
||||
|
||||
class Queue(object):
|
||||
DB_FILE = os.path.join(os.path.expanduser('~'), '.wakatime.db')
|
||||
db_file = os.path.join(os.path.expanduser('~'), '.wakatime.db')
|
||||
table_name = 'heartbeat_1'
|
||||
|
||||
def get_db_file(self):
|
||||
return self.db_file
|
||||
|
||||
def connect(self):
|
||||
conn = sqlite3.connect(self.DB_FILE)
|
||||
conn = sqlite3.connect(self.get_db_file())
|
||||
c = conn.cursor()
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS heartbeat (
|
||||
file text,
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS {0} (
|
||||
entity text,
|
||||
type text,
|
||||
time real,
|
||||
project text,
|
||||
branch text,
|
||||
|
@ -42,17 +47,17 @@ class Queue(object):
|
|||
stats text,
|
||||
misc text,
|
||||
plugin text)
|
||||
''')
|
||||
'''.format(self.table_name))
|
||||
return (conn, c)
|
||||
|
||||
|
||||
def push(self, data, stats, plugin, misc=None):
|
||||
if not HAS_SQL:
|
||||
if not HAS_SQL: # pragma: nocover
|
||||
return
|
||||
try:
|
||||
conn, c = self.connect()
|
||||
heartbeat = {
|
||||
'file': u(data.get('entity')),
|
||||
'entity': u(data.get('entity')),
|
||||
'type': u(data.get('type')),
|
||||
'time': data.get('time'),
|
||||
'project': u(data.get('project')),
|
||||
'branch': u(data.get('branch')),
|
||||
|
@ -61,15 +66,14 @@ class Queue(object):
|
|||
'misc': u(misc),
|
||||
'plugin': u(plugin),
|
||||
}
|
||||
c.execute('INSERT INTO heartbeat VALUES (:file,:time,:project,:branch,:is_write,:stats,:misc,:plugin)', heartbeat)
|
||||
c.execute('INSERT INTO {0} VALUES (:entity,:type,:time,:project,:branch,:is_write,:stats,:misc,:plugin)'.format(self.table_name), heartbeat)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
except sqlite3.Error:
|
||||
log.error(traceback.format_exc())
|
||||
|
||||
|
||||
def pop(self):
|
||||
if not HAS_SQL:
|
||||
if not HAS_SQL: # pragma: nocover
|
||||
return None
|
||||
tries = 3
|
||||
wait = 0.1
|
||||
|
@ -83,42 +87,43 @@ class Queue(object):
|
|||
while loop and tries > -1:
|
||||
try:
|
||||
c.execute('BEGIN IMMEDIATE')
|
||||
c.execute('SELECT * FROM heartbeat LIMIT 1')
|
||||
c.execute('SELECT * FROM {0} LIMIT 1'.format(self.table_name))
|
||||
row = c.fetchone()
|
||||
if row is not None:
|
||||
values = []
|
||||
clauses = []
|
||||
index = 0
|
||||
for row_name in ['file', 'time', 'project', 'branch', 'is_write']:
|
||||
for row_name in ['entity', 'type', 'time', 'project', 'branch', 'is_write']:
|
||||
if row[index] is not None:
|
||||
clauses.append('{0}=?'.format(row_name))
|
||||
values.append(row[index])
|
||||
else:
|
||||
else: # pragma: nocover
|
||||
clauses.append('{0} IS NULL'.format(row_name))
|
||||
index += 1
|
||||
if len(values) > 0:
|
||||
c.execute('DELETE FROM heartbeat WHERE {0}'.format(' AND '.join(clauses)), values)
|
||||
else:
|
||||
c.execute('DELETE FROM heartbeat WHERE {0}'.format(' AND '.join(clauses)))
|
||||
c.execute('DELETE FROM {0} WHERE {1}'.format(self.table_name, ' AND '.join(clauses)), values)
|
||||
else: # pragma: nocover
|
||||
c.execute('DELETE FROM {0} WHERE {1}'.format(self.table_name, ' AND '.join(clauses)))
|
||||
conn.commit()
|
||||
if row is not None:
|
||||
heartbeat = {
|
||||
'file': row[0],
|
||||
'time': row[1],
|
||||
'project': row[2],
|
||||
'branch': row[3],
|
||||
'is_write': True if row[4] is 1 else False,
|
||||
'stats': row[5],
|
||||
'misc': row[6],
|
||||
'plugin': row[7],
|
||||
'entity': row[0],
|
||||
'type': row[1],
|
||||
'time': row[2],
|
||||
'project': row[3],
|
||||
'branch': row[4],
|
||||
'is_write': True if row[5] is 1 else False,
|
||||
'stats': row[6],
|
||||
'misc': row[7],
|
||||
'plugin': row[8],
|
||||
}
|
||||
loop = False
|
||||
except sqlite3.Error:
|
||||
except sqlite3.Error: # pragma: nocover
|
||||
log.debug(traceback.format_exc())
|
||||
sleep(wait)
|
||||
tries -= 1
|
||||
try:
|
||||
conn.close()
|
||||
except sqlite3.Error:
|
||||
except sqlite3.Error: # pragma: nocover
|
||||
log.debug(traceback.format_exc())
|
||||
return heartbeat
|
||||
|
|
|
@ -33,7 +33,7 @@ REV_CONTROL_PLUGINS = [
|
|||
]
|
||||
|
||||
|
||||
def get_project_info(configs=None, args=None):
|
||||
def get_project_info(configs, args):
|
||||
"""Find the current project and branch.
|
||||
|
||||
First looks for a .wakatime-project file. Second, uses the --project arg.
|
||||
|
@ -50,9 +50,9 @@ def get_project_info(configs=None, args=None):
|
|||
plugin_name = plugin_cls.__name__.lower()
|
||||
plugin_configs = get_configs_for_plugin(plugin_name, configs)
|
||||
|
||||
project = plugin_cls(args.targetFile, configs=plugin_configs)
|
||||
project = plugin_cls(args.entity, configs=plugin_configs)
|
||||
if project.process():
|
||||
project_name = project.name()
|
||||
project_name = project_name or project.name()
|
||||
branch_name = project.branch()
|
||||
break
|
||||
|
||||
|
@ -66,7 +66,7 @@ def get_project_info(configs=None, args=None):
|
|||
plugin_name = plugin_cls.__name__.lower()
|
||||
plugin_configs = get_configs_for_plugin(plugin_name, configs)
|
||||
|
||||
project = plugin_cls(args.targetFile, configs=plugin_configs)
|
||||
project = plugin_cls(args.entity, configs=plugin_configs)
|
||||
if project.process():
|
||||
project_name = project_name or project.name()
|
||||
branch_name = branch_name or project.branch()
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
|
||||
import logging
|
||||
|
||||
from ..exceptions import NotYetImplemented
|
||||
|
||||
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
||||
|
@ -25,29 +27,19 @@ class BaseProject(object):
|
|||
self.path = path
|
||||
self._configs = configs
|
||||
|
||||
def project_type(self):
|
||||
""" Returns None if this is the base class.
|
||||
Returns the type of project if this is a
|
||||
valid project.
|
||||
"""
|
||||
project_type = self.__class__.__name__.lower()
|
||||
if project_type == 'baseproject':
|
||||
project_type = None
|
||||
return project_type
|
||||
|
||||
def process(self):
|
||||
""" Processes self.path into a project and
|
||||
returns True if project is valid, otherwise
|
||||
returns False.
|
||||
"""
|
||||
return False
|
||||
raise NotYetImplemented()
|
||||
|
||||
def name(self):
|
||||
""" Returns the project's name.
|
||||
"""
|
||||
return None
|
||||
raise NotYetImplemented()
|
||||
|
||||
def branch(self):
|
||||
""" Returns the current branch.
|
||||
"""
|
||||
return None
|
||||
raise NotYetImplemented()
|
||||
|
|
|
@ -30,7 +30,7 @@ class Git(BaseProject):
|
|||
base = self._project_base()
|
||||
if base:
|
||||
return u(os.path.basename(base))
|
||||
return None
|
||||
return None # pragma: nocover
|
||||
|
||||
def branch(self):
|
||||
base = self._project_base()
|
||||
|
@ -39,13 +39,13 @@ class Git(BaseProject):
|
|||
try:
|
||||
with open(head, 'r', encoding='utf-8') as fh:
|
||||
return u(fh.readline().strip().rsplit('/', 1)[-1])
|
||||
except UnicodeDecodeError:
|
||||
except UnicodeDecodeError: # pragma: nocover
|
||||
try:
|
||||
with open(head, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
||||
return u(fh.readline().strip().rsplit('/', 1)[-1])
|
||||
except:
|
||||
log.exception("Exception:")
|
||||
except IOError:
|
||||
except IOError: # pragma: nocover
|
||||
log.exception("Exception:")
|
||||
return None
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ class Mercurial(BaseProject):
|
|||
def name( |