2014-12-23 11:22:49 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
2015-09-29 10:10:32 +00:00
|
|
|
wakatime.dependencies
|
|
|
|
~~~~~~~~~~~~~~~~~~~~~
|
2014-12-23 11:22:49 +00:00
|
|
|
|
|
|
|
Parse dependencies from a source code file.
|
|
|
|
|
|
|
|
:copyright: (c) 2014 Alan Hamlett.
|
|
|
|
:license: BSD, see LICENSE for more details.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import logging
|
2015-09-29 10:10:32 +00:00
|
|
|
import re
|
2015-08-25 07:51:01 +00:00
|
|
|
import sys
|
2014-12-23 11:22:49 +00:00
|
|
|
|
2014-12-25 07:03:09 +00:00
|
|
|
from ..compat import u, open, import_module
|
2015-09-29 10:10:32 +00:00
|
|
|
from ..exceptions import NotYetImplemented
|
2014-12-23 11:22:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger('WakaTime')
|
|
|
|
|
|
|
|
|
|
|
|
class TokenParser(object):
|
|
|
|
"""The base class for all dependency parsers. To add support for your
|
|
|
|
language, inherit from this class and implement the :meth:`parse` method
|
|
|
|
to return a list of dependency strings.
|
|
|
|
"""
|
2015-09-29 10:10:32 +00:00
|
|
|
exclude = []
|
2014-12-23 11:22:49 +00:00
|
|
|
|
|
|
|
def __init__(self, source_file, lexer=None):
|
2015-09-29 10:10:32 +00:00
|
|
|
self._tokens = None
|
|
|
|
self.dependencies = []
|
2014-12-23 11:22:49 +00:00
|
|
|
self.source_file = source_file
|
|
|
|
self.lexer = lexer
|
2015-09-29 10:10:32 +00:00
|
|
|
self.exclude = [re.compile(x, re.IGNORECASE) for x in self.exclude]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def tokens(self):
|
|
|
|
if self._tokens is None:
|
|
|
|
self._tokens = self._extract_tokens()
|
|
|
|
return self._tokens
|
2014-12-23 11:22:49 +00:00
|
|
|
|
|
|
|
def parse(self, tokens=[]):
|
|
|
|
""" Should return a list of dependencies.
|
|
|
|
"""
|
2015-09-29 10:10:32 +00:00
|
|
|
raise NotYetImplemented()
|
2014-12-23 11:22:49 +00:00
|
|
|
|
2014-12-25 07:03:09 +00:00
|
|
|
def append(self, dep, truncate=False, separator=None, truncate_to=None,
|
|
|
|
strip_whitespace=True):
|
|
|
|
self._save_dependency(
|
|
|
|
dep,
|
|
|
|
truncate=truncate,
|
|
|
|
truncate_to=truncate_to,
|
|
|
|
separator=separator,
|
|
|
|
strip_whitespace=strip_whitespace,
|
|
|
|
)
|
2014-12-23 11:22:49 +00:00
|
|
|
|
2015-09-29 10:10:32 +00:00
|
|
|
def partial(self, token):
|
|
|
|
return u(token).split('.')[-1]
|
|
|
|
|
2014-12-23 11:22:49 +00:00
|
|
|
def _extract_tokens(self):
|
2014-12-23 11:36:34 +00:00
|
|
|
if self.lexer:
|
2015-08-25 07:51:01 +00:00
|
|
|
try:
|
|
|
|
with open(self.source_file, 'r', encoding='utf-8') as fh:
|
|
|
|
return self.lexer.get_tokens_unprocessed(fh.read(512000))
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
with open(self.source_file, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
2016-09-02 08:47:21 +00:00
|
|
|
return self.lexer.get_tokens_unprocessed(fh.read(512000)) # pragma: nocover
|
2015-08-25 07:51:01 +00:00
|
|
|
except:
|
|
|
|
pass
|
2014-12-23 11:36:34 +00:00
|
|
|
return []
|
2014-12-23 11:22:49 +00:00
|
|
|
|
2014-12-25 07:03:09 +00:00
|
|
|
def _save_dependency(self, dep, truncate=False, separator=None,
|
|
|
|
truncate_to=None, strip_whitespace=True):
|
|
|
|
if truncate:
|
|
|
|
if separator is None:
|
|
|
|
separator = u('.')
|
|
|
|
separator = u(separator)
|
|
|
|
dep = dep.split(separator)
|
2015-09-29 10:10:32 +00:00
|
|
|
if truncate_to is None or truncate_to < 1:
|
|
|
|
truncate_to = 1
|
|
|
|
if truncate_to > len(dep):
|
|
|
|
truncate_to = len(dep)
|
|
|
|
dep = dep[0] if len(dep) == 1 else separator.join(dep[:truncate_to])
|
2014-12-25 07:03:09 +00:00
|
|
|
if strip_whitespace:
|
|
|
|
dep = dep.strip()
|
2015-09-29 10:10:32 +00:00
|
|
|
if dep and (not separator or not dep.startswith(separator)):
|
|
|
|
should_exclude = False
|
|
|
|
for compiled in self.exclude:
|
|
|
|
if compiled.search(dep):
|
|
|
|
should_exclude = True
|
|
|
|
break
|
|
|
|
if not should_exclude:
|
|
|
|
self.dependencies.append(dep)
|
2014-12-23 11:22:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
class DependencyParser(object):
|
|
|
|
source_file = None
|
|
|
|
lexer = None
|
|
|
|
parser = None
|
|
|
|
|
|
|
|
def __init__(self, source_file, lexer):
|
|
|
|
self.source_file = source_file
|
|
|
|
self.lexer = lexer
|
|
|
|
|
2014-12-23 11:36:34 +00:00
|
|
|
if self.lexer:
|
2018-03-15 08:33:38 +00:00
|
|
|
module_name = self.root_lexer.__module__.rsplit('.', 1)[-1]
|
|
|
|
class_name = self.root_lexer.__class__.__name__.replace('Lexer', 'Parser', 1)
|
2014-12-25 07:03:09 +00:00
|
|
|
else:
|
|
|
|
module_name = 'unknown'
|
|
|
|
class_name = 'UnknownParser'
|
|
|
|
|
|
|
|
try:
|
|
|
|
module = import_module('.%s' % module_name, package=__package__)
|
2014-12-23 11:36:34 +00:00
|
|
|
try:
|
|
|
|
self.parser = getattr(module, class_name)
|
2014-12-25 07:03:09 +00:00
|
|
|
except AttributeError:
|
2016-10-24 10:38:35 +00:00
|
|
|
log.debug('Parsing dependencies not supported for {0}.{1}'.format(module_name, class_name))
|
2014-12-25 07:03:09 +00:00
|
|
|
except ImportError:
|
2016-10-24 10:38:35 +00:00
|
|
|
log.debug('Parsing dependencies not supported for {0}.{1}'.format(module_name, class_name))
|
2014-12-23 11:22:49 +00:00
|
|
|
|
2018-03-15 08:33:38 +00:00
|
|
|
@property
|
|
|
|
def root_lexer(self):
|
|
|
|
if hasattr(self.lexer, 'root_lexer'):
|
|
|
|
return self.lexer.root_lexer
|
|
|
|
return self.lexer
|
|
|
|
|
2014-12-23 11:22:49 +00:00
|
|
|
def parse(self):
|
|
|
|
if self.parser:
|
|
|
|
plugin = self.parser(self.source_file, lexer=self.lexer)
|
|
|
|
dependencies = plugin.parse()
|
2018-11-20 07:13:05 +00:00
|
|
|
return list(filter(bool, set(dependencies)))
|
2014-12-23 11:22:49 +00:00
|
|
|
return []
|