tests for c and cpp dependency detection. change api of TokenParser class.

This commit is contained in:
Alan Hamlett 2015-09-26 13:04:35 -07:00
parent 350fc8cc0a
commit 1984fe4228
13 changed files with 186 additions and 40 deletions

View file

@ -0,0 +1,8 @@
#include <stdio.h>
#include <openssl/rand.h>
main()
{
printf("Hello World\n");
return 0;
}

View file

@ -0,0 +1,8 @@
#include <stdio.h>
#include <openssl/rand.h>
main()
{
printf("Hello World\n");
return 0;
}

View file

@ -34,13 +34,15 @@ class LanguagesTestCase(utils.TestCase):
]
def test_token_parser(self):
with self.assertRaises(NotYetImplemented):
source_file = 'tests/samples/codefiles/see.h'
parser = TokenParser(source_file)
parser.parse()
with utils.mock.patch('wakatime.languages.TokenParser._extract_tokens') as mock_extract_tokens:
with self.assertRaises(NotYetImplemented):
source_file = 'tests/samples/codefiles/see.h'
parser = TokenParser(source_file)
parser.parse()
source_file = 'tests/samples/codefiles/see.h'
parser = TokenParser(source_file)
parser.tokens
mock_extract_tokens.assert_called_once_with()
def test_python_dependencies_detected(self):
@ -86,7 +88,6 @@ class LanguagesTestCase(utils.TestCase):
'flask',
'jinja',
'mock',
'os',
'pygments',
'simplejson',
'sqlalchemy',
@ -195,3 +196,109 @@ class LanguagesTestCase(utils.TestCase):
self.assertEquals(normalize(dependencies), normalize(expected_dependencies))
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_c_dependencies_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/see.c'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
{'project': 'wakatime-cli', 'language': 'C', 'time': 1443295423.0, 'type': 'file', 'lines': 7, 'entity': '/Users/alanhamlett/git/wakatime-cli/tests/samples/codefiles/see.c'}
{'language': 'C', 'lines': 7, 'entity': '/Users/alanhamlett/git/wakatime-cli/tests/samples/codefiles/see.c', 'project': u'wakatime-cli', 'time': 1443295423.0, 'type': 'file'}
heartbeat = {
'language': u('C'),
'lines': 8,
'entity': os.path.realpath(entity),
'project': u(os.path.basename(os.path.realpath('.'))),
'dependencies': ANY,
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): ANY,
u('language'): u('C'),
u('lineno'): None,
u('lines'): 8,
}
expected_dependencies = [
'openssl',
]
def normalize(items):
return sorted([u(x) for x in items])
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
dependencies = self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['dependencies']
self.assertEquals(normalize(dependencies), normalize(expected_dependencies))
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_cpp_dependencies_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/seeplusplus.cpp'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
{'project': 'wakatime-cli', 'language': 'C', 'time': 1443295423.0, 'type': 'file', 'lines': 7, 'entity': '/Users/alanhamlett/git/wakatime-cli/tests/samples/codefiles/see.c'}
{'language': 'C', 'lines': 7, 'entity': '/Users/alanhamlett/git/wakatime-cli/tests/samples/codefiles/see.c', 'project': u'wakatime-cli', 'time': 1443295423.0, 'type': 'file'}
heartbeat = {
'language': u('C++'),
'lines': 8,
'entity': os.path.realpath(entity),
'project': u(os.path.basename(os.path.realpath('.'))),
'dependencies': ANY,
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): ANY,
u('language'): u('C++'),
u('lineno'): None,
u('lines'): 8,
}
expected_dependencies = [
'openssl',
]
def normalize(items):
return sorted([u(x) for x in items])
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
dependencies = self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['dependencies']
self.assertEquals(normalize(dependencies), normalize(expected_dependencies))
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()

View file

@ -29,17 +29,21 @@ class TokenParser(object):
exclude = []
def __init__(self, source_file, lexer=None):
self.tokens = []
self._tokens = None
self.dependencies = []
self.source_file = source_file
self.lexer = lexer
self.exclude = [re.compile(x, re.IGNORECASE) for x in self.exclude]
@property
def tokens(self):
if self._tokens is None:
self._tokens = self._extract_tokens()
return self._tokens
def parse(self, tokens=[]):
""" Should return a list of dependencies.
"""
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
raise NotYetImplemented()
def append(self, dep, truncate=False, separator=None, truncate_to=None,

View file

@ -13,10 +13,14 @@ from . import TokenParser
class CppParser(TokenParser):
exclude = [
r'^stdio\.h$',
r'^stdlib\.h$',
r'^string\.h$',
r'^time\.h$',
]
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies
@ -29,7 +33,35 @@ class CppParser(TokenParser):
def _process_preproc(self, token, content):
if content.strip().startswith('include ') or content.strip().startswith("include\t"):
content = content.replace('include', '', 1).strip()
content = content.replace('include', '', 1).strip().strip('"').strip('<').strip('>').strip()
self.append(content)
def _process_other(self, token, content):
pass
class CParser(TokenParser):
exclude = [
r'^stdio\.h$',
r'^stdlib\.h$',
r'^string\.h$',
r'^time\.h$',
]
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies
def _process_token(self, token, content):
if self.partial(token) == 'Preproc':
self._process_preproc(token, content)
else:
self._process_other(token, content)
def _process_preproc(self, token, content):
if content.strip().startswith('include ') or content.strip().startswith("include\t"):
content = content.replace('include', '', 1).strip().strip('"').strip('<').strip('>').strip()
self.append(content)
def _process_other(self, token, content):

View file

@ -26,10 +26,8 @@ class JsonParser(TokenParser):
state = None
level = 0
def parse(self, tokens=[]):
def parse(self):
self._process_file_name(os.path.basename(self.source_file))
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies

View file

@ -14,9 +14,7 @@ from . import TokenParser
class CSharpParser(TokenParser):
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies

View file

@ -25,9 +25,7 @@ class JavaParser(TokenParser):
state = None
buffer = u('')
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies

View file

@ -17,9 +17,7 @@ class PhpParser(TokenParser):
state = None
parens = 0
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies

View file

@ -16,10 +16,12 @@ class PythonParser(TokenParser):
state = None
parens = 0
nonpackage = False
exclude = [
r'^os$',
r'^sys\.',
]
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies

View file

@ -71,9 +71,7 @@ KEYWORDS = [
class LassoJavascriptParser(TokenParser):
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies
@ -99,9 +97,7 @@ class HtmlDjangoParser(TokenParser):
current_attr = None
current_attr_value = None
def parse(self, tokens=[]):
if not tokens and not self.tokens:
self.tokens = self._extract_tokens()
def parse(self):
for index, token, content in self.tokens:
self._process_token(token, content)
return self.dependencies

View file

@ -22,7 +22,7 @@ FILES = {
class UnknownParser(TokenParser):
def parse(self, tokens=[]):
def parse(self):
self._process_file_name(os.path.basename(self.source_file))
return self.dependencies

View file

@ -35,11 +35,8 @@ def guess_language(file_name):
"""
language = get_language_from_extension(file_name)
if language:
return language, None
lexer = smart_guess_lexer(file_name)
if lexer:
if language is None:
language = u(lexer.name)
return language, lexer