diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 6bb9d2e..f8c2418 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -50,6 +50,20 @@ class DependenciesTestCase(utils.TestCase): parser.tokens mock_extract_tokens.assert_called_once_with() + parser = TokenParser(None) + parser.append('one.two.three', truncate=True, truncate_to=1) + parser.append('one.two.three', truncate=True, truncate_to=2) + parser.append('one.two.three', truncate=True, truncate_to=3) + parser.append('one.two.three', truncate=True, truncate_to=4) + + expected = [ + 'one', + 'one.two', + 'one.two.three', + 'one.two.three', + ] + self.assertEquals(parser.dependencies, expected) + def test_io_error_when_parsing_dependencies(self): response = Response() response.status_code = 0 diff --git a/wakatime/dependencies/__init__.py b/wakatime/dependencies/__init__.py index b02e70c..7a1e972 100644 --- a/wakatime/dependencies/__init__.py +++ b/wakatime/dependencies/__init__.py @@ -68,7 +68,7 @@ class TokenParser(object): pass try: with open(self.source_file, 'r', encoding=sys.getfilesystemencoding()) as fh: - return self.lexer.get_tokens_unprocessed(fh.read(512000)) + return self.lexer.get_tokens_unprocessed(fh.read(512000)) # pragma: nocover except: pass return []