Support for Haxe dependencies
This commit is contained in:
parent
fcdc60afa1
commit
882dc7e4b2
3 changed files with 80 additions and 0 deletions
18
tests/samples/codefiles/haxe.hx
Normal file
18
tests/samples/codefiles/haxe.hx
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
import alpha.ds.StringMap;
|
||||||
|
import bravo.macro.*;
|
||||||
|
import Math.random;
|
||||||
|
#if js
|
||||||
|
js.Browser.alert("Hello");
|
||||||
|
#elseif sys
|
||||||
|
Sys.println("Hello");
|
||||||
|
#end
|
||||||
|
import charlie.fromCharCode in f;
|
||||||
|
import delta.something;
|
||||||
|
import delta.another.thing;
|
||||||
|
|
||||||
|
class Main {
|
||||||
|
static public function main() {
|
||||||
|
// instead of: new haxe.ds.StringMap();
|
||||||
|
new StringMap();
|
||||||
|
}
|
||||||
|
}
|
|
@ -462,3 +462,17 @@ class DependenciesTestCase(TestCase):
|
||||||
expected_lines=24,
|
expected_lines=24,
|
||||||
entity='kotlin.kt',
|
entity='kotlin.kt',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_haxe_dependencies_detected(self):
|
||||||
|
self.shared(
|
||||||
|
expected_dependencies=[
|
||||||
|
'alpha',
|
||||||
|
'bravo',
|
||||||
|
'Math',
|
||||||
|
'charlie',
|
||||||
|
'delta',
|
||||||
|
],
|
||||||
|
expected_language='Haxe',
|
||||||
|
expected_lines=18,
|
||||||
|
entity='haxe.hx',
|
||||||
|
)
|
||||||
|
|
48
wakatime/dependencies/haxe.py
Normal file
48
wakatime/dependencies/haxe.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
wakatime.dependencies.haxe
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Parse dependencies from Haxe code.
|
||||||
|
|
||||||
|
:copyright: (c) 2018 Alan Hamlett.
|
||||||
|
:license: BSD, see LICENSE for more details.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from . import TokenParser
|
||||||
|
|
||||||
|
|
||||||
|
class HaxeParser(TokenParser):
|
||||||
|
exclude = [
|
||||||
|
r'^haxe$',
|
||||||
|
]
|
||||||
|
state = None
|
||||||
|
|
||||||
|
def parse(self):
|
||||||
|
for index, token, content in self.tokens:
|
||||||
|
self._process_token(token, content)
|
||||||
|
return self.dependencies
|
||||||
|
|
||||||
|
def _process_token(self, token, content):
|
||||||
|
if self.partial(token) == 'Namespace':
|
||||||
|
self._process_namespace(token, content)
|
||||||
|
elif self.partial(token) == 'Text':
|
||||||
|
self._process_text(token, content)
|
||||||
|
else:
|
||||||
|
self._process_other(token, content)
|
||||||
|
|
||||||
|
def _process_namespace(self, token, content):
|
||||||
|
if self.state == 'import':
|
||||||
|
self.append(self._format(content))
|
||||||
|
self.state = None
|
||||||
|
else:
|
||||||
|
self.state = content
|
||||||
|
|
||||||
|
def _process_text(self, token, content):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _process_other(self, token, content):
|
||||||
|
self.state = None
|
||||||
|
|
||||||
|
def _format(self, content):
|
||||||
|
return content.strip()
|
Loading…
Reference in a new issue