forked from luna/vim-rana-local
upgrade wakatime-cli to v10.6.1
This commit is contained in:
parent
fe33dfaf90
commit
1596b04567
5 changed files with 28 additions and 31 deletions
|
@ -1,7 +1,7 @@
|
|||
__title__ = 'wakatime'
|
||||
__description__ = 'Common interface to the WakaTime api.'
|
||||
__url__ = 'https://github.com/wakatime/wakatime'
|
||||
__version_info__ = ('10', '4', '2')
|
||||
__version_info__ = ('10', '6', '1')
|
||||
__version__ = '.'.join(__version_info__)
|
||||
__author__ = 'Alan Hamlett'
|
||||
__author_email__ = 'alan@wakatime.com'
|
||||
|
|
|
@ -89,8 +89,8 @@ def parse_arguments():
|
|||
help='Category of this heartbeat activity. Can be ' +
|
||||
'"coding", "building", "indexing", ' +
|
||||
'"debugging", "running tests", ' +
|
||||
'"manual testing", "browsing", ' +
|
||||
'"code reviewing" or "designing". ' +
|
||||
'"writing tests", "manual testing", ' +
|
||||
'"code reviewing", "browsing", or "designing". ' +
|
||||
'Defaults to "coding".')
|
||||
parser.add_argument('--proxy', dest='proxy', action=StoreWithoutQuotes,
|
||||
help='Optional proxy configuration. Supports HTTPS '+
|
||||
|
@ -275,7 +275,7 @@ def parse_arguments():
|
|||
except TypeError: # pragma: nocover
|
||||
pass
|
||||
if not args.include_only_with_project_file and configs.has_option('settings', 'include_only_with_project_file'):
|
||||
args.include_only_with_project_file = configs.get('settings', 'include_only_with_project_file')
|
||||
args.include_only_with_project_file = configs.get('settings', 'include_only_with_project_file') == 'true'
|
||||
if not args.include:
|
||||
args.include = []
|
||||
if configs.has_option('settings', 'include'):
|
||||
|
|
|
@ -53,4 +53,4 @@ DEFAULT_SYNC_OFFLINE_ACTIVITY = 100
|
|||
Even when sending more heartbeats, this is the number of heartbeats sent per
|
||||
individual https request to the WakaTime API.
|
||||
"""
|
||||
HEARTBEATS_PER_REQUEST = 50
|
||||
HEARTBEATS_PER_REQUEST = 25
|
||||
|
|
|
@ -70,6 +70,7 @@ class Heartbeat(object):
|
|||
'debugging',
|
||||
'running tests',
|
||||
'manual testing',
|
||||
'writing tests',
|
||||
'browsing',
|
||||
'code reviewing',
|
||||
'designing',
|
||||
|
|
|
@ -41,31 +41,28 @@ log = logging.getLogger('WakaTime')
|
|||
|
||||
def get_file_stats(file_name, entity_type='file', lineno=None, cursorpos=None,
|
||||
plugin=None, language=None, local_file=None):
|
||||
if entity_type != 'file':
|
||||
"""Returns a hash of information about the entity."""
|
||||
|
||||
language = standardize_language(language, plugin)
|
||||
stats = {
|
||||
'language': None,
|
||||
'language': language,
|
||||
'dependencies': [],
|
||||
'lines': None,
|
||||
'lineno': lineno,
|
||||
'cursorpos': cursorpos,
|
||||
}
|
||||
else:
|
||||
language, lexer = standardize_language(language, plugin)
|
||||
|
||||
if entity_type == 'file':
|
||||
lexer = get_lexer(language)
|
||||
if not language:
|
||||
language, lexer = guess_language(file_name, local_file)
|
||||
|
||||
language = use_root_language(language, lexer)
|
||||
|
||||
parser = DependencyParser(local_file or file_name, lexer)
|
||||
dependencies = parser.parse()
|
||||
|
||||
stats = {
|
||||
'language': language,
|
||||
'dependencies': dependencies,
|
||||
stats.update({
|
||||
'language': use_root_language(language, lexer),
|
||||
'dependencies': parser.parse(),
|
||||
'lines': number_lines_in_file(local_file or file_name),
|
||||
'lineno': lineno,
|
||||
'cursorpos': cursorpos,
|
||||
}
|
||||
})
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
|
@ -222,22 +219,21 @@ def number_lines_in_file(file_name):
|
|||
def standardize_language(language, plugin):
|
||||
"""Maps a string to the equivalent Pygments language.
|
||||
|
||||
Returns a tuple of (language_str, lexer_obj).
|
||||
Returns the standardized language string.
|
||||
"""
|
||||
|
||||
if not language:
|
||||
return None, None
|
||||
return None
|
||||
|
||||
# standardize language for this plugin
|
||||
if plugin:
|
||||
plugin = plugin.split(' ')[-1].split('/')[0].split('-')[0]
|
||||
standardized = get_language_from_json(language, plugin)
|
||||
if standardized is not None:
|
||||
return standardized, get_lexer(standardized)
|
||||
return standardized
|
||||
|
||||
# standardize language against default languages
|
||||
standardized = get_language_from_json(language, 'default')
|
||||
return standardized, get_lexer(standardized)
|
||||
return get_language_from_json(language, 'default')
|
||||
|
||||
|
||||
def get_lexer(language):
|
||||
|
|
Loading…
Reference in a new issue