sending file stats (total lines in file, language name) to api
This commit is contained in:
parent
0707e2dff0
commit
934cc51a6c
79 changed files with 48613 additions and 2 deletions
240
wakatime/packages/pygments/lexers/__init__.py
Normal file
240
wakatime/packages/pygments/lexers/__init__.py
Normal file
|
@ -0,0 +1,240 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Pygments lexers.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import types
|
||||
import fnmatch
|
||||
from os.path import basename
|
||||
|
||||
from pygments.lexers._mapping import LEXERS
|
||||
from pygments.modeline import get_filetype_from_buffer
|
||||
from pygments.plugin import find_plugin_lexers
|
||||
from pygments.util import ClassNotFound, bytes
|
||||
|
||||
|
||||
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
|
||||
'guess_lexer'] + LEXERS.keys()
|
||||
|
||||
_lexer_cache = {}
|
||||
|
||||
|
||||
def _load_lexers(module_name):
|
||||
"""
|
||||
Load a lexer (and all others in the module too).
|
||||
"""
|
||||
mod = __import__(module_name, None, None, ['__all__'])
|
||||
for lexer_name in mod.__all__:
|
||||
cls = getattr(mod, lexer_name)
|
||||
_lexer_cache[cls.name] = cls
|
||||
|
||||
|
||||
def get_all_lexers():
|
||||
"""
|
||||
Return a generator of tuples in the form ``(name, aliases,
|
||||
filenames, mimetypes)`` of all know lexers.
|
||||
"""
|
||||
for item in LEXERS.itervalues():
|
||||
yield item[1:]
|
||||
for lexer in find_plugin_lexers():
|
||||
yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
|
||||
|
||||
|
||||
def find_lexer_class(name):
|
||||
"""
|
||||
Lookup a lexer class by name. Return None if not found.
|
||||
"""
|
||||
if name in _lexer_cache:
|
||||
return _lexer_cache[name]
|
||||
# lookup builtin lexers
|
||||
for module_name, lname, aliases, _, _ in LEXERS.itervalues():
|
||||
if name == lname:
|
||||
_load_lexers(module_name)
|
||||
return _lexer_cache[name]
|
||||
# continue with lexers from setuptools entrypoints
|
||||
for cls in find_plugin_lexers():
|
||||
if cls.name == name:
|
||||
return cls
|
||||
|
||||
|
||||
def get_lexer_by_name(_alias, **options):
|
||||
"""
|
||||
Get a lexer by an alias.
|
||||
"""
|
||||
# lookup builtin lexers
|
||||
for module_name, name, aliases, _, _ in LEXERS.itervalues():
|
||||
if _alias in aliases:
|
||||
if name not in _lexer_cache:
|
||||
_load_lexers(module_name)
|
||||
return _lexer_cache[name](**options)
|
||||
# continue with lexers from setuptools entrypoints
|
||||
for cls in find_plugin_lexers():
|
||||
if _alias in cls.aliases:
|
||||
return cls(**options)
|
||||
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
||||
|
||||
|
||||
def get_lexer_for_filename(_fn, code=None, **options):
|
||||
"""
|
||||
Get a lexer for a filename. If multiple lexers match the filename
|
||||
pattern, use ``analyze_text()`` to figure out which one is more
|
||||
appropriate.
|
||||
"""
|
||||
matches = []
|
||||
fn = basename(_fn)
|
||||
for modname, name, _, filenames, _ in LEXERS.itervalues():
|
||||
for filename in filenames:
|
||||
if fnmatch.fnmatch(fn, filename):
|
||||
if name not in _lexer_cache:
|
||||
_load_lexers(modname)
|
||||
matches.append((_lexer_cache[name], filename))
|
||||
for cls in find_plugin_lexers():
|
||||
for filename in cls.filenames:
|
||||
if fnmatch.fnmatch(fn, filename):
|
||||
matches.append((cls, filename))
|
||||
|
||||
if sys.version_info > (3,) and isinstance(code, bytes):
|
||||
# decode it, since all analyse_text functions expect unicode
|
||||
code = code.decode('latin1')
|
||||
|
||||
def get_rating(info):
|
||||
cls, filename = info
|
||||
# explicit patterns get a bonus
|
||||
bonus = '*' not in filename and 0.5 or 0
|
||||
# The class _always_ defines analyse_text because it's included in
|
||||
# the Lexer class. The default implementation returns None which
|
||||
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
|
||||
# to find lexers which need it overridden.
|
||||
if code:
|
||||
return cls.analyse_text(code) + bonus
|
||||
return cls.priority + bonus
|
||||
|
||||
if matches:
|
||||
matches.sort(key=get_rating)
|
||||
#print "Possible lexers, after sort:", matches
|
||||
return matches[-1][0](**options)
|
||||
raise ClassNotFound('no lexer for filename %r found' % _fn)
|
||||
|
||||
|
||||
def get_lexer_for_mimetype(_mime, **options):
|
||||
"""
|
||||
Get a lexer for a mimetype.
|
||||
"""
|
||||
for modname, name, _, _, mimetypes in LEXERS.itervalues():
|
||||
if _mime in mimetypes:
|
||||
if name not in _lexer_cache:
|
||||
_load_lexers(modname)
|
||||
return _lexer_cache[name](**options)
|
||||
for cls in find_plugin_lexers():
|
||||
if _mime in cls.mimetypes:
|
||||
return cls(**options)
|
||||
raise ClassNotFound('no lexer for mimetype %r found' % _mime)
|
||||
|
||||
|
||||
def _iter_lexerclasses():
|
||||
"""
|
||||
Return an iterator over all lexer classes.
|
||||
"""
|
||||
for key in sorted(LEXERS):
|
||||
module_name, name = LEXERS[key][:2]
|
||||
if name not in _lexer_cache:
|
||||
_load_lexers(module_name)
|
||||
yield _lexer_cache[name]
|
||||
for lexer in find_plugin_lexers():
|
||||
yield lexer
|
||||
|
||||
|
||||
def guess_lexer_for_filename(_fn, _text, **options):
|
||||
"""
|
||||
Lookup all lexers that handle those filenames primary (``filenames``)
|
||||
or secondary (``alias_filenames``). Then run a text analysis for those
|
||||
lexers and choose the best result.
|
||||
|
||||
usage::
|
||||
|
||||
>>> from pygments.lexers import guess_lexer_for_filename
|
||||
>>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
|
||||
<pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
|
||||
>>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
|
||||
<pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
|
||||
>>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
|
||||
<pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
|
||||
"""
|
||||
fn = basename(_fn)
|
||||
primary = None
|
||||
matching_lexers = set()
|
||||
for lexer in _iter_lexerclasses():
|
||||
for filename in lexer.filenames:
|
||||
if fnmatch.fnmatch(fn, filename):
|
||||
matching_lexers.add(lexer)
|
||||
primary = lexer
|
||||
for filename in lexer.alias_filenames:
|
||||
if fnmatch.fnmatch(fn, filename):
|
||||
matching_lexers.add(lexer)
|
||||
if not matching_lexers:
|
||||
raise ClassNotFound('no lexer for filename %r found' % fn)
|
||||
if len(matching_lexers) == 1:
|
||||
return matching_lexers.pop()(**options)
|
||||
result = []
|
||||
for lexer in matching_lexers:
|
||||
rv = lexer.analyse_text(_text)
|
||||
if rv == 1.0:
|
||||
return lexer(**options)
|
||||
result.append((rv, lexer))
|
||||
result.sort()
|
||||
if not result[-1][0] and primary is not None:
|
||||
return primary(**options)
|
||||
return result[-1][1](**options)
|
||||
|
||||
|
||||
def guess_lexer(_text, **options):
|
||||
"""
|
||||
Guess a lexer by strong distinctions in the text (eg, shebang).
|
||||
"""
|
||||
|
||||
# try to get a vim modeline first
|
||||
ft = get_filetype_from_buffer(_text)
|
||||
|
||||
if ft is not None:
|
||||
try:
|
||||
return get_lexer_by_name(ft, **options)
|
||||
except ClassNotFound:
|
||||
pass
|
||||
|
||||
best_lexer = [0.0, None]
|
||||
for lexer in _iter_lexerclasses():
|
||||
rv = lexer.analyse_text(_text)
|
||||
if rv == 1.0:
|
||||
return lexer(**options)
|
||||
if rv > best_lexer[0]:
|
||||
best_lexer[:] = (rv, lexer)
|
||||
if not best_lexer[0] or best_lexer[1] is None:
|
||||
raise ClassNotFound('no lexer matching the text found')
|
||||
return best_lexer[1](**options)
|
||||
|
||||
|
||||
class _automodule(types.ModuleType):
|
||||
"""Automatically import lexers."""
|
||||
|
||||
def __getattr__(self, name):
|
||||
info = LEXERS.get(name)
|
||||
if info:
|
||||
_load_lexers(info[0])
|
||||
cls = _lexer_cache[info[1]]
|
||||
setattr(self, name, cls)
|
||||
return cls
|
||||
raise AttributeError(name)
|
||||
|
||||
|
||||
oldmod = sys.modules['pygments.lexers']
|
||||
newmod = _automodule('pygments.lexers')
|
||||
newmod.__dict__.update(oldmod.__dict__)
|
||||
sys.modules['pygments.lexers'] = newmod
|
||||
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
|
1645
wakatime/packages/pygments/lexers/_asybuiltins.py
Normal file
1645
wakatime/packages/pygments/lexers/_asybuiltins.py
Normal file
File diff suppressed because it is too large
Load diff
232
wakatime/packages/pygments/lexers/_clbuiltins.py
Normal file
232
wakatime/packages/pygments/lexers/_clbuiltins.py
Normal file
|
@ -0,0 +1,232 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._clbuiltins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
ANSI Common Lisp builtins.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
BUILTIN_FUNCTIONS = [ # 638 functions
|
||||
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
|
||||
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
|
||||
'adjustable-array-p', 'adjust-array', 'allocate-instance',
|
||||
'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
|
||||
'apropos-list', 'aref', 'arithmetic-error-operands',
|
||||
'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
|
||||
'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
|
||||
'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
|
||||
'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
|
||||
'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
|
||||
'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
|
||||
'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
|
||||
'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
|
||||
'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
|
||||
'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
|
||||
'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
|
||||
'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
|
||||
'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
|
||||
'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
|
||||
'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
|
||||
'characterp', 'char-code', 'char-downcase', 'char-equal',
|
||||
'char-greaterp', 'char-int', 'char-lessp', 'char-name',
|
||||
'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
|
||||
'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
|
||||
'close', 'clrhash', 'code-char', 'coerce', 'compile',
|
||||
'compiled-function-p', 'compile-file', 'compile-file-pathname',
|
||||
'compiler-macro-function', 'complement', 'complex', 'complexp',
|
||||
'compute-applicable-methods', 'compute-restarts', 'concatenate',
|
||||
'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
|
||||
'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
|
||||
'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
|
||||
'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
|
||||
'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
|
||||
'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
|
||||
'delete-package', 'denominator', 'deposit-field', 'describe',
|
||||
'describe-object', 'digit-char', 'digit-char-p', 'directory',
|
||||
'directory-namestring', 'disassemble', 'documentation', 'dpb',
|
||||
'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
|
||||
'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
|
||||
'enough-namestring', 'ensure-directories-exist',
|
||||
'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
|
||||
'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
|
||||
'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
|
||||
'file-error-pathname', 'file-length', 'file-namestring',
|
||||
'file-position', 'file-string-length', 'file-write-date',
|
||||
'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
|
||||
'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
|
||||
'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
|
||||
'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
|
||||
'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
|
||||
'fround', 'ftruncate', 'funcall', 'function-keywords',
|
||||
'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
|
||||
'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
|
||||
'gethash', 'get-internal-real-time', 'get-internal-run-time',
|
||||
'get-macro-character', 'get-output-stream-string', 'get-properties',
|
||||
'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
|
||||
'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
|
||||
'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
|
||||
'host-namestring', 'identity', 'imagpart', 'import',
|
||||
'initialize-instance', 'input-stream-p', 'inspect',
|
||||
'integer-decode-float', 'integer-length', 'integerp',
|
||||
'interactive-stream-p', 'intern', 'intersection',
|
||||
'invalid-method-error', 'invoke-debugger', 'invoke-restart',
|
||||
'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
|
||||
'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
|
||||
'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
|
||||
'listen', 'list-length', 'listp', 'load',
|
||||
'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
|
||||
'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
|
||||
'logical-pathname-translations', 'logior', 'lognand', 'lognor',
|
||||
'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
|
||||
'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
|
||||
'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
|
||||
'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
|
||||
'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
|
||||
'make-instance', 'make-instances-obsolete', 'make-list',
|
||||
'make-load-form', 'make-load-form-saving-slots', 'make-package',
|
||||
'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
|
||||
'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
|
||||
'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
|
||||
'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
|
||||
'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
|
||||
'merge', 'merge-pathnames', 'method-combination-error',
|
||||
'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
|
||||
'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
|
||||
'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
|
||||
'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
|
||||
'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
|
||||
'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
|
||||
'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
|
||||
'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
|
||||
'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
|
||||
'package-name', 'package-nicknames', 'packagep',
|
||||
'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
|
||||
'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
|
||||
'pathname-device', 'pathname-directory', 'pathname-host',
|
||||
'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
|
||||
'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
|
||||
'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
|
||||
'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
|
||||
'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
|
||||
'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
|
||||
'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
|
||||
'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
|
||||
'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
|
||||
'read-from-string', 'read-line', 'read-preserving-whitespace',
|
||||
'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
|
||||
'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
|
||||
'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
|
||||
'remprop', 'rename-file', 'rename-package', 'replace', 'require',
|
||||
'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
|
||||
'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
|
||||
'search', 'second', 'set', 'set-difference',
|
||||
'set-dispatch-macro-character', 'set-exclusive-or',
|
||||
'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
|
||||
'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
|
||||
'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
|
||||
'simple-condition-format-arguments', 'simple-condition-format-control',
|
||||
'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
|
||||
'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
|
||||
'slot-unbound', 'slot-value', 'software-type', 'software-version',
|
||||
'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
|
||||
'standard-char-p', 'store-value', 'stream-element-type',
|
||||
'stream-error-stream', 'stream-external-format', 'streamp', 'string',
|
||||
'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
|
||||
'string-capitalize', 'string-downcase', 'string-equal',
|
||||
'string-greaterp', 'string-left-trim', 'string-lessp',
|
||||
'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
|
||||
'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
|
||||
'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
|
||||
'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
|
||||
'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
|
||||
'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
|
||||
'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
|
||||
'translate-logical-pathname', 'translate-pathname', 'tree-equal',
|
||||
'truename', 'truncate', 'two-way-stream-input-stream',
|
||||
'two-way-stream-output-stream', 'type-error-datum',
|
||||
'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
|
||||
'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
|
||||
'update-instance-for-different-class',
|
||||
'update-instance-for-redefined-class', 'upgraded-array-element-type',
|
||||
'upgraded-complex-part-type', 'upper-case-p', 'use-package',
|
||||
'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
|
||||
'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
|
||||
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
|
||||
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
|
||||
'y-or-n-p', 'zerop',
|
||||
]
|
||||
|
||||
SPECIAL_FORMS = [
|
||||
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
|
||||
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
|
||||
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
|
||||
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
|
||||
'unwind-protect',
|
||||
]
|
||||
|
||||
MACROS = [
|
||||
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
|
||||
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
|
||||
'define-compiler-macro', 'define-condition', 'define-method-combination',
|
||||
'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
|
||||
'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
|
||||
'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
|
||||
'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
|
||||
'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
|
||||
'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
|
||||
'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
|
||||
'multiple-value-setq', 'nth-value', 'or', 'pop',
|
||||
'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
|
||||
'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
|
||||
'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
|
||||
'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
|
||||
'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
|
||||
'with-condition-restarts', 'with-hash-table-iterator',
|
||||
'with-input-from-string', 'with-open-file', 'with-open-stream',
|
||||
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
|
||||
'with-slots', 'with-standard-io-syntax',
|
||||
]
|
||||
|
||||
LAMBDA_LIST_KEYWORDS = [
|
||||
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
|
||||
'&rest', '&whole',
|
||||
]
|
||||
|
||||
DECLARATIONS = [
|
||||
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
|
||||
'ignorable', 'notinline', 'type',
|
||||
]
|
||||
|
||||
BUILTIN_TYPES = [
|
||||
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
|
||||
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
|
||||
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
|
||||
'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
|
||||
'simple-vector', 'standard-char', 'unsigned-byte',
|
||||
|
||||
# Condition Types
|
||||
'arithmetic-error', 'cell-error', 'condition', 'control-error',
|
||||
'division-by-zero', 'end-of-file', 'error', 'file-error',
|
||||
'floating-point-inexact', 'floating-point-overflow',
|
||||
'floating-point-underflow', 'floating-point-invalid-operation',
|
||||
'parse-error', 'package-error', 'print-not-readable', 'program-error',
|
||||
'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
|
||||
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
|
||||
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
|
||||
'undefined-function', 'warning',
|
||||
]
|
||||
|
||||
BUILTIN_CLASSES = [
|
||||
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
|
||||
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
|
||||
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
|
||||
'integer', 'list', 'logical-pathname', 'method-combination', 'method',
|
||||
'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
|
||||
'real', 'random-state', 'restart', 'sequence', 'standard-class',
|
||||
'standard-generic-function', 'standard-method', 'standard-object',
|
||||
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
|
||||
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
|
||||
]
|
5172
wakatime/packages/pygments/lexers/_lassobuiltins.py
Normal file
5172
wakatime/packages/pygments/lexers/_lassobuiltins.py
Normal file
File diff suppressed because it is too large
Load diff
249
wakatime/packages/pygments/lexers/_luabuiltins.py
Normal file
249
wakatime/packages/pygments/lexers/_luabuiltins.py
Normal file
|
@ -0,0 +1,249 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._luabuiltins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This file contains the names and modules of lua functions
|
||||
It is able to re-generate itself, but for adding new functions you
|
||||
probably have to add some callbacks (see function module_callbacks).
|
||||
|
||||
Do not edit the MODULES dict by hand.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
MODULES = {'basic': ['_G',
|
||||
'_VERSION',
|
||||
'assert',
|
||||
'collectgarbage',
|
||||
'dofile',
|
||||
'error',
|
||||
'getfenv',
|
||||
'getmetatable',
|
||||
'ipairs',
|
||||
'load',
|
||||
'loadfile',
|
||||
'loadstring',
|
||||
'next',
|
||||
'pairs',
|
||||
'pcall',
|
||||
'print',
|
||||
'rawequal',
|
||||
'rawget',
|
||||
'rawset',
|
||||
'select',
|
||||
'setfenv',
|
||||
'setmetatable',
|
||||
'tonumber',
|
||||
'tostring',
|
||||
'type',
|
||||
'unpack',
|
||||
'xpcall'],
|
||||
'coroutine': ['coroutine.create',
|
||||
'coroutine.resume',
|
||||
'coroutine.running',
|
||||
'coroutine.status',
|
||||
'coroutine.wrap',
|
||||
'coroutine.yield'],
|
||||
'debug': ['debug.debug',
|
||||
'debug.getfenv',
|
||||
'debug.gethook',
|
||||
'debug.getinfo',
|
||||
'debug.getlocal',
|
||||
'debug.getmetatable',
|
||||
'debug.getregistry',
|
||||
'debug.getupvalue',
|
||||
'debug.setfenv',
|
||||
'debug.sethook',
|
||||
'debug.setlocal',
|
||||
'debug.setmetatable',
|
||||
'debug.setupvalue',
|
||||
'debug.traceback'],
|
||||
'io': ['io.close',
|
||||
'io.flush',
|
||||
'io.input',
|
||||
'io.lines',
|
||||
'io.open',
|
||||
'io.output',
|
||||
'io.popen',
|
||||
'io.read',
|
||||
'io.tmpfile',
|
||||
'io.type',
|
||||
'io.write'],
|
||||
'math': ['math.abs',
|
||||
'math.acos',
|
||||
'math.asin',
|
||||
'math.atan2',
|
||||
'math.atan',
|
||||
'math.ceil',
|
||||
'math.cosh',
|
||||
'math.cos',
|
||||
'math.deg',
|
||||
'math.exp',
|
||||
'math.floor',
|
||||
'math.fmod',
|
||||
'math.frexp',
|
||||
'math.huge',
|
||||
'math.ldexp',
|
||||
'math.log10',
|
||||
'math.log',
|
||||
'math.max',
|
||||
'math.min',
|
||||
'math.modf',
|
||||
'math.pi',
|
||||
'math.pow',
|
||||
'math.rad',
|
||||
'math.random',
|
||||
'math.randomseed',
|
||||
'math.sinh',
|
||||
'math.sin',
|
||||
'math.sqrt',
|
||||
'math.tanh',
|
||||
'math.tan'],
|
||||
'modules': ['module',
|
||||
'require',
|
||||
'package.cpath',
|
||||
'package.loaded',
|
||||
'package.loadlib',
|
||||
'package.path',
|
||||
'package.preload',
|
||||
'package.seeall'],
|
||||
'os': ['os.clock',
|
||||
'os.date',
|
||||
'os.difftime',
|
||||
'os.execute',
|
||||
'os.exit',
|
||||
'os.getenv',
|
||||
'os.remove',
|
||||
'os.rename',
|
||||
'os.setlocale',
|
||||
'os.time',
|
||||
'os.tmpname'],
|
||||
'string': ['string.byte',
|
||||
'string.char',
|
||||
'string.dump',
|
||||
'string.find',
|
||||
'string.format',
|
||||
'string.gmatch',
|
||||
'string.gsub',
|
||||
'string.len',
|
||||
'string.lower',
|
||||
'string.match',
|
||||
'string.rep',
|
||||
'string.reverse',
|
||||
'string.sub',
|
||||
'string.upper'],
|
||||
'table': ['table.concat',
|
||||
'table.insert',
|
||||
'table.maxn',
|
||||
'table.remove',
|
||||
'table.sort']}
|
||||
|
||||
if __name__ == '__main__':
|
||||
import re
|
||||
import urllib
|
||||
import pprint
|
||||
|
||||
# you can't generally find out what module a function belongs to if you
|
||||
# have only its name. Because of this, here are some callback functions
|
||||
# that recognize if a gioven function belongs to a specific module
|
||||
def module_callbacks():
|
||||
def is_in_coroutine_module(name):
|
||||
return name.startswith('coroutine.')
|
||||
|
||||
def is_in_modules_module(name):
|
||||
if name in ['require', 'module'] or name.startswith('package'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_in_string_module(name):
|
||||
return name.startswith('string.')
|
||||
|
||||
def is_in_table_module(name):
|
||||
return name.startswith('table.')
|
||||
|
||||
def is_in_math_module(name):
|
||||
return name.startswith('math')
|
||||
|
||||
def is_in_io_module(name):
|
||||
return name.startswith('io.')
|
||||
|
||||
def is_in_os_module(name):
|
||||
return name.startswith('os.')
|
||||
|
||||
def is_in_debug_module(name):
|
||||
return name.startswith('debug.')
|
||||
|
||||
return {'coroutine': is_in_coroutine_module,
|
||||
'modules': is_in_modules_module,
|
||||
'string': is_in_string_module,
|
||||
'table': is_in_table_module,
|
||||
'math': is_in_math_module,
|
||||
'io': is_in_io_module,
|
||||
'os': is_in_os_module,
|
||||
'debug': is_in_debug_module}
|
||||
|
||||
|
||||
|
||||
def get_newest_version():
|
||||
f = urllib.urlopen('http://www.lua.org/manual/')
|
||||
r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
|
||||
for line in f:
|
||||
m = r.match(line)
|
||||
if m is not None:
|
||||
return m.groups()[0]
|
||||
|
||||
def get_lua_functions(version):
|
||||
f = urllib.urlopen('http://www.lua.org/manual/%s/' % version)
|
||||
r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
|
||||
functions = []
|
||||
for line in f:
|
||||
m = r.match(line)
|
||||
if m is not None:
|
||||
functions.append(m.groups()[0])
|
||||
return functions
|
||||
|
||||
def get_function_module(name):
|
||||
for mod, cb in module_callbacks().iteritems():
|
||||
if cb(name):
|
||||
return mod
|
||||
if '.' in name:
|
||||
return name.split('.')[0]
|
||||
else:
|
||||
return 'basic'
|
||||
|
||||
def regenerate(filename, modules):
|
||||
f = open(filename)
|
||||
try:
|
||||
content = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
header = content[:content.find('MODULES = {')]
|
||||
footer = content[content.find("if __name__ == '__main__':"):]
|
||||
|
||||
|
||||
f = open(filename, 'w')
|
||||
f.write(header)
|
||||
f.write('MODULES = %s\n\n' % pprint.pformat(modules))
|
||||
f.write(footer)
|
||||
f.close()
|
||||
|
||||
def run():
|
||||
version = get_newest_version()
|
||||
print '> Downloading function index for Lua %s' % version
|
||||
functions = get_lua_functions(version)
|
||||
print '> %d functions found:' % len(functions)
|
||||
|
||||
modules = {}
|
||||
for full_function_name in functions:
|
||||
print '>> %s' % full_function_name
|
||||
m = get_function_module(full_function_name)
|
||||
modules.setdefault(m, []).append(full_function_name)
|
||||
|
||||
regenerate(__file__, modules)
|
||||
|
||||
|
||||
run()
|
350
wakatime/packages/pygments/lexers/_mapping.py
Normal file
350
wakatime/packages/pygments/lexers/_mapping.py
Normal file
|
@ -0,0 +1,350 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._mapping
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexer mapping defintions. This file is generated by itself. Everytime
|
||||
you change something on a builtin lexer defintion, run this script from
|
||||
the lexers folder to update it.
|
||||
|
||||
Do not alter the LEXERS dictionary by hand.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
LEXERS = {
|
||||
'ABAPLexer': ('pygments.lexers.other', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
|
||||
'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
|
||||
'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
|
||||
'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
|
||||
'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
|
||||
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
|
||||
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
|
||||
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
|
||||
'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
|
||||
'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
|
||||
'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
|
||||
'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
|
||||
'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
|
||||
'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
|
||||
'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
|
||||
'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
|
||||
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
|
||||
'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
|
||||
'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit', 'Autoit'), ('*.au3',), ('text/x-autoit',)),
|
||||
'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
|
||||
'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
|
||||
'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
|
||||
'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
|
||||
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*'), ('application/x-sh', 'application/x-shellscript')),
|
||||
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
|
||||
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
|
||||
'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
|
||||
'BlitzBasicLexer': ('pygments.lexers.compiled', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
|
||||
'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
|
||||
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
|
||||
'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
|
||||
'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()),
|
||||
'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
|
||||
'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
|
||||
'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
|
||||
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
|
||||
'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
|
||||
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
|
||||
'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()),
|
||||
'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
|
||||
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
|
||||
'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
|
||||
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
|
||||
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
|
||||
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
|
||||
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
|
||||
'ClayLexer': ('pygments.lexers.compiled', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
|
||||
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
|
||||
'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
|
||||
'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
|
||||
'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
|
||||
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
|
||||
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
|
||||
'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
|
||||
'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
|
||||
'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
|
||||
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
|
||||
'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
|
||||
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
|
||||
'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
|
||||
'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
|
||||
'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)),
|
||||
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
|
||||
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
|
||||
'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
|
||||
'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
|
||||
'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
|
||||
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
|
||||
'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
|
||||
'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
|
||||
'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
|
||||
'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
|
||||
'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
|
||||
'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
|
||||
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
|
||||
'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
|
||||
'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
|
||||
'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
|
||||
'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
|
||||
'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
|
||||
'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
|
||||
'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
|
||||
'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
|
||||
'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
|
||||
'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
|
||||
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
|
||||
'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
|
||||
'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
|
||||
'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
|
||||
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
|
||||
'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
|
||||
'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
|
||||
'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
|
||||
'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
|
||||
'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
|
||||
'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
|
||||
'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
|
||||
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()),
|
||||
'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
|
||||
'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
|
||||
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
|
||||
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
|
||||
'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
|
||||
'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('Cucumber', 'cucumber', 'Gherkin', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
|
||||
'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
|
||||
'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
|
||||
'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
|
||||
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
|
||||
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
|
||||
'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
|
||||
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
|
||||
'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
|
||||
'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
|
||||
'HaxeLexer': ('pygments.lexers.web', 'Haxe', ('hx', 'Haxe', 'haxe', 'haXe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
|
||||
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
|
||||
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
|
||||
'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
|
||||
'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
|
||||
'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
|
||||
'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
|
||||
'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
|
||||
'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
|
||||
'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
|
||||
'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
|
||||
'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)),
|
||||
'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
|
||||
'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
|
||||
'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
|
||||
'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
|
||||
'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
|
||||
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
|
||||
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
|
||||
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
|
||||
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
|
||||
'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
|
||||
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
|
||||
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
|
||||
'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)),
|
||||
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
|
||||
'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()),
|
||||
'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
|
||||
'KconfigLexer': ('pygments.lexers.other', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
|
||||
'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
|
||||
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
|
||||
'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
|
||||
'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
|
||||
'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
|
||||
'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
|
||||
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
|
||||
'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
|
||||
'LiterateAgdaLexer': ('pygments.lexers.functional', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
|
||||
'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
|
||||
'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
|
||||
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
|
||||
'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
|
||||
'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
|
||||
'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
|
||||
'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
|
||||
'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
|
||||
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
|
||||
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
|
||||
'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
|
||||
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
|
||||
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
|
||||
'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
|
||||
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
|
||||
'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
|
||||
'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
|
||||
'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
|
||||
'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
|
||||
'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
|
||||
'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
|
||||
'MonkeyLexer': ('pygments.lexers.compiled', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
|
||||
'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
|
||||
'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
|
||||
'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()),
|
||||
'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()),
|
||||
'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
|
||||
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
|
||||
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
|
||||
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
|
||||
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
|
||||
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
|
||||
'NSISLexer': ('pygments.lexers.other', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
|
||||
'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
|
||||
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
|
||||
'NesCLexer': ('pygments.lexers.compiled', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
|
||||
'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
|
||||
'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
|
||||
'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
|
||||
'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
|
||||
'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()),
|
||||
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
|
||||
'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
|
||||
'ObjectiveCppLexer': ('pygments.lexers.compiled', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
|
||||
'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
|
||||
'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
|
||||
'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
|
||||
'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
|
||||
'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
|
||||
'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
|
||||
'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6'), ('text/x-perl6', 'application/x-perl6')),
|
||||
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
|
||||
'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
|
||||
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
|
||||
'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
|
||||
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
|
||||
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
|
||||
'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
|
||||
'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
|
||||
'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
|
||||
'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
|
||||
'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
|
||||
'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()),
|
||||
'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
|
||||
'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
|
||||
'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
|
||||
'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
|
||||
'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
|
||||
'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
|
||||
'QmlLexer': ('pygments.lexers.web', 'QML', ('qml', 'Qt Meta Language', 'Qt modeling Language'), ('*.qml',), ('application/x-qml',)),
|
||||
'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
|
||||
'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
|
||||
'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktl'), ('text/x-racket', 'application/x-racket')),
|
||||
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
|
||||
'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
|
||||
'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
|
||||
'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
|
||||
'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
|
||||
'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
|
||||
'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
|
||||
'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
|
||||
'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
|
||||
'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
|
||||
'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
|
||||
'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
|
||||
'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
|
||||
'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'ARexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
|
||||
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
|
||||
'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)),
|
||||
'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
|
||||
'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
|
||||
'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
|
||||
'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs', '*.rc'), ('text/x-rustsrc',)),
|
||||
'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
|
||||
'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
|
||||
'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
|
||||
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
|
||||
'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
|
||||
'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
|
||||
'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
|
||||
'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
|
||||
'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)),
|
||||
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
|
||||
'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
|
||||
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
|
||||
'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
|
||||
'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
|
||||
'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
|
||||
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
|
||||
'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
|
||||
'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
|
||||
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
|
||||
'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()),
|
||||
'SwigLexer': ('pygments.lexers.compiled', 'SWIG', ('Swig', 'swig'), ('*.swg', '*.i'), ('text/swig',)),
|
||||
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
|
||||
'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
|
||||
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
|
||||
'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
|
||||
'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
|
||||
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
|
||||
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
|
||||
'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
|
||||
'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
|
||||
'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()),
|
||||
'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
|
||||
'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
|
||||
'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
|
||||
'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
|
||||
'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
|
||||
'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
|
||||
'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
|
||||
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
|
||||
'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
|
||||
'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
|
||||
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
|
||||
'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
|
||||
'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
|
||||
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
|
||||
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
|
||||
'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
|
||||
'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
|
||||
'YamlLexer': ('pygments.lexers.text', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
|
||||
}
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os
|
||||
|
||||
# lookup lexers
|
||||
found_lexers = []
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
for filename in os.listdir('.'):
|
||||
if filename.endswith('.py') and not filename.startswith('_'):
|
||||
module_name = 'pygments.lexers.%s' % filename[:-3]
|
||||
print module_name
|
||||
module = __import__(module_name, None, None, [''])
|
||||
for lexer_name in module.__all__:
|
||||
lexer = getattr(module, lexer_name)
|
||||
found_lexers.append(
|
||||
'%r: %r' % (lexer_name,
|
||||
(module_name,
|
||||
lexer.name,
|
||||
tuple(lexer.aliases),
|
||||
tuple(lexer.filenames),
|
||||
tuple(lexer.mimetypes))))
|
||||
# sort them, that should make the diff files for svn smaller
|
||||
found_lexers.sort()
|
||||
|
||||
# extract useful sourcecode from this file
|
||||
f = open(__file__)
|
||||
try:
|
||||
content = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
header = content[:content.find('LEXERS = {')]
|
||||
footer = content[content.find("if __name__ == '__main__':"):]
|
||||
|
||||
# write new file
|
||||
f = open(__file__, 'wb')
|
||||
f.write(header)
|
||||
f.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
|
||||
f.write(footer)
|
||||
f.close()
|
562
wakatime/packages/pygments/lexers/_openedgebuiltins.py
Normal file
562
wakatime/packages/pygments/lexers/_openedgebuiltins.py
Normal file
|
@ -0,0 +1,562 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._openedgebuiltins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Builtin list for the OpenEdgeLexer.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
OPENEDGEKEYWORDS = [
|
||||
'ABSOLUTE', 'ABS', 'ABSO', 'ABSOL', 'ABSOLU', 'ABSOLUT', 'ACCELERATOR',
|
||||
'ACCUM', 'ACCUMULATE', 'ACCUM', 'ACCUMU', 'ACCUMUL', 'ACCUMULA',
|
||||
'ACCUMULAT', 'ACTIVE-FORM', 'ACTIVE-WINDOW', 'ADD', 'ADD-BUFFER',
|
||||
'ADD-CALC-COLUMN', 'ADD-COLUMNS-FROM', 'ADD-EVENTS-PROCEDURE',
|
||||
'ADD-FIELDS-FROM', 'ADD-FIRST', 'ADD-INDEX-FIELD', 'ADD-LAST',
|
||||
'ADD-LIKE-COLUMN', 'ADD-LIKE-FIELD', 'ADD-LIKE-INDEX', 'ADD-NEW-FIELD',
|
||||
'ADD-NEW-INDEX', 'ADD-SCHEMA-LOCATION', 'ADD-SUPER-PROCEDURE', 'ADM-DATA',
|
||||
'ADVISE', 'ALERT-BOX', 'ALIAS', 'ALL', 'ALLOW-COLUMN-SEARCHING',
|
||||
'ALLOW-REPLICATION', 'ALTER', 'ALWAYS-ON-TOP', 'AMBIGUOUS', 'AMBIG',
|
||||
'AMBIGU', 'AMBIGUO', 'AMBIGUOU', 'ANALYZE', 'ANALYZ', 'AND', 'ANSI-ONLY',
|
||||
'ANY', 'ANYWHERE', 'APPEND', 'APPL-ALERT-BOXES', 'APPL-ALERT',
|
||||
'APPL-ALERT-', 'APPL-ALERT-B', 'APPL-ALERT-BO', 'APPL-ALERT-BOX',
|
||||
'APPL-ALERT-BOXE', 'APPL-CONTEXT-ID', 'APPLICATION', 'APPLY',
|
||||
'APPSERVER-INFO', 'APPSERVER-PASSWORD', 'APPSERVER-USERID', 'ARRAY-MESSAGE',
|
||||
'AS', 'ASC', 'ASCENDING', 'ASCE', 'ASCEN', 'ASCEND', 'ASCENDI', 'ASCENDIN',
|
||||
'ASK-OVERWRITE', 'ASSEMBLY', 'ASSIGN', 'ASYNCHRONOUS',
|
||||
'ASYNC-REQUEST-COUNT', 'ASYNC-REQUEST-HANDLE', 'AT', 'ATTACHED-PAIRLIST',
|
||||
'ATTR-SPACE', 'ATTR', 'ATTRI', 'ATTRIB', 'ATTRIBU', 'ATTRIBUT',
|
||||
'AUDIT-CONTROL', 'AUDIT-ENABLED', 'AUDIT-EVENT-CONTEXT', 'AUDIT-POLICY',
|
||||
'AUTHENTICATION-FAILED', 'AUTHORIZATION', 'AUTO-COMPLETION', 'AUTO-COMP',
|
||||
'AUTO-COMPL', 'AUTO-COMPLE', 'AUTO-COMPLET', 'AUTO-COMPLETI',
|
||||
'AUTO-COMPLETIO', 'AUTO-ENDKEY', 'AUTO-END-KEY', 'AUTO-GO', 'AUTO-INDENT',
|
||||
'AUTO-IND', 'AUTO-INDE', 'AUTO-INDEN', 'AUTOMATIC', 'AUTO-RESIZE',
|
||||
'AUTO-RETURN', 'AUTO-RET', 'AUTO-RETU', 'AUTO-RETUR', 'AUTO-SYNCHRONIZE',
|
||||
'AUTO-ZAP', 'AUTO-Z', 'AUTO-ZA', 'AVAILABLE', 'AVAIL', 'AVAILA', 'AVAILAB',
|
||||
'AVAILABL', 'AVAILABLE-FORMATS', 'AVERAGE', 'AVE', 'AVER', 'AVERA',
|
||||
'AVERAG', 'AVG', 'BACKGROUND', 'BACK', 'BACKG', 'BACKGR', 'BACKGRO',
|
||||
'BACKGROU', 'BACKGROUN', 'BACKWARDS', 'BACKWARD', 'BASE64-DECODE',
|
||||
'BASE64-ENCODE', 'BASE-ADE', 'BASE-KEY', 'BATCH-MODE', 'BATCH', 'BATCH-',
|
||||
'BATCH-M', 'BATCH-MO', 'BATCH-MOD', 'BATCH-SIZE', 'BEFORE-HIDE', 'BEFORE-H',
|
||||
'BEFORE-HI', 'BEFORE-HID', 'BEGIN-EVENT-GROUP', 'BEGINS', 'BELL', 'BETWEEN',
|
||||
'BGCOLOR', 'BGC', 'BGCO', 'BGCOL', 'BGCOLO', 'BIG-ENDIAN', 'BINARY', 'BIND',
|
||||
'BIND-WHERE', 'BLANK', 'BLOCK-ITERATION-DISPLAY', 'BORDER-BOTTOM-CHARS',
|
||||
'BORDER-B', 'BORDER-BO', 'BORDER-BOT', 'BORDER-BOTT', 'BORDER-BOTTO',
|
||||
'BORDER-BOTTOM-PIXELS', 'BORDER-BOTTOM-P', 'BORDER-BOTTOM-PI',
|
||||
'BORDER-BOTTOM-PIX', 'BORDER-BOTTOM-PIXE', 'BORDER-BOTTOM-PIXEL',
|
||||
'BORDER-LEFT-CHARS', 'BORDER-L', 'BORDER-LE', 'BORDER-LEF', 'BORDER-LEFT',
|
||||
'BORDER-LEFT-', 'BORDER-LEFT-C', 'BORDER-LEFT-CH', 'BORDER-LEFT-CHA',
|
||||
'BORDER-LEFT-CHAR', 'BORDER-LEFT-PIXELS', 'BORDER-LEFT-P', 'BORDER-LEFT-PI',
|
||||
'BORDER-LEFT-PIX', 'BORDER-LEFT-PIXE', 'BORDER-LEFT-PIXEL',
|
||||
'BORDER-RIGHT-CHARS', 'BORDER-R', 'BORDER-RI', 'BORDER-RIG', 'BORDER-RIGH',
|
||||
'BORDER-RIGHT', 'BORDER-RIGHT-', 'BORDER-RIGHT-C', 'BORDER-RIGHT-CH',
|
||||
'BORDER-RIGHT-CHA', 'BORDER-RIGHT-CHAR', 'BORDER-RIGHT-PIXELS',
|
||||
'BORDER-RIGHT-P', 'BORDER-RIGHT-PI', 'BORDER-RIGHT-PIX',
|
||||
'BORDER-RIGHT-PIXE', 'BORDER-RIGHT-PIXEL', 'BORDER-TOP-CHARS', 'BORDER-T',
|
||||
'BORDER-TO', 'BORDER-TOP', 'BORDER-TOP-', 'BORDER-TOP-C', 'BORDER-TOP-CH',
|
||||
'BORDER-TOP-CHA', 'BORDER-TOP-CHAR', 'BORDER-TOP-PIXELS', 'BORDER-TOP-P',
|
||||
'BORDER-TOP-PI', 'BORDER-TOP-PIX', 'BORDER-TOP-PIXE', 'BORDER-TOP-PIXEL',
|
||||
'BOX', 'BOX-SELECTABLE', 'BOX-SELECT', 'BOX-SELECTA', 'BOX-SELECTAB',
|
||||
'BOX-SELECTABL', 'BREAK', 'BROWSE', 'BUFFER', 'BUFFER-CHARS',
|
||||
'BUFFER-COMPARE', 'BUFFER-COPY', 'BUFFER-CREATE', 'BUFFER-DELETE',
|
||||
'BUFFER-FIELD', 'BUFFER-HANDLE', 'BUFFER-LINES', 'BUFFER-NAME',
|
||||
'BUFFER-RELEASE', 'BUFFER-VALUE', 'BUTTON', 'BUTTONS', 'BUTTON', 'BY',
|
||||
'BY-POINTER', 'BY-VARIANT-POINTER', 'CACHE', 'CACHE-SIZE', 'CALL',
|
||||
'CALL-NAME', 'CALL-TYPE', 'CANCEL-BREAK', 'CANCEL-BUTTON', 'CAN-CREATE',
|
||||
'CAN-DELETE', 'CAN-DO', 'CAN-FIND', 'CAN-QUERY', 'CAN-READ', 'CAN-SET',
|
||||
'CAN-WRITE', 'CAPS', 'CAREFUL-PAINT', 'CASE', 'CASE-SENSITIVE', 'CASE-SEN',
|
||||
'CASE-SENS', 'CASE-SENSI', 'CASE-SENSIT', 'CASE-SENSITI', 'CASE-SENSITIV',
|
||||
'CAST', 'CATCH', 'CDECL', 'CENTERED', 'CENTER', 'CENTERE', 'CHAINED',
|
||||
'CHARACTER_LENGTH', 'CHARSET', 'CHECK', 'CHECKED', 'CHOOSE', 'CHR', 'CLASS',
|
||||
'CLASS-TYPE', 'CLEAR', 'CLEAR-APPL-CONTEXT', 'CLEAR-LOG', 'CLEAR-SELECTION',
|
||||
'CLEAR-SELECT', 'CLEAR-SELECTI', 'CLEAR-SELECTIO', 'CLEAR-SORT-ARROWS',
|
||||
'CLEAR-SORT-ARROW', 'CLIENT-CONNECTION-ID', 'CLIENT-PRINCIPAL',
|
||||
'CLIENT-TTY', 'CLIENT-TYPE', 'CLIENT-WORKSTATION', 'CLIPBOARD', 'CLOSE',
|
||||
'CLOSE-LOG', 'CODE', 'CODEBASE-LOCATOR', 'CODEPAGE', 'CODEPAGE-CONVERT',
|
||||
'COLLATE', 'COL-OF', 'COLON', 'COLON-ALIGNED', 'COLON-ALIGN',
|
||||
'COLON-ALIGNE', 'COLOR', 'COLOR-TABLE', 'COLUMN', 'COL', 'COLU', 'COLUM',
|
||||
'COLUMN-BGCOLOR', 'COLUMN-DCOLOR', 'COLUMN-FGCOLOR', 'COLUMN-FONT',
|
||||
'COLUMN-LABEL', 'COLUMN-LAB', 'COLUMN-LABE', 'COLUMN-MOVABLE', 'COLUMN-OF',
|
||||
'COLUMN-PFCOLOR', 'COLUMN-READ-ONLY', 'COLUMN-RESIZABLE', 'COLUMNS',
|
||||
'COLUMN-SCROLLING', 'COMBO-BOX', 'COMMAND', 'COMPARES', 'COMPILE',
|
||||
'COMPILER', 'COMPLETE', 'COM-SELF', 'CONFIG-NAME', 'CONNECT', 'CONNECTED',
|
||||
'CONSTRUCTOR', 'CONTAINS', 'CONTENTS', 'CONTEXT', 'CONTEXT-HELP',
|
||||
'CONTEXT-HELP-FILE', 'CONTEXT-HELP-ID', 'CONTEXT-POPUP', 'CONTROL',
|
||||
'CONTROL-BOX', 'CONTROL-FRAME', 'CONVERT', 'CONVERT-3D-COLORS',
|
||||
'CONVERT-TO-OFFSET', 'CONVERT-TO-OFFS', 'CONVERT-TO-OFFSE', 'COPY-DATASET',
|
||||
'COPY-LOB', 'COPY-SAX-ATTRIBUTES', 'COPY-TEMP-TABLE', 'COUNT', 'COUNT-OF',
|
||||
'CPCASE', 'CPCOLL', 'CPINTERNAL', 'CPLOG', 'CPPRINT', 'CPRCODEIN',
|
||||
'CPRCODEOUT', 'CPSTREAM', 'CPTERM', 'CRC-VALUE', 'CREATE', 'CREATE-LIKE',
|
||||
'CREATE-LIKE-SEQUENTIAL', 'CREATE-NODE-NAMESPACE',
|
||||
'CREATE-RESULT-LIST-ENTRY', 'CREATE-TEST-FILE', 'CURRENT', 'CURRENT_DATE',
|
||||
'CURRENT_DATE', 'CURRENT-CHANGED', 'CURRENT-COLUMN', 'CURRENT-ENVIRONMENT',
|
||||
'CURRENT-ENV', 'CURRENT-ENVI', 'CURRENT-ENVIR', 'CURRENT-ENVIRO',
|
||||
'CURRENT-ENVIRON', 'CURRENT-ENVIRONM', 'CURRENT-ENVIRONME',
|
||||
'CURRENT-ENVIRONMEN', 'CURRENT-ITERATION', 'CURRENT-LANGUAGE',
|
||||
'CURRENT-LANG', 'CURRENT-LANGU', 'CURRENT-LANGUA', 'CURRENT-LANGUAG',
|
||||
'CURRENT-QUERY', 'CURRENT-RESULT-ROW', 'CURRENT-ROW-MODIFIED',
|
||||
'CURRENT-VALUE', 'CURRENT-WINDOW', 'CURSOR', 'CURS', 'CURSO', 'CURSOR-CHAR',
|
||||
'CURSOR-LINE', 'CURSOR-OFFSET', 'DATABASE', 'DATA-BIND',
|
||||
'DATA-ENTRY-RETURN', 'DATA-ENTRY-RET', 'DATA-ENTRY-RETU',
|
||||
'DATA-ENTRY-RETUR', 'DATA-RELATION', 'DATA-REL', 'DATA-RELA', 'DATA-RELAT',
|
||||
'DATA-RELATI', 'DATA-RELATIO', 'DATASERVERS', 'DATASET', 'DATASET-HANDLE',
|
||||
'DATA-SOURCE', 'DATA-SOURCE-COMPLETE-MAP', 'DATA-SOURCE-MODIFIED',
|
||||
'DATA-SOURCE-ROWID', 'DATA-TYPE', 'DATA-T', 'DATA-TY', 'DATA-TYP',
|
||||
'DATE-FORMAT', 'DATE-F', 'DATE-FO', 'DATE-FOR', 'DATE-FORM', 'DATE-FORMA',
|
||||
'DAY', 'DBCODEPAGE', 'DBCOLLATION', 'DBNAME', 'DBPARAM', 'DB-REFERENCES',
|
||||
'DBRESTRICTIONS', 'DBREST', 'DBRESTR', 'DBRESTRI', 'DBRESTRIC',
|
||||
'DBRESTRICT', 'DBRESTRICTI', 'DBRESTRICTIO', 'DBRESTRICTION', 'DBTASKID',
|
||||
'DBTYPE', 'DBVERSION', 'DBVERS', 'DBVERSI', 'DBVERSIO', 'DCOLOR', 'DDE',
|
||||
'DDE-ERROR', 'DDE-ID', 'DDE-I', 'DDE-ITEM', 'DDE-NAME', 'DDE-TOPIC',
|
||||
'DEBLANK', 'DEBUG', 'DEBU', 'DEBUG-ALERT', 'DEBUGGER', 'DEBUG-LIST',
|
||||
'DECIMALS', 'DECLARE', 'DECLARE-NAMESPACE', 'DECRYPT', 'DEFAULT',
|
||||
'DEFAULT-BUFFER-HANDLE', 'DEFAULT-BUTTON', 'DEFAUT-B', 'DEFAUT-BU',
|
||||
'DEFAUT-BUT', 'DEFAUT-BUTT', 'DEFAUT-BUTTO', 'DEFAULT-COMMIT',
|
||||
'DEFAULT-EXTENSION', 'DEFAULT-EX', 'DEFAULT-EXT', 'DEFAULT-EXTE',
|
||||
'DEFAULT-EXTEN', 'DEFAULT-EXTENS', 'DEFAULT-EXTENSI', 'DEFAULT-EXTENSIO',
|
||||
'DEFAULT-NOXLATE', 'DEFAULT-NOXL', 'DEFAULT-NOXLA', 'DEFAULT-NOXLAT',
|
||||
'DEFAULT-VALUE', 'DEFAULT-WINDOW', 'DEFINED', 'DEFINE-USER-EVENT-MANAGER',
|
||||
'DELETE', 'DEL', 'DELE', 'DELET', 'DELETE-CHARACTER', 'DELETE-CHAR',
|
||||
'DELETE-CHARA', 'DELETE-CHARAC', 'DELETE-CHARACT', 'DELETE-CHARACTE',
|
||||
'DELETE-CURRENT-ROW', 'DELETE-LINE', 'DELETE-RESULT-LIST-ENTRY',
|
||||
'DELETE-SELECTED-ROW', 'DELETE-SELECTED-ROWS', 'DELIMITER', 'DESC',
|
||||
'DESCENDING', 'DESC', 'DESCE', 'DESCEN', 'DESCEND', 'DESCENDI', 'DESCENDIN',
|
||||
'DESELECT-FOCUSED-ROW', 'DESELECTION', 'DESELECT-ROWS',
|
||||
'DESELECT-SELECTED-ROW', 'DESTRUCTOR', 'DIALOG-BOX', 'DICTIONARY', 'DICT',
|
||||
'DICTI', 'DICTIO', 'DICTION', 'DICTIONA', 'DICTIONAR', 'DIR', 'DISABLE',
|
||||
'DISABLE-AUTO-ZAP', 'DISABLED', 'DISABLE-DUMP-TRIGGERS',
|
||||
'DISABLE-LOAD-TRIGGERS', 'DISCONNECT', 'DISCON', 'DISCONN', 'DISCONNE',
|
||||
'DISCONNEC', 'DISP', 'DISPLAY', 'DISP', 'DISPL', 'DISPLA',
|
||||
'DISPLAY-MESSAGE', 'DISPLAY-TYPE', 'DISPLAY-T', 'DISPLAY-TY', 'DISPLAY-TYP',
|
||||
'DISTINCT', 'DO', 'DOMAIN-DESCRIPTION', 'DOMAIN-NAME', 'DOMAIN-TYPE', 'DOS',
|
||||
'DOUBLE', 'DOWN', 'DRAG-ENABLED', 'DROP', 'DROP-DOWN', 'DROP-DOWN-LIST',
|
||||
'DROP-FILE-NOTIFY', 'DROP-TARGET', 'DUMP', 'DYNAMIC', 'DYNAMIC-FUNCTION',
|
||||
'EACH', 'ECHO', 'EDGE-CHARS', 'EDGE', 'EDGE-', 'EDGE-C', 'EDGE-CH',
|
||||
'EDGE-CHA', 'EDGE-CHAR', 'EDGE-PIXELS', 'EDGE-P', 'EDGE-PI', 'EDGE-PIX',
|
||||
'EDGE-PIXE', 'EDGE-PIXEL', 'EDIT-CAN-PASTE', 'EDIT-CAN-UNDO', 'EDIT-CLEAR',
|
||||
'EDIT-COPY', 'EDIT-CUT', 'EDITING', 'EDITOR', 'EDIT-PASTE', 'EDIT-UNDO',
|
||||
'ELSE', 'EMPTY', 'EMPTY-TEMP-TABLE', 'ENABLE', 'ENABLED-FIELDS', 'ENCODE',
|
||||
'ENCRYPT', 'ENCRYPT-AUDIT-MAC-KEY', 'ENCRYPTION-SALT', 'END',
|
||||
'END-DOCUMENT', 'END-ELEMENT', 'END-EVENT-GROUP', 'END-FILE-DROP', 'ENDKEY',
|
||||
'END-KEY', 'END-MOVE', 'END-RESIZE', 'END-ROW-RESIZE', 'END-USER-PROMPT',
|
||||
'ENTERED', 'ENTRY', 'EQ', 'ERROR', 'ERROR-COLUMN', 'ERROR-COL',
|
||||
'ERROR-COLU', 'ERROR-COLUM', 'ERROR-ROW', 'ERROR-STACK-TRACE',
|
||||
'ERROR-STATUS', 'ERROR-STAT', 'ERROR-STATU', 'ESCAPE', 'ETIME',
|
||||
'EVENT-GROUP-ID', 'EVENT-PROCEDURE', 'EVENT-PROCEDURE-CONTEXT', 'EVENTS',
|
||||
'EVENT', 'EVENT-TYPE', 'EVENT-T', 'EVENT-TY', 'EVENT-TYP', 'EXCEPT',
|
||||
'EXCLUSIVE-ID', 'EXCLUSIVE-LOCK', 'EXCLUSIVE', 'EXCLUSIVE-', 'EXCLUSIVE-L',
|
||||
'EXCLUSIVE-LO', 'EXCLUSIVE-LOC', 'EXCLUSIVE-WEB-USER', 'EXECUTE', 'EXISTS',
|
||||
'EXP', 'EXPAND', 'EXPANDABLE', 'EXPLICIT', 'EXPORT', 'EXPORT-PRINCIPAL',
|
||||
'EXTENDED', 'EXTENT', 'EXTERNAL', 'FALSE', 'FETCH', 'FETCH-SELECTED-ROW',
|
||||
'FGCOLOR', 'FGC', 'FGCO', 'FGCOL', 'FGCOLO', 'FIELD', 'FIELDS', 'FIELD',
|
||||
'FILE', 'FILE-CREATE-DATE', 'FILE-CREATE-TIME', 'FILE-INFORMATION',
|
||||
'FILE-INFO', 'FILE-INFOR', 'FILE-INFORM', 'FILE-INFORMA', 'FILE-INFORMAT',
|
||||
'FILE-INFORMATI', 'FILE-INFORMATIO', 'FILE-MOD-DATE', 'FILE-MOD-TIME',
|
||||
'FILENAME', 'FILE-NAME', 'FILE-OFFSET', 'FILE-OFF', 'FILE-OFFS',
|
||||
'FILE-OFFSE', 'FILE-SIZE', 'FILE-TYPE', 'FILL', 'FILLED', 'FILL-IN',
|
||||
'FILTERS', 'FINAL', 'FINALLY', 'FIND', 'FIND-BY-ROWID',
|
||||
'FIND-CASE-SENSITIVE', 'FIND-CURRENT', 'FINDER', 'FIND-FIRST',
|
||||
'FIND-GLOBAL', 'FIND-LAST', 'FIND-NEXT-OCCURRENCE', 'FIND-PREV-OCCURRENCE',
|
||||
'FIND-SELECT', 'FIND-UNIQUE', 'FIND-WRAP-AROUND', 'FIRST',
|
||||
'FIRST-ASYNCH-REQUEST', 'FIRST-CHILD', 'FIRST-COLUMN', 'FIRST-FORM',
|
||||
'FIRST-OBJECT', 'FIRST-OF', 'FIRST-PROCEDURE', 'FIRST-PROC', 'FIRST-PROCE',
|
||||
'FIRST-PROCED', 'FIRST-PROCEDU', 'FIRST-PROCEDUR', 'FIRST-SERVER',
|
||||
'FIRST-TAB-ITEM', 'FIRST-TAB-I', 'FIRST-TAB-IT', 'FIRST-TAB-ITE',
|
||||
'FIT-LAST-COLUMN', 'FIXED-ONLY', 'FLAT-BUTTON', 'FLOAT', 'FOCUS',
|
||||
'FOCUSED-ROW', 'FOCUSED-ROW-SELECTED', 'FONT', 'FONT-TABLE', 'FOR',
|
||||
'FORCE-FILE', 'FOREGROUND', 'FORE', 'FOREG', 'FOREGR', 'FOREGRO',
|
||||
'FOREGROU', 'FOREGROUN', 'FORM', 'FORMAT', 'FORM', 'FORMA', 'FORMATTED',
|
||||
'FORMATTE', 'FORM-LONG-INPUT', 'FORWARD', 'FORWARDS', 'FORWARD', 'FRAGMENT',
|
||||
'FRAGMEN', 'FRAME', 'FRAM', 'FRAME-COL', 'FRAME-DB', 'FRAME-DOWN',
|
||||
'FRAME-FIELD', 'FRAME-FILE', 'FRAME-INDEX', 'FRAME-INDE', 'FRAME-LINE',
|
||||
'FRAME-NAME', 'FRAME-ROW', 'FRAME-SPACING', 'FRAME-SPA', 'FRAME-SPAC',
|
||||
'FRAME-SPACI', 'FRAME-SPACIN', 'FRAME-VALUE', 'FRAME-VAL', 'FRAME-VALU',
|
||||
'FRAME-X', 'FRAME-Y', 'FREQUENCY', 'FROM', 'FROM-CHARS', 'FROM-C',
|
||||
'FROM-CH', 'FROM-CHA', 'FROM-CHAR', 'FROM-CURRENT', 'FROM-CUR', 'FROM-CURR',
|
||||
'FROM-CURRE', 'FROM-CURREN', 'FROM-PIXELS', 'FROM-P', 'FROM-PI', 'FROM-PIX',
|
||||
'FROM-PIXE', 'FROM-PIXEL', 'FULL-HEIGHT-CHARS', 'FULL-HEIGHT',
|
||||
'FULL-HEIGHT-', 'FULL-HEIGHT-C', 'FULL-HEIGHT-CH', 'FULL-HEIGHT-CHA',
|
||||
'FULL-HEIGHT-CHAR', 'FULL-HEIGHT-PIXELS', 'FULL-HEIGHT-P', 'FULL-HEIGHT-PI',
|
||||
'FULL-HEIGHT-PIX', 'FULL-HEIGHT-PIXE', 'FULL-HEIGHT-PIXEL', 'FULL-PATHNAME',
|
||||
'FULL-PATHN', 'FULL-PATHNA', 'FULL-PATHNAM', 'FULL-WIDTH-CHARS',
|
||||
'FULL-WIDTH', 'FULL-WIDTH-', 'FULL-WIDTH-C', 'FULL-WIDTH-CH',
|
||||
'FULL-WIDTH-CHA', 'FULL-WIDTH-CHAR', 'FULL-WIDTH-PIXELS', 'FULL-WIDTH-P',
|
||||
'FULL-WIDTH-PI', 'FULL-WIDTH-PIX', 'FULL-WIDTH-PIXE', 'FULL-WIDTH-PIXEL',
|
||||
'FUNCTION', 'FUNCTION-CALL-TYPE', 'GATEWAYS', 'GATEWAY', 'GE',
|
||||
'GENERATE-MD5', 'GENERATE-PBE-KEY', 'GENERATE-PBE-SALT',
|
||||
'GENERATE-RANDOM-KEY', 'GENERATE-UUID', 'GET', 'GET-ATTR-CALL-TYPE',
|
||||
'GET-ATTRIBUTE-NODE', 'GET-BINARY-DATA', 'GET-BLUE-VALUE', 'GET-BLUE',
|
||||
'GET-BLUE-', 'GET-BLUE-V', 'GET-BLUE-VA', 'GET-BLUE-VAL', 'GET-BLUE-VALU',
|
||||
'GET-BROWSE-COLUMN', 'GET-BUFFER-HANDLEGETBYTE', 'GET-BYTE',
|
||||
'GET-CALLBACK-PROC-CONTEXT', 'GET-CALLBACK-PROC-NAME', 'GET-CGI-LIST',
|
||||
'GET-CGI-LONG-VALUE', 'GET-CGI-VALUE', 'GET-CODEPAGES', 'GET-COLLATIONS',
|
||||
'GET-CONFIG-VALUE', 'GET-CURRENT', 'GET-DOUBLE', 'GET-DROPPED-FILE',
|
||||
'GET-DYNAMIC', 'GET-ERROR-COLUMN', 'GET-ERROR-ROW', 'GET-FILE',
|
||||
'GET-FILE-NAME', 'GET-FILE-OFFSET', 'GET-FILE-OFFSE', 'GET-FIRST',
|
||||
'GET-FLOAT', 'GET-GREEN-VALUE', 'GET-GREEN', 'GET-GREEN-', 'GET-GREEN-V',
|
||||
'GET-GREEN-VA', 'GET-GREEN-VAL', 'GET-GREEN-VALU',
|
||||
'GET-INDEX-BY-NAMESPACE-NAME', 'GET-INDEX-BY-QNAME', 'GET-INT64',
|
||||
'GET-ITERATION', 'GET-KEY-VALUE', 'GET-KEY-VAL', 'GET-KEY-VALU', 'GET-LAST',
|
||||
'GET-LOCALNAME-BY-INDEX', 'GET-LONG', 'GET-MESSAGE', 'GET-NEXT',
|
||||
'GET-NUMBER', 'GET-POINTER-VALUE', 'GET-PREV', 'GET-PRINTERS',
|
||||
'GET-PROPERTY', 'GET-QNAME-BY-INDEX', 'GET-RED-VALUE', 'GET-RED',
|
||||
'GET-RED-', 'GET-RED-V', 'GET-RED-VA', 'GET-RED-VAL', 'GET-RED-VALU',
|
||||
'GET-REPOSITIONED-ROW', 'GET-RGB-VALUE', 'GET-SELECTED-WIDGET',
|
||||
'GET-SELECTED', 'GET-SELECTED-', 'GET-SELECTED-W', 'GET-SELECTED-WI',
|
||||
'GET-SELECTED-WID', 'GET-SELECTED-WIDG', 'GET-SELECTED-WIDGE', 'GET-SHORT',
|
||||
'GET-SIGNATURE', 'GET-SIZE', 'GET-STRING', 'GET-TAB-ITEM',
|
||||
'GET-TEXT-HEIGHT-CHARS', 'GET-TEXT-HEIGHT', 'GET-TEXT-HEIGHT-',
|
||||
'GET-TEXT-HEIGHT-C', 'GET-TEXT-HEIGHT-CH', 'GET-TEXT-HEIGHT-CHA',
|
||||
'GET-TEXT-HEIGHT-CHAR', 'GET-TEXT-HEIGHT-PIXELS', 'GET-TEXT-HEIGHT-P',
|
||||
'GET-TEXT-HEIGHT-PI', 'GET-TEXT-HEIGHT-PIX', 'GET-TEXT-HEIGHT-PIXE',
|
||||
'GET-TEXT-HEIGHT-PIXEL', 'GET-TEXT-WIDTH-CHARS', 'GET-TEXT-WIDTH',
|
||||
'GET-TEXT-WIDTH-', 'GET-TEXT-WIDTH-C', 'GET-TEXT-WIDTH-CH',
|
||||
'GET-TEXT-WIDTH-CHA', 'GET-TEXT-WIDTH-CHAR', 'GET-TEXT-WIDTH-PIXELS',
|
||||
'GET-TEXT-WIDTH-P', 'GET-TEXT-WIDTH-PI', 'GET-TEXT-WIDTH-PIX',
|
||||
'GET-TEXT-WIDTH-PIXE', 'GET-TEXT-WIDTH-PIXEL', 'GET-TYPE-BY-INDEX',
|
||||
'GET-TYPE-BY-NAMESPACE-NAME', 'GET-TYPE-BY-QNAME', 'GET-UNSIGNED-LONG',
|
||||
'GET-UNSIGNED-SHORT', 'GET-URI-BY-INDEX', 'GET-VALUE-BY-INDEX',
|
||||
'GET-VALUE-BY-NAMESPACE-NAME', 'GET-VALUE-BY-QNAME', 'GET-WAIT-STATE',
|
||||
'GLOBAL', 'GO-ON', 'GO-PENDING', 'GO-PEND', 'GO-PENDI', 'GO-PENDIN',
|
||||
'GRANT', 'GRAPHIC-EDGE', 'GRAPHIC-E', 'GRAPHIC-ED', 'GRAPHIC-EDG',
|
||||
'GRID-FACTOR-HORIZONTAL', 'GRID-FACTOR-H', 'GRID-FACTOR-HO',
|
||||
'GRID-FACTOR-HOR', 'GRID-FACTOR-HORI', 'GRID-FACTOR-HORIZ',
|
||||
'GRID-FACTOR-HORIZO', 'GRID-FACTOR-HORIZON', 'GRID-FACTOR-HORIZONT',
|
||||
'GRID-FACTOR-HORIZONTA', 'GRID-FACTOR-VERTICAL', 'GRID-FACTOR-V',
|
||||
'GRID-FACTOR-VE', 'GRID-FACTOR-VER', 'GRID-FACTOR-VERT', 'GRID-FACTOR-VERT',
|
||||
'GRID-FACTOR-VERTI', 'GRID-FACTOR-VERTIC', 'GRID-FACTOR-VERTICA',
|
||||
'GRID-SNAP', 'GRID-UNIT-HEIGHT-CHARS', 'GRID-UNIT-HEIGHT',
|
||||
'GRID-UNIT-HEIGHT-', 'GRID-UNIT-HEIGHT-C', 'GRID-UNIT-HEIGHT-CH',
|
||||
'GRID-UNIT-HEIGHT-CHA', 'GRID-UNIT-HEIGHT-PIXELS', 'GRID-UNIT-HEIGHT-P',
|
||||
'GRID-UNIT-HEIGHT-PI', 'GRID-UNIT-HEIGHT-PIX', 'GRID-UNIT-HEIGHT-PIXE',
|
||||
'GRID-UNIT-HEIGHT-PIXEL', 'GRID-UNIT-WIDTH-CHARS', 'GRID-UNIT-WIDTH',
|
||||
'GRID-UNIT-WIDTH-', 'GRID-UNIT-WIDTH-C', 'GRID-UNIT-WIDTH-CH',
|
||||
'GRID-UNIT-WIDTH-CHA', 'GRID-UNIT-WIDTH-CHAR', 'GRID-UNIT-WIDTH-PIXELS',
|
||||
'GRID-UNIT-WIDTH-P', 'GRID-UNIT-WIDTH-PI', 'GRID-UNIT-WIDTH-PIX',
|
||||
'GRID-UNIT-WIDTH-PIXE', 'GRID-UNIT-WIDTH-PIXEL', 'GRID-VISIBLE', 'GROUP',
|
||||
'GT', 'GUID', 'HANDLER', 'HAS-RECORDS', 'HAVING', 'HEADER', 'HEIGHT-CHARS',
|
||||
'HEIGHT', 'HEIGHT-', 'HEIGHT-C', 'HEIGHT-CH', 'HEIGHT-CHA', 'HEIGHT-CHAR',
|
||||
'HEIGHT-PIXELS', 'HEIGHT-P', 'HEIGHT-PI', 'HEIGHT-PIX', 'HEIGHT-PIXE',
|
||||
'HEIGHT-PIXEL', 'HELP', 'HEX-DECODE', 'HEX-ENCODE', 'HIDDEN', 'HIDE',
|
||||
'HORIZONTAL', 'HORI', 'HORIZ', 'HORIZO', 'HORIZON', 'HORIZONT', 'HORIZONTA',
|
||||
'HOST-BYTE-ORDER', 'HTML-CHARSET', 'HTML-END-OF-LINE', 'HTML-END-OF-PAGE',
|
||||
'HTML-FRAME-BEGIN', 'HTML-FRAME-END', 'HTML-HEADER-BEGIN',
|
||||
'HTML-HEADER-END', 'HTML-TITLE-BEGIN', 'HTML-TITLE-END', 'HWND', 'ICON',
|
||||
'IF', 'IMAGE', 'IMAGE-DOWN', 'IMAGE-INSENSITIVE', 'IMAGE-SIZE',
|
||||
'IMAGE-SIZE-CHARS', 'IMAGE-SIZE-C', 'IMAGE-SIZE-CH', 'IMAGE-SIZE-CHA',
|
||||
'IMAGE-SIZE-CHAR', 'IMAGE-SIZE-PIXELS', 'IMAGE-SIZE-P', 'IMAGE-SIZE-PI',
|
||||
'IMAGE-SIZE-PIX', 'IMAGE-SIZE-PIXE', 'IMAGE-SIZE-PIXEL', 'IMAGE-UP',
|
||||
'IMMEDIATE-DISPLAY', 'IMPLEMENTS', 'IMPORT', 'IMPORT-PRINCIPAL', 'IN',
|
||||
'INCREMENT-EXCLUSIVE-ID', 'INDEX', 'INDEXED-REPOSITION', 'INDEX-HINT',
|
||||
'INDEX-INFORMATION', 'INDICATOR', 'INFORMATION', 'INFO', 'INFOR', 'INFORM',
|
||||
'INFORMA', 'INFORMAT', 'INFORMATI', 'INFORMATIO', 'IN-HANDLE',
|
||||
'INHERIT-BGCOLOR', 'INHERIT-BGC', 'INHERIT-BGCO', 'INHERIT-BGCOL',
|
||||
'INHERIT-BGCOLO', 'INHERIT-FGCOLOR', 'INHERIT-FGC', 'INHERIT-FGCO',
|
||||
'INHERIT-FGCOL', 'INHERIT-FGCOLO', 'INHERITS', 'INITIAL', 'INIT', 'INITI',
|
||||
'INITIA', 'INITIAL-DIR', 'INITIAL-FILTER', 'INITIALIZE-DOCUMENT-TYPE',
|
||||
'INITIATE', 'INNER-CHARS', 'INNER-LINES', 'INPUT', 'INPUT-OUTPUT',
|
||||
'INPUT-O', 'INPUT-OU', 'INPUT-OUT', 'INPUT-OUTP', 'INPUT-OUTPU',
|
||||
'INPUT-VALUE', 'INSERT', 'INSERT-ATTRIBUTE', 'INSERT-BACKTAB', 'INSERT-B',
|
||||
'INSERT-BA', 'INSERT-BAC', 'INSERT-BACK', 'INSERT-BACKT', 'INSERT-BACKTA',
|
||||
'INSERT-FILE', 'INSERT-ROW', 'INSERT-STRING', 'INSERT-TAB', 'INSERT-T',
|
||||
'INSERT-TA', 'INTERFACE', 'INTERNAL-ENTRIES', 'INTO', 'INVOKE', 'IS',
|
||||
'IS-ATTR-SPACE', 'IS-ATTR', 'IS-ATTR-', 'IS-ATTR-S', 'IS-ATTR-SP',
|
||||
'IS-ATTR-SPA', 'IS-ATTR-SPAC', 'IS-CLASS', 'IS-CLAS', 'IS-LEAD-BYTE',
|
||||
'IS-ATTR', 'IS-OPEN', 'IS-PARAMETER-SET', 'IS-ROW-SELECTED', 'IS-SELECTED',
|
||||
'ITEM', 'ITEMS-PER-ROW', 'JOIN', 'JOIN-BY-SQLDB', 'KBLABEL',
|
||||
'KEEP-CONNECTION-OPEN', 'KEEP-FRAME-Z-ORDER', 'KEEP-FRAME-Z',
|
||||
'KEEP-FRAME-Z-', 'KEEP-FRAME-Z-O', 'KEEP-FRAME-Z-OR', 'KEEP-FRAME-Z-ORD',
|
||||
'KEEP-FRAME-Z-ORDE', 'KEEP-MESSAGES', 'KEEP-SECURITY-CACHE',
|
||||
'KEEP-TAB-ORDER', 'KEY', 'KEYCODE', 'KEY-CODE', 'KEYFUNCTION', 'KEYFUNC',
|
||||
'KEYFUNCT', 'KEYFUNCTI', 'KEYFUNCTIO', 'KEY-FUNCTION', 'KEY-FUNC',
|
||||
'KEY-FUNCT', 'KEY-FUNCTI', 'KEY-FUNCTIO', 'KEYLABEL', 'KEY-LABEL', 'KEYS',
|
||||
'KEYWORD', 'KEYWORD-ALL', 'LABEL', 'LABEL-BGCOLOR', 'LABEL-BGC',
|
||||
'LABEL-BGCO', 'LABEL-BGCOL', 'LABEL-BGCOLO', 'LABEL-DCOLOR', 'LABEL-DC',
|
||||
'LABEL-DCO', 'LABEL-DCOL', 'LABEL-DCOLO', 'LABEL-FGCOLOR', 'LABEL-FGC',
|
||||
'LABEL-FGCO', 'LABEL-FGCOL', 'LABEL-FGCOLO', 'LABEL-FONT', 'LABEL-PFCOLOR',
|
||||
'LABEL-PFC', 'LABEL-PFCO', 'LABEL-PFCOL', 'LABEL-PFCOLO', 'LABELS',
|
||||
'LANDSCAPE', 'LANGUAGES', 'LANGUAGE', 'LARGE', 'LARGE-TO-SMALL', 'LAST',
|
||||
'LAST-ASYNCH-REQUEST', 'LAST-BATCH', 'LAST-CHILD', 'LAST-EVENT',
|
||||
'LAST-EVEN', 'LAST-FORM', 'LASTKEY', 'LAST-KEY', 'LAST-OBJECT', 'LAST-OF',
|
||||
'LAST-PROCEDURE', 'LAST-PROCE', 'LAST-PROCED', 'LAST-PROCEDU',
|
||||
'LAST-PROCEDUR', 'LAST-SERVER', 'LAST-TAB-ITEM', 'LAST-TAB-I',
|
||||
'LAST-TAB-IT', 'LAST-TAB-ITE', 'LC', 'LDBNAME', 'LE', 'LEAVE',
|
||||
'LEFT-ALIGNED', 'LEFT-ALIGN', 'LEFT-ALIGNE', 'LEFT-TRIM', 'LENGTH',
|
||||
'LIBRARY', 'LIKE', 'LIKE-SEQUENTIAL', 'LINE', 'LINE-COUNTER', 'LINE-COUNT',
|
||||
'LINE-COUNTE', 'LIST-EVENTS', 'LISTING', 'LISTI', 'LISTIN',
|
||||
'LIST-ITEM-PAIRS', 'LIST-ITEMS', 'LIST-PROPERTY-NAMES', 'LIST-QUERY-ATTRS',
|
||||
'LIST-SET-ATTRS', 'LIST-WIDGETS', 'LITERAL-QUESTION', 'LITTLE-ENDIAN',
|
||||
'LOAD', 'LOAD-DOMAINS', 'LOAD-ICON', 'LOAD-IMAGE', 'LOAD-IMAGE-DOWN',
|
||||
'LOAD-IMAGE-INSENSITIVE', 'LOAD-IMAGE-UP', 'LOAD-MOUSE-POINTER',
|
||||
'LOAD-MOUSE-P', 'LOAD-MOUSE-PO', 'LOAD-MOUSE-POI', 'LOAD-MOUSE-POIN',
|
||||
'LOAD-MOUSE-POINT', 'LOAD-MOUSE-POINTE', 'LOAD-PICTURE', 'LOAD-SMALL-ICON',
|
||||
'LOCAL-NAME', 'LOCATOR-COLUMN-NUMBER', 'LOCATOR-LINE-NUMBER',
|
||||
'LOCATOR-PUBLIC-ID', 'LOCATOR-SYSTEM-ID', 'LOCATOR-TYPE', 'LOCKED',
|
||||
'LOCK-REGISTRATION', 'LOG', 'LOG-AUDIT-EVENT', 'LOGIN-EXPIRATION-TIMESTAMP',
|
||||
'LOGIN-HOST', 'LOGIN-STATE', 'LOG-MANAGER', 'LOGOUT', 'LOOKAHEAD', 'LOOKUP',
|
||||
'LT', 'MACHINE-CLASS', 'MANDATORY', 'MANUAL-HIGHLIGHT', 'MAP',
|
||||
'MARGIN-EXTRA', 'MARGIN-HEIGHT-CHARS', 'MARGIN-HEIGHT', 'MARGIN-HEIGHT-',
|
||||
'MARGIN-HEIGHT-C', 'MARGIN-HEIGHT-CH', 'MARGIN-HEIGHT-CHA',
|
||||
'MARGIN-HEIGHT-CHAR', 'MARGIN-HEIGHT-PIXELS', 'MARGIN-HEIGHT-P',
|
||||
'MARGIN-HEIGHT-PI', 'MARGIN-HEIGHT-PIX', 'MARGIN-HEIGHT-PIXE',
|
||||
'MARGIN-HEIGHT-PIXEL', 'MARGIN-WIDTH-CHARS', 'MARGIN-WIDTH',
|
||||
'MARGIN-WIDTH-', 'MARGIN-WIDTH-C', 'MARGIN-WIDTH-CH', 'MARGIN-WIDTH-CHA',
|
||||
'MARGIN-WIDTH-CHAR', 'MARGIN-WIDTH-PIXELS', 'MARGIN-WIDTH-P',
|
||||
'MARGIN-WIDTH-PI', 'MARGIN-WIDTH-PIX', 'MARGIN-WIDTH-PIXE',
|
||||
'MARGIN-WIDTH-PIXEL', 'MARK-NEW', 'MARK-ROW-STATE', 'MATCHES', 'MAX',
|
||||
'MAX-BUTTON', 'MAX-CHARS', 'MAX-DATA-GUESS', 'MAX-HEIGHT',
|
||||
'MAX-HEIGHT-CHARS', 'MAX-HEIGHT-C', 'MAX-HEIGHT-CH', 'MAX-HEIGHT-CHA',
|
||||
'MAX-HEIGHT-CHAR', 'MAX-HEIGHT-PIXELS', 'MAX-HEIGHT-P', 'MAX-HEIGHT-PI',
|
||||
'MAX-HEIGHT-PIX', 'MAX-HEIGHT-PIXE', 'MAX-HEIGHT-PIXEL', 'MAXIMIZE',
|
||||
'MAXIMUM', 'MAX', 'MAXI', 'MAXIM', 'MAXIMU', 'MAXIMUM-LEVEL', 'MAX-ROWS',
|
||||
'MAX-SIZE', 'MAX-VALUE', 'MAX-VAL', 'MAX-VALU', 'MAX-WIDTH',
|
||||
'MAX-WIDTH-CHARS', 'MAX-WIDTH', 'MAX-WIDTH-', 'MAX-WIDTH-C', 'MAX-WIDTH-CH',
|
||||
'MAX-WIDTH-CHA', 'MAX-WIDTH-CHAR', 'MAX-WIDTH-PIXELS', 'MAX-WIDTH-P',
|
||||
'MAX-WIDTH-PI', 'MAX-WIDTH-PIX', 'MAX-WIDTH-PIXE', 'MAX-WIDTH-PIXEL',
|
||||
'MD5-DIGEST', 'MEMBER', 'MEMPTR-TO-NODE-VALUE', 'MENU', 'MENUBAR',
|
||||
'MENU-BAR', 'MENU-ITEM', 'MENU-KEY', 'MENU-K', 'MENU-KE', 'MENU-MOUSE',
|
||||
'MENU-M', 'MENU-MO', 'MENU-MOU', 'MENU-MOUS', 'MERGE-BY-FIELD', 'MESSAGE',
|
||||
'MESSAGE-AREA', 'MESSAGE-AREA-FONT', 'MESSAGE-LINES', 'METHOD', 'MIN',
|
||||
'MIN-BUTTON', 'MIN-COLUMN-WIDTH-CHARS', 'MIN-COLUMN-WIDTH-C',
|
||||
'MIN-COLUMN-WIDTH-CH', 'MIN-COLUMN-WIDTH-CHA', 'MIN-COLUMN-WIDTH-CHAR',
|
||||
'MIN-COLUMN-WIDTH-PIXELS', 'MIN-COLUMN-WIDTH-P', 'MIN-COLUMN-WIDTH-PI',
|
||||
'MIN-COLUMN-WIDTH-PIX', 'MIN-COLUMN-WIDTH-PIXE', 'MIN-COLUMN-WIDTH-PIXEL',
|
||||
'MIN-HEIGHT-CHARS', 'MIN-HEIGHT', 'MIN-HEIGHT-', 'MIN-HEIGHT-C',
|
||||
'MIN-HEIGHT-CH', 'MIN-HEIGHT-CHA', 'MIN-HEIGHT-CHAR', 'MIN-HEIGHT-PIXELS',
|
||||
'MIN-HEIGHT-P', 'MIN-HEIGHT-PI', 'MIN-HEIGHT-PIX', 'MIN-HEIGHT-PIXE',
|
||||
'MIN-HEIGHT-PIXEL', 'MINIMUM', 'MIN', 'MINI', 'MINIM', 'MINIMU', 'MIN-SIZE',
|
||||
'MIN-VALUE', 'MIN-VAL', 'MIN-VALU', 'MIN-WIDTH-CHARS', 'MIN-WIDTH',
|
||||
'MIN-WIDTH-', 'MIN-WIDTH-C', 'MIN-WIDTH-CH', 'MIN-WIDTH-CHA',
|
||||
'MIN-WIDTH-CHAR', 'MIN-WIDTH-PIXELS', 'MIN-WIDTH-P', 'MIN-WIDTH-PI',
|
||||
'MIN-WIDTH-PIX', 'MIN-WIDTH-PIXE', 'MIN-WIDTH-PIXEL', 'MODIFIED', 'MODULO',
|
||||
'MOD', 'MODU', 'MODUL', 'MONTH', 'MOUSE', 'MOUSE-POINTER', 'MOUSE-P',
|
||||
'MOUSE-PO', 'MOUSE-POI', 'MOUSE-POIN', 'MOUSE-POINT', 'MOUSE-POINTE',
|
||||
'MOVABLE', 'MOVE-AFTER-TAB-ITEM', 'MOVE-AFTER', 'MOVE-AFTER-',
|
||||
'MOVE-AFTER-T', 'MOVE-AFTER-TA', 'MOVE-AFTER-TAB', 'MOVE-AFTER-TAB-',
|
||||
'MOVE-AFTER-TAB-I', 'MOVE-AFTER-TAB-IT', 'MOVE-AFTER-TAB-ITE',
|
||||
'MOVE-BEFORE-TAB-ITEM', 'MOVE-BEFOR', 'MOVE-BEFORE', 'MOVE-BEFORE-',
|
||||
'MOVE-BEFORE-T', 'MOVE-BEFORE-TA', 'MOVE-BEFORE-TAB', 'MOVE-BEFORE-TAB-',
|
||||
'MOVE-BEFORE-TAB-I', 'MOVE-BEFORE-TAB-IT', 'MOVE-BEFORE-TAB-ITE',
|
||||
'MOVE-COLUMN', 'MOVE-COL', 'MOVE-COLU', 'MOVE-COLUM', 'MOVE-TO-BOTTOM',
|
||||
'MOVE-TO-B', 'MOVE-TO-BO', 'MOVE-TO-BOT', 'MOVE-TO-BOTT', 'MOVE-TO-BOTTO',
|
||||
'MOVE-TO-EOF', 'MOVE-TO-TOP', 'MOVE-TO-T', 'MOVE-TO-TO', 'MPE',
|
||||
'MULTI-COMPILE', 'MULTIPLE', 'MULTIPLE-KEY', 'MULTITASKING-INTERVAL',
|
||||
'MUST-EXIST', 'NAME', 'NAMESPACE-PREFIX', 'NAMESPACE-URI', 'NATIVE', 'NE',
|
||||
'NEEDS-APPSERVER-PROMPT', 'NEEDS-PROMPT', 'NEW', 'NEW-INSTANCE', 'NEW-ROW',
|
||||
'NEXT', 'NEXT-COLUMN', 'NEXT-PROMPT', 'NEXT-ROWID', 'NEXT-SIBLING',
|
||||
'NEXT-TAB-ITEM', 'NEXT-TAB-I', 'NEXT-TAB-IT', 'NEXT-TAB-ITE', 'NEXT-VALUE',
|
||||
'NO', 'NO-APPLY', 'NO-ARRAY-MESSAGE', 'NO-ASSIGN', 'NO-ATTR-LIST',
|
||||
'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-L', 'NO-ATTR-LI', 'NO-ATTR-LIS',
|
||||
'NO-ATTR-SPACE', 'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-S', 'NO-ATTR-SP',
|
||||
'NO-ATTR-SPA', 'NO-ATTR-SPAC', 'NO-AUTO-VALIDATE', 'NO-BIND-WHERE',
|
||||
'NO-BOX', 'NO-CONSOLE', 'NO-CONVERT', 'NO-CONVERT-3D-COLORS',
|
||||
'NO-CURRENT-VALUE', 'NO-DEBUG', 'NODE-VALUE-TO-MEMPTR', 'NO-DRAG',
|
||||
'NO-ECHO', 'NO-EMPTY-SPACE', 'NO-ERROR', 'NO-FILL', 'NO-F', 'NO-FI',
|
||||
'NO-FIL', 'NO-FOCUS', 'NO-HELP', 'NO-HIDE', 'NO-INDEX-HINT',
|
||||
'NO-INHERIT-BGCOLOR', 'NO-INHERIT-BGC', 'NO-INHERIT-BGCO', 'LABEL-BGCOL',
|
||||
'LABEL-BGCOLO', 'NO-INHERIT-FGCOLOR', 'NO-INHERIT-FGC', 'NO-INHERIT-FGCO',
|
||||
'NO-INHERIT-FGCOL', 'NO-INHERIT-FGCOLO', 'NO-JOIN-BY-SQLDB', 'NO-LABELS',
|
||||
'NO-LABE', 'NO-LOBS', 'NO-LOCK', 'NO-LOOKAHEAD', 'NO-MAP', 'NO-MESSAGE',
|
||||
'NO-MES', 'NO-MESS', 'NO-MESSA', 'NO-MESSAG', 'NONAMESPACE-SCHEMA-LOCATION',
|
||||
'NONE', 'NO-PAUSE', 'NO-PREFETCH', 'NO-PREFE', 'NO-PREFET', 'NO-PREFETC',
|
||||
'NORMALIZE', 'NO-ROW-MARKERS', 'NO-SCROLLBAR-VERTICAL',
|
||||
'NO-SEPARATE-CONNECTION', 'NO-SEPARATORS', 'NOT', 'NO-TAB-STOP',
|
||||
'NOT-ACTIVE', 'NO-UNDERLINE', 'NO-UND', 'NO-UNDE', 'NO-UNDER', 'NO-UNDERL',
|
||||
'NO-UNDERLI', 'NO-UNDERLIN', 'NO-UNDO', 'NO-VALIDATE', 'NO-VAL', 'NO-VALI',
|
||||
'NO-VALID', 'NO-VALIDA', 'NO-VALIDAT', 'NOW', 'NO-WAIT', 'NO-WORD-WRAP',
|
||||
'NULL', 'NUM-ALIASES', 'NUM-ALI', 'NUM-ALIA', 'NUM-ALIAS', 'NUM-ALIASE',
|
||||
'NUM-BUFFERS', 'NUM-BUTTONS', 'NUM-BUT', 'NUM-BUTT', 'NUM-BUTTO',
|
||||
'NUM-BUTTON', 'NUM-COLUMNS', 'NUM-COL', 'NUM-COLU', 'NUM-COLUM',
|
||||
'NUM-COLUMN', 'NUM-COPIES', 'NUM-DBS', 'NUM-DROPPED-FILES', 'NUM-ENTRIES',
|
||||
'NUMERIC', 'NUMERIC-FORMAT', 'NUMERIC-F', 'NUMERIC-FO', 'NUMERIC-FOR',
|
||||
'NUMERIC-FORM', 'NUMERIC-FORMA', 'NUM-FIELDS', 'NUM-FORMATS', 'NUM-ITEMS',
|
||||
'NUM-ITERATIONS', 'NUM-LINES', 'NUM-LOCKED-COLUMNS', 'NUM-LOCKED-COL',
|
||||
'NUM-LOCKED-COLU', 'NUM-LOCKED-COLUM', 'NUM-LOCKED-COLUMN', 'NUM-MESSAGES',
|
||||
'NUM-PARAMETERS', 'NUM-REFERENCES', 'NUM-REPLACED', 'NUM-RESULTS',
|
||||
'NUM-SELECTED-ROWS', 'NUM-SELECTED-WIDGETS', 'NUM-SELECTED',
|
||||
'NUM-SELECTED-', 'NUM-SELECTED-W', 'NUM-SELECTED-WI', 'NUM-SELECTED-WID',
|
||||
'NUM-SELECTED-WIDG', 'NUM-SELECTED-WIDGE', 'NUM-SELECTED-WIDGET',
|
||||
'NUM-TABS', 'NUM-TO-RETAIN', 'NUM-VISIBLE-COLUMNS', 'OCTET-LENGTH', 'OF',
|
||||
'OFF', 'OK', 'OK-CANCEL', 'OLD', 'ON', 'ON-FRAME-BORDER', 'ON-FRAME',
|
||||
'ON-FRAME-', 'ON-FRAME-B', 'ON-FRAME-BO', 'ON-FRAME-BOR', 'ON-FRAME-BORD',
|
||||
'ON-FRAME-BORDE', 'OPEN', 'OPSYS', 'OPTION', 'OR', 'ORDERED-JOIN',
|
||||
'ORDINAL', 'OS-APPEND', 'OS-COMMAND', 'OS-COPY', 'OS-CREATE-DIR',
|
||||
'OS-DELETE', 'OS-DIR', 'OS-DRIVES', 'OS-DRIVE', 'OS-ERROR', 'OS-GETENV',
|
||||
'OS-RENAME', 'OTHERWISE', 'OUTPUT', 'OVERLAY', 'OVERRIDE', 'OWNER', 'PAGE',
|
||||
'PAGE-BOTTOM', 'PAGE-BOT', 'PAGE-BOTT', 'PAGE-BOTTO', 'PAGED',
|
||||
'PAGE-NUMBER', 'PAGE-NUM', 'PAGE-NUMB', 'PAGE-NUMBE', 'PAGE-SIZE',
|
||||
'PAGE-TOP', 'PAGE-WIDTH', 'PAGE-WID', 'PAGE-WIDT', 'PARAMETER', 'PARAM',
|
||||
'PARAME', 'PARAMET', 'PARAMETE', 'PARENT', 'PARSE-STATUS', 'PARTIAL-KEY',
|
||||
'PASCAL', 'PASSWORD-FIELD', 'PATHNAME', 'PAUSE', 'PBE-HASH-ALGORITHM',
|
||||
'PBE-HASH-ALG', 'PBE-HASH-ALGO', 'PBE-HASH-ALGOR', 'PBE-HASH-ALGORI',
|
||||
'PBE-HASH-ALGORIT', 'PBE-HASH-ALGORITH', 'PBE-KEY-ROUNDS', 'PDBNAME',
|
||||
'PERSISTENT', 'PERSIST', 'PERSISTE', 'PERSISTEN',
|
||||
'PERSISTENT-CACHE-DISABLED', 'PFCOLOR', 'PFC', 'PFCO', 'PFCOL', 'PFCOLO',
|
||||
'PIXELS', 'PIXELS-PER-COLUMN', 'PIXELS-PER-COL', 'PIXELS-PER-COLU',
|
||||
'PIXELS-PER-COLUM', 'PIXELS-PER-ROW', 'POPUP-MENU', 'POPUP-M', 'POPUP-ME',
|
||||
'POPUP-MEN', 'POPUP-ONLY', 'POPUP-O', 'POPUP-ON', 'POPUP-ONL', 'PORTRAIT',
|
||||
'POSITION', 'PRECISION', 'PREFER-DATASET', 'PREPARED', 'PREPARE-STRING',
|
||||
'PREPROCESS', 'PREPROC', 'PREPROCE', 'PREPROCES', 'PRESELECT', 'PRESEL',
|
||||
'PRESELE', 'PRESELEC', 'PREV', 'PREV-COLUMN', 'PREV-SIBLING',
|
||||
'PREV-TAB-ITEM', 'PREV-TAB-I', 'PREV-TAB-IT', 'PREV-TAB-ITE', 'PRIMARY',
|
||||
'PRINTER', 'PRINTER-CONTROL-HANDLE', 'PRINTER-HDC', 'PRINTER-NAME',
|
||||
'PRINTER-PORT', 'PRINTER-SETUP', 'PRIVATE', 'PRIVATE-DATA', 'PRIVATE-D',
|
||||
'PRIVATE-DA', 'PRIVATE-DAT', 'PRIVILEGES', 'PROCEDURE', 'PROCE', 'PROCED',
|
||||
'PROCEDU', 'PROCEDUR', 'PROCEDURE-CALL-TYPE', 'PROCESS', 'PROC-HANDLE',
|
||||
'PROC-HA', 'PROC-HAN', 'PROC-HAND', 'PROC-HANDL', 'PROC-STATUS', 'PROC-ST',
|
||||
'PROC-STA', 'PROC-STAT', 'PROC-STATU', 'proc-text', 'proc-text-buffe',
|
||||
'PROFILER', 'PROGRAM-NAME', 'PROGRESS', 'PROGRESS-SOURCE', 'PROGRESS-S',
|
||||
'PROGRESS-SO', 'PROGRESS-SOU', 'PROGRESS-SOUR', 'PROGRESS-SOURC', 'PROMPT',
|
||||
'PROMPT-FOR', 'PROMPT-F', 'PROMPT-FO', 'PROMSGS', 'PROPATH', 'PROPERTY',
|
||||
'PROTECTED', 'PROVERSION', 'PROVERS', 'PROVERSI', 'PROVERSIO', 'PROXY',
|
||||
'PROXY-PASSWORD', 'PROXY-USERID', 'PUBLIC', 'PUBLIC-ID', 'PUBLISH',
|
||||
'PUBLISHED-EVENTS', 'PUT', 'PUTBYTE', 'PUT-BYTE', 'PUT-DOUBLE', 'PUT-FLOAT',
|
||||
'PUT-INT64', 'PUT-KEY-VALUE', 'PUT-KEY-VAL', 'PUT-KEY-VALU', 'PUT-LONG',
|
||||
'PUT-SHORT', 'PUT-STRING', 'PUT-UNSIGNED-LONG', 'QUERY', 'QUERY-CLOSE',
|
||||
'QUERY-OFF-END', 'QUERY-OPEN', 'QUERY-PREPARE', 'QUERY-TUNING', 'QUESTION',
|
||||
'QUIT', 'QUOTER', 'RADIO-BUTTONS', 'RADIO-SET', 'RANDOM', 'RAW-TRANSFER',
|
||||
'RCODE-INFORMATION', 'RCODE-INFO', 'RCODE-INFOR', 'RCODE-INFORM',
|
||||
'RCODE-INFORMA', 'RCODE-INFORMAT', 'RCODE-INFORMATI', 'RCODE-INFORMATIO',
|
||||
'READ-AVAILABLE', 'READ-EXACT-NUM', 'READ-FILE', 'READKEY', 'READ-ONLY',
|
||||
'READ-XML', 'READ-XMLSCHEMA', 'REAL', 'RECORD-LENGTH', 'RECTANGLE', 'RECT',
|
||||
'RECTA', 'RECTAN', 'RECTANG', 'RECTANGL', 'RECURSIVE', 'REFERENCE-ONLY',
|
||||
'REFRESH', 'REFRESHABLE', 'REFRESH-AUDIT-POLICY', 'REGISTER-DOMAIN',
|
||||
'RELEASE', 'REMOTE', 'REMOVE-EVENTS-PROCEDURE', 'REMOVE-SUPER-PROCEDURE',
|
||||
'REPEAT', 'REPLACE', 'REPLACE-SELECTION-TEXT', 'REPOSITION',
|
||||
'REPOSITION-BACKWARD', 'REPOSITION-FORWARD', 'REPOSITION-MODE',
|
||||
'REPOSITION-TO-ROW', 'REPOSITION-TO-ROWID', 'REQUEST', 'RESET', 'RESIZABLE',
|
||||
'RESIZA', 'RESIZAB', 'RESIZABL', 'RESIZE', 'RESTART-ROW', 'RESTART-ROWID',
|
||||
'RETAIN', 'RETAIN-SHAPE', 'RETRY', 'RETRY-CANCEL', 'RETURN',
|
||||
'RETURN-INSERTED', 'RETURN-INS', 'RETURN-INSE', 'RETURN-INSER',
|
||||
'RETURN-INSERT', 'RETURN-INSERTE', 'RETURNS', 'RETURN-TO-START-DIR',
|
||||
'RETURN-TO-START-DI', 'RETURN-VALUE', 'RETURN-VAL', 'RETURN-VALU',
|
||||
'RETURN-VALUE-DATA-TYPE', 'REVERSE-FROM', 'REVERT', 'REVOKE', 'RGB-VALUE',
|
||||
'RIGHT-ALIGNED', 'RETURN-ALIGN', 'RETURN-ALIGNE', 'RIGHT-TRIM', 'R-INDEX',
|
||||
'ROLES', 'ROUND', 'ROUTINE-LEVEL', 'ROW', 'ROW-HEIGHT-CHARS', 'HEIGHT',
|
||||
'ROW-HEIGHT-PIXELS', 'HEIGHT-P', 'ROW-MARKERS', 'ROW-OF', 'ROW-RESIZABLE',
|
||||
'RULE', 'RUN', 'RUN-PROCEDURE', 'SAVE', 'SAVE-AS', 'SAVE-FILE',
|
||||
'SAX-COMPLETE', 'SAX-COMPLE', 'SAX-COMPLET', 'SAX-PARSE', 'SAX-PARSE-FIRST',
|
||||
'SAX-PARSE-NEXT', 'SAX-PARSER-ERROR', 'SAX-RUNNING', 'SAX-UNINITIALIZED',
|
||||
'SAX-WRITE-BEGIN', 'SAX-WRITE-COMPLETE', 'SAX-WRITE-CONTENT',
|
||||
'SAX-WRITE-ELEMENT', 'SAX-WRITE-ERROR', 'SAX-WRITE-IDLE', 'SAX-WRITER',
|
||||
'SAX-WRITE-TAG', 'SCHEMA', 'SCHEMA-LOCATION', 'SCHEMA-MARSHAL',
|
||||
'SCHEMA-PATH', 'SCREEN', 'SCREEN-IO', 'SCREEN-LINES', 'SCREEN-VALUE',
|
||||
'SCREEN-VAL', 'SCREEN-VALU', 'SCROLL', 'SCROLLABLE', 'SCROLLBAR-HORIZONTAL',
|
||||
'SCROLLBAR-H', 'SCROLLBAR-HO', 'SCROLLBAR-HOR', 'SCROLLBAR-HORI',
|
||||
'SCROLLBAR-HORIZ', 'SCROLLBAR-HORIZO', 'SCROLLBAR-HORIZON',
|
||||
'SCROLLBAR-HORIZONT', 'SCROLLBAR-HORIZONTA', 'SCROLL-BARS',
|
||||
'SCROLLBAR-VERTICAL', 'SCROLLBAR-V', 'SCROLLBAR-VE', 'SCROLLBAR-VER',
|
||||
'SCROLLBAR-VERT', 'SCROLLBAR-VERTI', 'SCROLLBAR-VERTIC',
|
||||
'SCROLLBAR-VERTICA', 'SCROLL-DELTA', 'SCROLLED-ROW-POSITION',
|
||||
'SCROLLED-ROW-POS', 'SCROLLED-ROW-POSI', 'SCROLLED-ROW-POSIT',
|
||||
'SCROLLED-ROW-POSITI', 'SCROLLED-ROW-POSITIO', 'SCROLLING', 'SCROLL-OFFSET',
|
||||
'SCROLL-TO-CURRENT-ROW', 'SCROLL-TO-ITEM', 'SCROLL-TO-I', 'SCROLL-TO-IT',
|
||||
'SCROLL-TO-ITE', 'SCROLL-TO-SELECTED-ROW', 'SDBNAME', 'SEAL',
|
||||
'SEAL-TIMESTAMP', 'SEARCH', 'SEARCH-SELF', 'SEARCH-TARGET', 'SECTION',
|
||||
'SECURITY-POLICY', 'SEEK', 'SELECT', 'SELECTABLE', 'SELECT-ALL', 'SELECTED',
|
||||
'SELECT-FOCUSED-ROW', 'SELECTION', 'SELECTION-END', 'SELECTION-LIST',
|
||||
'SELECTION-START', 'SELECTION-TEXT', 'SELECT-NEXT-ROW', 'SELECT-PREV-ROW',
|
||||
'SELECT-ROW', 'SELF', 'SEND', 'send-sql-statement', 'send-sql', 'SENSITIVE',
|
||||
'SEPARATE-CONNECTION', 'SEPARATOR-FGCOLOR', 'SEPARATORS', 'SERVER',
|
||||
'SERVER-CONNECTION-BOUND', 'SERVER-CONNECTION-BOUND-REQUEST',
|
||||
'SERVER-CONNECTION-CONTEXT', 'SERVER-CONNECTION-ID',
|
||||
'SERVER-OPERATING-MODE', 'SESSION', 'SESSION-ID', 'SET', 'SET-APPL-CONTEXT',
|
||||
'SET-ATTR-CALL-TYPE', 'SET-ATTRIBUTE-NODE', 'SET-BLUE-VALUE', 'SET-BLUE',
|
||||
'SET-BLUE-', 'SET-BLUE-V', 'SET-BLUE-VA', 'SET-BLUE-VAL', 'SET-BLUE-VALU',
|
||||
'SET-BREAK', 'SET-BUFFERS', 'SET-CALLBACK', 'SET-CLIENT', 'SET-COMMIT',
|
||||
'SET-CONTENTS', 'SET-CURRENT-VALUE', 'SET-DB-CLIENT', 'SET-DYNAMIC',
|
||||
'SET-EVENT-MANAGER-OPTION', 'SET-GREEN-VALUE', 'SET-GREEN', 'SET-GREEN-',
|
||||
'SET-GREEN-V', 'SET-GREEN-VA', 'SET-GREEN-VAL', 'SET-GREEN-VALU',
|
||||
'SET-INPUT-SOURCE', 'SET-OPTION', 'SET-OUTPUT-DESTINATION', 'SET-PARAMETER',
|
||||
'SET-POINTER-VALUE', 'SET-PROPERTY', 'SET-RED-VALUE', 'SET-RED', 'SET-RED-',
|
||||
'SET-RED-V', 'SET-RED-VA', 'SET-RED-VAL', 'SET-RED-VALU',
|
||||
'SET-REPOSITIONED-ROW', 'SET-RGB-VALUE', 'SET-ROLLBACK', 'SET-SELECTION',
|
||||
'SET-SIZE', 'SET-SORT-ARROW', 'SETUSERID', 'SETUSER', 'SETUSERI',
|
||||
'SET-WAIT-STATE', 'SHA1-DIGEST', 'SHARED', 'SHARE-LOCK', 'SHARE', 'SHARE-',
|
||||
'SHARE-L', 'SHARE-LO', 'SHARE-LOC', 'SHOW-IN-TASKBAR', 'SHOW-STATS',
|
||||
'SHOW-STAT', 'SIDE-LABEL-HANDLE', 'SIDE-LABEL-H', 'SIDE-LABEL-HA',
|
||||
'SIDE-LABEL-HAN', 'SIDE-LABEL-HAND', 'SIDE-LABEL-HANDL', 'SIDE-LABELS',
|
||||
'SIDE-LAB', 'SIDE-LABE', 'SIDE-LABEL', 'SILENT', 'SIMPLE', 'SINGLE', 'SIZE',
|
||||
'SIZE-CHARS', 'SIZE-C', 'SIZE-CH', 'SIZE-CHA', 'SIZE-CHAR', 'SIZE-PIXELS',
|
||||
'SIZE-P', 'SIZE-PI', 'SIZE-PIX', 'SIZE-PIXE', 'SIZE-PIXEL', 'SKIP',
|
||||
'SKIP-DELETED-RECORD', 'SLIDER', 'SMALL-ICON', 'SMALLINT', 'SMALL-TITLE',
|
||||
'SOME', 'SORT', 'SORT-ASCENDING', 'SORT-NUMBER', 'SOURCE',
|
||||
'SOURCE-PROCEDURE', 'SPACE', 'SQL', 'SQRT', 'SSL-SERVER-NAME', 'STANDALONE',
|
||||
'START', 'START-DOCUMENT', 'START-ELEMENT', 'START-MOVE', 'START-RESIZE',
|
||||
'START-ROW-RESIZE', 'STATE-DETAIL', 'STATIC', 'STATUS', 'STATUS-AREA',
|
||||
'STATUS-AREA-FONT', 'STDCALL', 'STOP', 'STOP-PARSING', 'STOPPED', 'STOPPE',
|
||||
'STORED-PROCEDURE', 'STORED-PROC', 'STORED-PROCE', 'STORED-PROCED',
|
||||
'STORED-PROCEDU', 'STORED-PROCEDUR', 'STREAM', 'STREAM-HANDLE', 'STREAM-IO',
|
||||
'STRETCH-TO-FIT', 'STRICT', 'STRING', 'STRING-VALUE', 'STRING-XREF',
|
||||
'SUB-AVERAGE', 'SUB-AVE', 'SUB-AVER', 'SUB-AVERA', 'SUB-AVERAG',
|
||||
'SUB-COUNT', 'SUB-MAXIMUM', 'SUM-MAX', 'SUM-MAXI', 'SUM-MAXIM',
|
||||
'SUM-MAXIMU', 'SUB-MENU', 'SUBSUB-', 'MINIMUM', 'SUB-MIN', 'SUBSCRIBE',
|
||||
'SUBSTITUTE', 'SUBST', 'SUBSTI', 'SUBSTIT', 'SUBSTITU', 'SUBSTITUT',
|
||||
'SUBSTRING', 'SUBSTR', 'SUBSTRI', 'SUBSTRIN', 'SUB-TOTAL', 'SUBTYPE', 'SUM',
|
||||
'SUPER', 'SUPER-PROCEDURES', 'SUPPRESS-NAMESPACE-PROCESSING',
|
||||
'SUPPRESS-WARNINGS', 'SUPPRESS-W', 'SUPPRESS-WA', 'SUPPRESS-WAR',
|
||||
'SUPPRESS-WARN', 'SUPPRESS-WARNI', 'SUPPRESS-WARNIN', 'SUPPRESS-WARNING',
|
||||
'SYMMETRIC-ENCRYPTION-ALGORITHM', 'SYMMETRIC-ENCRYPTION-IV',
|
||||
'SYMMETRIC-ENCRYPTION-KEY', 'SYMMETRIC-SUPPORT', 'SYSTEM-ALERT-BOXES',
|
||||
'SYSTEM-ALERT', 'SYSTEM-ALERT-', 'SYSTEM-ALERT-B', 'SYSTEM-ALERT-BO',
|
||||
'SYSTEM-ALERT-BOX', 'SYSTEM-ALERT-BOXE', 'SYSTEM-DIALOG', 'SYSTEM-HELP',
|
||||
'SYSTEM-ID', 'TABLE', 'TABLE-HANDLE', 'TABLE-NUMBER', 'TAB-POSITION',
|
||||
'TAB-STOP', 'TARGET', 'TARGET-PROCEDURE', 'TEMP-DIRECTORY', 'TEMP-DIR',
|
||||
'TEMP-DIRE', 'TEMP-DIREC', 'TEMP-DIRECT', 'TEMP-DIRECTO', 'TEMP-DIRECTOR',
|
||||
'TEMP-TABLE', 'TEMP-TABLE-PREPARE', 'TERM', 'TERMINAL', 'TERM', 'TERMI',
|
||||
'TERMIN', 'TERMINA', 'TERMINATE', 'TEXT', 'TEXT-CURSOR', 'TEXT-SEG-GROW',
|
||||
'TEXT-SELECTED', 'THEN', 'THIS-OBJECT', 'THIS-PROCEDURE', 'THREE-D',
|
||||
'THROW', 'THROUGH', 'THRU', 'TIC-MARKS', 'TIME', 'TIME-SOURCE', 'TITLE',
|
||||
'TITLE-BGCOLOR', 'TITLE-BGC', 'TITLE-BGCO', 'TITLE-BGCOL', 'TITLE-BGCOLO',
|
||||
'TITLE-DCOLOR', 'TITLE-DC', 'TITLE-DCO', 'TITLE-DCOL', 'TITLE-DCOLO',
|
||||
'TITLE-FGCOLOR', 'TITLE-FGC', 'TITLE-FGCO', 'TITLE-FGCOL', 'TITLE-FGCOLO',
|
||||
'TITLE-FONT', 'TITLE-FO', 'TITLE-FON', 'TO', 'TODAY', 'TOGGLE-BOX',
|
||||
'TOOLTIP', 'TOOLTIPS', 'TOPIC', 'TOP-NAV-QUERY', 'TOP-ONLY', 'TO-ROWID',
|
||||
'TOTAL', 'TRAILING', 'TRANS', 'TRANSACTION', 'TRANSACTION-MODE',
|
||||
'TRANS-INIT-PROCEDURE', 'TRANSPARENT', 'TRIGGER', 'TRIGGERS', 'TRIM',
|
||||
'TRUE', 'TRUNCATE', 'TRUNC', 'TRUNCA', 'TRUNCAT', 'TYPE', 'TYPE-OF',
|
||||
'UNBOX', 'UNBUFFERED', 'UNBUFF', 'UNBUFFE', 'UNBUFFER', 'UNBUFFERE',
|
||||
'UNDERLINE', 'UNDERL', 'UNDERLI', 'UNDERLIN', 'UNDO', 'UNFORMATTED',
|
||||
'UNFORM', 'UNFORMA', 'UNFORMAT', 'UNFORMATT', 'UNFORMATTE', 'UNION',
|
||||
'UNIQUE', 'UNIQUE-ID', 'UNIQUE-MATCH', 'UNIX', 'UNLESS-HIDDEN', 'UNLOAD',
|
||||
'UNSIGNED-LONG', 'UNSUBSCRIBE', 'UP', 'UPDATE', 'UPDATE-ATTRIBUTE', 'URL',
|
||||
'URL-DECODE', 'URL-ENCODE', 'URL-PASSWORD', 'URL-USERID', 'USE',
|
||||
'USE-DICT-EXPS', 'USE-FILENAME', 'USE-INDEX', 'USER', 'USE-REVVIDEO',
|
||||
'USERID', 'USER-ID', 'USE-TEXT', 'USE-UNDERLINE', 'USE-WIDGET-POOL',
|
||||
'USING', 'V6DISPLAY', 'V6FRAME', 'VALIDATE', 'VALIDATE-EXPRESSION',
|
||||
'VALIDATE-MESSAGE', 'VALIDATE-SEAL', 'VALIDATION-ENABLED', 'VALID-EVENT',
|
||||
'VALID-HANDLE', 'VALID-OBJECT', 'VALUE', 'VALUE-CHANGED', 'VALUES',
|
||||
'VARIABLE', 'VAR', 'VARI', 'VARIA', 'VARIAB', 'VARIABL', 'VERBOSE',
|
||||
'VERSION', 'VERTICAL', 'VERT', 'VERTI', 'VERTIC', 'VERTICA', 'VIEW',
|
||||
'VIEW-AS', 'VIEW-FIRST-COLUMN-ON-REOPEN', 'VIRTUAL-HEIGHT-CHARS',
|
||||
'VIRTUAL-HEIGHT', 'VIRTUAL-HEIGHT-', 'VIRTUAL-HEIGHT-C',
|
||||
'VIRTUAL-HEIGHT-CH', 'VIRTUAL-HEIGHT-CHA', 'VIRTUAL-HEIGHT-CHAR',
|
||||
'VIRTUAL-HEIGHT-PIXELS', 'VIRTUAL-HEIGHT-P', 'VIRTUAL-HEIGHT-PI',
|
||||
'VIRTUAL-HEIGHT-PIX', 'VIRTUAL-HEIGHT-PIXE', 'VIRTUAL-HEIGHT-PIXEL',
|
||||
'VIRTUAL-WIDTH-CHARS', 'VIRTUAL-WIDTH', 'VIRTUAL-WIDTH-', 'VIRTUAL-WIDTH-C',
|
||||
'VIRTUAL-WIDTH-CH', 'VIRTUAL-WIDTH-CHA', 'VIRTUAL-WIDTH-CHAR',
|
||||
'VIRTUAL-WIDTH-PIXELS', 'VIRTUAL-WIDTH-P', 'VIRTUAL-WIDTH-PI',
|
||||
'VIRTUAL-WIDTH-PIX', 'VIRTUAL-WIDTH-PIXE', 'VIRTUAL-WIDTH-PIXEL', 'VISIBLE',
|
||||
'VOID', 'WAIT', 'WAIT-FOR', 'WARNING', 'WEB-CONTEXT', 'WEEKDAY', 'WHEN',
|
||||
'WHERE', 'WHILE', 'WIDGET', 'WIDGET-ENTER', 'WIDGET-E', 'WIDGET-EN',
|
||||
'WIDGET-ENT', 'WIDGET-ENTE', 'WIDGET-ID', 'WIDGET-LEAVE', 'WIDGET-L',
|
||||
'WIDGET-LE', 'WIDGET-LEA', 'WIDGET-LEAV', 'WIDGET-POOL', 'WIDTH',
|
||||
'WIDTH-CHARS', 'WIDTH', 'WIDTH-', 'WIDTH-C', 'WIDTH-CH', 'WIDTH-CHA',
|
||||
'WIDTH-CHAR', 'WIDTH-PIXELS', 'WIDTH-P', 'WIDTH-PI', 'WIDTH-PIX',
|
||||
'WIDTH-PIXE', 'WIDTH-PIXEL', 'WINDOW', 'WINDOW-MAXIMIZED', 'WINDOW-MAXIM',
|
||||
'WINDOW-MAXIMI', 'WINDOW-MAXIMIZ', 'WINDOW-MAXIMIZE', 'WINDOW-MINIMIZED',
|
||||
'WINDOW-MINIM', 'WINDOW-MINIMI', 'WINDOW-MINIMIZ', 'WINDOW-MINIMIZE',
|
||||
'WINDOW-NAME', 'WINDOW-NORMAL', 'WINDOW-STATE', 'WINDOW-STA', 'WINDOW-STAT',
|
||||
'WINDOW-SYSTEM', 'WITH', 'WORD-INDEX', 'WORD-WRAP',
|
||||
'WORK-AREA-HEIGHT-PIXELS', 'WORK-AREA-WIDTH-PIXELS', 'WORK-AREA-X',
|
||||
'WORK-AREA-Y', 'WORKFILE', 'WORK-TABLE', 'WORK-TAB', 'WORK-TABL', 'WRITE',
|
||||
'WRITE-CDATA', 'WRITE-CHARACTERS', 'WRITE-COMMENT', 'WRITE-DATA-ELEMENT',
|
||||
'WRITE-EMPTY-ELEMENT', 'WRITE-ENTITY-REF', 'WRITE-EXTERNAL-DTD',
|
||||
'WRITE-FRAGMENT', 'WRITE-MESSAGE', 'WRITE-PROCESSING-INSTRUCTION',
|
||||
'WRITE-STATUS', 'WRITE-XML', 'WRITE-XMLSCHEMA', 'X', 'XCODE',
|
||||
'XML-DATA-TYPE', 'XML-NODE-TYPE', 'XML-SCHEMA-PATH',
|
||||
'XML-SUPPRESS-NAMESPACE-PROCESSING', 'X-OF', 'XREF', 'XREF-XML', 'Y',
|
||||
'YEAR', 'YEAR-OFFSET', 'YES', 'YES-NO', 'YES-NO-CANCEL', 'Y-OF'
|
||||
]
|
3787
wakatime/packages/pygments/lexers/_phpbuiltins.py
Normal file
3787
wakatime/packages/pygments/lexers/_phpbuiltins.py
Normal file
File diff suppressed because it is too large
Load diff
233
wakatime/packages/pygments/lexers/_postgres_builtins.py
Normal file
233
wakatime/packages/pygments/lexers/_postgres_builtins.py
Normal file
|
@ -0,0 +1,233 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._postgres_builtins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Self-updating data files for PostgreSQL lexer.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
import urllib
|
||||
|
||||
# One man's constant is another man's variable.
|
||||
SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
|
||||
KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
|
||||
DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
|
||||
|
||||
def update_myself():
|
||||
data_file = list(fetch(DATATYPES_URL))
|
||||
datatypes = parse_datatypes(data_file)
|
||||
pseudos = parse_pseudos(data_file)
|
||||
|
||||
keywords = parse_keywords(fetch(KEYWORDS_URL))
|
||||
update_consts(__file__, 'DATATYPES', datatypes)
|
||||
update_consts(__file__, 'PSEUDO_TYPES', pseudos)
|
||||
update_consts(__file__, 'KEYWORDS', keywords)
|
||||
|
||||
def parse_keywords(f):
|
||||
kw = []
|
||||
for m in re.finditer(
|
||||
r'\s*<entry><token>([^<]+)</token></entry>\s*'
|
||||
r'<entry>([^<]+)</entry>', f.read()):
|
||||
kw.append(m.group(1))
|
||||
|
||||
if not kw:
|
||||
raise ValueError('no keyword found')
|
||||
|
||||
kw.sort()
|
||||
return kw
|
||||
|
||||
def parse_datatypes(f):
|
||||
dt = set()
|
||||
for line in f:
|
||||
if '<sect1' in line:
|
||||
break
|
||||
if '<entry><type>' not in line:
|
||||
continue
|
||||
|
||||
# Parse a string such as
|
||||
# time [ (<replaceable>p</replaceable>) ] [ without time zone ]
|
||||
# into types "time" and "without time zone"
|
||||
|
||||
# remove all the tags
|
||||
line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
|
||||
line = re.sub("<[^>]+>", "", line)
|
||||
|
||||
# Drop the parts containing braces
|
||||
for tmp in [t for tmp in line.split('[')
|
||||
for t in tmp.split(']') if "(" not in t]:
|
||||
for t in tmp.split(','):
|
||||
t = t.strip()
|
||||
if not t: continue
|
||||
dt.add(" ".join(t.split()))
|
||||
|
||||
dt = list(dt)
|
||||
dt.sort()
|
||||
return dt
|
||||
|
||||
def parse_pseudos(f):
|
||||
dt = []
|
||||
re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
|
||||
re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
|
||||
re_end = re.compile(r'\s*</table>')
|
||||
|
||||
f = iter(f)
|
||||
for line in f:
|
||||
if re_start.match(line) is not None:
|
||||
break
|
||||
else:
|
||||
raise ValueError('pseudo datatypes table not found')
|
||||
|
||||
for line in f:
|
||||
m = re_entry.match(line)
|
||||
if m is not None:
|
||||
dt.append(m.group(1))
|
||||
|
||||
if re_end.match(line) is not None:
|
||||
break
|
||||
else:
|
||||
raise ValueError('end of pseudo datatypes table not found')
|
||||
|
||||
if not dt:
|
||||
raise ValueError('pseudo datatypes not found')
|
||||
|
||||
return dt
|
||||
|
||||
def fetch(url):
|
||||
return urllib.urlopen(url)
|
||||
|
||||
def update_consts(filename, constname, content):
|
||||
f = open(filename)
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
|
||||
# Line to start/end inserting
|
||||
re_start = re.compile(r'^%s\s*=\s*\[\s*$' % constname)
|
||||
re_end = re.compile(r'^\s*\]\s*$')
|
||||
start = [ n for n, l in enumerate(lines) if re_start.match(l) ]
|
||||
if not start:
|
||||
raise ValueError("couldn't find line containing '%s = ['" % constname)
|
||||
if len(start) > 1:
|
||||
raise ValueError("too many lines containing '%s = ['" % constname)
|
||||
start = start[0] + 1
|
||||
|
||||
end = [ n for n, l in enumerate(lines) if n >= start and re_end.match(l) ]
|
||||
if not end:
|
||||
raise ValueError("couldn't find line containing ']' after %s " % constname)
|
||||
end = end[0]
|
||||
|
||||
# Pack the new content in lines not too long
|
||||
content = [repr(item) for item in content ]
|
||||
new_lines = [[]]
|
||||
for item in content:
|
||||
if sum(map(len, new_lines[-1])) + 2 * len(new_lines[-1]) + len(item) + 4 > 75:
|
||||
new_lines.append([])
|
||||
new_lines[-1].append(item)
|
||||
|
||||
lines[start:end] = [ " %s,\n" % ", ".join(items) for items in new_lines ]
|
||||
|
||||
f = open(filename, 'w')
|
||||
f.write(''.join(lines))
|
||||
f.close()
|
||||
|
||||
|
||||
# Autogenerated: please edit them if you like wasting your time.
|
||||
|
||||
KEYWORDS = [
|
||||
'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER',
|
||||
'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE',
|
||||
'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT',
|
||||
'ASYMMETRIC', 'AT', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', 'BEFORE',
|
||||
'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BOOLEAN', 'BOTH', 'BY',
|
||||
'CACHE', 'CALLED', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG',
|
||||
'CHAIN', 'CHAR', 'CHARACTER', 'CHARACTERISTICS', 'CHECK', 'CHECKPOINT',
|
||||
'CLASS', 'CLOSE', 'CLUSTER', 'COALESCE', 'COLLATE', 'COLLATION',
|
||||
'COLUMN', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY',
|
||||
'CONFIGURATION', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', 'CONTENT',
|
||||
'CONTINUE', 'CONVERSION', 'COPY', 'COST', 'CREATE', 'CROSS', 'CSV',
|
||||
'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', 'CURRENT_ROLE',
|
||||
'CURRENT_SCHEMA', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
|
||||
'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DAY', 'DEALLOCATE', 'DEC',
|
||||
'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
|
||||
'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DESC', 'DICTIONARY',
|
||||
'DISABLE', 'DISCARD', 'DISTINCT', 'DO', 'DOCUMENT', 'DOMAIN', 'DOUBLE',
|
||||
'DROP', 'EACH', 'ELSE', 'ENABLE', 'ENCODING', 'ENCRYPTED', 'END',
|
||||
'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXCLUDING', 'EXCLUSIVE',
|
||||
'EXECUTE', 'EXISTS', 'EXPLAIN', 'EXTENSION', 'EXTERNAL', 'EXTRACT',
|
||||
'FALSE', 'FAMILY', 'FETCH', 'FIRST', 'FLOAT', 'FOLLOWING', 'FOR',
|
||||
'FORCE', 'FOREIGN', 'FORWARD', 'FREEZE', 'FROM', 'FULL', 'FUNCTION',
|
||||
'FUNCTIONS', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP',
|
||||
'HANDLER', 'HAVING', 'HEADER', 'HOLD', 'HOUR', 'IDENTITY', 'IF',
|
||||
'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', 'IN', 'INCLUDING',
|
||||
'INCREMENT', 'INDEX', 'INDEXES', 'INHERIT', 'INHERITS', 'INITIALLY',
|
||||
'INLINE', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD',
|
||||
'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVOKER', 'IS',
|
||||
'ISNULL', 'ISOLATION', 'JOIN', 'KEY', 'LABEL', 'LANGUAGE', 'LARGE',
|
||||
'LAST', 'LC_COLLATE', 'LC_CTYPE', 'LEADING', 'LEAST', 'LEFT', 'LEVEL',
|
||||
'LIKE', 'LIMIT', 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME',
|
||||
'LOCALTIMESTAMP', 'LOCATION', 'LOCK', 'MAPPING', 'MATCH', 'MAXVALUE',
|
||||
'MINUTE', 'MINVALUE', 'MODE', 'MONTH', 'MOVE', 'NAME', 'NAMES',
|
||||
'NATIONAL', 'NATURAL', 'NCHAR', 'NEXT', 'NO', 'NONE', 'NOT', 'NOTHING',
|
||||
'NOTIFY', 'NOTNULL', 'NOWAIT', 'NULL', 'NULLIF', 'NULLS', 'NUMERIC',
|
||||
'OBJECT', 'OF', 'OFF', 'OFFSET', 'OIDS', 'ON', 'ONLY', 'OPERATOR',
|
||||
'OPTION', 'OPTIONS', 'OR', 'ORDER', 'OUT', 'OUTER', 'OVER', 'OVERLAPS',
|
||||
'OVERLAY', 'OWNED', 'OWNER', 'PARSER', 'PARTIAL', 'PARTITION',
|
||||
'PASSING', 'PASSWORD', 'PLACING', 'PLANS', 'POSITION', 'PRECEDING',
|
||||
'PRECISION', 'PREPARE', 'PREPARED', 'PRESERVE', 'PRIMARY', 'PRIOR',
|
||||
'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', 'QUOTE', 'RANGE', 'READ',
|
||||
'REAL', 'REASSIGN', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
|
||||
'REINDEX', 'RELATIVE', 'RELEASE', 'RENAME', 'REPEATABLE', 'REPLACE',
|
||||
'REPLICA', 'RESET', 'RESTART', 'RESTRICT', 'RETURNING', 'RETURNS',
|
||||
'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROW', 'ROWS', 'RULE',
|
||||
'SAVEPOINT', 'SCHEMA', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY',
|
||||
'SELECT', 'SEQUENCE', 'SEQUENCES', 'SERIALIZABLE', 'SERVER', 'SESSION',
|
||||
'SESSION_USER', 'SET', 'SETOF', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE',
|
||||
'SMALLINT', 'SOME', 'STABLE', 'STANDALONE', 'START', 'STATEMENT',
|
||||
'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRIP',
|
||||
'SUBSTRING', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES',
|
||||
'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', 'TIME',
|
||||
'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', 'TREAT', 'TRIGGER',
|
||||
'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNBOUNDED',
|
||||
'UNCOMMITTED', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
|
||||
'UNLOGGED', 'UNTIL', 'UPDATE', 'USER', 'USING', 'VACUUM', 'VALID',
|
||||
'VALIDATE', 'VALIDATOR', 'VALUE', 'VALUES', 'VARCHAR', 'VARIADIC',
|
||||
'VARYING', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHERE',
|
||||
'WHITESPACE', 'WINDOW', 'WITH', 'WITHOUT', 'WORK', 'WRAPPER', 'WRITE',
|
||||
'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS',
|
||||
'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR',
|
||||
'YES', 'ZONE',
|
||||
]
|
||||
|
||||
DATATYPES = [
|
||||
'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
|
||||
'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
|
||||
'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
|
||||
'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
|
||||
'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
|
||||
'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
|
||||
'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
|
||||
'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
|
||||
'with time zone', 'without time zone', 'xml',
|
||||
]
|
||||
|
||||
PSEUDO_TYPES = [
|
||||
'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
|
||||
'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
|
||||
'trigger', 'void', 'opaque',
|
||||
]
|
||||
|
||||
# Remove 'trigger' from types
|
||||
PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))
|
||||
|
||||
PLPGSQL_KEYWORDS = [
|
||||
'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
|
||||
'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
|
||||
'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
|
||||
]
|
||||
|
||||
if __name__ == '__main__':
|
||||
update_myself()
|
||||
|
557
wakatime/packages/pygments/lexers/_robotframeworklexer.py
Normal file
557
wakatime/packages/pygments/lexers/_robotframeworklexer.py
Normal file
|
@ -0,0 +1,557 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._robotframeworklexer
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexer for Robot Framework.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
# Copyright 2012 Nokia Siemens Networks Oyj
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import Lexer
|
||||
from pygments.token import Token
|
||||
|
||||
|
||||
HEADING = Token.Generic.Heading
|
||||
SETTING = Token.Keyword.Namespace
|
||||
IMPORT = Token.Name.Namespace
|
||||
TC_KW_NAME = Token.Generic.Subheading
|
||||
KEYWORD = Token.Name.Function
|
||||
ARGUMENT = Token.String
|
||||
VARIABLE = Token.Name.Variable
|
||||
COMMENT = Token.Comment
|
||||
SEPARATOR = Token.Punctuation
|
||||
SYNTAX = Token.Punctuation
|
||||
GHERKIN = Token.Generic.Emph
|
||||
ERROR = Token.Error
|
||||
|
||||
|
||||
def normalize(string, remove=''):
|
||||
string = string.lower()
|
||||
for char in remove + ' ':
|
||||
if char in string:
|
||||
string = string.replace(char, '')
|
||||
return string
|
||||
|
||||
|
||||
class RobotFrameworkLexer(Lexer):
|
||||
"""
|
||||
For `Robot Framework <http://robotframework.org>`_ test data.
|
||||
|
||||
Supports both space and pipe separated plain text formats.
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
name = 'RobotFramework'
|
||||
aliases = ['RobotFramework', 'robotframework']
|
||||
filenames = ['*.txt', '*.robot']
|
||||
mimetypes = ['text/x-robotframework']
|
||||
|
||||
def __init__(self, **options):
|
||||
options['tabsize'] = 2
|
||||
options['encoding'] = 'UTF-8'
|
||||
Lexer.__init__(self, **options)
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
row_tokenizer = RowTokenizer()
|
||||
var_tokenizer = VariableTokenizer()
|
||||
index = 0
|
||||
for row in text.splitlines():
|
||||
for value, token in row_tokenizer.tokenize(row):
|
||||
for value, token in var_tokenizer.tokenize(value, token):
|
||||
if value:
|
||||
yield index, token, unicode(value)
|
||||
index += len(value)
|
||||
|
||||
|
||||
class VariableTokenizer(object):
|
||||
|
||||
def tokenize(self, string, token):
|
||||
var = VariableSplitter(string, identifiers='$@%')
|
||||
if var.start < 0 or token in (COMMENT, ERROR):
|
||||
yield string, token
|
||||
return
|
||||
for value, token in self._tokenize(var, string, token):
|
||||
if value:
|
||||
yield value, token
|
||||
|
||||
def _tokenize(self, var, string, orig_token):
|
||||
before = string[:var.start]
|
||||
yield before, orig_token
|
||||
yield var.identifier + '{', SYNTAX
|
||||
for value, token in self.tokenize(var.base, VARIABLE):
|
||||
yield value, token
|
||||
yield '}', SYNTAX
|
||||
if var.index:
|
||||
yield '[', SYNTAX
|
||||
for value, token in self.tokenize(var.index, VARIABLE):
|
||||
yield value, token
|
||||
yield ']', SYNTAX
|
||||
for value, token in self.tokenize(string[var.end:], orig_token):
|
||||
yield value, token
|
||||
|
||||
|
||||
class RowTokenizer(object):
|
||||
|
||||
def __init__(self):
|
||||
self._table = UnknownTable()
|
||||
self._splitter = RowSplitter()
|
||||
testcases = TestCaseTable()
|
||||
settings = SettingTable(testcases.set_default_template)
|
||||
variables = VariableTable()
|
||||
keywords = KeywordTable()
|
||||
self._tables = {'settings': settings, 'setting': settings,
|
||||
'metadata': settings,
|
||||
'variables': variables, 'variable': variables,
|
||||
'testcases': testcases, 'testcase': testcases,
|
||||
'keywords': keywords, 'keyword': keywords,
|
||||
'userkeywords': keywords, 'userkeyword': keywords}
|
||||
|
||||
def tokenize(self, row):
|
||||
commented = False
|
||||
heading = False
|
||||
for index, value in enumerate(self._splitter.split(row)):
|
||||
# First value, and every second after that, is a separator.
|
||||
index, separator = divmod(index-1, 2)
|
||||
if value.startswith('#'):
|
||||
commented = True
|
||||
elif index == 0 and value.startswith('*'):
|
||||
self._table = self._start_table(value)
|
||||
heading = True
|
||||
for value, token in self._tokenize(value, index, commented,
|
||||
separator, heading):
|
||||
yield value, token
|
||||
self._table.end_row()
|
||||
|
||||
def _start_table(self, header):
|
||||
name = normalize(header, remove='*')
|
||||
return self._tables.get(name, UnknownTable())
|
||||
|
||||
def _tokenize(self, value, index, commented, separator, heading):
|
||||
if commented:
|
||||
yield value, COMMENT
|
||||
elif separator:
|
||||
yield value, SEPARATOR
|
||||
elif heading:
|
||||
yield value, HEADING
|
||||
else:
|
||||
for value, token in self._table.tokenize(value, index):
|
||||
yield value, token
|
||||
|
||||
|
||||
class RowSplitter(object):
|
||||
_space_splitter = re.compile('( {2,})')
|
||||
_pipe_splitter = re.compile('((?:^| +)\|(?: +|$))')
|
||||
|
||||
def split(self, row):
|
||||
splitter = (row.startswith('| ') and self._split_from_pipes
|
||||
or self._split_from_spaces)
|
||||
for value in splitter(row):
|
||||
yield value
|
||||
yield '\n'
|
||||
|
||||
def _split_from_spaces(self, row):
|
||||
yield '' # Start with (pseudo)separator similarly as with pipes
|
||||
for value in self._space_splitter.split(row):
|
||||
yield value
|
||||
|
||||
def _split_from_pipes(self, row):
|
||||
_, separator, rest = self._pipe_splitter.split(row, 1)
|
||||
yield separator
|
||||
while self._pipe_splitter.search(rest):
|
||||
cell, separator, rest = self._pipe_splitter.split(rest, 1)
|
||||
yield cell
|
||||
yield separator
|
||||
yield rest
|
||||
|
||||
|
||||
class Tokenizer(object):
|
||||
_tokens = None
|
||||
|
||||
def __init__(self):
|
||||
self._index = 0
|
||||
|
||||
def tokenize(self, value):
|
||||
values_and_tokens = self._tokenize(value, self._index)
|
||||
self._index += 1
|
||||
if isinstance(values_and_tokens, type(Token)):
|
||||
values_and_tokens = [(value, values_and_tokens)]
|
||||
return values_and_tokens
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
index = min(index, len(self._tokens) - 1)
|
||||
return self._tokens[index]
|
||||
|
||||
def _is_assign(self, value):
|
||||
if value.endswith('='):
|
||||
value = value[:-1].strip()
|
||||
var = VariableSplitter(value, identifiers='$@')
|
||||
return var.start == 0 and var.end == len(value)
|
||||
|
||||
|
||||
class Comment(Tokenizer):
|
||||
_tokens = (COMMENT,)
|
||||
|
||||
|
||||
class Setting(Tokenizer):
|
||||
_tokens = (SETTING, ARGUMENT)
|
||||
_keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
|
||||
'suitepostcondition', 'testsetup', 'testprecondition',
|
||||
'testteardown', 'testpostcondition', 'testtemplate')
|
||||
_import_settings = ('library', 'resource', 'variables')
|
||||
_other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
|
||||
'testtimeout')
|
||||
_custom_tokenizer = None
|
||||
|
||||
def __init__(self, template_setter=None):
|
||||
Tokenizer.__init__(self)
|
||||
self._template_setter = template_setter
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
if index == 1 and self._template_setter:
|
||||
self._template_setter(value)
|
||||
if index == 0:
|
||||
normalized = normalize(value)
|
||||
if normalized in self._keyword_settings:
|
||||
self._custom_tokenizer = KeywordCall(support_assign=False)
|
||||
elif normalized in self._import_settings:
|
||||
self._custom_tokenizer = ImportSetting()
|
||||
elif normalized not in self._other_settings:
|
||||
return ERROR
|
||||
elif self._custom_tokenizer:
|
||||
return self._custom_tokenizer.tokenize(value)
|
||||
return Tokenizer._tokenize(self, value, index)
|
||||
|
||||
|
||||
class ImportSetting(Tokenizer):
|
||||
_tokens = (IMPORT, ARGUMENT)
|
||||
|
||||
|
||||
class TestCaseSetting(Setting):
|
||||
_keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
|
||||
'template')
|
||||
_import_settings = ()
|
||||
_other_settings = ('documentation', 'tags', 'timeout')
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
if index == 0:
|
||||
type = Setting._tokenize(self, value[1:-1], index)
|
||||
return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
|
||||
return Setting._tokenize(self, value, index)
|
||||
|
||||
|
||||
class KeywordSetting(TestCaseSetting):
|
||||
_keyword_settings = ('teardown',)
|
||||
_other_settings = ('documentation', 'arguments', 'return', 'timeout')
|
||||
|
||||
|
||||
class Variable(Tokenizer):
|
||||
_tokens = (SYNTAX, ARGUMENT)
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
if index == 0 and not self._is_assign(value):
|
||||
return ERROR
|
||||
return Tokenizer._tokenize(self, value, index)
|
||||
|
||||
|
||||
class KeywordCall(Tokenizer):
|
||||
_tokens = (KEYWORD, ARGUMENT)
|
||||
|
||||
def __init__(self, support_assign=True):
|
||||
Tokenizer.__init__(self)
|
||||
self._keyword_found = not support_assign
|
||||
self._assigns = 0
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
if not self._keyword_found and self._is_assign(value):
|
||||
self._assigns += 1
|
||||
return SYNTAX # VariableTokenizer tokenizes this later.
|
||||
if self._keyword_found:
|
||||
return Tokenizer._tokenize(self, value, index - self._assigns)
|
||||
self._keyword_found = True
|
||||
return GherkinTokenizer().tokenize(value, KEYWORD)
|
||||
|
||||
|
||||
class GherkinTokenizer(object):
|
||||
_gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
|
||||
|
||||
def tokenize(self, value, token):
|
||||
match = self._gherkin_prefix.match(value)
|
||||
if not match:
|
||||
return [(value, token)]
|
||||
end = match.end()
|
||||
return [(value[:end], GHERKIN), (value[end:], token)]
|
||||
|
||||
|
||||
class TemplatedKeywordCall(Tokenizer):
|
||||
_tokens = (ARGUMENT,)
|
||||
|
||||
|
||||
class ForLoop(Tokenizer):
|
||||
|
||||
def __init__(self):
|
||||
Tokenizer.__init__(self)
|
||||
self._in_arguments = False
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
token = self._in_arguments and ARGUMENT or SYNTAX
|
||||
if value.upper() in ('IN', 'IN RANGE'):
|
||||
self._in_arguments = True
|
||||
return token
|
||||
|
||||
|
||||
class _Table(object):
|
||||
_tokenizer_class = None
|
||||
|
||||
def __init__(self, prev_tokenizer=None):
|
||||
self._tokenizer = self._tokenizer_class()
|
||||
self._prev_tokenizer = prev_tokenizer
|
||||
self._prev_values_on_row = []
|
||||
|
||||
def tokenize(self, value, index):
|
||||
if self._continues(value, index):
|
||||
self._tokenizer = self._prev_tokenizer
|
||||
yield value, SYNTAX
|
||||
else:
|
||||
for value_and_token in self._tokenize(value, index):
|
||||
yield value_and_token
|
||||
self._prev_values_on_row.append(value)
|
||||
|
||||
def _continues(self, value, index):
|
||||
return value == '...' and all(self._is_empty(t)
|
||||
for t in self._prev_values_on_row)
|
||||
|
||||
def _is_empty(self, value):
|
||||
return value in ('', '\\')
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
return self._tokenizer.tokenize(value)
|
||||
|
||||
def end_row(self):
|
||||
self.__init__(prev_tokenizer=self._tokenizer)
|
||||
|
||||
|
||||
class UnknownTable(_Table):
|
||||
_tokenizer_class = Comment
|
||||
|
||||
def _continues(self, value, index):
|
||||
return False
|
||||
|
||||
|
||||
class VariableTable(_Table):
|
||||
_tokenizer_class = Variable
|
||||
|
||||
|
||||
class SettingTable(_Table):
|
||||
_tokenizer_class = Setting
|
||||
|
||||
def __init__(self, template_setter, prev_tokenizer=None):
|
||||
_Table.__init__(self, prev_tokenizer)
|
||||
self._template_setter = template_setter
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
if index == 0 and normalize(value) == 'testtemplate':
|
||||
self._tokenizer = Setting(self._template_setter)
|
||||
return _Table._tokenize(self, value, index)
|
||||
|
||||
def end_row(self):
|
||||
self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
|
||||
|
||||
|
||||
class TestCaseTable(_Table):
|
||||
_setting_class = TestCaseSetting
|
||||
_test_template = None
|
||||
_default_template = None
|
||||
|
||||
@property
|
||||
def _tokenizer_class(self):
|
||||
if self._test_template or (self._default_template and
|
||||
self._test_template is not False):
|
||||
return TemplatedKeywordCall
|
||||
return KeywordCall
|
||||
|
||||
def _continues(self, value, index):
|
||||
return index > 0 and _Table._continues(self, value, index)
|
||||
|
||||
def _tokenize(self, value, index):
|
||||
if index == 0:
|
||||
if value:
|
||||
self._test_template = None
|
||||
return GherkinTokenizer().tokenize(value, TC_KW_NAME)
|
||||
if index == 1 and self._is_setting(value):
|
||||
if self._is_template(value):
|
||||
self._test_template = False
|
||||
self._tokenizer = self._setting_class(self.set_test_template)
|
||||
else:
|
||||
self._tokenizer = self._setting_class()
|
||||
if index == 1 and self._is_for_loop(value):
|
||||
self._tokenizer = ForLoop()
|
||||
if index == 1 and self._is_empty(value):
|
||||
return [(value, SYNTAX)]
|
||||
return _Table._tokenize(self, value, index)
|
||||
|
||||
def _is_setting(self, value):
|
||||
return value.startswith('[') and value.endswith(']')
|
||||
|
||||
def _is_template(self, value):
|
||||
return normalize(value) == '[template]'
|
||||
|
||||
def _is_for_loop(self, value):
|
||||
return value.startswith(':') and normalize(value, remove=':') == 'for'
|
||||
|
||||
def set_test_template(self, template):
|
||||
self._test_template = self._is_template_set(template)
|
||||
|
||||
def set_default_template(self, template):
|
||||
self._default_template = self._is_template_set(template)
|
||||
|
||||
def _is_template_set(self, template):
|
||||
return normalize(template) not in ('', '\\', 'none', '${empty}')
|
||||
|
||||
|
||||
class KeywordTable(TestCaseTable):
|
||||
_tokenizer_class = KeywordCall
|
||||
_setting_class = KeywordSetting
|
||||
|
||||
def _is_template(self, value):
|
||||
return False
|
||||
|
||||
|
||||
# Following code copied directly from Robot Framework 2.7.5.
|
||||
|
||||
class VariableSplitter:
|
||||
|
||||
def __init__(self, string, identifiers):
|
||||
self.identifier = None
|
||||
self.base = None
|
||||
self.index = None
|
||||
self.start = -1
|
||||
self.end = -1
|
||||
self._identifiers = identifiers
|
||||
self._may_have_internal_variables = False
|
||||
try:
|
||||
self._split(string)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
self._finalize()
|
||||
|
||||
def get_replaced_base(self, variables):
|
||||
if self._may_have_internal_variables:
|
||||
return variables.replace_string(self.base)
|
||||
return self.base
|
||||
|
||||
def _finalize(self):
|
||||
self.identifier = self._variable_chars[0]
|
||||
self.base = ''.join(self._variable_chars[2:-1])
|
||||
self.end = self.start + len(self._variable_chars)
|
||||
if self._has_list_variable_index():
|
||||
self.index = ''.join(self._list_variable_index_chars[1:-1])
|
||||
self.end += len(self._list_variable_index_chars)
|
||||
|
||||
def _has_list_variable_index(self):
|
||||
return self._list_variable_index_chars\
|
||||
and self._list_variable_index_chars[-1] == ']'
|
||||
|
||||
def _split(self, string):
|
||||
start_index, max_index = self._find_variable(string)
|
||||
self.start = start_index
|
||||
self._open_curly = 1
|
||||
self._state = self._variable_state
|
||||
self._variable_chars = [string[start_index], '{']
|
||||
self._list_variable_index_chars = []
|
||||
self._string = string
|
||||
start_index += 2
|
||||
for index, char in enumerate(string[start_index:]):
|
||||
index += start_index # Giving start to enumerate only in Py 2.6+
|
||||
try:
|
||||
self._state(char, index)
|
||||
except StopIteration:
|
||||
return
|
||||
if index == max_index and not self._scanning_list_variable_index():
|
||||
return
|
||||
|
||||
def _scanning_list_variable_index(self):
|
||||
return self._state in [self._waiting_list_variable_index_state,
|
||||
self._list_variable_index_state]
|
||||
|
||||
def _find_variable(self, string):
|
||||
max_end_index = string.rfind('}')
|
||||
if max_end_index == -1:
|
||||
raise ValueError('No variable end found')
|
||||
if self._is_escaped(string, max_end_index):
|
||||
return self._find_variable(string[:max_end_index])
|
||||
start_index = self._find_start_index(string, 1, max_end_index)
|
||||
if start_index == -1:
|
||||
raise ValueError('No variable start found')
|
||||
return start_index, max_end_index
|
||||
|
||||
def _find_start_index(self, string, start, end):
|
||||
index = string.find('{', start, end) - 1
|
||||
if index < 0:
|
||||
return -1
|
||||
if self._start_index_is_ok(string, index):
|
||||
return index
|
||||
return self._find_start_index(string, index+2, end)
|
||||
|
||||
def _start_index_is_ok(self, string, index):
|
||||
return string[index] in self._identifiers\
|
||||
and not self._is_escaped(string, index)
|
||||
|
||||
def _is_escaped(self, string, index):
|
||||
escaped = False
|
||||
while index > 0 and string[index-1] == '\\':
|
||||
index -= 1
|
||||
escaped = not escaped
|
||||
return escaped
|
||||
|
||||
def _variable_state(self, char, index):
|
||||
self._variable_chars.append(char)
|
||||
if char == '}' and not self._is_escaped(self._string, index):
|
||||
self._open_curly -= 1
|
||||
if self._open_curly == 0:
|
||||
if not self._is_list_variable():
|
||||
raise StopIteration
|
||||
self._state = self._waiting_list_variable_index_state
|
||||
elif char in self._identifiers:
|
||||
self._state = self._internal_variable_start_state
|
||||
|
||||
def _is_list_variable(self):
|
||||
return self._variable_chars[0] == '@'
|
||||
|
||||
def _internal_variable_start_state(self, char, index):
|
||||
self._state = self._variable_state
|
||||
if char == '{':
|
||||
self._variable_chars.append(char)
|
||||
self._open_curly += 1
|
||||
self._may_have_internal_variables = True
|
||||
else:
|
||||
self._variable_state(char, index)
|
||||
|
||||
def _waiting_list_variable_index_state(self, char, index):
|
||||
if char != '[':
|
||||
raise StopIteration
|
||||
self._list_variable_index_chars.append(char)
|
||||
self._state = self._list_variable_index_state
|
||||
|
||||
def _list_variable_index_state(self, char, index):
|
||||
self._list_variable_index_chars.append(char)
|
||||
if char == ']':
|
||||
raise StopIteration
|
40
wakatime/packages/pygments/lexers/_scilab_builtins.py
Normal file
40
wakatime/packages/pygments/lexers/_scilab_builtins.py
Normal file
File diff suppressed because one or more lines are too long
1072
wakatime/packages/pygments/lexers/_sourcemodbuiltins.py
Normal file
1072
wakatime/packages/pygments/lexers/_sourcemodbuiltins.py
Normal file
File diff suppressed because it is too large
Load diff
360
wakatime/packages/pygments/lexers/_stan_builtins.py
Normal file
360
wakatime/packages/pygments/lexers/_stan_builtins.py
Normal file
|
@ -0,0 +1,360 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._stan_builtins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This file contains the names of functions for Stan used by
|
||||
``pygments.lexers.math.StanLexer.
|
||||
|
||||
:copyright: Copyright 2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
KEYWORDS = ['else', 'for', 'if', 'in', 'lower', 'lp__', 'print', 'upper', 'while']
|
||||
|
||||
TYPES = [ 'corr_matrix',
|
||||
'cov_matrix',
|
||||
'int',
|
||||
'matrix',
|
||||
'ordered',
|
||||
'positive_ordered',
|
||||
'real',
|
||||
'row_vector',
|
||||
'simplex',
|
||||
'unit_vector',
|
||||
'vector']
|
||||
|
||||
FUNCTIONS = [ 'Phi',
|
||||
'Phi_approx',
|
||||
'abs',
|
||||
'acos',
|
||||
'acosh',
|
||||
'asin',
|
||||
'asinh',
|
||||
'atan',
|
||||
'atan2',
|
||||
'atanh',
|
||||
'bernoulli_cdf',
|
||||
'bernoulli_log',
|
||||
'bernoulli_logit_log',
|
||||
'bernoulli_rng',
|
||||
'beta_binomial_cdf',
|
||||
'beta_binomial_log',
|
||||
'beta_binomial_rng',
|
||||
'beta_cdf',
|
||||
'beta_log',
|
||||
'beta_rng',
|
||||
'binary_log_loss',
|
||||
'binomial_cdf',
|
||||
'binomial_coefficient_log',
|
||||
'binomial_log',
|
||||
'binomial_logit_log',
|
||||
'binomial_rng',
|
||||
'block',
|
||||
'categorical_log',
|
||||
'categorical_rng',
|
||||
'cauchy_cdf',
|
||||
'cauchy_log',
|
||||
'cauchy_rng',
|
||||
'cbrt',
|
||||
'ceil',
|
||||
'chi_square_log',
|
||||
'chi_square_rng',
|
||||
'cholesky_decompose',
|
||||
'col',
|
||||
'cols',
|
||||
'cos',
|
||||
'cosh',
|
||||
'crossprod',
|
||||
'cumulative_sum',
|
||||
'determinant',
|
||||
'diag_matrix',
|
||||
'diag_post_multiply',
|
||||
'diag_pre_multiply',
|
||||
'diagonal',
|
||||
'dims',
|
||||
'dirichlet_log',
|
||||
'dirichlet_rng',
|
||||
'dot_product',
|
||||
'dot_self',
|
||||
'double_exponential_log',
|
||||
'double_exponential_rng',
|
||||
'e',
|
||||
'eigenvalues_sym',
|
||||
'eigenvectors_sym',
|
||||
'epsilon',
|
||||
'erf',
|
||||
'erfc',
|
||||
'exp',
|
||||
'exp2',
|
||||
'exp_mod_normal_cdf',
|
||||
'exp_mod_normal_log',
|
||||
'exp_mod_normal_rng',
|
||||
'expm1',
|
||||
'exponential_cdf',
|
||||
'exponential_log',
|
||||
'exponential_rng',
|
||||
'fabs',
|
||||
'fdim',
|
||||
'floor',
|
||||
'fma',
|
||||
'fmax',
|
||||
'fmin',
|
||||
'fmod',
|
||||
'gamma_log',
|
||||
'gamma_rng',
|
||||
'gumbel_cdf',
|
||||
'gumbel_log',
|
||||
'gumbel_rng',
|
||||
'hypergeometric_log',
|
||||
'hypergeometric_rng',
|
||||
'hypot',
|
||||
'if_else',
|
||||
'int_step',
|
||||
'inv_chi_square_cdf',
|
||||
'inv_chi_square_log',
|
||||
'inv_chi_square_rng',
|
||||
'inv_cloglog',
|
||||
'inv_gamma_cdf',
|
||||
'inv_gamma_log',
|
||||
'inv_gamma_rng',
|
||||
'inv_logit',
|
||||
'inv_wishart_log',
|
||||
'inv_wishart_rng',
|
||||
'inverse',
|
||||
'lbeta',
|
||||
'lgamma',
|
||||
'lkj_corr_cholesky_log',
|
||||
'lkj_corr_cholesky_rng',
|
||||
'lkj_corr_log',
|
||||
'lkj_corr_rng',
|
||||
'lkj_cov_log',
|
||||
'lmgamma',
|
||||
'log',
|
||||
'log10',
|
||||
'log1m',
|
||||
'log1m_inv_logit',
|
||||
'log1p',
|
||||
'log1p_exp',
|
||||
'log2',
|
||||
'log_determinant',
|
||||
'log_inv_logit',
|
||||
'log_sum_exp',
|
||||
'logistic_cdf',
|
||||
'logistic_log',
|
||||
'logistic_rng',
|
||||
'logit',
|
||||
'lognormal_cdf',
|
||||
'lognormal_log',
|
||||
'lognormal_rng',
|
||||
'max',
|
||||
'mdivide_left_tri_low',
|
||||
'mdivide_right_tri_low',
|
||||
'mean',
|
||||
'min',
|
||||
'multi_normal_cholesky_log',
|
||||
'multi_normal_log',
|
||||
'multi_normal_prec_log',
|
||||
'multi_normal_rng',
|
||||
'multi_student_t_log',
|
||||
'multi_student_t_rng',
|
||||
'multinomial_cdf',
|
||||
'multinomial_log',
|
||||
'multinomial_rng',
|
||||
'multiply_log',
|
||||
'multiply_lower_tri_self_transpose',
|
||||
'neg_binomial_cdf',
|
||||
'neg_binomial_log',
|
||||
'neg_binomial_rng',
|
||||
'negative_epsilon',
|
||||
'negative_infinity',
|
||||
'normal_cdf',
|
||||
'normal_log',
|
||||
'normal_rng',
|
||||
'not_a_number',
|
||||
'ordered_logistic_log',
|
||||
'ordered_logistic_rng',
|
||||
'owens_t',
|
||||
'pareto_cdf',
|
||||
'pareto_log',
|
||||
'pareto_rng',
|
||||
'pi',
|
||||
'poisson_cdf',
|
||||
'poisson_log',
|
||||
'poisson_log_log',
|
||||
'poisson_rng',
|
||||
'positive_infinity',
|
||||
'pow',
|
||||
'prod',
|
||||
'rep_array',
|
||||
'rep_matrix',
|
||||
'rep_row_vector',
|
||||
'rep_vector',
|
||||
'round',
|
||||
'row',
|
||||
'rows',
|
||||
'scaled_inv_chi_square_cdf',
|
||||
'scaled_inv_chi_square_log',
|
||||
'scaled_inv_chi_square_rng',
|
||||
'sd',
|
||||
'sin',
|
||||
'singular_values',
|
||||
'sinh',
|
||||
'size',
|
||||
'skew_normal_cdf',
|
||||
'skew_normal_log',
|
||||
'skew_normal_rng',
|
||||
'softmax',
|
||||
'sqrt',
|
||||
'sqrt2',
|
||||
'square',
|
||||
'step',
|
||||
'student_t_cdf',
|
||||
'student_t_log',
|
||||
'student_t_rng',
|
||||
'sum',
|
||||
'tan',
|
||||
'tanh',
|
||||
'tcrossprod',
|
||||
'tgamma',
|
||||
'trace',
|
||||
'trunc',
|
||||
'uniform_log',
|
||||
'uniform_rng',
|
||||
'variance',
|
||||
'weibull_cdf',
|
||||
'weibull_log',
|
||||
'weibull_rng',
|
||||
'wishart_log',
|
||||
'wishart_rng']
|
||||
|
||||
DISTRIBUTIONS = [ 'bernoulli',
|
||||
'bernoulli_logit',
|
||||
'beta',
|
||||
'beta_binomial',
|
||||
'binomial',
|
||||
'binomial_coefficient',
|
||||
'binomial_logit',
|
||||
'categorical',
|
||||
'cauchy',
|
||||
'chi_square',
|
||||
'dirichlet',
|
||||
'double_exponential',
|
||||
'exp_mod_normal',
|
||||
'exponential',
|
||||
'gamma',
|
||||
'gumbel',
|
||||
'hypergeometric',
|
||||
'inv_chi_square',
|
||||
'inv_gamma',
|
||||
'inv_wishart',
|
||||
'lkj_corr',
|
||||
'lkj_corr_cholesky',
|
||||
'lkj_cov',
|
||||
'logistic',
|
||||
'lognormal',
|
||||
'multi_normal',
|
||||
'multi_normal_cholesky',
|
||||
'multi_normal_prec',
|
||||
'multi_student_t',
|
||||
'multinomial',
|
||||
'multiply',
|
||||
'neg_binomial',
|
||||
'normal',
|
||||
'ordered_logistic',
|
||||
'pareto',
|
||||
'poisson',
|
||||
'poisson_log',
|
||||
'scaled_inv_chi_square',
|
||||
'skew_normal',
|
||||
'student_t',
|
||||
'uniform',
|
||||
'weibull',
|
||||
'wishart']
|
||||
|
||||
RESERVED = [ 'alignas',
|
||||
'alignof',
|
||||
'and',
|
||||
'and_eq',
|
||||
'asm',
|
||||
'auto',
|
||||
'bitand',
|
||||
'bitor',
|
||||
'bool',
|
||||
'break',
|
||||
'case',
|
||||
'catch',
|
||||
'char',
|
||||
'char16_t',
|
||||
'char32_t',
|
||||
'class',
|
||||
'compl',
|
||||
'const',
|
||||
'const_cast',
|
||||
'constexpr',
|
||||
'continue',
|
||||
'decltype',
|
||||
'default',
|
||||
'delete',
|
||||
'do',
|
||||
'double',
|
||||
'dynamic_cast',
|
||||
'enum',
|
||||
'explicit',
|
||||
'export',
|
||||
'extern',
|
||||
'false',
|
||||
'false',
|
||||
'float',
|
||||
'friend',
|
||||
'goto',
|
||||
'inline',
|
||||
'int',
|
||||
'long',
|
||||
'mutable',
|
||||
'namespace',
|
||||
'new',
|
||||
'noexcept',
|
||||
'not',
|
||||
'not_eq',
|
||||
'nullptr',
|
||||
'operator',
|
||||
'or',
|
||||
'or_eq',
|
||||
'private',
|
||||
'protected',
|
||||
'public',
|
||||
'register',
|
||||
'reinterpret_cast',
|
||||
'repeat',
|
||||
'return',
|
||||
'short',
|
||||
'signed',
|
||||
'sizeof',
|
||||
'static',
|
||||
'static_assert',
|
||||
'static_cast',
|
||||
'struct',
|
||||
'switch',
|
||||
'template',
|
||||
'then',
|
||||
'this',
|
||||
'thread_local',
|
||||
'throw',
|
||||
'true',
|
||||
'true',
|
||||
'try',
|
||||
'typedef',
|
||||
'typeid',
|
||||
'typename',
|
||||
'union',
|
||||
'unsigned',
|
||||
'until',
|
||||
'using',
|
||||
'virtual',
|
||||
'void',
|
||||
'volatile',
|
||||
'wchar_t',
|
||||
'xor',
|
||||
'xor_eq']
|
||||
|
13
wakatime/packages/pygments/lexers/_vimbuiltins.py
Normal file
13
wakatime/packages/pygments/lexers/_vimbuiltins.py
Normal file
File diff suppressed because one or more lines are too long
2290
wakatime/packages/pygments/lexers/agile.py
Normal file
2290
wakatime/packages/pygments/lexers/agile.py
Normal file
File diff suppressed because it is too large
Load diff
398
wakatime/packages/pygments/lexers/asm.py
Normal file
398
wakatime/packages/pygments/lexers/asm.py
Normal file
|
@ -0,0 +1,398 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.asm
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for assembly languages.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer
|
||||
from pygments.lexers.compiled import DLexer, CppLexer, CLexer
|
||||
from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
|
||||
Other, Keyword, Operator
|
||||
|
||||
__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer',
|
||||
'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'Ca65Lexer']
|
||||
|
||||
|
||||
class GasLexer(RegexLexer):
|
||||
"""
|
||||
For Gas (AT&T) assembly code.
|
||||
"""
|
||||
name = 'GAS'
|
||||
aliases = ['gas', 'asm']
|
||||
filenames = ['*.s', '*.S']
|
||||
mimetypes = ['text/x-gas']
|
||||
|
||||
#: optional Comment or Whitespace
|
||||
string = r'"(\\"|[^"])*"'
|
||||
char = r'[a-zA-Z$._0-9@-]'
|
||||
identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
|
||||
number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
(identifier + ':', Name.Label),
|
||||
(r'\.' + identifier, Name.Attribute, 'directive-args'),
|
||||
(r'lock|rep(n?z)?|data\d+', Name.Attribute),
|
||||
(identifier, Name.Function, 'instruction-args'),
|
||||
(r'[\r\n]+', Text)
|
||||
],
|
||||
'directive-args': [
|
||||
(identifier, Name.Constant),
|
||||
(string, String),
|
||||
('@' + identifier, Name.Attribute),
|
||||
(number, Number.Integer),
|
||||
(r'[\r\n]+', Text, '#pop'),
|
||||
|
||||
(r'#.*?$', Comment, '#pop'),
|
||||
|
||||
include('punctuation'),
|
||||
include('whitespace')
|
||||
],
|
||||
'instruction-args': [
|
||||
# For objdump-disassembled code, shouldn't occur in
|
||||
# actual assembler input
|
||||
('([a-z0-9]+)( )(<)('+identifier+')(>)',
|
||||
bygroups(Number.Hex, Text, Punctuation, Name.Constant,
|
||||
Punctuation)),
|
||||
('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
|
||||
bygroups(Number.Hex, Text, Punctuation, Name.Constant,
|
||||
Punctuation, Number.Integer, Punctuation)),
|
||||
|
||||
# Address constants
|
||||
(identifier, Name.Constant),
|
||||
(number, Number.Integer),
|
||||
# Registers
|
||||
('%' + identifier, Name.Variable),
|
||||
# Numeric constants
|
||||
('$'+number, Number.Integer),
|
||||
(r"$'(.|\\')'", String.Char),
|
||||
(r'[\r\n]+', Text, '#pop'),
|
||||
(r'#.*?$', Comment, '#pop'),
|
||||
include('punctuation'),
|
||||
include('whitespace')
|
||||
],
|
||||
'whitespace': [
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
(r'#.*?\n', Comment)
|
||||
],
|
||||
'punctuation': [
|
||||
(r'[-*,.():]+', Punctuation)
|
||||
]
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
if re.match(r'^\.(text|data|section)', text, re.M):
|
||||
return True
|
||||
elif re.match(r'^\.\w+', text, re.M):
|
||||
return 0.1
|
||||
|
||||
|
||||
class ObjdumpLexer(RegexLexer):
|
||||
"""
|
||||
For the output of 'objdump -dr'
|
||||
"""
|
||||
name = 'objdump'
|
||||
aliases = ['objdump']
|
||||
filenames = ['*.objdump']
|
||||
mimetypes = ['text/x-objdump']
|
||||
|
||||
hex = r'[0-9A-Za-z]'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
# File name & format:
|
||||
('(.*?)(:)( +file format )(.*?)$',
|
||||
bygroups(Name.Label, Punctuation, Text, String)),
|
||||
# Section header
|
||||
('(Disassembly of section )(.*?)(:)$',
|
||||
bygroups(Text, Name.Label, Punctuation)),
|
||||
# Function labels
|
||||
# (With offset)
|
||||
('('+hex+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
|
||||
bygroups(Number.Hex, Text, Punctuation, Name.Function,
|
||||
Punctuation, Number.Hex, Punctuation)),
|
||||
# (Without offset)
|
||||
('('+hex+'+)( )(<)(.*?)(>:)$',
|
||||
bygroups(Number.Hex, Text, Punctuation, Name.Function,
|
||||
Punctuation)),
|
||||
# Code line with disassembled instructions
|
||||
('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)( *\t)([a-zA-Z].*?)$',
|
||||
bygroups(Text, Name.Label, Text, Number.Hex, Text,
|
||||
using(GasLexer))),
|
||||
# Code line with ascii
|
||||
('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)( *)(.*?)$',
|
||||
bygroups(Text, Name.Label, Text, Number.Hex, Text, String)),
|
||||
# Continued code line, only raw opcodes without disassembled
|
||||
# instruction
|
||||
('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)$',
|
||||
bygroups(Text, Name.Label, Text, Number.Hex)),
|
||||
# Skipped a few bytes
|
||||
(r'\t\.\.\.$', Text),
|
||||
# Relocation line
|
||||
# (With offset)
|
||||
(r'(\t\t\t)('+hex+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x' + hex + '+)$',
|
||||
bygroups(Text, Name.Label, Text, Name.Property, Text,
|
||||
Name.Constant, Punctuation, Number.Hex)),
|
||||
# (Without offset)
|
||||
(r'(\t\t\t)('+hex+r'+:)( )([^\t]+)(\t)(.*?)$',
|
||||
bygroups(Text, Name.Label, Text, Name.Property, Text,
|
||||
Name.Constant)),
|
||||
(r'[^\n]+\n', Other)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class DObjdumpLexer(DelegatingLexer):
|
||||
"""
|
||||
For the output of 'objdump -Sr on compiled D files'
|
||||
"""
|
||||
name = 'd-objdump'
|
||||
aliases = ['d-objdump']
|
||||
filenames = ['*.d-objdump']
|
||||
mimetypes = ['text/x-d-objdump']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(DObjdumpLexer, self).__init__(DLexer, ObjdumpLexer, **options)
|
||||
|
||||
|
||||
class CppObjdumpLexer(DelegatingLexer):
|
||||
"""
|
||||
For the output of 'objdump -Sr on compiled C++ files'
|
||||
"""
|
||||
name = 'cpp-objdump'
|
||||
aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
|
||||
filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
|
||||
mimetypes = ['text/x-cpp-objdump']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(CppObjdumpLexer, self).__init__(CppLexer, ObjdumpLexer, **options)
|
||||
|
||||
|
||||
class CObjdumpLexer(DelegatingLexer):
|
||||
"""
|
||||
For the output of 'objdump -Sr on compiled C files'
|
||||
"""
|
||||
name = 'c-objdump'
|
||||
aliases = ['c-objdump']
|
||||
filenames = ['*.c-objdump']
|
||||
mimetypes = ['text/x-c-objdump']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options)
|
||||
|
||||
|
||||
class LlvmLexer(RegexLexer):
|
||||
"""
|
||||
For LLVM assembly code.
|
||||
"""
|
||||
name = 'LLVM'
|
||||
aliases = ['llvm']
|
||||
filenames = ['*.ll']
|
||||
mimetypes = ['text/x-llvm']
|
||||
|
||||
#: optional Comment or Whitespace
|
||||
string = r'"[^"]*?"'
|
||||
identifier = r'([-a-zA-Z$._][-a-zA-Z$._0-9]*|' + string + ')'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
|
||||
# Before keywords, because keywords are valid label names :(...
|
||||
(identifier + '\s*:', Name.Label),
|
||||
|
||||
include('keyword'),
|
||||
|
||||
(r'%' + identifier, Name.Variable),#Name.Identifier.Local),
|
||||
(r'@' + identifier, Name.Variable.Global),#Name.Identifier.Global),
|
||||
(r'%\d+', Name.Variable.Anonymous),#Name.Identifier.Anonymous),
|
||||
(r'@\d+', Name.Variable.Global),#Name.Identifier.Anonymous),
|
||||
(r'!' + identifier, Name.Variable),
|
||||
(r'!\d+', Name.Variable.Anonymous),
|
||||
(r'c?' + string, String),
|
||||
|
||||
(r'0[xX][a-fA-F0-9]+', Number),
|
||||
(r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
|
||||
|
||||
(r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
|
||||
],
|
||||
'whitespace': [
|
||||
(r'(\n|\s)+', Text),
|
||||
(r';.*?\n', Comment)
|
||||
],
|
||||
'keyword': [
|
||||
# Regular keywords
|
||||
(r'(begin|end'
|
||||
r'|true|false'
|
||||
r'|declare|define'
|
||||
r'|global|constant'
|
||||
|
||||
r'|private|linker_private|internal|available_externally|linkonce'
|
||||
r'|linkonce_odr|weak|weak_odr|appending|dllimport|dllexport'
|
||||
r'|common|default|hidden|protected|extern_weak|external'
|
||||
r'|thread_local|zeroinitializer|undef|null|to|tail|target|triple'
|
||||
r'|datalayout|volatile|nuw|nsw|nnan|ninf|nsz|arcp|fast|exact|inbounds'
|
||||
r'|align|addrspace|section|alias|module|asm|sideeffect|gc|dbg'
|
||||
|
||||
r'|ccc|fastcc|coldcc|x86_stdcallcc|x86_fastcallcc|arm_apcscc'
|
||||
r'|arm_aapcscc|arm_aapcs_vfpcc|ptx_device|ptx_kernel'
|
||||
|
||||
r'|cc|c'
|
||||
|
||||
r'|signext|zeroext|inreg|sret|nounwind|noreturn|noalias|nocapture'
|
||||
r'|byval|nest|readnone|readonly'
|
||||
|
||||
r'|inlinehint|noinline|alwaysinline|optsize|ssp|sspreq|noredzone'
|
||||
r'|noimplicitfloat|naked'
|
||||
|
||||
r'|type|opaque'
|
||||
|
||||
r'|eq|ne|slt|sgt|sle'
|
||||
r'|sge|ult|ugt|ule|uge'
|
||||
r'|oeq|one|olt|ogt|ole'
|
||||
r'|oge|ord|uno|ueq|une'
|
||||
r'|x'
|
||||
|
||||
# instructions
|
||||
r'|add|fadd|sub|fsub|mul|fmul|udiv|sdiv|fdiv|urem|srem|frem|shl'
|
||||
r'|lshr|ashr|and|or|xor|icmp|fcmp'
|
||||
|
||||
r'|phi|call|trunc|zext|sext|fptrunc|fpext|uitofp|sitofp|fptoui'
|
||||
r'fptosi|inttoptr|ptrtoint|bitcast|select|va_arg|ret|br|switch'
|
||||
r'|invoke|unwind|unreachable'
|
||||
|
||||
r'|malloc|alloca|free|load|store|getelementptr'
|
||||
|
||||
r'|extractelement|insertelement|shufflevector|getresult'
|
||||
r'|extractvalue|insertvalue'
|
||||
|
||||
r')\b', Keyword),
|
||||
|
||||
# Types
|
||||
(r'void|float|double|x86_fp80|fp128|ppc_fp128|label|metadata',
|
||||
Keyword.Type),
|
||||
|
||||
# Integer types
|
||||
(r'i[1-9]\d*', Keyword)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class NasmLexer(RegexLexer):
|
||||
"""
|
||||
For Nasm (Intel) assembly code.
|
||||
"""
|
||||
name = 'NASM'
|
||||
aliases = ['nasm']
|
||||
filenames = ['*.asm', '*.ASM']
|
||||
mimetypes = ['text/x-nasm']
|
||||
|
||||
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?#@~]*'
|
||||
hexn = r'(?:0[xX][0-9a-fA-F]+|$0[0-9a-fA-F]*|[0-9]+[0-9a-fA-F]*h)'
|
||||
octn = r'[0-7]+q'
|
||||
binn = r'[01]+b'
|
||||
decn = r'[0-9]+'
|
||||
floatn = decn + r'\.e?' + decn
|
||||
string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
|
||||
declkw = r'(?:res|d)[bwdqt]|times'
|
||||
register = (r'r[0-9][0-5]?[bwd]|'
|
||||
r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
|
||||
r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
|
||||
wordop = r'seg|wrt|strict'
|
||||
type = r'byte|[dq]?word'
|
||||
directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
|
||||
r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
|
||||
r'EXPORT|LIBRARY|MODULE')
|
||||
|
||||
flags = re.IGNORECASE | re.MULTILINE
|
||||
tokens = {
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
(r'^\s*%', Comment.Preproc, 'preproc'),
|
||||
(identifier + ':', Name.Label),
|
||||
(r'(%s)(\s+)(equ)' % identifier,
|
||||
bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
|
||||
'instruction-args'),
|
||||
(directives, Keyword, 'instruction-args'),
|
||||
(declkw, Keyword.Declaration, 'instruction-args'),
|
||||
(identifier, Name.Function, 'instruction-args'),
|
||||
(r'[\r\n]+', Text)
|
||||
],
|
||||
'instruction-args': [
|
||||
(string, String),
|
||||
(hexn, Number.Hex),
|
||||
(octn, Number.Oct),
|
||||
(binn, Number),
|
||||
(floatn, Number.Float),
|
||||
(decn, Number.Integer),
|
||||
include('punctuation'),
|
||||
(register, Name.Builtin),
|
||||
(identifier, Name.Variable),
|
||||
(r'[\r\n]+', Text, '#pop'),
|
||||
include('whitespace')
|
||||
],
|
||||
'preproc': [
|
||||
(r'[^;\n]+', Comment.Preproc),
|
||||
(r';.*?\n', Comment.Single, '#pop'),
|
||||
(r'\n', Comment.Preproc, '#pop'),
|
||||
],
|
||||
'whitespace': [
|
||||
(r'\n', Text),
|
||||
(r'[ \t]+', Text),
|
||||
(r';.*', Comment.Single)
|
||||
],
|
||||
'punctuation': [
|
||||
(r'[,():\[\]]+', Punctuation),
|
||||
(r'[&|^<>+*/%~-]+', Operator),
|
||||
(r'[$]+', Keyword.Constant),
|
||||
(wordop, Operator.Word),
|
||||
(type, Keyword.Type)
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class Ca65Lexer(RegexLexer):
|
||||
"""
|
||||
For ca65 assembler sources.
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
name = 'ca65'
|
||||
aliases = ['ca65']
|
||||
filenames = ['*.s']
|
||||
|
||||
flags = re.IGNORECASE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r';.*', Comment.Single),
|
||||
(r'\s+', Text),
|
||||
(r'[a-z_.@$][\w.@$]*:', Name.Label),
|
||||
(r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
|
||||
r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
|
||||
r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
|
||||
r'|bit)\b', Keyword),
|
||||
(r'\.[a-z0-9_]+', Keyword.Pseudo),
|
||||
(r'[-+~*/^&|!<>=]', Operator),
|
||||
(r'"[^"\n]*.', String),
|
||||
(r"'[^'\n]*.", String.Char),
|
||||
(r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
|
||||
(r'\d+|%[01]+', Number.Integer),
|
||||
(r'[#,.:()=]', Punctuation),
|
||||
(r'[a-z_.@$][\w.@$]*', Name),
|
||||
]
|
||||
}
|
||||
|
||||
def analyse_text(self, text):
|
||||
# comments in GAS start with "#"
|
||||
if re.match(r'^\s*;', text, re.MULTILINE):
|
||||
return 0.9
|
3723
wakatime/packages/pygments/lexers/compiled.py
Normal file
3723
wakatime/packages/pygments/lexers/compiled.py
Normal file
File diff suppressed because it is too large
Load diff
104
wakatime/packages/pygments/lexers/dalvik.py
Normal file
104
wakatime/packages/pygments/lexers/dalvik.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.dalvik
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Pygments lexers for Dalvik VM-related languages.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from pygments.lexer import RegexLexer, include, bygroups
|
||||
from pygments.token import Keyword, Text, Comment, Name, String, Number, \
|
||||
Punctuation
|
||||
|
||||
__all__ = ['SmaliLexer']
|
||||
|
||||
|
||||
class SmaliLexer(RegexLexer):
|
||||
"""
|
||||
For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
|
||||
code.
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
name = 'Smali'
|
||||
aliases = ['smali']
|
||||
filenames = ['*.smali']
|
||||
mimetypes = ['text/smali']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('comment'),
|
||||
include('label'),
|
||||
include('field'),
|
||||
include('method'),
|
||||
include('class'),
|
||||
include('directive'),
|
||||
include('access-modifier'),
|
||||
include('instruction'),
|
||||
include('literal'),
|
||||
include('punctuation'),
|
||||
include('type'),
|
||||
include('whitespace')
|
||||
],
|
||||
'directive': [
|
||||
(r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
|
||||
r'enum|method|registers|locals|array-data|packed-switch|'
|
||||
r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
|
||||
r'epilogue|source)', Keyword),
|
||||
(r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
|
||||
'packed-switch|sparse-switch|parameter|local)', Keyword),
|
||||
(r'^[ \t]*\.restart local', Keyword),
|
||||
],
|
||||
'access-modifier': [
|
||||
(r'(public|private|protected|static|final|synchronized|bridge|'
|
||||
r'varargs|native|abstract|strictfp|synthetic|constructor|'
|
||||
r'declared-synchronized|interface|enum|annotation|volatile|'
|
||||
r'transient)', Keyword),
|
||||
],
|
||||
'whitespace': [
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'instruction': [
|
||||
(r'\b[vp]\d+\b', Name.Builtin), # registers
|
||||
(r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
|
||||
],
|
||||
'literal': [
|
||||
(r'".*"', String),
|
||||
(r'0x[0-9A-Fa-f]+t?', Number.Hex),
|
||||
(r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
||||
(r'[0-9]+L?', Number.Integer),
|
||||
],
|
||||
'field': [
|
||||
(r'(\$?\b)([A-Za-z0-9_$]*)(:)',
|
||||
bygroups(Punctuation, Name.Variable, Punctuation)),
|
||||
],
|
||||
'method': [
|
||||
(r'<(?:cl)?init>', Name.Function), # constructor
|
||||
(r'(\$?\b)([A-Za-z0-9_$]*)(\()',
|
||||
bygroups(Punctuation, Name.Function, Punctuation)),
|
||||
],
|
||||
'label': [
|
||||
(r':[A-Za-z0-9_]+', Name.Label),
|
||||
],
|
||||
'class': [
|
||||
# class names in the form Lcom/namespace/ClassName;
|
||||
# I only want to color the ClassName part, so the namespace part is
|
||||
# treated as 'Text'
|
||||
(r'(L)((?:[A-Za-z0-9_$]+/)*)([A-Za-z0-9_$]+)(;)',
|
||||
bygroups(Keyword.Type, Text, Name.Class, Text)),
|
||||
],
|
||||
'punctuation': [
|
||||
(r'->', Punctuation),
|
||||
(r'[{},\(\):=\.-]', Punctuation),
|
||||
],
|
||||
'type': [
|
||||
(r'[ZBSCIJFDV\[]+', Keyword.Type),
|
||||
],
|
||||
'comment': [
|
||||
(r'#.*?\n', Comment),
|
||||
],
|
||||
}
|
671
wakatime/packages/pygments/lexers/dotnet.py
Normal file
671
wakatime/packages/pygments/lexers/dotnet.py
Normal file
|
@ -0,0 +1,671 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.dotnet
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for .net languages.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
|
||||
using, this
|
||||
from pygments.token import Punctuation, \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
|
||||
from pygments.util import get_choice_opt
|
||||
from pygments import unistring as uni
|
||||
|
||||
from pygments.lexers.web import XmlLexer
|
||||
|
||||
__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
|
||||
'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
|
||||
|
||||
|
||||
class CSharpLexer(RegexLexer):
|
||||
"""
|
||||
For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
|
||||
source code.
|
||||
|
||||
Additional options accepted:
|
||||
|
||||
`unicodelevel`
|
||||
Determines which Unicode characters this lexer allows for identifiers.
|
||||
The possible values are:
|
||||
|
||||
* ``none`` -- only the ASCII letters and numbers are allowed. This
|
||||
is the fastest selection.
|
||||
* ``basic`` -- all Unicode characters from the specification except
|
||||
category ``Lo`` are allowed.
|
||||
* ``full`` -- all Unicode characters as specified in the C# specs
|
||||
are allowed. Note that this means a considerable slowdown since the
|
||||
``Lo`` category has more than 40,000 characters in it!
|
||||
|
||||
The default value is ``basic``.
|
||||
|
||||
*New in Pygments 0.8.*
|
||||
"""
|
||||
|
||||
name = 'C#'
|
||||
aliases = ['csharp', 'c#']
|
||||
filenames = ['*.cs']
|
||||
mimetypes = ['text/x-csharp'] # inferred
|
||||
|
||||
flags = re.MULTILINE | re.DOTALL | re.UNICODE
|
||||
|
||||
# for the range of allowed unicode characters in identifiers,
|
||||
# see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
|
||||
|
||||
levels = {
|
||||
'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
|
||||
'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
|
||||
'[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
|
||||
uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
|
||||
'full': ('@?(?:_|[^' +
|
||||
uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
|
||||
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
|
||||
'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
|
||||
}
|
||||
|
||||
tokens = {}
|
||||
token_variants = True
|
||||
|
||||
for levelname, cs_ident in levels.items():
|
||||
tokens[levelname] = {
|
||||
'root': [
|
||||
# method names
|
||||
(r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
|
||||
r'(' + cs_ident + ')' # method name
|
||||
r'(\s*)(\()', # signature start
|
||||
bygroups(using(this), Name.Function, Text, Punctuation)),
|
||||
(r'^\s*\[.*?\]', Name.Attribute),
|
||||
(r'[^\S\n]+', Text),
|
||||
(r'\\\n', Text), # line continuation
|
||||
(r'//.*?\n', Comment.Single),
|
||||
(r'/[*].*?[*]/', Comment.Multiline),
|
||||
(r'\n', Text),
|
||||
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
|
||||
(r'[{}]', Punctuation),
|
||||
(r'@"(""|[^"])*"', String),
|
||||
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
|
||||
(r"'\\.'|'[^\\]'", String.Char),
|
||||
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
|
||||
r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
|
||||
(r'#[ \t]*(if|endif|else|elif|define|undef|'
|
||||
r'line|error|warning|region|endregion|pragma)\b.*?\n',
|
||||
Comment.Preproc),
|
||||
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
|
||||
Keyword)),
|
||||
(r'(abstract|as|async|await|base|break|case|catch|'
|
||||
r'checked|const|continue|default|delegate|'
|
||||
r'do|else|enum|event|explicit|extern|false|finally|'
|
||||
r'fixed|for|foreach|goto|if|implicit|in|interface|'
|
||||
r'internal|is|lock|new|null|operator|'
|
||||
r'out|override|params|private|protected|public|readonly|'
|
||||
r'ref|return|sealed|sizeof|stackalloc|static|'
|
||||
r'switch|this|throw|true|try|typeof|'
|
||||
r'unchecked|unsafe|virtual|void|while|'
|
||||
r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
|
||||
r'descending|from|group|into|orderby|select|where|'
|
||||
r'join|equals)\b', Keyword),
|
||||
(r'(global)(::)', bygroups(Keyword, Punctuation)),
|
||||
(r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
|
||||
r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
|
||||
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
|
||||
(r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
|
||||
(cs_ident, Name),
|
||||
],
|
||||
'class': [
|
||||
(cs_ident, Name.Class, '#pop')
|
||||
],
|
||||
'namespace': [
|
||||
(r'(?=\()', Text, '#pop'), # using (resource)
|
||||
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
|
||||
]
|
||||
}
|
||||
|
||||
def __init__(self, **options):
|
||||
level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(), 'basic')
|
||||
if level not in self._all_tokens:
|
||||
# compile the regexes now
|
||||
self._tokens = self.__class__.process_tokendef(level)
|
||||
else:
|
||||
self._tokens = self._all_tokens[level]
|
||||
|
||||
RegexLexer.__init__(self, **options)
|
||||
|
||||
|
||||
class NemerleLexer(RegexLexer):
|
||||
"""
|
||||
For `Nemerle <http://nemerle.org>`_ source code.
|
||||
|
||||
Additional options accepted:
|
||||
|
||||
`unicodelevel`
|
||||
Determines which Unicode characters this lexer allows for identifiers.
|
||||
The possible values are:
|
||||
|
||||
* ``none`` -- only the ASCII letters and numbers are allowed. This
|
||||
is the fastest selection.
|
||||
* ``basic`` -- all Unicode characters from the specification except
|
||||
category ``Lo`` are allowed.
|
||||
* ``full`` -- all Unicode characters as specified in the C# specs
|
||||
are allowed. Note that this means a considerable slowdown since the
|
||||
``Lo`` category has more than 40,000 characters in it!
|
||||
|
||||
The default value is ``basic``.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
|
||||
name = 'Nemerle'
|
||||
aliases = ['nemerle']
|
||||
filenames = ['*.n']
|
||||
mimetypes = ['text/x-nemerle'] # inferred
|
||||
|
||||
flags = re.MULTILINE | re.DOTALL | re.UNICODE
|
||||
|
||||
# for the range of allowed unicode characters in identifiers, see
|
||||
# http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
|
||||
|
||||
levels = dict(
|
||||
none = '@?[_a-zA-Z][a-zA-Z0-9_]*',
|
||||
basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
|
||||
'[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
|
||||
uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
|
||||
full = ('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
|
||||
'Nl') + '])'
|
||||
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
|
||||
'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
|
||||
)
|
||||
|
||||
tokens = {}
|
||||
token_variants = True
|
||||
|
||||
for levelname, cs_ident in levels.items():
|
||||
tokens[levelname] = {
|
||||
'root': [
|
||||
# method names
|
||||
(r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
|
||||
r'(' + cs_ident + ')' # method name
|
||||
r'(\s*)(\()', # signature start
|
||||
bygroups(using(this), Name.Function, Text, Punctuation)),
|
||||
(r'^\s*\[.*?\]', Name.Attribute),
|
||||
(r'[^\S\n]+', Text),
|
||||
(r'\\\n', Text), # line continuation
|
||||
(r'//.*?\n', Comment.Single),
|
||||
(r'/[*].*?[*]/', Comment.Multiline),
|
||||
(r'\n', Text),
|
||||
(r'\$\s*"', String, 'splice-string'),
|
||||
(r'\$\s*<#', String, 'splice-string2'),
|
||||
(r'<#', String, 'recursive-string'),
|
||||
|
||||
(r'(<\[)\s*(' + cs_ident + ':)?', Keyword),
|
||||
(r'\]\>', Keyword),
|
||||
|
||||
# quasiquotation only
|
||||
(r'\$' + cs_ident, Name),
|
||||
(r'(\$)(\()', bygroups(Name, Punctuation),
|
||||
'splice-string-content'),
|
||||
|
||||
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
|
||||
(r'[{}]', Punctuation),
|
||||
(r'@"(""|[^"])*"', String),
|
||||
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
|
||||
(r"'\\.'|'[^\\]'", String.Char),
|
||||
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
|
||||
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
|
||||
(r'#[ \t]*(if|endif|else|elif|define|undef|'
|
||||
r'line|error|warning|region|endregion|pragma)\b.*?\n',
|
||||
Comment.Preproc),
|
||||
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
|
||||
Keyword)),
|
||||
(r'(abstract|and|as|base|catch|def|delegate|'
|
||||
r'enum|event|extern|false|finally|'
|
||||
r'fun|implements|interface|internal|'
|
||||
r'is|macro|match|matches|module|mutable|new|'
|
||||
r'null|out|override|params|partial|private|'
|
||||
r'protected|public|ref|sealed|static|'
|
||||
r'syntax|this|throw|true|try|type|typeof|'
|
||||
r'virtual|volatile|when|where|with|'
|
||||
r'assert|assert2|async|break|checked|continue|do|else|'
|
||||
r'ensures|for|foreach|if|late|lock|new|nolate|'
|
||||
r'otherwise|regexp|repeat|requires|return|surroundwith|'
|
||||
r'unchecked|unless|using|while|yield)\b', Keyword),
|
||||
(r'(global)(::)', bygroups(Keyword, Punctuation)),
|
||||
(r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
|
||||
r'short|string|uint|ulong|ushort|void|array|list)\b\??',
|
||||
Keyword.Type),
|
||||
(r'(:>?)\s*(' + cs_ident + r'\??)',
|
||||
bygroups(Punctuation, Keyword.Type)),
|
||||
(r'(class|struct|variant|module)(\s+)',
|
||||
bygroups(Keyword, Text), 'class'),
|
||||
(r'(namespace|using)(\s+)', bygroups(Keyword, Text),
|
||||
'namespace'),
|
||||
(cs_ident, Name),
|
||||
],
|
||||
'class': [
|
||||
(cs_ident, Name.Class, '#pop')
|
||||
],
|
||||
'namespace': [
|
||||
(r'(?=\()', Text, '#pop'), # using (resource)
|
||||
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
|
||||
],
|
||||
'splice-string': [
|
||||
(r'[^"$]', String),
|
||||
(r'\$' + cs_ident, Name),
|
||||
(r'(\$)(\()', bygroups(Name, Punctuation),
|
||||
'splice-string-content'),
|
||||
(r'\\"', String),
|
||||
(r'"', String, '#pop')
|
||||
],
|
||||
'splice-string2': [
|
||||
(r'[^#<>$]', String),
|
||||
(r'\$' + cs_ident, Name),
|
||||
(r'(\$)(\()', bygroups(Name, Punctuation),
|
||||
'splice-string-content'),
|
||||
(r'<#', String, '#push'),
|
||||
(r'#>', String, '#pop')
|
||||
],
|
||||
'recursive-string': [
|
||||
(r'[^#<>]', String),
|
||||
(r'<#', String, '#push'),
|
||||
(r'#>', String, '#pop')
|
||||
],
|
||||
'splice-string-content': [
|
||||
(r'if|match', Keyword),
|
||||
(r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
|
||||
(cs_ident, Name),
|
||||
(r'\d+', Number),
|
||||
(r'\(', Punctuation, '#push'),
|
||||
(r'\)', Punctuation, '#pop')
|
||||
]
|
||||
}
|
||||
|
||||
def __init__(self, **options):
|
||||
level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(),
|
||||
'basic')
|
||||
if level not in self._all_tokens:
|
||||
# compile the regexes now
|
||||
self._tokens = self.__class__.process_tokendef(level)
|
||||
else:
|
||||
self._tokens = self._all_tokens[level]
|
||||
|
||||
RegexLexer.__init__(self, **options)
|
||||
|
||||
|
||||
class BooLexer(RegexLexer):
|
||||
"""
|
||||
For `Boo <http://boo.codehaus.org/>`_ source code.
|
||||
"""
|
||||
|
||||
name = 'Boo'
|
||||
aliases = ['boo']
|
||||
filenames = ['*.boo']
|
||||
mimetypes = ['text/x-boo']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
(r'(#|//).*$', Comment.Single),
|
||||
(r'/[*]', Comment.Multiline, 'comment'),
|
||||
(r'[]{}:(),.;[]', Punctuation),
|
||||
(r'\\\n', Text),
|
||||
(r'\\', Text),
|
||||
(r'(in|is|and|or|not)\b', Operator.Word),
|
||||
(r'/(\\\\|\\/|[^/\s])/', String.Regex),
|
||||
(r'@/(\\\\|\\/|[^/])*/', String.Regex),
|
||||
(r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
|
||||
(r'(as|abstract|callable|constructor|destructor|do|import|'
|
||||
r'enum|event|final|get|interface|internal|of|override|'
|
||||
r'partial|private|protected|public|return|set|static|'
|
||||
r'struct|transient|virtual|yield|super|and|break|cast|'
|
||||
r'continue|elif|else|ensure|except|for|given|goto|if|in|'
|
||||
r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
|
||||
r'while|from|as)\b', Keyword),
|
||||
(r'def(?=\s+\(.*?\))', Keyword),
|
||||
(r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
|
||||
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
|
||||
(r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
|
||||
(r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
|
||||
r'assert|checked|enumerate|filter|getter|len|lock|map|'
|
||||
r'matrix|max|min|normalArrayIndexing|print|property|range|'
|
||||
r'rawArrayIndexing|required|typeof|unchecked|using|'
|
||||
r'yieldAll|zip)\b', Name.Builtin),
|
||||
(r'"""(\\\\|\\"|.*?)"""', String.Double),
|
||||
(r'"(\\\\|\\"|[^"]*?)"', String.Double),
|
||||
(r"'(\\\\|\\'|[^']*?)'", String.Single),
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
|
||||
(r'[0-9][0-9\.]*(ms?|d|h|s)', Number),
|
||||
(r'0\d+', Number.Oct),
|
||||
(r'0x[a-fA-F0-9]+', Number.Hex),
|
||||
(r'\d+L', Number.Integer.Long),
|
||||
(r'\d+', Number.Integer),
|
||||
],
|
||||
'comment': [
|
||||
('/[*]', Comment.Multiline, '#push'),
|
||||
('[*]/', Comment.Multiline, '#pop'),
|
||||
('[^/*]', Comment.Multiline),
|
||||
('[*/]', Comment.Multiline)
|
||||
],
|
||||
'funcname': [
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
|
||||
],
|
||||
'classname': [
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
|
||||
],
|
||||
'namespace': [
|
||||
('[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class VbNetLexer(RegexLexer):
|
||||
"""
|
||||
For
|
||||
`Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
|
||||
source code.
|
||||
"""
|
||||
|
||||
name = 'VB.net'
|
||||
aliases = ['vb.net', 'vbnet']
|
||||
filenames = ['*.vb', '*.bas']
|
||||
mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
|
||||
|
||||
flags = re.MULTILINE | re.IGNORECASE
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'^\s*<.*?>', Name.Attribute),
|
||||
(r'\s+', Text),
|
||||
(r'\n', Text),
|
||||
(r'rem\b.*?\n', Comment),
|
||||
(r"'.*?\n", Comment),
|
||||
(r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#End\s+If|#Const|'
|
||||
r'#ExternalSource.*?\n|#End\s+ExternalSource|'
|
||||
r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
|
||||
Comment.Preproc),
|
||||
(r'[\(\){}!#,.:]', Punctuation),
|
||||
(r'Option\s+(Strict|Explicit|Compare)\s+'
|
||||
r'(On|Off|Binary|Text)', Keyword.Declaration),
|
||||
(r'(?<!\.)(AddHandler|Alias|'
|
||||
r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
|
||||
r'CDec|CDbl|CInt|CLng|CObj|Continue|CSByte|CShort|'
|
||||
r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
|
||||
r'Default|Delegate|DirectCast|Do|Each|Else|ElseIf|'
|
||||
r'EndIf|Erase|Error|Event|Exit|False|Finally|For|'
|
||||
r'Friend|Get|Global|GoSub|GoTo|Handles|If|'
|
||||
r'Implements|Inherits|Interface|'
|
||||
r'Let|Lib|Loop|Me|MustInherit|'
|
||||
r'MustOverride|MyBase|MyClass|Narrowing|New|Next|'
|
||||
r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
|
||||
r'Operator|Option|Optional|Overloads|Overridable|'
|
||||
r'Overrides|ParamArray|Partial|Private|Protected|'
|
||||
r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
|
||||
r'Return|Select|Set|Shadows|Shared|Single|'
|
||||
r'Static|Step|Stop|SyncLock|Then|'
|
||||
r'Throw|To|True|Try|TryCast|Wend|'
|
||||
r'Using|When|While|Widening|With|WithEvents|'
|
||||
r'WriteOnly)\b', Keyword),
|
||||
(r'(?<!\.)End\b', Keyword, 'end'),
|
||||
(r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
|
||||
(r'(?<!\.)(Function|Sub|Property)(\s+)',
|
||||
bygroups(Keyword, Text), 'funcname'),
|
||||
(r'(?<!\.)(Class|Structure|Enum)(\s+)',
|
||||
bygroups(Keyword, Text), 'classname'),
|
||||
(r'(?<!\.)(Module|Namespace|Imports)(\s+)',
|
||||
bygroups(Keyword, Text), 'namespace'),
|
||||
(r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
|
||||
r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
|
||||
r'UShort)\b', Keyword.Type),
|
||||
(r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
|
||||
r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
|
||||
(r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
|
||||
r'<=|>=|<>|[-&*/\\^+=<>]',
|
||||
Operator),
|
||||
('"', String, 'string'),
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*[%&@!#$]?', Name),
|
||||
('#.*?#', Literal.Date),
|
||||
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
|
||||
(r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
|
||||
(r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
|
||||
(r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
|
||||
(r'_\n', Text), # Line continuation
|
||||
],
|
||||
'string': [
|
||||
(r'""', String),
|
||||
(r'"C?', String, '#pop'),
|
||||
(r'[^"]+', String),
|
||||
],
|
||||
'dim': [
|
||||
(r'[a-z_][a-z0-9_]*', Name.Variable, '#pop'),
|
||||
(r'', Text, '#pop'), # any other syntax
|
||||
],
|
||||
'funcname': [
|
||||
(r'[a-z_][a-z0-9_]*', Name.Function, '#pop'),
|
||||
],
|
||||
'classname': [
|
||||
(r'[a-z_][a-z0-9_]*', Name.Class, '#pop'),
|
||||
],
|
||||
'namespace': [
|
||||
(r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop'),
|
||||
],
|
||||
'end': [
|
||||
(r'\s+', Text),
|
||||
(r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
|
||||
Keyword, '#pop'),
|
||||
(r'', Text, '#pop'),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class GenericAspxLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for ASP.NET pages.
|
||||
"""
|
||||
|
||||
name = 'aspx-gen'
|
||||
filenames = []
|
||||
mimetypes = []
|
||||
|
||||
flags = re.DOTALL
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
|
||||
(r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
|
||||
Other,
|
||||
using(XmlLexer))),
|
||||
(r'(.+?)(?=<)', using(XmlLexer)),
|
||||
(r'.+', using(XmlLexer)),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
#TODO support multiple languages within the same source file
|
||||
class CSharpAspxLexer(DelegatingLexer):
|
||||
"""
|
||||
Lexer for highligting C# within ASP.NET pages.
|
||||
"""
|
||||
|
||||
name = 'aspx-cs'
|
||||
aliases = ['aspx-cs']
|
||||
filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
|
||||
mimetypes = []
|
||||
|
||||
def __init__(self, **options):
|
||||
super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
|
||||
return 0.2
|
||||
elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
|
||||
return 0.15
|
||||
|
||||
|
||||
class VbNetAspxLexer(DelegatingLexer):
|
||||
"""
|
||||
Lexer for highligting Visual Basic.net within ASP.NET pages.
|
||||
"""
|
||||
|
||||
name = 'aspx-vb'
|
||||
aliases = ['aspx-vb']
|
||||
filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
|
||||
mimetypes = []
|
||||
|
||||
def __init__(self, **options):
|
||||
super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
|
||||
return 0.2
|
||||
elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
|
||||
return 0.15
|
||||
|
||||
|
||||
# Very close to functional.OcamlLexer
|
||||
class FSharpLexer(RegexLexer):
|
||||
"""
|
||||
For the F# language (version 3.0).
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
|
||||
name = 'FSharp'
|
||||
aliases = ['fsharp']
|
||||
filenames = ['*.fs', '*.fsi']
|
||||
mimetypes = ['text/x-fsharp']
|
||||
|
||||
keywords = [
|
||||
'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
|
||||
'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
|
||||
'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
|
||||
'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
|
||||
'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
|
||||
'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
|
||||
'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
|
||||
'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
|
||||
'while', 'with', 'yield!', 'yield',
|
||||
]
|
||||
# Reserved words; cannot hurt to color them as keywords too.
|
||||
keywords += [
|
||||
'atomic', 'break', 'checked', 'component', 'const', 'constraint',
|
||||
'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
|
||||
'functor', 'include', 'method', 'mixin', 'object', 'parallel',
|
||||
'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
|
||||
'virtual', 'volatile',
|
||||
]
|
||||
keyopts = [
|
||||
'!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
|
||||
'->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
|
||||
'<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
|
||||
'_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>',
|
||||
]
|
||||
|
||||
operators = r'[!$%&*+\./:<=>?@^|~-]'
|
||||
word_operators = ['and', 'or', 'not']
|
||||
prefix_syms = r'[!?~]'
|
||||
infix_syms = r'[=<>@^|&+\*/$%-]'
|
||||
primitives = [
|
||||
'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
|
||||
'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
|
||||
'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
|
||||
'list', 'exn', 'obj', 'enum',
|
||||
]
|
||||
|
||||
# See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
|
||||
# http://fsharp.org/about/files/spec.pdf for reference. Good luck.
|
||||
|
||||
tokens = {
|
||||
'escape-sequence': [
|
||||
(r'\\[\\\"\'ntbrafv]', String.Escape),
|
||||
(r'\\[0-9]{3}', String.Escape),
|
||||
(r'\\u[0-9a-fA-F]{4}', String.Escape),
|
||||
(r'\\U[0-9a-fA-F]{8}', String.Escape),
|
||||
],
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
(r'\(\)|\[\]', Name.Builtin.Pseudo),
|
||||
(r'\b(?<!\.)([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
|
||||
Name.Namespace, 'dotted'),
|
||||
(r'\b([A-Z][A-Za-z0-9_\']*)', Name),
|
||||
(r'///.*?\n', String.Doc),
|
||||
(r'//.*?\n', Comment.Single),
|
||||
(r'\(\*(?!\))', Comment, 'comment'),
|
||||
|
||||
(r'@"', String, 'lstring'),
|
||||
(r'"""', String, 'tqs'),
|
||||
(r'"', String, 'string'),
|
||||
|
||||
(r'\b(open|module)(\s+)([a-zA-Z0-9_.]+)',
|
||||
bygroups(Keyword, Text, Name.Namespace)),
|
||||
(r'\b(let!?)(\s+)([a-zA-Z0-9_]+)',
|
||||
bygroups(Keyword, Text, Name.Variable)),
|
||||
(r'\b(type)(\s+)([a-zA-Z0-9_]+)',
|
||||
bygroups(Keyword, Text, Name.Class)),
|
||||
(r'\b(member|override)(\s+)([a-zA-Z0-9_]+)(\.)([a-zA-Z0-9_]+)',
|
||||
bygroups(Keyword, Text, Name, Punctuation, Name.Function)),
|
||||
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
|
||||
(r'(%s)' % '|'.join(keyopts), Operator),
|
||||
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
|
||||
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
|
||||
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
|
||||
(r'#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n',
|
||||
Comment.Preproc),
|
||||
|
||||
(r"[^\W\d][\w']*", Name),
|
||||
|
||||
(r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
|
||||
(r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
|
||||
(r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
|
||||
(r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Binary),
|
||||
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
|
||||
Number.Float),
|
||||
|
||||
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
|
||||
String.Char),
|
||||
(r"'.'", String.Char),
|
||||
(r"'", Keyword), # a stray quote is another syntax element
|
||||
|
||||
(r'[~?][a-z][\w\']*:', Name.Variable),
|
||||
],
|
||||
'dotted': [
|
||||
(r'\s+', Text),
|
||||
(r'\.', Punctuation),
|
||||
(r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
|
||||
(r'[A-Z][A-Za-z0-9_\']*', Name, '#pop'),
|
||||
(r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
|
||||
],
|
||||
'comment': [
|
||||
(r'[^(*)@"]+', Comment),
|
||||
(r'\(\*', Comment, '#push'),
|
||||
(r'\*\)', Comment, '#pop'),
|
||||
# comments cannot be closed within strings in comments
|
||||
(r'@"', String, 'lstring'),
|
||||
(r'"""', String, 'tqs'),
|
||||
(r'"', String, 'string'),
|
||||
(r'[(*)@]', Comment),
|
||||
],
|
||||
'string': [
|
||||
(r'[^\\"]+', String),
|
||||
include('escape-sequence'),
|
||||
(r'\\\n', String),
|
||||
(r'\n', String), # newlines are allowed in any string
|
||||
(r'"B?', String, '#pop'),
|
||||
],
|
||||
'lstring': [
|
||||
(r'[^"]+', String),
|
||||
(r'\n', String),
|
||||
(r'""', String),
|
||||
(r'"B?', String, '#pop'),
|
||||
],
|
||||
'tqs': [
|
||||
(r'[^"]+', String),
|
||||
(r'\n', String),
|
||||
(r'"""B?', String, '#pop'),
|
||||
(r'"', String),
|
||||
],
|
||||
}
|
428
wakatime/packages/pygments/lexers/foxpro.py
Normal file
428
wakatime/packages/pygments/lexers/foxpro.py
Normal file
|
@ -0,0 +1,428 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.foxpro
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Simple lexer for Microsoft Visual FoxPro source code.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer
|
||||
from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
|
||||
Name, String
|
||||
|
||||
__all__ = ['FoxProLexer']
|
||||
|
||||
|
||||
class FoxProLexer(RegexLexer):
|
||||
"""Lexer for Microsoft Visual FoxPro language.
|
||||
|
||||
FoxPro syntax allows to shorten all keywords and function names
|
||||
to 4 characters. Shortened forms are not recognized by this lexer.
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
|
||||
name = 'FoxPro'
|
||||
aliases = ['Clipper', 'XBase']
|
||||
filenames = ['*.PRG', '*.prg']
|
||||
mimetype = []
|
||||
|
||||
flags = re.IGNORECASE | re.MULTILINE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r';\s*\n', Punctuation), # consume newline
|
||||
(r'(^|\n)\s*', Text, 'newline'),
|
||||
|
||||
# Square brackets may be used for array indices
|
||||
# and for string literal. Look for arrays
|
||||
# before matching string literals.
|
||||
(r'(?<=\w)\[[0-9, ]+\]', Text),
|
||||
(r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
|
||||
(r'(^\s*\*|&&|&&).*?\n', Comment.Single),
|
||||
|
||||
(r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
|
||||
r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
|
||||
r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
|
||||
r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
|
||||
r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
|
||||
r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
|
||||
r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
|
||||
r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
|
||||
r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
|
||||
r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
|
||||
r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
|
||||
r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
|
||||
r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
|
||||
r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
|
||||
r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
|
||||
r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
|
||||
r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
|
||||
r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
|
||||
r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
|
||||
r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
|
||||
r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
|
||||
r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
|
||||
r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
|
||||
r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
|
||||
r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
|
||||
r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
|
||||
r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
|
||||
r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
|
||||
r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
|
||||
r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
|
||||
r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
|
||||
r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
|
||||
r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
|
||||
r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
|
||||
r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
|
||||
r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
|
||||
r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
|
||||
r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
|
||||
r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
|
||||
r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
|
||||
r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
|
||||
r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
|
||||
r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
|
||||
r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
|
||||
r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
|
||||
r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
|
||||
r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
|
||||
r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
|
||||
r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
|
||||
r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
|
||||
r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
|
||||
r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
|
||||
r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
|
||||
r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
|
||||
r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
|
||||
r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
|
||||
r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
|
||||
r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
|
||||
r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
|
||||
r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
|
||||
r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
|
||||
r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
|
||||
r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
|
||||
r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
|
||||
r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
|
||||
r'YEAR)(?=\s*\()', Name.Function),
|
||||
|
||||
(r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
|
||||
r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
|
||||
r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
|
||||
r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
|
||||
r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
|
||||
r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
|
||||
r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
|
||||
r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
|
||||
r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
|
||||
r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
|
||||
r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
|
||||
r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
|
||||
r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
|
||||
|
||||
(r'THISFORMSET|THISFORM|THIS', Name.Builtin),
|
||||
|
||||
(r'Application|CheckBox|Collection|Column|ComboBox|'
|
||||
r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
|
||||
r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
|
||||
r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
|
||||
r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
|
||||
r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
|
||||
r'Project|Relation|ReportListener|Separator|Servers|Server|'
|
||||
r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
|
||||
r'XMLAdapter|XMLField|XMLTable', Name.Class),
|
||||
|
||||
(r'm\.[a-z_]\w*', Name.Variable),
|
||||
(r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
|
||||
|
||||
(r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
|
||||
r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
|
||||
r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
|
||||
r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
|
||||
r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
|
||||
r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
|
||||
r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
|
||||
r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
|
||||
r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
|
||||
r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
|
||||
r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
|
||||
r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
|
||||
r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
|
||||
r'BreakOnError|BufferModeOverride|BufferMode|'
|
||||
r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
|
||||
r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
|
||||
r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
|
||||
r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
|
||||
r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
|
||||
r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
|
||||
r'ContinuousScroll|ControlBox|ControlCount|Controls|'
|
||||
r'ControlSource|ConversionFunc|Count|CurrentControl|'
|
||||
r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
|
||||
r'CursorSchema|CursorSource|CursorStatus|Curvature|'
|
||||
r'Database|DataSessionID|DataSession|DataSourceType|'
|
||||
r'DataSource|DataType|DateFormat|DateMark|Debug|'
|
||||
r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
|
||||
r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
|
||||
r'DeleteCmd|DeleteMark|Description|Desktop|'
|
||||
r'Details|DisabledBackColor|DisabledForeColor|'
|
||||
r'DisabledItemBackColor|DisabledItemForeColor|'
|
||||
r'DisabledPicture|DisableEncode|DisplayCount|'
|
||||
r'DisplayValue|Dockable|Docked|DockPosition|'
|
||||
r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
|
||||
r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
|
||||
r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
|
||||
r'DynamicFontItalic|DynamicFontStrikethru|'
|
||||
r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
|
||||
r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
|
||||
r'DynamicLineHeight|EditorOptions|Enabled|'
|
||||
r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
|
||||
r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
|
||||
r'FetchMemoDataSource|FetchMemo|FetchSize|'
|
||||
r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
|
||||
r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
|
||||
r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
|
||||
r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
|
||||
r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
|
||||
r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
|
||||
r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
|
||||
r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
|
||||
r'HelpContextID|HideSelection|HighlightBackColor|'
|
||||
r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
|
||||
r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
|
||||
r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
|
||||
r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
|
||||
r'InsertCmdDataSource|InsertCmdRefreshCmd|'
|
||||
r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
|
||||
r'InsertCmd|Instancing|IntegralHeight|'
|
||||
r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
|
||||
r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
|
||||
r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
|
||||
r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
|
||||
r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
|
||||
r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
|
||||
r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
|
||||
r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
|
||||
r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
|
||||
r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
|
||||
r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
|
||||
r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
|
||||
r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
|
||||
r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
|
||||
r'NumberOfElements|Object|OLEClass|OLEDragMode|'
|
||||
r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
|
||||
r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
|
||||
r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
|
||||
r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
|
||||
r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
|
||||
r'OutputPageCount|OutputType|PageCount|PageHeight|'
|
||||
r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
|
||||
r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
|
||||
r'Parent|Partition|PasswordChar|PictureMargin|'
|
||||
r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
|
||||
r'PictureVal|Picture|Prepared|'
|
||||
r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
|
||||
r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
|
||||
r'ProjectHookLibrary|ProjectHook|QuietMode|'
|
||||
r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
|
||||
r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
|
||||
r'RecordSource|RefreshAlias|'
|
||||
r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
|
||||
r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
|
||||
r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
|
||||
r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
|
||||
r'Rotation|RowColChange|RowHeight|RowSourceType|'
|
||||
r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
|
||||
r'Seconds|SelectCmd|SelectedID|'
|
||||
r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
|
||||
r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
|
||||
r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
|
||||
r'ServerClass|ServerHelpFile|ServerName|'
|
||||
r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
|
||||
r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
|
||||
r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
|
||||
r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
|
||||
r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
|
||||
r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
|
||||
r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
|
||||
r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
|
||||
r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
|
||||
r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
|
||||
r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
|
||||
r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
|
||||
r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
|
||||
r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
|
||||
r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
|
||||
r'VersionCompany|VersionCopyright|VersionDescription|'
|
||||
r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
|
||||
r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
|
||||
r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
|
||||
r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
|
||||
r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
|
||||
r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
|
||||
r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
|
||||
r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
|
||||
r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
|
||||
r'XSDtype|ZoomBox)', Name.Attribute),
|
||||
|
||||
(r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
|
||||
r'AddProperty|AddTableSchema|AddToSCC|Add|'
|
||||
r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
|
||||
r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
|
||||
r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
|
||||
r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
|
||||
r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
|
||||
r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
|
||||
r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
|
||||
r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
|
||||
r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
|
||||
r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
|
||||
r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
|
||||
r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
|
||||
r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
|
||||
r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
|
||||
r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
|
||||
r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
|
||||
r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
|
||||
r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
|
||||
r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
|
||||
|
||||
(r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
|
||||
r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
|
||||
r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
|
||||
r'AfterCursorUpdate|AfterDelete|AfterInsert|'
|
||||
r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
|
||||
r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
|
||||
r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
|
||||
r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
|
||||
r'BeforeInsert|BeforeDock|BeforeOpenTables|'
|
||||
r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
|
||||
r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
|
||||
r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
|
||||
r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
|
||||
r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
|
||||
r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
|
||||
r'dbc_AfterDropOffline|dbc_AfterDropTable|'
|
||||
r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
|
||||
r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
|
||||
r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
|
||||
r'dbc_AfterRenameTable|dbc_AfterRenameView|'
|
||||
r'dbc_AfterValidateData|dbc_BeforeAddTable|'
|
||||
r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
|
||||
r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
|
||||
r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
|
||||
r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
|
||||
r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
|
||||
r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
|
||||
r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
|
||||
r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
|
||||
r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
|
||||
r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
|
||||
r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
|
||||
r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
|
||||
r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
|
||||
r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
|
||||
r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
|
||||
r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
|
||||
r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
|
||||
r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
|
||||
r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
|
||||
r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
|
||||
r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
|
||||
r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
|
||||
r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
|
||||
r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
|
||||
|
||||
(r'\s+', Text),
|
||||
# everything else is not colored
|
||||
(r'.', Text),
|
||||
],
|
||||
'newline': [
|
||||
(r'\*.*?$', Comment.Single, '#pop'),
|
||||
(r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
|
||||
r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
|
||||
r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
|
||||
r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
|
||||
r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
|
||||
r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
|
||||
r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
|
||||
r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
|
||||
r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
|
||||
r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
|
||||
r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
|
||||
r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
|
||||
r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
|
||||
r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
|
||||
r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
|
||||
r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
|
||||
r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
|
||||
r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
|
||||
r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
|
||||
r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
|
||||
r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
|
||||
r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
|
||||
r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
|
||||
r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
|
||||
r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
|
||||
r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
|
||||
r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
|
||||
r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
|
||||
r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
|
||||
r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
|
||||
r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
|
||||
r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
|
||||
r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
|
||||
r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
|
||||
r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
|
||||
r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
|
||||
r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
|
||||
r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
|
||||
r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
|
||||
r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
|
||||
r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
|
||||
r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
|
||||
r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
|
||||
r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
|
||||
r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
|
||||
r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
|
||||
r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
|
||||
r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
|
||||
r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
|
||||
r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
|
||||
r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
|
||||
r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
|
||||
r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
|
||||
r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
|
||||
r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
|
||||
r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
|
||||
r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
|
||||
r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
|
||||
r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
|
||||
r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
|
||||
r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
|
||||
r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
|
||||
r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
|
||||
r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
|
||||
r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
|
||||
r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
|
||||
r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
|
||||
r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
|
||||
r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
|
||||
r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
|
||||
r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
|
||||
r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
|
||||
r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
|
||||
r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
|
||||
Keyword.Reserved, '#pop'),
|
||||
(r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
|
||||
Comment.Preproc, '#pop'),
|
||||
(r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
|
||||
(r'.', Text, '#pop'),
|
||||
],
|
||||
}
|
2731
wakatime/packages/pygments/lexers/functional.py
Normal file
2731
wakatime/packages/pygments/lexers/functional.py
Normal file
File diff suppressed because it is too large
Load diff
356
wakatime/packages/pygments/lexers/hdl.py
Normal file
356
wakatime/packages/pygments/lexers/hdl.py
Normal file
|
@ -0,0 +1,356 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.hdl
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for hardware descriptor languages.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
from pygments.lexer import RegexLexer, bygroups, include, using, this
|
||||
from pygments.token import \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
|
||||
Error
|
||||
|
||||
__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
|
||||
|
||||
|
||||
class VerilogLexer(RegexLexer):
|
||||
"""
|
||||
For verilog source code with preprocessor directives.
|
||||
|
||||
*New in Pygments 1.4.*
|
||||
"""
|
||||
name = 'verilog'
|
||||
aliases = ['verilog', 'v']
|
||||
filenames = ['*.v']
|
||||
mimetypes = ['text/x-verilog']
|
||||
|
||||
#: optional Comment or Whitespace
|
||||
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'^\s*`define', Comment.Preproc, 'macro'),
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
(r'\\\n', Text), # line continuation
|
||||
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
|
||||
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
|
||||
(r'[{}#@]', Punctuation),
|
||||
(r'L?"', String, 'string'),
|
||||
(r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
|
||||
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
|
||||
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
|
||||
(r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
|
||||
(r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
|
||||
(r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
|
||||
(r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
|
||||
(r'\'[01xz]', Number),
|
||||
(r'\d+[Ll]?', Number.Integer),
|
||||
(r'\*/', Error),
|
||||
(r'[~!%^&*+=|?:<>/-]', Operator),
|
||||
(r'[()\[\],.;\']', Punctuation),
|
||||
(r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
|
||||
|
||||
(r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
|
||||
(r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text),
|
||||
'import'),
|
||||
|
||||
(r'(always|always_comb|always_ff|always_latch|and|assign|automatic|'
|
||||
r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|'
|
||||
r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|'
|
||||
r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|'
|
||||
r'endtable|endtask|enum|event|final|for|force|forever|fork|function|'
|
||||
r'generate|genvar|highz0|highz1|if|initial|inout|input|'
|
||||
r'integer|join|large|localparam|macromodule|medium|module|'
|
||||
r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|'
|
||||
r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|'
|
||||
r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|'
|
||||
r'rtranif1|scalared|signed|small|specify|specparam|strength|'
|
||||
r'string|strong0|strong1|struct|table|task|'
|
||||
r'tran|tranif0|tranif1|type|typedef|'
|
||||
r'unsigned|var|vectored|void|wait|weak0|weak1|while|'
|
||||
r'xnor|xor)\b', Keyword),
|
||||
|
||||
(r'`(accelerate|autoexpand_vectornets|celldefine|default_nettype|'
|
||||
r'else|elsif|endcelldefine|endif|endprotect|endprotected|'
|
||||
r'expand_vectornets|ifdef|ifndef|include|noaccelerate|noexpand_vectornets|'
|
||||
r'noremove_gatenames|noremove_netnames|nounconnected_drive|'
|
||||
r'protect|protected|remove_gatenames|remove_netnames|resetall|'
|
||||
r'timescale|unconnected_drive|undef)\b', Comment.Preproc),
|
||||
|
||||
(r'\$(bits|bitstoreal|bitstoshortreal|countdrivers|display|fclose|'
|
||||
r'fdisplay|finish|floor|fmonitor|fopen|fstrobe|fwrite|'
|
||||
r'getpattern|history|incsave|input|itor|key|list|log|'
|
||||
r'monitor|monitoroff|monitoron|nokey|nolog|printtimescale|'
|
||||
r'random|readmemb|readmemh|realtime|realtobits|reset|reset_count|'
|
||||
r'reset_value|restart|rtoi|save|scale|scope|shortrealtobits|'
|
||||
r'showscopes|showvariables|showvars|sreadmemb|sreadmemh|'
|
||||
r'stime|stop|strobe|time|timeformat|write)\b', Name.Builtin),
|
||||
|
||||
(r'(byte|shortint|int|longint|integer|time|'
|
||||
r'bit|logic|reg|'
|
||||
r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
|
||||
r'shortreal|real|realtime)\b', Keyword.Type),
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
],
|
||||
'string': [
|
||||
(r'"', String, '#pop'),
|
||||
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
|
||||
(r'[^\\"\n]+', String), # all other characters
|
||||
(r'\\\n', String), # line continuation
|
||||
(r'\\', String), # stray backslash
|
||||
],
|
||||
'macro': [
|
||||
(r'[^/\n]+', Comment.Preproc),
|
||||
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
|
||||
(r'//.*?\n', Comment.Single, '#pop'),
|
||||
(r'/', Comment.Preproc),
|
||||
(r'(?<=\\)\n', Comment.Preproc),
|
||||
(r'\n', Comment.Preproc, '#pop'),
|
||||
],
|
||||
'import': [
|
||||
(r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
|
||||
]
|
||||
}
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
for index, token, value in \
|
||||
RegexLexer.get_tokens_unprocessed(self, text):
|
||||
# Convention: mark all upper case names as constants
|
||||
if token is Name:
|
||||
if value.isupper():
|
||||
token = Name.Constant
|
||||
yield index, token, value
|
||||
|
||||
|
||||
class SystemVerilogLexer(RegexLexer):
|
||||
"""
|
||||
Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
|
||||
1800-2009 standard.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
name = 'systemverilog'
|
||||
aliases = ['systemverilog', 'sv']
|
||||
filenames = ['*.sv', '*.svh']
|
||||
mimetypes = ['text/x-systemverilog']
|
||||
|
||||
#: optional Comment or Whitespace
|
||||
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'^\s*`define', Comment.Preproc, 'macro'),
|
||||
(r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
|
||||
(r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'),
|
||||
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
(r'\\\n', Text), # line continuation
|
||||
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
|
||||
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
|
||||
(r'[{}#@]', Punctuation),
|
||||
(r'L?"', String, 'string'),
|
||||
(r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
|
||||
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
|
||||
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
|
||||
(r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
|
||||
(r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
|
||||
(r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
|
||||
(r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
|
||||
(r'\'[01xz]', Number),
|
||||
(r'\d+[Ll]?', Number.Integer),
|
||||
(r'\*/', Error),
|
||||
(r'[~!%^&*+=|?:<>/-]', Operator),
|
||||
(r'[()\[\],.;\']', Punctuation),
|
||||
(r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
|
||||
|
||||
(r'(accept_on|alias|always|always_comb|always_ff|always_latch|'
|
||||
r'and|assert|assign|assume|automatic|before|begin|bind|bins|'
|
||||
r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|'
|
||||
r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|'
|
||||
r'context|continue|cover|covergroup|coverpoint|cross|deassign|'
|
||||
r'default|defparam|design|disable|dist|do|edge|else|end|endcase|'
|
||||
r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|'
|
||||
r'endgroup|endinterface|endmodule|endpackage|endprimitive|'
|
||||
r'endprogram|endproperty|endsequence|endspecify|endtable|'
|
||||
r'endtask|enum|event|eventually|expect|export|extends|extern|'
|
||||
r'final|first_match|for|force|foreach|forever|fork|forkjoin|'
|
||||
r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|'
|
||||
r'ignore_bins|illegal_bins|implies|import|incdir|include|'
|
||||
r'initial|inout|input|inside|instance|int|integer|interface|'
|
||||
r'intersect|join|join_any|join_none|large|let|liblist|library|'
|
||||
r'local|localparam|logic|longint|macromodule|matches|medium|'
|
||||
r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|'
|
||||
r'not|notif0|notif1|null|or|output|package|packed|parameter|'
|
||||
r'pmos|posedge|primitive|priority|program|property|protected|'
|
||||
r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|'
|
||||
r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|'
|
||||
r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|'
|
||||
r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|'
|
||||
r's_until|s_until_with|scalared|sequence|shortint|shortreal|'
|
||||
r'showcancelled|signed|small|solve|specify|specparam|static|'
|
||||
r'string|strong|strong0|strong1|struct|super|supply0|supply1|'
|
||||
r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|'
|
||||
r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|'
|
||||
r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|'
|
||||
r'unsigned|until|until_with|untyped|use|uwire|var|vectored|'
|
||||
r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|'
|
||||
r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ),
|
||||
|
||||
(r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|'
|
||||
r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|'
|
||||
r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|'
|
||||
r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b',
|
||||
Comment.Preproc ),
|
||||
|
||||
(r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|'
|
||||
r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|'
|
||||
r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|'
|
||||
r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|'
|
||||
r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|'
|
||||
r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|'
|
||||
r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|'
|
||||
r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|'
|
||||
r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|'
|
||||
r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|'
|
||||
r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|'
|
||||
r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|'
|
||||
r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|'
|
||||
r'\$writememh|\$writeo)\b' , Name.Builtin ),
|
||||
|
||||
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
|
||||
(r'(byte|shortint|int|longint|integer|time|'
|
||||
r'bit|logic|reg|'
|
||||
r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
|
||||
r'shortreal|real|realtime)\b', Keyword.Type),
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
|
||||
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
],
|
||||
'classname': [
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
|
||||
],
|
||||
'string': [
|
||||
(r'"', String, '#pop'),
|
||||
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
|
||||
(r'[^\\"\n]+', String), # all other characters
|
||||
(r'\\\n', String), # line continuation
|
||||
(r'\\', String), # stray backslash
|
||||
],
|
||||
'macro': [
|
||||
(r'[^/\n]+', Comment.Preproc),
|
||||
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
|
||||
(r'//.*?\n', Comment.Single, '#pop'),
|
||||
(r'/', Comment.Preproc),
|
||||
(r'(?<=\\)\n', Comment.Preproc),
|
||||
(r'\n', Comment.Preproc, '#pop'),
|
||||
],
|
||||
'import': [
|
||||
(r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
|
||||
]
|
||||
}
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
for index, token, value in \
|
||||
RegexLexer.get_tokens_unprocessed(self, text):
|
||||
# Convention: mark all upper case names as constants
|
||||
if token is Name:
|
||||
if value.isupper():
|
||||
token = Name.Constant
|
||||
yield index, token, value
|
||||
|
||||
def analyse_text(text):
|
||||
if text.startswith('//') or text.startswith('/*'):
|
||||
return 0.5
|
||||
|
||||
|
||||
class VhdlLexer(RegexLexer):
|
||||
"""
|
||||
For VHDL source code.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
name = 'vhdl'
|
||||
aliases = ['vhdl']
|
||||
filenames = ['*.vhdl', '*.vhd']
|
||||
mimetypes = ['text/x-vhdl']
|
||||
flags = re.MULTILINE | re.IGNORECASE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
(r'\\\n', Text), # line continuation
|
||||
(r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single),
|
||||
(r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
|
||||
(r'[~!%^&*+=|?:<>/-]', Operator),
|
||||
(r"'[a-zA-Z_][a-zA-Z0-9_]*", Name.Attribute),
|
||||
(r'[()\[\],.;\']', Punctuation),
|
||||
(r'"[^\n\\]*"', String),
|
||||
|
||||
(r'(library)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
|
||||
bygroups(Keyword, Text, Name.Namespace)),
|
||||
(r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
|
||||
(r'(use)(\s+)([a-zA-Z_][\.a-zA-Z0-9_]*)',
|
||||
bygroups(Keyword, Text, Name.Namespace)),
|
||||
(r'(entity|component)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
|
||||
bygroups(Keyword, Text, Name.Class)),
|
||||
(r'(architecture|configuration)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)'
|
||||
r'(of)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)(is)',
|
||||
bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
|
||||
Name.Class, Text, Keyword)),
|
||||
|
||||
(r'(end)(\s+)', bygroups(using(this), Text), 'endblock'),
|
||||
|
||||
include('types'),
|
||||
include('keywords'),
|
||||
include('numbers'),
|
||||
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
],
|
||||
'endblock': [
|
||||
include('keywords'),
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class),
|
||||
(r'(\s+)', Text),
|
||||
(r';', Punctuation, '#pop'),
|
||||
],
|
||||
'types': [
|
||||
(r'(boolean|bit|character|severity_level|integer|time|delay_length|'
|
||||
r'natural|positive|string|bit_vector|file_open_kind|'
|
||||
r'file_open_status|std_ulogic|std_ulogic_vector|std_logic|'
|
||||
r'std_logic_vector)\b', Keyword.Type),
|
||||
],
|
||||
'keywords': [
|
||||
(r'(abs|access|after|alias|all|and|'
|
||||
r'architecture|array|assert|attribute|begin|block|'
|
||||
r'body|buffer|bus|case|component|configuration|'
|
||||
r'constant|disconnect|downto|else|elsif|end|'
|
||||
r'entity|exit|file|for|function|generate|'
|
||||
r'generic|group|guarded|if|impure|in|'
|
||||
r'inertial|inout|is|label|library|linkage|'
|
||||
r'literal|loop|map|mod|nand|new|'
|
||||
r'next|nor|not|null|of|on|'
|
||||
r'open|or|others|out|package|port|'
|
||||
r'postponed|procedure|process|pure|range|record|'
|
||||
r'register|reject|return|rol|ror|select|'
|
||||
r'severity|signal|shared|sla|sli|sra|'
|
||||
r'srl|subtype|then|to|transport|type|'
|
||||
r'units|until|use|variable|wait|when|'
|
||||
r'while|with|xnor|xor)\b', Keyword),
|
||||
],
|
||||
'numbers': [
|
||||
(r'\d{1,2}#[0-9a-fA-F_]+#?', Number.Integer),
|
||||
(r'[0-1_]+(\.[0-1_])', Number.Integer),
|
||||
(r'\d+', Number.Integer),
|
||||
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
|
||||
(r'H"[0-9a-fA-F_]+"', Number.Oct),
|
||||
(r'O"[0-7_]+"', Number.Oct),
|
||||
(r'B"[0-1_]+"', Number.Oct),
|
||||
],
|
||||
}
|
1112
wakatime/packages/pygments/lexers/jvm.py
Normal file
1112
wakatime/packages/pygments/lexers/jvm.py
Normal file
File diff suppressed because it is too large
Load diff
1918
wakatime/packages/pygments/lexers/math.py
Normal file
1918
wakatime/packages/pygments/lexers/math.py
Normal file
File diff suppressed because it is too large
Load diff
3778
wakatime/packages/pygments/lexers/other.py
Normal file
3778
wakatime/packages/pygments/lexers/other.py
Normal file
File diff suppressed because it is too large
Load diff
778
wakatime/packages/pygments/lexers/parsers.py
Normal file
778
wakatime/packages/pygments/lexers/parsers.py
Normal file
|
@ -0,0 +1,778 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.parsers
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for parser generators.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, DelegatingLexer, \
|
||||
include, bygroups, using
|
||||
from pygments.token import Punctuation, Other, Text, Comment, Operator, \
|
||||
Keyword, Name, String, Number, Whitespace
|
||||
from pygments.lexers.compiled import JavaLexer, CLexer, CppLexer, \
|
||||
ObjectiveCLexer, DLexer
|
||||
from pygments.lexers.dotnet import CSharpLexer
|
||||
from pygments.lexers.agile import RubyLexer, PythonLexer, PerlLexer
|
||||
from pygments.lexers.web import ActionScriptLexer
|
||||
|
||||
|
||||
__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
|
||||
'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
|
||||
'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
|
||||
'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
|
||||
#'AntlrCLexer',
|
||||
'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
|
||||
'AntlrJavaLexer', "AntlrActionScriptLexer",
|
||||
'TreetopLexer']
|
||||
|
||||
|
||||
class RagelLexer(RegexLexer):
|
||||
"""
|
||||
A pure `Ragel <http://www.complang.org/ragel/>`_ lexer. Use this for
|
||||
fragments of Ragel. For ``.rl`` files, use RagelEmbeddedLexer instead
|
||||
(or one of the language-specific subclasses).
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel'
|
||||
aliases = ['ragel']
|
||||
filenames = []
|
||||
|
||||
tokens = {
|
||||
'whitespace': [
|
||||
(r'\s+', Whitespace)
|
||||
],
|
||||
'comments': [
|
||||
(r'\#.*$', Comment),
|
||||
],
|
||||
'keywords': [
|
||||
(r'(access|action|alphtype)\b', Keyword),
|
||||
(r'(getkey|write|machine|include)\b', Keyword),
|
||||
(r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
|
||||
(r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
|
||||
],
|
||||
'numbers': [
|
||||
(r'0x[0-9A-Fa-f]+', Number.Hex),
|
||||
(r'[+-]?[0-9]+', Number.Integer),
|
||||
],
|
||||
'literals': [
|
||||
(r'"(\\\\|\\"|[^"])*"', String), # double quote string
|
||||
(r"'(\\\\|\\'|[^'])*'", String), # single quote string
|
||||
(r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
|
||||
(r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
|
||||
],
|
||||
'identifiers': [
|
||||
(r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable),
|
||||
],
|
||||
'operators': [
|
||||
(r',', Operator), # Join
|
||||
(r'\||&|--?', Operator), # Union, Intersection and Subtraction
|
||||
(r'\.|<:|:>>?', Operator), # Concatention
|
||||
(r':', Operator), # Label
|
||||
(r'->', Operator), # Epsilon Transition
|
||||
(r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
|
||||
(r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
|
||||
(r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
|
||||
(r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
|
||||
(r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
|
||||
(r'>|@|\$|%', Operator), # Transition Actions and Priorities
|
||||
(r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition
|
||||
(r'!|\^', Operator), # Negation
|
||||
(r'\(|\)', Operator), # Grouping
|
||||
],
|
||||
'root': [
|
||||
include('literals'),
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
include('keywords'),
|
||||
include('numbers'),
|
||||
include('identifiers'),
|
||||
include('operators'),
|
||||
(r'{', Punctuation, 'host'),
|
||||
(r'=', Operator),
|
||||
(r';', Punctuation),
|
||||
],
|
||||
'host': [
|
||||
(r'(' + r'|'.join(( # keep host code in largest possible chunks
|
||||
r'[^{}\'"/#]+', # exclude unsafe characters
|
||||
r'[^\\][\\][{}]', # allow escaped { or }
|
||||
|
||||
# strings and comments may safely contain unsafe characters
|
||||
r'"(\\\\|\\"|[^"])*"', # double quote string
|
||||
r"'(\\\\|\\'|[^'])*'", # single quote string
|
||||
r'//.*$\n?', # single line comment
|
||||
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
|
||||
r'\#.*$\n?', # ruby comment
|
||||
|
||||
# regular expression: There's no reason for it to start
|
||||
# with a * and this stops confusion with comments.
|
||||
r'/(?!\*)(\\\\|\\/|[^/])*/',
|
||||
|
||||
# / is safe now that we've handled regex and javadoc comments
|
||||
r'/',
|
||||
)) + r')+', Other),
|
||||
|
||||
(r'{', Punctuation, '#push'),
|
||||
(r'}', Punctuation, '#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class RagelEmbeddedLexer(RegexLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ embedded in a host language file.
|
||||
|
||||
This will only highlight Ragel statements. If you want host language
|
||||
highlighting then call the language-specific Ragel lexer.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Embedded Ragel'
|
||||
aliases = ['ragel-em']
|
||||
filenames = ['*.rl']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(' + r'|'.join(( # keep host code in largest possible chunks
|
||||
r'[^%\'"/#]+', # exclude unsafe characters
|
||||
r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
|
||||
|
||||
# strings and comments may safely contain unsafe characters
|
||||
r'"(\\\\|\\"|[^"])*"', # double quote string
|
||||
r"'(\\\\|\\'|[^'])*'", # single quote string
|
||||
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
|
||||
r'//.*$\n?', # single line comment
|
||||
r'\#.*$\n?', # ruby/ragel comment
|
||||
r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
|
||||
|
||||
# / is safe now that we've handled regex and javadoc comments
|
||||
r'/',
|
||||
)) + r')+', Other),
|
||||
|
||||
# Single Line FSM.
|
||||
# Please don't put a quoted newline in a single line FSM.
|
||||
# That's just mean. It will break this.
|
||||
(r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
|
||||
using(RagelLexer),
|
||||
Punctuation, Text)),
|
||||
|
||||
# Multi Line FSM.
|
||||
(r'(%%%%|%%){', Punctuation, 'multi-line-fsm'),
|
||||
],
|
||||
'multi-line-fsm': [
|
||||
(r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
|
||||
r'(' + r'|'.join((
|
||||
r'[^}\'"\[/#]', # exclude unsafe characters
|
||||
r'}(?=[^%]|$)', # } is okay as long as it's not followed by %
|
||||
r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
|
||||
r'[^\\][\\][{}]', # ...and } is okay if it's escaped
|
||||
|
||||
# allow / if it's preceded with one of these symbols
|
||||
# (ragel EOF actions)
|
||||
r'(>|\$|%|<|@|<>)/',
|
||||
|
||||
# specifically allow regex followed immediately by *
|
||||
# so it doesn't get mistaken for a comment
|
||||
r'/(?!\*)(\\\\|\\/|[^/])*/\*',
|
||||
|
||||
# allow / as long as it's not followed by another / or by a *
|
||||
r'/(?=[^/\*]|$)',
|
||||
|
||||
# We want to match as many of these as we can in one block.
|
||||
# Not sure if we need the + sign here,
|
||||
# does it help performance?
|
||||
)) + r')+',
|
||||
|
||||
# strings and comments may safely contain unsafe characters
|
||||
r'"(\\\\|\\"|[^"])*"', # double quote string
|
||||
r"'(\\\\|\\'|[^'])*'", # single quote string
|
||||
r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
|
||||
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
|
||||
r'//.*$\n?', # single line comment
|
||||
r'\#.*$\n?', # ruby/ragel comment
|
||||
)) + r')+', using(RagelLexer)),
|
||||
|
||||
(r'}%%', Punctuation, '#pop'),
|
||||
]
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: indep' in text or 0.1
|
||||
|
||||
|
||||
class RagelRubyLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ in a Ruby host file.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel in Ruby Host'
|
||||
aliases = ['ragel-ruby', 'ragel-rb']
|
||||
filenames = ['*.rl']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: ruby' in text
|
||||
|
||||
|
||||
class RagelCLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ in a C host file.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel in C Host'
|
||||
aliases = ['ragel-c']
|
||||
filenames = ['*.rl']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(RagelCLexer, self).__init__(CLexer, RagelEmbeddedLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: c' in text
|
||||
|
||||
|
||||
class RagelDLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ in a D host file.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel in D Host'
|
||||
aliases = ['ragel-d']
|
||||
filenames = ['*.rl']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options)
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: d' in text
|
||||
|
||||
|
||||
class RagelCppLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ in a CPP host file.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel in CPP Host'
|
||||
aliases = ['ragel-cpp']
|
||||
filenames = ['*.rl']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options)
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: c++' in text
|
||||
|
||||
|
||||
class RagelObjectiveCLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ in an Objective C host file.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel in Objective C Host'
|
||||
aliases = ['ragel-objc']
|
||||
filenames = ['*.rl']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(RagelObjectiveCLexer, self).__init__(ObjectiveCLexer,
|
||||
RagelEmbeddedLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: objc' in text
|
||||
|
||||
|
||||
class RagelJavaLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Ragel`_ in a Java host file.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Ragel in Java Host'
|
||||
aliases = ['ragel-java']
|
||||
filenames = ['*.rl']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(RagelJavaLexer, self).__init__(JavaLexer, RagelEmbeddedLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return '@LANG: java' in text
|
||||
|
||||
|
||||
class AntlrLexer(RegexLexer):
|
||||
"""
|
||||
Generic `ANTLR`_ Lexer.
|
||||
Should not be called directly, instead
|
||||
use DelegatingLexer for your target language.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
|
||||
.. _ANTLR: http://www.antlr.org/
|
||||
"""
|
||||
|
||||
name = 'ANTLR'
|
||||
aliases = ['antlr']
|
||||
filenames = []
|
||||
|
||||
_id = r'[A-Za-z][A-Za-z_0-9]*'
|
||||
_TOKEN_REF = r'[A-Z][A-Za-z_0-9]*'
|
||||
_RULE_REF = r'[a-z][A-Za-z_0-9]*'
|
||||
_STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
|
||||
_INT = r'[0-9]+'
|
||||
|
||||
tokens = {
|
||||
'whitespace': [
|
||||
(r'\s+', Whitespace),
|
||||
],
|
||||
'comments': [
|
||||
(r'//.*$', Comment),
|
||||
(r'/\*(.|\n)*?\*/', Comment),
|
||||
],
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
|
||||
(r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
|
||||
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
|
||||
Punctuation)),
|
||||
# optionsSpec
|
||||
(r'options\b', Keyword, 'options'),
|
||||
# tokensSpec
|
||||
(r'tokens\b', Keyword, 'tokens'),
|
||||
# attrScope
|
||||
(r'(scope)(\s*)(' + _id + ')(\s*)({)',
|
||||
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
|
||||
Punctuation), 'action'),
|
||||
# exception
|
||||
(r'(catch|finally)\b', Keyword, 'exception'),
|
||||
# action
|
||||
(r'(@' + _id + ')(\s*)(::)?(\s*)(' + _id + ')(\s*)({)',
|
||||
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
|
||||
Name.Label, Whitespace, Punctuation), 'action'),
|
||||
# rule
|
||||
(r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?', \
|
||||
bygroups(Keyword, Whitespace, Name.Label, Punctuation),
|
||||
('rule-alts', 'rule-prelims')),
|
||||
],
|
||||
'exception': [
|
||||
(r'\n', Whitespace, '#pop'),
|
||||
(r'\s', Whitespace),
|
||||
include('comments'),
|
||||
|
||||
(r'\[', Punctuation, 'nested-arg-action'),
|
||||
(r'\{', Punctuation, 'action'),
|
||||
],
|
||||
'rule-prelims': [
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
|
||||
(r'returns\b', Keyword),
|
||||
(r'\[', Punctuation, 'nested-arg-action'),
|
||||
(r'\{', Punctuation, 'action'),
|
||||
# throwsSpec
|
||||
(r'(throws)(\s+)(' + _id + ')',
|
||||
bygroups(Keyword, Whitespace, Name.Label)),
|
||||
(r'(,)(\s*)(' + _id + ')',
|
||||
bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
|
||||
# optionsSpec
|
||||
(r'options\b', Keyword, 'options'),
|
||||
# ruleScopeSpec - scope followed by target language code or name of action
|
||||
# TODO finish implementing other possibilities for scope
|
||||
# L173 ANTLRv3.g from ANTLR book
|
||||
(r'(scope)(\s+)({)', bygroups(Keyword, Whitespace, Punctuation),
|
||||
'action'),
|
||||
(r'(scope)(\s+)(' + _id + ')(\s*)(;)',
|
||||
bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
|
||||
# ruleAction
|
||||
(r'(@' + _id + ')(\s*)({)',
|
||||
bygroups(Name.Label, Whitespace, Punctuation), 'action'),
|
||||
# finished prelims, go to rule alts!
|
||||
(r':', Punctuation, '#pop')
|
||||
],
|
||||
'rule-alts': [
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
|
||||
# These might need to go in a separate 'block' state triggered by (
|
||||
(r'options\b', Keyword, 'options'),
|
||||
(r':', Punctuation),
|
||||
|
||||
# literals
|
||||
(r"'(\\\\|\\'|[^'])*'", String),
|
||||
(r'"(\\\\|\\"|[^"])*"', String),
|
||||
(r'<<([^>]|>[^>])>>', String),
|
||||
# identifiers
|
||||
# Tokens start with capital letter.
|
||||
(r'\$?[A-Z_][A-Za-z_0-9]*', Name.Constant),
|
||||
# Rules start with small letter.
|
||||
(r'\$?[a-z_][A-Za-z_0-9]*', Name.Variable),
|
||||
# operators
|
||||
(r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
|
||||
(r',', Punctuation),
|
||||
(r'\[', Punctuation, 'nested-arg-action'),
|
||||
(r'\{', Punctuation, 'action'),
|
||||
(r';', Punctuation, '#pop')
|
||||
],
|
||||
'tokens': [
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
(r'{', Punctuation),
|
||||
(r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
|
||||
+ ')?(\s*)(;)',
|
||||
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
|
||||
String, Whitespace, Punctuation)),
|
||||
(r'}', Punctuation, '#pop'),
|
||||
],
|
||||
'options': [
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
(r'{', Punctuation),
|
||||
(r'(' + _id + r')(\s*)(=)(\s*)(' +
|
||||
'|'.join((_id, _STRING_LITERAL, _INT, '\*'))+ ')(\s*)(;)',
|
||||
bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
|
||||
Text, Whitespace, Punctuation)),
|
||||
(r'}', Punctuation, '#pop'),
|
||||
],
|
||||
'action': [
|
||||
(r'(' + r'|'.join(( # keep host code in largest possible chunks
|
||||
r'[^\${}\'"/\\]+', # exclude unsafe characters
|
||||
|
||||
# strings and comments may safely contain unsafe characters
|
||||
r'"(\\\\|\\"|[^"])*"', # double quote string
|
||||
r"'(\\\\|\\'|[^'])*'", # single quote string
|
||||
r'//.*$\n?', # single line comment
|
||||
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
|
||||
|
||||
# regular expression: There's no reason for it to start
|
||||
# with a * and this stops confusion with comments.
|
||||
r'/(?!\*)(\\\\|\\/|[^/])*/',
|
||||
|
||||
# backslashes are okay, as long as we are not backslashing a %
|
||||
r'\\(?!%)',
|
||||
|
||||
# Now that we've handled regex and javadoc comments
|
||||
# it's safe to let / through.
|
||||
r'/',
|
||||
)) + r')+', Other),
|
||||
(r'(\\)(%)', bygroups(Punctuation, Other)),
|
||||
(r'(\$[a-zA-Z]+)(\.?)(text|value)?',
|
||||
bygroups(Name.Variable, Punctuation, Name.Property)),
|
||||
(r'{', Punctuation, '#push'),
|
||||
(r'}', Punctuation, '#pop'),
|
||||
],
|
||||
'nested-arg-action': [
|
||||
(r'(' + r'|'.join(( # keep host code in largest possible chunks.
|
||||
r'[^\$\[\]\'"/]+', # exclude unsafe characters
|
||||
|
||||
# strings and comments may safely contain unsafe characters
|
||||
r'"(\\\\|\\"|[^"])*"', # double quote string
|
||||
r"'(\\\\|\\'|[^'])*'", # single quote string
|
||||
r'//.*$\n?', # single line comment
|
||||
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
|
||||
|
||||
# regular expression: There's no reason for it to start
|
||||
# with a * and this stops confusion with comments.
|
||||
r'/(?!\*)(\\\\|\\/|[^/])*/',
|
||||
|
||||
# Now that we've handled regex and javadoc comments
|
||||
# it's safe to let / through.
|
||||
r'/',
|
||||
)) + r')+', Other),
|
||||
|
||||
|
||||
(r'\[', Punctuation, '#push'),
|
||||
(r'\]', Punctuation, '#pop'),
|
||||
(r'(\$[a-zA-Z]+)(\.?)(text|value)?',
|
||||
bygroups(Name.Variable, Punctuation, Name.Property)),
|
||||
(r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
|
||||
]
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
|
||||
|
||||
# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
|
||||
|
||||
# TH: I'm not aware of any language features of C++ that will cause
|
||||
# incorrect lexing of C files. Antlr doesn't appear to make a distinction,
|
||||
# so just assume they're C++. No idea how to make Objective C work in the
|
||||
# future.
|
||||
|
||||
#class AntlrCLexer(DelegatingLexer):
|
||||
# """
|
||||
# ANTLR with C Target
|
||||
#
|
||||
# *New in Pygments 1.1*
|
||||
# """
|
||||
#
|
||||
# name = 'ANTLR With C Target'
|
||||
# aliases = ['antlr-c']
|
||||
# filenames = ['*.G', '*.g']
|
||||
#
|
||||
# def __init__(self, **options):
|
||||
# super(AntlrCLexer, self).__init__(CLexer, AntlrLexer, **options)
|
||||
#
|
||||
# def analyse_text(text):
|
||||
# return re.match(r'^\s*language\s*=\s*C\s*;', text)
|
||||
|
||||
class AntlrCppLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with CPP Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With CPP Target'
|
||||
aliases = ['antlr-cpp']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrCppLexer, self).__init__(CppLexer, AntlrLexer, **options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
|
||||
|
||||
|
||||
class AntlrObjectiveCLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with Objective-C Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With ObjectiveC Target'
|
||||
aliases = ['antlr-objc']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer,
|
||||
AntlrLexer, **options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
|
||||
|
||||
|
||||
class AntlrCSharpLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with C# Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With C# Target'
|
||||
aliases = ['antlr-csharp', 'antlr-c#']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrCSharpLexer, self).__init__(CSharpLexer, AntlrLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
|
||||
|
||||
|
||||
class AntlrPythonLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with Python Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With Python Target'
|
||||
aliases = ['antlr-python']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrPythonLexer, self).__init__(PythonLexer, AntlrLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
|
||||
|
||||
|
||||
class AntlrJavaLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with Java Target
|
||||
|
||||
*New in Pygments 1.1*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With Java Target'
|
||||
aliases = ['antlr-java']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrJavaLexer, self).__init__(JavaLexer, AntlrLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
# Antlr language is Java by default
|
||||
return AntlrLexer.analyse_text(text) and 0.9
|
||||
|
||||
|
||||
class AntlrRubyLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with Ruby Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With Ruby Target'
|
||||
aliases = ['antlr-ruby', 'antlr-rb']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrRubyLexer, self).__init__(RubyLexer, AntlrLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
|
||||
|
||||
|
||||
class AntlrPerlLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with Perl Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With Perl Target'
|
||||
aliases = ['antlr-perl']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrPerlLexer, self).__init__(PerlLexer, AntlrLexer,
|
||||
**options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
|
||||
|
||||
|
||||
class AntlrActionScriptLexer(DelegatingLexer):
|
||||
"""
|
||||
`ANTLR`_ with ActionScript Target
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'ANTLR With ActionScript Target'
|
||||
aliases = ['antlr-as', 'antlr-actionscript']
|
||||
filenames = ['*.G', '*.g']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
|
||||
AntlrLexer, **options)
|
||||
|
||||
def analyse_text(text):
|
||||
return AntlrLexer.analyse_text(text) and \
|
||||
re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
|
||||
|
||||
class TreetopBaseLexer(RegexLexer):
|
||||
"""
|
||||
A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
|
||||
Not for direct use; use TreetopLexer instead.
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('space'),
|
||||
(r'require[ \t]+[^\n\r]+[\n\r]', Other),
|
||||
(r'module\b', Keyword.Namespace, 'module'),
|
||||
(r'grammar\b', Keyword, 'grammar'),
|
||||
],
|
||||
'module': [
|
||||
include('space'),
|
||||
include('end'),
|
||||
(r'module\b', Keyword, '#push'),
|
||||
(r'grammar\b', Keyword, 'grammar'),
|
||||
(r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Namespace),
|
||||
],
|
||||
'grammar': [
|
||||
include('space'),
|
||||
include('end'),
|
||||
(r'rule\b', Keyword, 'rule'),
|
||||
(r'include\b', Keyword, 'include'),
|
||||
(r'[A-Z][A-Za-z_0-9]*', Name),
|
||||
],
|
||||
'include': [
|
||||
include('space'),
|
||||
(r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Class, '#pop'),
|
||||
],
|
||||
'rule': [
|
||||
include('space'),
|
||||
include('end'),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
(r"'(\\\\|\\'|[^'])*'", String.Single),
|
||||
(r'([A-Za-z_][A-Za-z_0-9]*)(:)', bygroups(Name.Label, Punctuation)),
|
||||
(r'[A-Za-z_][A-Za-z_0-9]*', Name),
|
||||
(r'[()]', Punctuation),
|
||||
(r'[?+*/&!~]', Operator),
|
||||
(r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
|
||||
(r'([0-9]*)(\.\.)([0-9]*)',
|
||||
bygroups(Number.Integer, Operator, Number.Integer)),
|
||||
(r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
|
||||
(r'{', Punctuation, 'inline_module'),
|
||||
(r'\.', String.Regex),
|
||||
],
|
||||
'inline_module': [
|
||||
(r'{', Other, 'ruby'),
|
||||
(r'}', Punctuation, '#pop'),
|
||||
(r'[^{}]+', Other),
|
||||
],
|
||||
'ruby': [
|
||||
(r'{', Other, '#push'),
|
||||
(r'}', Other, '#pop'),
|
||||
(r'[^{}]+', Other),
|
||||
],
|
||||
'space': [
|
||||
(r'[ \t\n\r]+', Whitespace),
|
||||
(r'#[^\n]*', Comment.Single),
|
||||
],
|
||||
'end': [
|
||||
(r'end\b', Keyword, '#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
class TreetopLexer(DelegatingLexer):
|
||||
"""
|
||||
A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
|
||||
name = 'Treetop'
|
||||
aliases = ['treetop']
|
||||
filenames = ['*.treetop', '*.tt']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
|
424
wakatime/packages/pygments/lexers/shell.py
Normal file
424
wakatime/packages/pygments/lexers/shell.py
Normal file
|
@ -0,0 +1,424 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.shell
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for various shells.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
|
||||
from pygments.token import Punctuation, \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Generic
|
||||
from pygments.util import shebang_matches
|
||||
|
||||
|
||||
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
|
||||
'PowerShellLexer', 'ShellSessionLexer']
|
||||
|
||||
line_re = re.compile('.*?\n')
|
||||
|
||||
|
||||
class BashLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for (ba|k|)sh shell scripts.
|
||||
|
||||
*New in Pygments 0.6.*
|
||||
"""
|
||||
|
||||
name = 'Bash'
|
||||
aliases = ['bash', 'sh', 'ksh']
|
||||
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
|
||||
'.bashrc', 'bashrc', '.bash_*', 'bash_*']
|
||||
mimetypes = ['application/x-sh', 'application/x-shellscript']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('basic'),
|
||||
(r'\$\(\(', Keyword, 'math'),
|
||||
(r'\$\(', Keyword, 'paren'),
|
||||
(r'\${#?', Keyword, 'curly'),
|
||||
(r'`', String.Backtick, 'backticks'),
|
||||
include('data'),
|
||||
],
|
||||
'basic': [
|
||||
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
|
||||
r'select|continue|until|esac|elif)\s*\b',
|
||||
Keyword),
|
||||
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
|
||||
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
|
||||
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
|
||||
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
|
||||
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
|
||||
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
|
||||
Name.Builtin),
|
||||
(r'#.*\n', Comment),
|
||||
(r'\\[\w\W]', String.Escape),
|
||||
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
|
||||
(r'[\[\]{}()=]', Operator),
|
||||
(r'<<<', Operator), # here-string
|
||||
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
|
||||
(r'&&|\|\|', Operator),
|
||||
],
|
||||
'data': [
|
||||
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
|
||||
(r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
|
||||
(r';', Punctuation),
|
||||
(r'&', Punctuation),
|
||||
(r'\|', Punctuation),
|
||||
(r'\s+', Text),
|
||||
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
|
||||
(r'\d+(?= |\Z)', Number),
|
||||
(r'\$#?(\w+|.)', Name.Variable),
|
||||
(r'<', Text),
|
||||
],
|
||||
'curly': [
|
||||
(r'}', Keyword, '#pop'),
|
||||
(r':-', Keyword),
|
||||
(r'[a-zA-Z0-9_]+', Name.Variable),
|
||||
(r'[^}:"\'`$]+', Punctuation),
|
||||
(r':', Punctuation),
|
||||
include('root'),
|
||||
],
|
||||
'paren': [
|
||||
(r'\)', Keyword, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
'math': [
|
||||
(r'\)\)', Keyword, '#pop'),
|
||||
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
|
||||
(r'\d+', Number),
|
||||
include('root'),
|
||||
],
|
||||
'backticks': [
|
||||
(r'`', String.Backtick, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
if shebang_matches(text, r'(ba|z|)sh'):
|
||||
return 1
|
||||
if text.startswith('$ '):
|
||||
return 0.2
|
||||
|
||||
|
||||
class BashSessionLexer(Lexer):
|
||||
"""
|
||||
Lexer for simplistic shell sessions.
|
||||
|
||||
*New in Pygments 1.1.*
|
||||
"""
|
||||
|
||||
name = 'Bash Session'
|
||||
aliases = ['console']
|
||||
filenames = ['*.sh-session']
|
||||
mimetypes = ['application/x-shell-session']
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
bashlexer = BashLexer(**self.options)
|
||||
|
||||
pos = 0
|
||||
curcode = ''
|
||||
insertions = []
|
||||
|
||||
for match in line_re.finditer(text):
|
||||
line = match.group()
|
||||
m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
|
||||
r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
|
||||
if m:
|
||||
# To support output lexers (say diff output), the output
|
||||
# needs to be broken by prompts whenever the output lexer
|
||||
# changes.
|
||||
if not insertions:
|
||||
pos = match.start()
|
||||
|
||||
insertions.append((len(curcode),
|
||||
[(0, Generic.Prompt, m.group(1))]))
|
||||
curcode += m.group(2)
|
||||
elif line.startswith('>'):
|
||||
insertions.append((len(curcode),
|
||||
[(0, Generic.Prompt, line[:1])]))
|
||||
curcode += line[1:]
|
||||
else:
|
||||
if insertions:
|
||||
toks = bashlexer.get_tokens_unprocessed(curcode)
|
||||
for i, t, v in do_insertions(insertions, toks):
|
||||
yield pos+i, t, v
|
||||
yield match.start(), Generic.Output, line
|
||||
insertions = []
|
||||
curcode = ''
|
||||
if insertions:
|
||||
for i, t, v in do_insertions(insertions,
|
||||
bashlexer.get_tokens_unprocessed(curcode)):
|
||||
yield pos+i, t, v
|
||||
|
||||
|
||||
class ShellSessionLexer(Lexer):
|
||||
"""
|
||||
Lexer for shell sessions that works with different command prompts
|
||||
|
||||
*New in Pygments 1.6.*
|
||||
"""
|
||||
|
||||
name = 'Shell Session'
|
||||
aliases = ['shell-session']
|
||||
filenames = ['*.shell-session']
|
||||
mimetypes = ['application/x-sh-session']
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
bashlexer = BashLexer(**self.options)
|
||||
|
||||
pos = 0
|
||||
curcode = ''
|
||||
insertions = []
|
||||
|
||||
for match in line_re.finditer(text):
|
||||
line = match.group()
|
||||
m = re.match(r'^((?:\[?\S+@[^$#%]+)[$#%])(.*\n?)', line)
|
||||
if m:
|
||||
# To support output lexers (say diff output), the output
|
||||
# needs to be broken by prompts whenever the output lexer
|
||||
# changes.
|
||||
if not insertions:
|
||||
pos = match.start()
|
||||
|
||||
insertions.append((len(curcode),
|
||||
[(0, Generic.Prompt, m.group(1))]))
|
||||
curcode += m.group(2)
|
||||
else:
|
||||
if insertions:
|
||||
toks = bashlexer.get_tokens_unprocessed(curcode)
|
||||
for i, t, v in do_insertions(insertions, toks):
|
||||
yield pos+i, t, v
|
||||
yield match.start(), Generic.Output, line
|
||||
insertions = []
|
||||
curcode = ''
|
||||
if insertions:
|
||||
for i, t, v in do_insertions(insertions,
|
||||
bashlexer.get_tokens_unprocessed(curcode)):
|
||||
yield pos+i, t, v
|
||||
|
||||
|
||||
class BatchLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for the DOS/Windows Batch file format.
|
||||
|
||||
*New in Pygments 0.7.*
|
||||
"""
|
||||
name = 'Batchfile'
|
||||
aliases = ['bat', 'dosbatch', 'winbatch']
|
||||
filenames = ['*.bat', '*.cmd']
|
||||
mimetypes = ['application/x-dos-batch']
|
||||
|
||||
flags = re.MULTILINE | re.IGNORECASE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
# Lines can start with @ to prevent echo
|
||||
(r'^\s*@', Punctuation),
|
||||
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
|
||||
(r'".*?"', String.Double),
|
||||
(r"'.*?'", String.Single),
|
||||
# If made more specific, make sure you still allow expansions
|
||||
# like %~$VAR:zlt
|
||||
(r'%%?[~$:\w]+%?', Name.Variable),
|
||||
(r'::.*', Comment), # Technically :: only works at BOL
|
||||
(r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
|
||||
(r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
|
||||
(r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
|
||||
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
|
||||
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
|
||||
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
|
||||
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
|
||||
include('basic'),
|
||||
(r'.', Text),
|
||||
],
|
||||
'echo': [
|
||||
# Escapes only valid within echo args?
|
||||
(r'\^\^|\^<|\^>|\^\|', String.Escape),
|
||||
(r'\n', Text, '#pop'),
|
||||
include('basic'),
|
||||
(r'[^\'"^]+', Text),
|
||||
],
|
||||
'basic': [
|
||||
(r'".*?"', String.Double),
|
||||
(r"'.*?'", String.Single),
|
||||
(r'`.*?`', String.Backtick),
|
||||
(r'-?\d+', Number),
|
||||
(r',', Punctuation),
|
||||
(r'=', Operator),
|
||||
(r'/\S+', Name),
|
||||
(r':\w+', Name.Label),
|
||||
(r'\w:\w+', Text),
|
||||
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class TcshLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for tcsh scripts.
|
||||
|
||||
*New in Pygments 0.10.*
|
||||
"""
|
||||
|
||||
name = 'Tcsh'
|
||||
aliases = ['tcsh', 'csh']
|
||||
filenames = ['*.tcsh', '*.csh']
|
||||
mimetypes = ['application/x-csh']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('basic'),
|
||||
(r'\$\(', Keyword, 'paren'),
|
||||
(r'\${#?', Keyword, 'curly'),
|
||||
(r'`', String.Backtick, 'backticks'),
|
||||
include('data'),
|
||||
],
|
||||
'basic': [
|
||||
(r'\b(if|endif|else|while|then|foreach|case|default|'
|
||||
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
|
||||
Keyword),
|
||||
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
|
||||
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
|
||||
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
|
||||
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
|
||||
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
|
||||
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
|
||||
r'source|stop|suspend|source|suspend|telltc|time|'
|
||||
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
|
||||
r'ver|wait|warp|watchlog|where|which)\s*\b',
|
||||
Name.Builtin),
|
||||
(r'#.*\n', Comment),
|
||||
(r'\\[\w\W]', String.Escape),
|
||||
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
|
||||
(r'[\[\]{}()=]+', Operator),
|
||||
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
|
||||
],
|
||||
'data': [
|
||||
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
|
||||
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
|
||||
(r'\s+', Text),
|
||||
(r'[^=\s\[\]{}()$"\'`\\]+', Text),
|
||||
(r'\d+(?= |\Z)', Number),
|
||||
(r'\$#?(\w+|.)', Name.Variable),
|
||||
],
|
||||
'curly': [
|
||||
(r'}', Keyword, '#pop'),
|
||||
(r':-', Keyword),
|
||||
(r'[a-zA-Z0-9_]+', Name.Variable),
|
||||
(r'[^}:"\'`$]+', Punctuation),
|
||||
(r':', Punctuation),
|
||||
include('root'),
|
||||
],
|
||||
'paren': [
|
||||
(r'\)', Keyword, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
'backticks': [
|
||||
(r'`', String.Backtick, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class PowerShellLexer(RegexLexer):
|
||||
"""
|
||||
For Windows PowerShell code.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
name = 'PowerShell'
|
||||
aliases = ['powershell', 'posh', 'ps1', 'psm1']
|
||||
filenames = ['*.ps1','*.psm1']
|
||||
mimetypes = ['text/x-powershell']
|
||||
|
||||
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
|
||||
|
||||
keywords = (
|
||||
'while validateset validaterange validatepattern validatelength '
|
||||
'validatecount until trap switch return ref process param parameter in '
|
||||
'if global: function foreach for finally filter end elseif else '
|
||||
'dynamicparam do default continue cmdletbinding break begin alias \\? '
|
||||
'% #script #private #local #global mandatory parametersetname position '
|
||||
'valuefrompipeline valuefrompipelinebypropertyname '
|
||||
'valuefromremainingarguments helpmessage try catch throw').split()
|
||||
|
||||
operators = (
|
||||
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
|
||||
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
|
||||
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
|
||||
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
|
||||
'lt match ne not notcontains notlike notmatch or regex replace '
|
||||
'wildcard').split()
|
||||
|
||||
verbs = (
|
||||
'write where wait use update unregister undo trace test tee take '
|
||||
'suspend stop start split sort skip show set send select scroll resume '
|
||||
'restore restart resolve resize reset rename remove register receive '
|
||||
'read push pop ping out new move measure limit join invoke import '
|
||||
'group get format foreach export expand exit enter enable disconnect '
|
||||
'disable debug cxnew copy convertto convertfrom convert connect '
|
||||
'complete compare clear checkpoint aggregate add').split()
|
||||
|
||||
commenthelp = (
|
||||
'component description example externalhelp forwardhelpcategory '
|
||||
'forwardhelptargetname functionality inputs link '
|
||||
'notes outputs parameter remotehelprunspace role synopsis').split()
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
# we need to count pairs of parentheses for correct highlight
|
||||
# of '$(...)' blocks in strings
|
||||
(r'\(', Punctuation, 'child'),
|
||||
(r'\s+', Text),
|
||||
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
|
||||
bygroups(Comment, String.Doc, Comment)),
|
||||
(r'#[^\n]*?$', Comment),
|
||||
(r'(<|<)#', Comment.Multiline, 'multline'),
|
||||
(r'@"\n', String.Heredoc, 'heredoc-double'),
|
||||
(r"@'\n.*?\n'@", String.Heredoc),
|
||||
# escaped syntax
|
||||
(r'`[\'"$@-]', Punctuation),
|
||||
(r'"', String.Double, 'string'),
|
||||
(r"'([^']|'')*'", String.Single),
|
||||
(r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
|
||||
Name.Variable),
|
||||
(r'(%s)\b' % '|'.join(keywords), Keyword),
|
||||
(r'-(%s)\b' % '|'.join(operators), Operator),
|
||||
(r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
|
||||
(r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
|
||||
(r'-[a-z_][a-z0-9_]*', Name),
|
||||
(r'\w+', Name),
|
||||
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
|
||||
],
|
||||
'child': [
|
||||
(r'\)', Punctuation, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
'multline': [
|
||||
(r'[^#&.]+', Comment.Multiline),
|
||||
(r'#(>|>)', Comment.Multiline, '#pop'),
|
||||
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
|
||||
(r'[#&.]', Comment.Multiline),
|
||||
],
|
||||
'string': [
|
||||
(r"`[0abfnrtv'\"\$]", String.Escape),
|
||||
(r'[^$`"]+', String.Double),
|
||||
(r'\$\(', Punctuation, 'child'),
|
||||
(r'""', String.Double),
|
||||
(r'[`$]', String.Double),
|
||||
(r'"', String.Double, '#pop'),
|
||||
],
|
||||
'heredoc-double': [
|
||||
(r'\n"@', String.Heredoc, '#pop'),
|
||||
(r'\$\(', Punctuation, 'child'),
|
||||
(r'[^@\n]+"]', String.Heredoc),
|
||||
(r".", String.Heredoc),
|
||||
]
|
||||
}
|
100
wakatime/packages/pygments/lexers/special.py
Normal file
100
wakatime/packages/pygments/lexers/special.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.special
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Special lexers.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
import cStringIO
|
||||
|
||||
from pygments.lexer import Lexer
|
||||
from pygments.token import Token, Error, Text
|
||||
from pygments.util import get_choice_opt, b
|
||||
|
||||
|
||||
__all__ = ['TextLexer', 'RawTokenLexer']
|
||||
|
||||
|
||||
class TextLexer(Lexer):
|
||||
"""
|
||||
"Null" lexer, doesn't highlight anything.
|
||||
"""
|
||||
name = 'Text only'
|
||||
aliases = ['text']
|
||||
filenames = ['*.txt']
|
||||
mimetypes = ['text/plain']
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
yield 0, Text, text
|
||||
|
||||
|
||||
_ttype_cache = {}
|
||||
|
||||
line_re = re.compile(b('.*?\n'))
|
||||
|
||||
class RawTokenLexer(Lexer):
|
||||
"""
|
||||
Recreate a token stream formatted with the `RawTokenFormatter`. This
|
||||
lexer raises exceptions during parsing if the token stream in the
|
||||
file is malformed.
|
||||
|
||||
Additional options accepted:
|
||||
|
||||
`compress`
|
||||
If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
|
||||
the given compression algorithm before lexing (default: ``""``).
|
||||
"""
|
||||
name = 'Raw token data'
|
||||
aliases = ['raw']
|
||||
filenames = []
|
||||
mimetypes = ['application/x-pygments-tokens']
|
||||
|
||||
def __init__(self, **options):
|
||||
self.compress = get_choice_opt(options, 'compress',
|
||||
['', 'none', 'gz', 'bz2'], '')
|
||||
Lexer.__init__(self, **options)
|
||||
|
||||
def get_tokens(self, text):
|
||||
if isinstance(text, unicode):
|
||||
# raw token stream never has any non-ASCII characters
|
||||
text = text.encode('ascii')
|
||||
if self.compress == 'gz':
|
||||
import gzip
|
||||
gzipfile = gzip.GzipFile('', 'rb', 9, cStringIO.StringIO(text))
|
||||
text = gzipfile.read()
|
||||
elif self.compress == 'bz2':
|
||||
import bz2
|
||||
text = bz2.decompress(text)
|
||||
|
||||
# do not call Lexer.get_tokens() because we do not want Unicode
|
||||
# decoding to occur, and stripping is not optional.
|
||||
text = text.strip(b('\n')) + b('\n')
|
||||
for i, t, v in self.get_tokens_unprocessed(text):
|
||||
yield t, v
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
length = 0
|
||||
for match in line_re.finditer(text):
|
||||
try:
|
||||
ttypestr, val = match.group().split(b('\t'), 1)
|
||||
except ValueError:
|
||||
val = match.group().decode(self.encoding)
|
||||
ttype = Error
|
||||
else:
|
||||
ttype = _ttype_cache.get(ttypestr)
|
||||
if not ttype:
|
||||
ttype = Token
|
||||
ttypes = ttypestr.split('.')[1:]
|
||||
for ttype_ in ttypes:
|
||||
if not ttype_ or not ttype_[0].isupper():
|
||||
raise ValueError('malformed token name')
|
||||
ttype = getattr(ttype, ttype_)
|
||||
_ttype_cache[ttypestr] = ttype
|
||||
val = val[2:-2].decode('unicode-escape')
|
||||
yield length, ttype, val
|
||||
length += len(val)
|
559
wakatime/packages/pygments/lexers/sql.py
Normal file
559
wakatime/packages/pygments/lexers/sql.py
Normal file
|
@ -0,0 +1,559 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.sql
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for various SQL dialects and related interactive sessions.
|
||||
|
||||
Postgres specific lexers:
|
||||
|
||||
`PostgresLexer`
|
||||
A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
|
||||
lexer are:
|
||||
|
||||
- keywords and data types list parsed from the PG docs (run the
|
||||
`_postgres_builtins` module to update them);
|
||||
- Content of $-strings parsed using a specific lexer, e.g. the content
|
||||
of a PL/Python function is parsed using the Python lexer;
|
||||
- parse PG specific constructs: E-strings, $-strings, U&-strings,
|
||||
different operators and punctuation.
|
||||
|
||||
`PlPgsqlLexer`
|
||||
A lexer for the PL/pgSQL language. Adds a few specific construct on
|
||||
top of the PG SQL lexer (such as <<label>>).
|
||||
|
||||
`PostgresConsoleLexer`
|
||||
A lexer to highlight an interactive psql session:
|
||||
|
||||
- identifies the prompt and does its best to detect the end of command
|
||||
in multiline statement where not all the lines are prefixed by a
|
||||
prompt, telling them apart from the output;
|
||||
- highlights errors in the output and notification levels;
|
||||
- handles psql backslash commands.
|
||||
|
||||
The ``tests/examplefiles`` contains a few test files with data to be
|
||||
parsed by these lexers.
|
||||
|
||||
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
|
||||
from pygments.token import Punctuation, \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Generic
|
||||
from pygments.lexers import get_lexer_by_name, ClassNotFound
|
||||
|
||||
from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
|
||||
PSEUDO_TYPES, PLPGSQL_KEYWORDS
|
||||
|
||||
|
||||
__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
|
||||
'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer']
|
||||
|
||||
line_re = re.compile('.*?\n')
|
||||
|
||||
language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
|
||||
|
||||
def language_callback(lexer, match):
|
||||
"""Parse the content of a $-string using a lexer
|
||||
|
||||
The lexer is chosen looking for a nearby LANGUAGE.
|
||||
"""
|
||||
l = None
|
||||
m = language_re.match(lexer.text[match.end():match.end()+100])
|
||||
if m is not None:
|
||||
l = lexer._get_lexer(m.group(1))
|
||||
else:
|
||||
m = list(language_re.finditer(
|
||||
lexer.text[max(0, match.start()-100):match.start()]))
|
||||
if m:
|
||||
l = lexer._get_lexer(m[-1].group(1))
|
||||
|
||||
if l:
|
||||
yield (match.start(1), String, match.group(1))
|
||||
for x in l.get_tokens_unprocessed(match.group(2)):
|
||||
yield x
|
||||
yield (match.start(3), String, match.group(3))
|
||||
|
||||
else:
|
||||
yield (match.start(), String, match.group())
|
||||
|
||||
|
||||
class PostgresBase(object):
|
||||
"""Base class for Postgres-related lexers.
|
||||
|
||||
This is implemented as a mixin to avoid the Lexer metaclass kicking in.
|
||||
this way the different lexer don't have a common Lexer ancestor. If they
|
||||
had, _tokens could be created on this ancestor and not updated for the
|
||||
other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
|
||||
seem to suggest that regexp lexers are not really subclassable.
|
||||
"""
|
||||
def get_tokens_unprocessed(self, text, *args):
|
||||
# Have a copy of the entire text to be used by `language_callback`.
|
||||
self.text = text
|
||||
for x in super(PostgresBase, self).get_tokens_unprocessed(
|
||||
text, *args):
|
||||
yield x
|
||||
|
||||
def _get_lexer(self, lang):
|
||||
if lang.lower() == 'sql':
|
||||
return get_lexer_by_name('postgresql', **self.options)
|
||||
|
||||
tries = [ lang ]
|
||||
if lang.startswith('pl'):
|
||||
tries.append(lang[2:])
|
||||
if lang.endswith('u'):
|
||||
tries.append(lang[:-1])
|
||||
if lang.startswith('pl') and lang.endswith('u'):
|
||||
tries.append(lang[2:-1])
|
||||
|
||||
for l in tries:
|
||||
try:
|
||||
return get_lexer_by_name(l, **self.options)
|
||||
except ClassNotFound:
|
||||
pass
|
||||
else:
|
||||
# TODO: better logging
|
||||
# print >>sys.stderr, "language not found:", lang
|
||||
return None
|
||||
|
||||
|
||||
class PostgresLexer(PostgresBase, RegexLexer):
|
||||
"""
|
||||
Lexer for the PostgreSQL dialect of SQL.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
|
||||
name = 'PostgreSQL SQL dialect'
|
||||
aliases = ['postgresql', 'postgres']
|
||||
mimetypes = ['text/x-postgresql']
|
||||
|
||||
flags = re.IGNORECASE
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
(r'--.*?\n', Comment.Single),
|
||||
(r'/\*', Comment.Multiline, 'multiline-comments'),
|
||||
(r'(' + '|'.join([s.replace(" ", "\s+")
|
||||
for s in DATATYPES + PSEUDO_TYPES])
|
||||
+ r')\b', Name.Builtin),
|
||||
(r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
|
||||
(r'[+*/<>=~!@#%^&|`?-]+', Operator),
|
||||
(r'::', Operator), # cast
|
||||
(r'\$\d+', Name.Variable),
|
||||
(r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
(r"(E|U&)?'(''|[^'])*'", String.Single),
|
||||
(r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
|
||||
(r'(?s)(\$[^\$]*\$)(.*?)(\1)', language_callback),
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
|
||||
# psql variable in SQL
|
||||
(r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
|
||||
|
||||
(r'[;:()\[\]\{\},\.]', Punctuation),
|
||||
],
|
||||
'multiline-comments': [
|
||||
(r'/\*', Comment.Multiline, 'multiline-comments'),
|
||||
(r'\*/', Comment.Multiline, '#pop'),
|
||||
(r'[^/\*]+', Comment.Multiline),
|
||||
(r'[/*]', Comment.Multiline)
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class PlPgsqlLexer(PostgresBase, RegexLexer):
|
||||
"""
|
||||
Handle the extra syntax in Pl/pgSQL language.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
name = 'PL/pgSQL'
|
||||
aliases = ['plpgsql']
|
||||
mimetypes = ['text/x-plpgsql']
|
||||
|
||||
flags = re.IGNORECASE
|
||||
tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
|
||||
|
||||
# extend the keywords list
|
||||
for i, pattern in enumerate(tokens['root']):
|
||||
if pattern[1] == Keyword:
|
||||
tokens['root'][i] = (
|
||||
r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
|
||||
Keyword)
|
||||
del i
|
||||
break
|
||||
else:
|
||||
assert 0, "SQL keywords not found"
|
||||
|
||||
# Add specific PL/pgSQL rules (before the SQL ones)
|
||||
tokens['root'][:0] = [
|
||||
(r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype
|
||||
(r':=', Operator),
|
||||
(r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
|
||||
(r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict
|
||||
]
|
||||
|
||||
|
||||
class PsqlRegexLexer(PostgresBase, RegexLexer):
|
||||
"""
|
||||
Extend the PostgresLexer adding support specific for psql commands.
|
||||
|
||||
This is not a complete psql lexer yet as it lacks prompt support
|
||||
and output rendering.
|
||||
"""
|
||||
|
||||
name = 'PostgreSQL console - regexp based lexer'
|
||||
aliases = [] # not public
|
||||
|
||||
flags = re.IGNORECASE
|
||||
tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
|
||||
|
||||
tokens['root'].append(
|
||||
(r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
|
||||
tokens['psql-command'] = [
|
||||
(r'\n', Text, 'root'),
|
||||
(r'\s+', Text),
|
||||
(r'\\[^\s]+', Keyword.Pseudo),
|
||||
(r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
|
||||
(r"'(''|[^'])*'", String.Single),
|
||||
(r"`([^`])*`", String.Backtick),
|
||||
(r"[^\s]+", String.Symbol),
|
||||
]
|
||||
|
||||
re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
|
||||
re_psql_command = re.compile(r'\s*\\')
|
||||
re_end_command = re.compile(r';\s*(--.*?)?$')
|
||||
re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
|
||||
re_error = re.compile(r'(ERROR|FATAL):')
|
||||
re_message = re.compile(
|
||||
r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
|
||||
r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
|
||||
|
||||
|
||||
class lookahead(object):
|
||||
"""Wrap an iterator and allow pushing back an item."""
|
||||
def __init__(self, x):
|
||||
self.iter = iter(x)
|
||||
self._nextitem = None
|
||||
def __iter__(self):
|
||||
return self
|
||||
def send(self, i):
|
||||
self._nextitem = i
|
||||
return i
|
||||
def next(self):
|
||||
if self._nextitem is not None:
|
||||
ni = self._nextitem
|
||||
self._nextitem = None
|
||||
return ni
|
||||
return self.iter.next()
|
||||
|
||||
|
||||
class PostgresConsoleLexer(Lexer):
|
||||
"""
|
||||
Lexer for psql sessions.
|
||||
|
||||
*New in Pygments 1.5.*
|
||||
"""
|
||||
|
||||
name = 'PostgreSQL console (psql)'
|
||||
aliases = ['psql', 'postgresql-console', 'postgres-console']
|
||||
mimetypes = ['text/x-postgresql-psql']
|
||||
|
||||
def get_tokens_unprocessed(self, data):
|
||||
sql = PsqlRegexLexer(**self.options)
|
||||
|
||||
lines = lookahead(line_re.findall(data))
|
||||
|
||||
# prompt-output cycle
|
||||
while 1:
|
||||
|
||||
# consume the lines of the command: start with an optional prompt
|
||||
# and continue until the end of command is detected
|
||||
curcode = ''
|
||||
insertions = []
|
||||
while 1:
|
||||
try:
|
||||
line = lines.next()
|
||||
except StopIteration:
|
||||
# allow the emission of partially collected items
|
||||
# the repl loop will be broken below
|
||||
break
|
||||
|
||||
# Identify a shell prompt in case of psql commandline example
|
||||
if line.startswith('$') and not curcode:
|
||||
lexer = get_lexer_by_name('console', **self.options)
|
||||
for x in lexer.get_tokens_unprocessed(line):
|
||||
yield x
|
||||
break
|
||||
|
||||
# Identify a psql prompt
|
||||
mprompt = re_prompt.match(line)
|
||||
if mprompt is not None:
|
||||
insertions.append((len(curcode),
|
||||
[(0, Generic.Prompt, mprompt.group())]))
|
||||
curcode += line[len(mprompt.group()):]
|
||||
else:
|
||||
curcode += line
|
||||
|
||||
# Check if this is the end of the command
|
||||
# TODO: better handle multiline comments at the end with
|
||||
# a lexer with an external state?
|
||||
if re_psql_command.match(curcode) \
|
||||
or re_end_command.search(curcode):
|
||||
break
|
||||
|
||||
# Emit the combined stream of command and prompt(s)
|
||||
for item in do_insertions(insertions,
|
||||
sql.get_tokens_unprocessed(curcode)):
|
||||
yield item
|
||||
|
||||
# Emit the output lines
|
||||
out_token = Generic.Output
|
||||
while 1:
|
||||
line = lines.next()
|
||||
mprompt = re_prompt.match(line)
|
||||
if mprompt is not None:
|
||||
# push the line back to have it processed by the prompt
|
||||
lines.send(line)
|
||||
break
|
||||
|
||||
mmsg = re_message.match(line)
|
||||
if mmsg is not None:
|
||||
if mmsg.group(1).startswith("ERROR") \
|
||||
or mmsg.group(1).startswith("FATAL"):
|
||||
out_token = Generic.Error
|
||||
yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
|
||||
yield (mmsg.start(2), out_token, mmsg.group(2))
|
||||
else:
|
||||
yield (0, out_token, line)
|
||||
|
||||
|
||||
class SqlLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for Structured Query Language. Currently, this lexer does
|
||||
not recognize any special syntax except ANSI SQL.
|
||||
"""
|
||||
|
||||
name = 'SQL'
|
||||
aliases = ['sql']
|
||||
filenames = ['*.sql']
|
||||
mimetypes = ['text/x-sql']
|
||||
|
||||
flags = re.IGNORECASE
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
(r'--.*?\n', Comment.Single),
|
||||
(r'/\*', Comment.Multiline, 'multiline-comments'),
|
||||
(r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
|
||||
r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
|
||||
r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
|
||||
r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
|
||||
r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
|
||||
r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
|
||||
r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
|
||||
r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
|
||||
r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
|
||||
r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
|
||||
r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
|
||||
r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
|
||||
r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
|
||||
r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
|
||||
r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
|
||||
r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
|
||||
r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
|
||||
r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
|
||||
r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
|
||||
r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
|
||||
r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
|
||||
r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
|
||||
r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
|
||||
r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
|
||||
r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
|
||||
r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
|
||||
r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
|
||||
r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
|
||||
r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
|
||||
r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
|
||||
r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
|
||||
r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
|
||||
r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
|
||||
r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
|
||||
r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
|
||||
r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
|
||||
r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
|
||||
r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
|
||||
r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
|
||||
r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
|
||||
r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
|
||||
r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
|
||||
r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
|
||||
r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
|
||||
r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
|
||||
r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
|
||||
r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
|
||||
r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
|
||||
r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
|
||||
r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
|
||||
r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
|
||||
r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
|
||||
r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
|
||||
r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
|
||||
r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
|
||||
r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
|
||||
r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
|
||||
r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
|
||||
r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
|
||||
r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
|
||||
r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
|
||||
r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
|
||||
r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
|
||||
r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
|
||||
r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
|
||||
r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
|
||||
r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
|
||||
r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
|
||||
r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
|
||||
r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
|
||||
r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
|
||||
r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
|
||||
r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
|
||||
r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
|
||||
r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
|
||||
(r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
|
||||
r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
|
||||
r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
|
||||
Name.Builtin),
|
||||
(r'[+*/<>=~!@#%^&|`?-]', Operator),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
# TODO: Backslash escapes?
|
||||
(r"'(''|[^'])*'", String.Single),
|
||||
(r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
(r'[;:()\[\],\.]', Punctuation)
|
||||
],
|
||||
'multiline-comments': [
|
||||
(r'/\*', Comment.Multiline, 'multiline-comments'),
|
||||
(r'\*/', Comment.Multiline, '#pop'),
|
||||
(r'[^/\*]+', Comment.Multiline),
|
||||
(r'[/*]', Comment.Multiline)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class MySqlLexer(RegexLexer):
|
||||
"""
|
||||
Special lexer for MySQL.
|
||||
"""
|
||||
|
||||
name = 'MySQL'
|
||||
aliases = ['mysql']
|
||||
mimetypes = ['text/x-mysql']
|
||||
|
||||
flags = re.IGNORECASE
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
(r'(#|--\s+).*?\n', Comment.Single),
|
||||
(r'/\*', Comment.Multiline, 'multiline-comments'),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
(r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
|
||||
# TODO: add backslash escapes
|
||||
(r"'(''|[^'])*'", String.Single),
|
||||
(r'"(""|[^"])*"', String.Double),
|
||||
(r"`(``|[^`])*`", String.Symbol),
|
||||
(r'[+*/<>=~!@#%^&|`?-]', Operator),
|
||||
(r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
|
||||
r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
|
||||
r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
|
||||
r'precision|real|numeric|dec|decimal|timestamp|year|char|'
|
||||
r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
|
||||
bygroups(Keyword.Type, Text, Punctuation)),
|
||||
(r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
|
||||
r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
|
||||
r'character|check|collate|column|condition|constraint|continue|'
|
||||
r'convert|create|cross|current_date|current_time|'
|
||||
r'current_timestamp|current_user|cursor|database|databases|'
|
||||
r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
|
||||
r'declare|default|delayed|delete|desc|describe|deterministic|'
|
||||
r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
|
||||
r'enclosed|escaped|exists|exit|explain|fetch|float|float4|float8'
|
||||
r'|for|force|foreign|from|fulltext|grant|group|having|'
|
||||
r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
|
||||
r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
|
||||
r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
|
||||
r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
|
||||
r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
|
||||
r'minute_microsecond|minute_second|mod|modifies|natural|'
|
||||
r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
|
||||
r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
|
||||
r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
|
||||
r'replace|require|restrict|return|revoke|right|rlike|schema|'
|
||||
r'schemas|second_microsecond|select|sensitive|separator|set|'
|
||||
r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
|
||||
r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
|
||||
r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
|
||||
r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
|
||||
r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
|
||||
r'varying|when|where|while|with|write|x509|xor|year_month|'
|
||||
r'zerofill)\b', Keyword),
|
||||
# TODO: this list is not complete
|
||||
(r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
|
||||
(r'(true|false|null)', Name.Constant),
|
||||
(r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
|
||||
bygroups(Name.Function, Text, Punctuation)),
|
||||
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
|
||||
(r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
|
||||
(r'[;:()\[\],\.]', Punctuation)
|
||||
],
|
||||
'multiline-comments': [
|
||||
(r'/\*', Comment.Multiline, 'multiline-comments'),
|
||||
(r'\*/', Comment.Multiline, '#pop'),
|
||||
(r'[^/\*]+', Comment.Multiline),
|
||||
(r'[/*]', Comment.Multiline)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class SqliteConsoleLexer(Lexer):
|
||||
"""
|
||||
Lexer for example sessions using sqlite3.
|
||||
|
||||
*New in Pygments 0.11.*
|
||||
"""
|
||||
|
||||
name = 'sqlite3con'
|
||||
aliases = ['sqlite3']
|
||||
filenames = ['*.sqlite3-console']
|
||||
mimetypes = ['text/x-sqlite3-console']
|
||||
|
||||
def get_tokens_unprocessed(self, data):
|
||||
sql = SqlLexer(**self.options)
|
||||
|
||||
curcode = ''
|
||||
insertions = []
|
||||
for match in line_re.finditer(data):
|
||||
line = match.group()
|
||||
if line.startswith('sqlite> ') or line.startswith(' ...> '):
|
||||
insertions.append((len(curcode),
|
||||
[(0, Generic.Prompt, line[:8])]))
|
||||
curcode += line[8:]
|
||||
else:
|
||||
if curcode:
|
||||
for item in do_insertions(insertions,
|
||||
sql.get_tokens_unprocessed(curcode)):
|
||||
yield item
|
||||
curcode = ''
|
||||
insertions = []
|
||||
if line.startswith('SQL error: '):
|
||||
yield (match.start(), Generic.Traceback, line)
|
||||
else:
|
||||
yield (match.start(), Generic.Output, line)
|
||||
if curcode:
|
||||
for item in do_insertions(insertions,
|
||||
sql.get_tokens_unprocessed(curcode)):
|
||||
yield item
|
1742
wakatime/packages/pygments/lexers/templates.py
Normal file
1742
wakatime/packages/pygments/lexers/templates.py
Normal file
File diff suppressed because it is too large
Load diff
1893
wakatime/packages/pygments/lexers/text.py
Normal file
1893
wakatime/packages/pygments/lexers/text.py
Normal file
File diff suppressed because it is too large
Load diff
4045
wakatime/packages/pygments/lexers/web.py
Normal file
4045
wakatime/packages/pygments/lexers/web.py
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue