upgrade pygments to v2.2.0
This commit is contained in:
parent
e5ee05a14f
commit
fd6f5779d4
204 changed files with 9566 additions and 2387 deletions
|
@ -440,7 +440,6 @@ class DependenciesTestCase(utils.TestCase):
|
|||
u('lines'): 20,
|
||||
}
|
||||
expected_dependencies = [
|
||||
'googlecode.javacv',
|
||||
'colorfulwolf.webcamapplet',
|
||||
'foobar',
|
||||
]
|
||||
|
|
|
@ -22,20 +22,18 @@
|
|||
.. _Pygments tip:
|
||||
http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
__version__ = '2.1.3'
|
||||
__docformat__ = 'restructuredtext'
|
||||
|
||||
__all__ = ['lex', 'format', 'highlight']
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from pygments.util import StringIO, BytesIO
|
||||
|
||||
__version__ = '2.2.0'
|
||||
__docformat__ = 'restructuredtext'
|
||||
|
||||
__all__ = ['lex', 'format', 'highlight']
|
||||
|
||||
|
||||
def lex(code, lexer):
|
||||
"""
|
||||
|
@ -44,9 +42,9 @@ def lex(code, lexer):
|
|||
try:
|
||||
return lexer.get_tokens(code)
|
||||
except TypeError as err:
|
||||
if isinstance(err.args[0], str) and \
|
||||
('unbound method get_tokens' in err.args[0] or
|
||||
'missing 1 required positional argument' in err.args[0]):
|
||||
if (isinstance(err.args[0], str) and
|
||||
('unbound method get_tokens' in err.args[0] or
|
||||
'missing 1 required positional argument' in err.args[0])):
|
||||
raise TypeError('lex() argument must be a lexer instance, '
|
||||
'not a class')
|
||||
raise
|
||||
|
@ -68,9 +66,9 @@ def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builti
|
|||
else:
|
||||
formatter.format(tokens, outfile)
|
||||
except TypeError as err:
|
||||
if isinstance(err.args[0], str) and \
|
||||
('unbound method format' in err.args[0] or
|
||||
'missing 1 required positional argument' in err.args[0]):
|
||||
if (isinstance(err.args[0], str) and
|
||||
('unbound method format' in err.args[0] or
|
||||
'missing 1 required positional argument' in err.args[0])):
|
||||
raise TypeError('format() argument must be a formatter instance, '
|
||||
'not a class')
|
||||
raise
|
||||
|
|
|
@ -5,12 +5,13 @@
|
|||
|
||||
Command line interface.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import getopt
|
||||
from textwrap import dedent
|
||||
|
@ -19,19 +20,20 @@ from pygments import __version__, highlight
|
|||
from pygments.util import ClassNotFound, OptionError, docstring_headline, \
|
||||
guess_decode, guess_decode_from_terminal, terminal_encoding
|
||||
from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
|
||||
get_lexer_for_filename, find_lexer_class_for_filename
|
||||
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
|
||||
from pygments.lexers.special import TextLexer
|
||||
from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
|
||||
from pygments.formatters import get_all_formatters, get_formatter_by_name, \
|
||||
get_formatter_for_filename, find_formatter_class
|
||||
load_formatter_from_file, get_formatter_for_filename, find_formatter_class
|
||||
from pygments.formatters.terminal import TerminalFormatter
|
||||
from pygments.formatters.terminal256 import Terminal256Formatter
|
||||
from pygments.filters import get_all_filters, find_filter_class
|
||||
from pygments.styles import get_all_styles, get_style_by_name
|
||||
|
||||
|
||||
USAGE = """\
|
||||
Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
|
||||
[-O <options>] [-P <option=value>] [-s] [-v] [-o <outfile>] [<infile>]
|
||||
[-O <options>] [-P <option=value>] [-s] [-v] [-x] [-o <outfile>] [<infile>]
|
||||
|
||||
%s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
|
||||
%s -L [<which> ...]
|
||||
|
@ -57,6 +59,14 @@ Likewise, <formatter> is a formatter name, and will be guessed from
|
|||
the extension of the output file name. If no output file is given,
|
||||
the terminal formatter will be used by default.
|
||||
|
||||
The additional option -x allows custom lexers and formatters to be
|
||||
loaded from a .py file relative to the current working directory. For
|
||||
example, ``-l ./customlexer.py -x``. By default, this option expects a
|
||||
file with a class named CustomLexer or CustomFormatter; you can also
|
||||
specify your own class name with a colon (``-l ./lexer.py:MyLexer``).
|
||||
Users should be very careful not to use this option with untrusted files,
|
||||
because it will import and run them.
|
||||
|
||||
With the -O option, you can give the lexer and formatter a comma-
|
||||
separated list of options, e.g. ``-O bg=light,python=cool``.
|
||||
|
||||
|
@ -223,7 +233,7 @@ def main_inner(popts, args, usage):
|
|||
return 0
|
||||
|
||||
if opts.pop('-V', None) is not None:
|
||||
print('Pygments version %s, (c) 2006-2015 by Georg Brandl.' % __version__)
|
||||
print('Pygments version %s, (c) 2006-2017 by Georg Brandl.' % __version__)
|
||||
return 0
|
||||
|
||||
# handle ``pygmentize -L``
|
||||
|
@ -314,17 +324,35 @@ def main_inner(popts, args, usage):
|
|||
F_opts = _parse_filters(F_opts)
|
||||
opts.pop('-F', None)
|
||||
|
||||
allow_custom_lexer_formatter = False
|
||||
# -x: allow custom (eXternal) lexers and formatters
|
||||
if opts.pop('-x', None) is not None:
|
||||
allow_custom_lexer_formatter = True
|
||||
|
||||
# select lexer
|
||||
lexer = None
|
||||
|
||||
# given by name?
|
||||
lexername = opts.pop('-l', None)
|
||||
if lexername:
|
||||
try:
|
||||
lexer = get_lexer_by_name(lexername, **parsed_opts)
|
||||
except (OptionError, ClassNotFound) as err:
|
||||
print('Error:', err, file=sys.stderr)
|
||||
return 1
|
||||
# custom lexer, located relative to user's cwd
|
||||
if allow_custom_lexer_formatter and '.py' in lexername:
|
||||
try:
|
||||
if ':' in lexername:
|
||||
filename, name = lexername.rsplit(':', 1)
|
||||
lexer = load_lexer_from_file(filename, name,
|
||||
**parsed_opts)
|
||||
else:
|
||||
lexer = load_lexer_from_file(lexername, **parsed_opts)
|
||||
except ClassNotFound as err:
|
||||
print('Error:', err, file=sys.stderr)
|
||||
return 1
|
||||
else:
|
||||
try:
|
||||
lexer = get_lexer_by_name(lexername, **parsed_opts)
|
||||
except (OptionError, ClassNotFound) as err:
|
||||
print('Error:', err, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# read input code
|
||||
code = None
|
||||
|
@ -401,11 +429,24 @@ def main_inner(popts, args, usage):
|
|||
outfn = opts.pop('-o', None)
|
||||
fmter = opts.pop('-f', None)
|
||||
if fmter:
|
||||
try:
|
||||
fmter = get_formatter_by_name(fmter, **parsed_opts)
|
||||
except (OptionError, ClassNotFound) as err:
|
||||
print('Error:', err, file=sys.stderr)
|
||||
return 1
|
||||
# custom formatter, located relative to user's cwd
|
||||
if allow_custom_lexer_formatter and '.py' in fmter:
|
||||
try:
|
||||
if ':' in fmter:
|
||||
file, fmtername = fmter.rsplit(':', 1)
|
||||
fmter = load_formatter_from_file(file, fmtername,
|
||||
**parsed_opts)
|
||||
else:
|
||||
fmter = load_formatter_from_file(fmter, **parsed_opts)
|
||||
except ClassNotFound as err:
|
||||
print('Error:', err, file=sys.stderr)
|
||||
return 1
|
||||
else:
|
||||
try:
|
||||
fmter = get_formatter_by_name(fmter, **parsed_opts)
|
||||
except (OptionError, ClassNotFound) as err:
|
||||
print('Error:', err, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if outfn:
|
||||
if not fmter:
|
||||
|
@ -421,7 +462,10 @@ def main_inner(popts, args, usage):
|
|||
return 1
|
||||
else:
|
||||
if not fmter:
|
||||
fmter = TerminalFormatter(**parsed_opts)
|
||||
if '256' in os.environ.get('TERM', ''):
|
||||
fmter = Terminal256Formatter(**parsed_opts)
|
||||
else:
|
||||
fmter = TerminalFormatter(**parsed_opts)
|
||||
if sys.version_info > (3,):
|
||||
# Python 3: we have to use .buffer to get a binary stream
|
||||
outfile = sys.stdout.buffer
|
||||
|
@ -495,7 +539,7 @@ def main(args=sys.argv):
|
|||
usage = USAGE % ((args[0],) * 6)
|
||||
|
||||
try:
|
||||
popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:vhVHgs")
|
||||
popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:vhVHgsx")
|
||||
except getopt.GetoptError:
|
||||
print(usage, file=sys.stderr)
|
||||
return 2
|
||||
|
|
|
@ -5,25 +5,25 @@
|
|||
|
||||
Format colored console output.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
esc = "\x1b["
|
||||
|
||||
codes = {}
|
||||
codes[""] = ""
|
||||
codes["reset"] = esc + "39;49;00m"
|
||||
codes[""] = ""
|
||||
codes["reset"] = esc + "39;49;00m"
|
||||
|
||||
codes["bold"] = esc + "01m"
|
||||
codes["faint"] = esc + "02m"
|
||||
codes["standout"] = esc + "03m"
|
||||
codes["bold"] = esc + "01m"
|
||||
codes["faint"] = esc + "02m"
|
||||
codes["standout"] = esc + "03m"
|
||||
codes["underline"] = esc + "04m"
|
||||
codes["blink"] = esc + "05m"
|
||||
codes["overline"] = esc + "06m"
|
||||
codes["blink"] = esc + "05m"
|
||||
codes["overline"] = esc + "06m"
|
||||
|
||||
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
|
||||
"purple", "teal", "lightgray"]
|
||||
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
|
||||
"purple", "teal", "lightgray"]
|
||||
light_colors = ["darkgray", "red", "green", "yellow", "blue",
|
||||
"fuchsia", "turquoise", "white"]
|
||||
|
||||
|
@ -35,10 +35,10 @@ for d, l in zip(dark_colors, light_colors):
|
|||
|
||||
del d, l, x
|
||||
|
||||
codes["darkteal"] = codes["turquoise"]
|
||||
codes["darkteal"] = codes["turquoise"]
|
||||
codes["darkyellow"] = codes["brown"]
|
||||
codes["fuscia"] = codes["fuchsia"]
|
||||
codes["white"] = codes["bold"]
|
||||
codes["fuscia"] = codes["fuchsia"]
|
||||
codes["white"] = codes["bold"]
|
||||
|
||||
|
||||
def reset_color():
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Module that implements the default filter.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -34,10 +34,10 @@ def simplefilter(f):
|
|||
yield ttype, value.lower()
|
||||
"""
|
||||
return type(f.__name__, (FunctionFilter,), {
|
||||
'function': f,
|
||||
'__module__': getattr(f, '__module__'),
|
||||
'__doc__': f.__doc__
|
||||
})
|
||||
'__module__': getattr(f, '__module__'),
|
||||
'__doc__': f.__doc__,
|
||||
'function': f,
|
||||
})
|
||||
|
||||
|
||||
class Filter(object):
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
Module containing filter lookup functions and default
|
||||
filters.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Base formatter class.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -65,7 +65,7 @@ class Formatter(object):
|
|||
|
||||
def __init__(self, **options):
|
||||
self.style = _lookup_style(options.get('style', 'default'))
|
||||
self.full = get_bool_opt(options, 'full', False)
|
||||
self.full = get_bool_opt(options, 'full', False)
|
||||
self.title = options.get('title', '')
|
||||
self.encoding = options.get('encoding', None) or None
|
||||
if self.encoding in ('guess', 'chardet'):
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Pygments formatters.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -20,7 +20,7 @@ from pygments.plugin import find_plugin_formatters
|
|||
from pygments.util import ClassNotFound, itervalues
|
||||
|
||||
__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
|
||||
'get_all_formatters'] + list(FORMATTERS)
|
||||
'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS)
|
||||
|
||||
_formatter_cache = {} # classes by name
|
||||
_pattern_cache = {}
|
||||
|
@ -79,6 +79,41 @@ def get_formatter_by_name(_alias, **options):
|
|||
return cls(**options)
|
||||
|
||||
|
||||
def load_formatter_from_file(filename, formattername="CustomFormatter",
|
||||
**options):
|
||||
"""Load a formatter from a file.
|
||||
|
||||
This method expects a file located relative to the current working
|
||||
directory, which contains a class named CustomFormatter. By default,
|
||||
it expects the Formatter to be named CustomFormatter; you can specify
|
||||
your own class name as the second argument to this function.
|
||||
|
||||
Users should be very careful with the input, because this method
|
||||
is equivalent to running eval on the input file.
|
||||
|
||||
Raises ClassNotFound if there are any problems importing the Formatter.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
try:
|
||||
# This empty dict will contain the namespace for the exec'd file
|
||||
custom_namespace = {}
|
||||
exec(open(filename, 'rb').read(), custom_namespace)
|
||||
# Retrieve the class `formattername` from that namespace
|
||||
if formattername not in custom_namespace:
|
||||
raise ClassNotFound('no valid %s class found in %s' %
|
||||
(formattername, filename))
|
||||
formatter_class = custom_namespace[formattername]
|
||||
# And finally instantiate it with the options
|
||||
return formatter_class(**options)
|
||||
except IOError as err:
|
||||
raise ClassNotFound('cannot read %s' % filename)
|
||||
except ClassNotFound as err:
|
||||
raise
|
||||
except Exception as err:
|
||||
raise ClassNotFound('error when loading custom formatter: %s' % err)
|
||||
|
||||
|
||||
def get_formatter_for_filename(fn, **options):
|
||||
"""Lookup and instantiate a formatter by filename pattern.
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
Do not alter the FORMATTERS dictionary by hand.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
BBcode formatter.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Formatter for HTML output.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,10 +5,11 @@
|
|||
|
||||
Formatter for Pixmap output.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pygments.formatter import Formatter
|
||||
|
@ -47,6 +48,7 @@ STYLES = {
|
|||
# A sane default for modern systems
|
||||
DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono'
|
||||
DEFAULT_FONT_NAME_WIN = 'Courier New'
|
||||
DEFAULT_FONT_NAME_MAC = 'Courier New'
|
||||
|
||||
|
||||
class PilNotAvailable(ImportError):
|
||||
|
@ -71,6 +73,10 @@ class FontManager(object):
|
|||
if not font_name:
|
||||
self.font_name = DEFAULT_FONT_NAME_WIN
|
||||
self._create_win()
|
||||
elif sys.platform.startswith('darwin'):
|
||||
if not font_name:
|
||||
self.font_name = DEFAULT_FONT_NAME_MAC
|
||||
self._create_mac()
|
||||
else:
|
||||
if not font_name:
|
||||
self.font_name = DEFAULT_FONT_NAME_NIX
|
||||
|
@ -111,6 +117,37 @@ class FontManager(object):
|
|||
else:
|
||||
self.fonts[style] = self.fonts['NORMAL']
|
||||
|
||||
def _get_mac_font_path(self, font_map, name, style):
|
||||
return font_map.get((name + ' ' + style).strip().lower())
|
||||
|
||||
def _create_mac(self):
|
||||
font_map = {}
|
||||
for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
|
||||
'/Library/Fonts/', '/System/Library/Fonts/'):
|
||||
font_map.update(
|
||||
((os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
|
||||
for f in os.listdir(font_dir) if f.lower().endswith('ttf')))
|
||||
|
||||
for name in STYLES['NORMAL']:
|
||||
path = self._get_mac_font_path(font_map, self.font_name, name)
|
||||
if path is not None:
|
||||
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
|
||||
break
|
||||
else:
|
||||
raise FontNotFound('No usable fonts named: "%s"' %
|
||||
self.font_name)
|
||||
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
|
||||
for stylename in STYLES[style]:
|
||||
path = self._get_mac_font_path(font_map, self.font_name, stylename)
|
||||
if path is not None:
|
||||
self.fonts[style] = ImageFont.truetype(path, self.font_size)
|
||||
break
|
||||
else:
|
||||
if style == 'BOLDITALIC':
|
||||
self.fonts[style] = self.fonts['BOLD']
|
||||
else:
|
||||
self.fonts[style] = self.fonts['NORMAL']
|
||||
|
||||
def _lookup_win(self, key, basename, styles, fail=False):
|
||||
for suffix in ('', ' (TrueType)'):
|
||||
for style in styles:
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Formatter for IRC output
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Formatter for LaTeX fancyvrb output.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Other formatters: NullFormatter, RawTokenFormatter.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
A formatter that generates RTF files.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Formatter for SVG output.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Formatter for terminal output with ANSI sequences.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
Formatter version 1.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -27,6 +27,8 @@
|
|||
import sys
|
||||
|
||||
from pygments.formatter import Formatter
|
||||
from pygments.console import codes
|
||||
from pygments.style import ansicolors
|
||||
|
||||
|
||||
__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
|
||||
|
@ -47,9 +49,21 @@ class EscapeSequence:
|
|||
def color_string(self):
|
||||
attrs = []
|
||||
if self.fg is not None:
|
||||
attrs.extend(("38", "5", "%i" % self.fg))
|
||||
if self.fg in ansicolors:
|
||||
esc = codes[self.fg[5:]]
|
||||
if ';01m' in esc:
|
||||
self.bold = True
|
||||
# extract fg color code.
|
||||
attrs.append(esc[2:4])
|
||||
else:
|
||||
attrs.extend(("38", "5", "%i" % self.fg))
|
||||
if self.bg is not None:
|
||||
attrs.extend(("48", "5", "%i" % self.bg))
|
||||
if self.bg in ansicolors:
|
||||
esc = codes[self.bg[5:]]
|
||||
# extract fg color code, add 10 for bg.
|
||||
attrs.append(str(int(esc[2:4])+10))
|
||||
else:
|
||||
attrs.extend(("48", "5", "%i" % self.bg))
|
||||
if self.bold:
|
||||
attrs.append("01")
|
||||
if self.underline:
|
||||
|
@ -91,6 +105,11 @@ class Terminal256Formatter(Formatter):
|
|||
|
||||
.. versionadded:: 0.9
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
If the used style defines foreground colors in the form ``#ansi*``, then
|
||||
`Terminal256Formatter` will map these to non extended foreground color.
|
||||
See :ref:`AnsiTerminalStyle` for more information.
|
||||
|
||||
Options accepted:
|
||||
|
||||
`style`
|
||||
|
@ -169,6 +188,10 @@ class Terminal256Formatter(Formatter):
|
|||
|
||||
def _color_index(self, color):
|
||||
index = self.best_match.get(color, None)
|
||||
if color in ansicolors:
|
||||
# strip the `#ansi` part and look up code
|
||||
index = color
|
||||
self.best_match[color] = index
|
||||
if index is None:
|
||||
try:
|
||||
rgb = int(str(color), 16)
|
||||
|
@ -185,9 +208,14 @@ class Terminal256Formatter(Formatter):
|
|||
def _setup_styles(self):
|
||||
for ttype, ndef in self.style:
|
||||
escape = EscapeSequence()
|
||||
if ndef['color']:
|
||||
# get foreground from ansicolor if set
|
||||
if ndef['ansicolor']:
|
||||
escape.fg = self._color_index(ndef['ansicolor'])
|
||||
elif ndef['color']:
|
||||
escape.fg = self._color_index(ndef['color'])
|
||||
if ndef['bgcolor']:
|
||||
if ndef['bgansicolor']:
|
||||
escape.bg = self._color_index(ndef['bgansicolor'])
|
||||
elif ndef['bgcolor']:
|
||||
escape.bg = self._color_index(ndef['bgcolor'])
|
||||
if self.usebold and ndef['bold']:
|
||||
escape.bold = True
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Base lexer classes.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -319,8 +319,8 @@ def bygroups(*args):
|
|||
if data is not None:
|
||||
if ctx:
|
||||
ctx.pos = match.start(i + 1)
|
||||
for item in action(
|
||||
lexer, _PseudoMatch(match.start(i + 1), data), ctx):
|
||||
for item in action(lexer,
|
||||
_PseudoMatch(match.start(i + 1), data), ctx):
|
||||
if item:
|
||||
yield item
|
||||
if ctx:
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Pygments lexers.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -22,7 +22,7 @@ from pygments.util import ClassNotFound, itervalues, guess_decode
|
|||
|
||||
|
||||
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
|
||||
'guess_lexer'] + list(LEXERS)
|
||||
'guess_lexer', 'load_lexer_from_file'] + list(LEXERS)
|
||||
|
||||
_lexer_cache = {}
|
||||
_pattern_cache = {}
|
||||
|
@ -72,6 +72,28 @@ def find_lexer_class(name):
|
|||
return cls
|
||||
|
||||
|
||||
def find_lexer_class_by_name(_alias):
|
||||
"""Lookup a lexer class by alias.
|
||||
|
||||
Like `get_lexer_by_name`, but does not instantiate the class.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
if not _alias:
|
||||
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
||||
# lookup builtin lexers
|
||||
for module_name, name, aliases, _, _ in itervalues(LEXERS):
|
||||
if _alias.lower() in aliases:
|
||||
if name not in _lexer_cache:
|
||||
_load_lexers(module_name)
|
||||
return _lexer_cache[name]
|
||||
# continue with lexers from setuptools entrypoints
|
||||
for cls in find_plugin_lexers():
|
||||
if _alias.lower() in cls.aliases:
|
||||
return cls
|
||||
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
||||
|
||||
|
||||
def get_lexer_by_name(_alias, **options):
|
||||
"""Get a lexer by an alias.
|
||||
|
||||
|
@ -93,6 +115,40 @@ def get_lexer_by_name(_alias, **options):
|
|||
raise ClassNotFound('no lexer for alias %r found' % _alias)
|
||||
|
||||
|
||||
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
|
||||
"""Load a lexer from a file.
|
||||
|
||||
This method expects a file located relative to the current working
|
||||
directory, which contains a Lexer class. By default, it expects the
|
||||
Lexer to be name CustomLexer; you can specify your own class name
|
||||
as the second argument to this function.
|
||||
|
||||
Users should be very careful with the input, because this method
|
||||
is equivalent to running eval on the input file.
|
||||
|
||||
Raises ClassNotFound if there are any problems importing the Lexer.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
try:
|
||||
# This empty dict will contain the namespace for the exec'd file
|
||||
custom_namespace = {}
|
||||
exec(open(filename, 'rb').read(), custom_namespace)
|
||||
# Retrieve the class `lexername` from that namespace
|
||||
if lexername not in custom_namespace:
|
||||
raise ClassNotFound('no valid %s class found in %s' %
|
||||
(lexername, filename))
|
||||
lexer_class = custom_namespace[lexername]
|
||||
# And finally instantiate it with the options
|
||||
return lexer_class(**options)
|
||||
except IOError as err:
|
||||
raise ClassNotFound('cannot read %s' % filename)
|
||||
except ClassNotFound as err:
|
||||
raise
|
||||
except Exception as err:
|
||||
raise ClassNotFound('error when loading custom lexer: %s' % err)
|
||||
|
||||
|
||||
def find_lexer_class_for_filename(_fn, code=None):
|
||||
"""Get a lexer for a filename.
|
||||
|
||||
|
@ -127,8 +183,8 @@ def find_lexer_class_for_filename(_fn, code=None):
|
|||
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
|
||||
# to find lexers which need it overridden.
|
||||
if code:
|
||||
return cls.analyse_text(code) + bonus
|
||||
return cls.priority + bonus
|
||||
return cls.analyse_text(code) + bonus, cls.__name__
|
||||
return cls.priority + bonus, cls.__name__
|
||||
|
||||
if matches:
|
||||
matches.sort(key=get_rating)
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
|
||||
for function and variable names.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
ANSI Common Lisp builtins.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
File may be also used as standalone generator for aboves.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
pygments.lexers._csound_builtins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Built-in Lasso types, traits, methods, and members.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -471,6 +471,10 @@ BUILTINS = {
|
|||
'curl_netrc_ignored',
|
||||
'curl_netrc_optional',
|
||||
'curl_netrc_required',
|
||||
'curl_sslversion_default',
|
||||
'curl_sslversion_sslv2',
|
||||
'curl_sslversion_sslv3',
|
||||
'curl_sslversion_tlsv1',
|
||||
'curl_version_asynchdns',
|
||||
'curl_version_debug',
|
||||
'curl_version_gssnegotiate',
|
||||
|
@ -1102,6 +1106,7 @@ BUILTINS = {
|
|||
'json_open_array',
|
||||
'json_open_object',
|
||||
'json_period',
|
||||
'json_positive',
|
||||
'json_quote_double',
|
||||
'json_rpccall',
|
||||
'json_serialize',
|
||||
|
@ -1229,6 +1234,7 @@ BUILTINS = {
|
|||
'lcapi_loadmodules',
|
||||
'lcapi_updatedatasourceslist',
|
||||
'ldap_scope_base',
|
||||
'ldap_scope_children',
|
||||
'ldap_scope_onelevel',
|
||||
'ldap_scope_subtree',
|
||||
'library_once',
|
||||
|
@ -4044,6 +4050,7 @@ MEMBERS = {
|
|||
'iscntrl',
|
||||
'isdigit',
|
||||
'isdir',
|
||||
'isdirectory',
|
||||
'isempty',
|
||||
'isemptyelement',
|
||||
'isfirststep',
|
||||
|
|
|
@ -9,60 +9,71 @@
|
|||
|
||||
Do not edit the MODULES dict by hand.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
MODULES = {'basic': ('_G',
|
||||
'_VERSION',
|
||||
'assert',
|
||||
'collectgarbage',
|
||||
'dofile',
|
||||
'error',
|
||||
'getfenv',
|
||||
'getmetatable',
|
||||
'ipairs',
|
||||
'load',
|
||||
'loadfile',
|
||||
'loadstring',
|
||||
'next',
|
||||
'pairs',
|
||||
'pcall',
|
||||
'print',
|
||||
'rawequal',
|
||||
'rawget',
|
||||
'rawlen',
|
||||
'rawset',
|
||||
'select',
|
||||
'setfenv',
|
||||
'setmetatable',
|
||||
'tonumber',
|
||||
'tostring',
|
||||
'type',
|
||||
'unpack',
|
||||
'xpcall'),
|
||||
'bit32': ('bit32.arshift',
|
||||
'bit32.band',
|
||||
'bit32.bnot',
|
||||
'bit32.bor',
|
||||
'bit32.btest',
|
||||
'bit32.bxor',
|
||||
'bit32.extract',
|
||||
'bit32.lrotate',
|
||||
'bit32.lshift',
|
||||
'bit32.replace',
|
||||
'bit32.rrotate',
|
||||
'bit32.rshift'),
|
||||
'coroutine': ('coroutine.create',
|
||||
'coroutine.isyieldable',
|
||||
'coroutine.resume',
|
||||
'coroutine.running',
|
||||
'coroutine.status',
|
||||
'coroutine.wrap',
|
||||
'coroutine.yield'),
|
||||
'debug': ('debug.debug',
|
||||
'debug.getfenv',
|
||||
'debug.gethook',
|
||||
'debug.getinfo',
|
||||
'debug.getlocal',
|
||||
'debug.getmetatable',
|
||||
'debug.getregistry',
|
||||
'debug.getupvalue',
|
||||
'debug.setfenv',
|
||||
'debug.getuservalue',
|
||||
'debug.sethook',
|
||||
'debug.setlocal',
|
||||
'debug.setmetatable',
|
||||
'debug.setupvalue',
|
||||
'debug.traceback'),
|
||||
'debug.setuservalue',
|
||||
'debug.traceback',
|
||||
'debug.upvalueid',
|
||||
'debug.upvaluejoin'),
|
||||
'io': ('io.close',
|
||||
'io.flush',
|
||||
'io.input',
|
||||
|
@ -71,17 +82,20 @@ MODULES = {'basic': ('_G',
|
|||
'io.output',
|
||||
'io.popen',
|
||||
'io.read',
|
||||
'io.stderr',
|
||||
'io.stdin',
|
||||
'io.stdout',
|
||||
'io.tmpfile',
|
||||
'io.type',
|
||||
'io.write'),
|
||||
'math': ('math.abs',
|
||||
'math.acos',
|
||||
'math.asin',
|
||||
'math.atan2',
|
||||
'math.atan',
|
||||
'math.atan2',
|
||||
'math.ceil',
|
||||
'math.cosh',
|
||||
'math.cos',
|
||||
'math.cosh',
|
||||
'math.deg',
|
||||
'math.exp',
|
||||
'math.floor',
|
||||
|
@ -89,29 +103,34 @@ MODULES = {'basic': ('_G',
|
|||
'math.frexp',
|
||||
'math.huge',
|
||||
'math.ldexp',
|
||||
'math.log10',
|
||||
'math.log',
|
||||
'math.max',
|
||||
'math.maxinteger',
|
||||
'math.min',
|
||||
'math.mininteger',
|
||||
'math.modf',
|
||||
'math.pi',
|
||||
'math.pow',
|
||||
'math.rad',
|
||||
'math.random',
|
||||
'math.randomseed',
|
||||
'math.sinh',
|
||||
'math.sin',
|
||||
'math.sinh',
|
||||
'math.sqrt',
|
||||
'math.tan',
|
||||
'math.tanh',
|
||||
'math.tan'),
|
||||
'modules': ('module',
|
||||
'require',
|
||||
'math.tointeger',
|
||||
'math.type',
|
||||
'math.ult'),
|
||||
'modules': ('package.config',
|
||||
'package.cpath',
|
||||
'package.loaded',
|
||||
'package.loadlib',
|
||||
'package.path',
|
||||
'package.preload',
|
||||
'package.seeall'),
|
||||
'package.searchers',
|
||||
'package.searchpath',
|
||||
'require'),
|
||||
'os': ('os.clock',
|
||||
'os.date',
|
||||
'os.difftime',
|
||||
|
@ -133,19 +152,37 @@ MODULES = {'basic': ('_G',
|
|||
'string.len',
|
||||
'string.lower',
|
||||
'string.match',
|
||||
'string.pack',
|
||||
'string.packsize',
|
||||
'string.rep',
|
||||
'string.reverse',
|
||||
'string.sub',
|
||||
'string.unpack',
|
||||
'string.upper'),
|
||||
'table': ('table.concat',
|
||||
'table.insert',
|
||||
'table.maxn',
|
||||
'table.move',
|
||||
'table.pack',
|
||||
'table.remove',
|
||||
'table.sort')}
|
||||
|
||||
'table.sort',
|
||||
'table.unpack'),
|
||||
'utf8': ('utf8.char',
|
||||
'utf8.charpattern',
|
||||
'utf8.codepoint',
|
||||
'utf8.codes',
|
||||
'utf8.len',
|
||||
'utf8.offset')}
|
||||
|
||||
if __name__ == '__main__': # pragma: no cover
|
||||
import re
|
||||
import sys
|
||||
|
||||
# urllib ends up wanting to import a module called 'math' -- if
|
||||
# pygments/lexers is in the path, this ends badly.
|
||||
for i in range(len(sys.path)-1, -1, -1):
|
||||
if sys.path[i].endswith('/lexers'):
|
||||
del sys.path[i]
|
||||
|
||||
try:
|
||||
from urllib import urlopen
|
||||
except ImportError:
|
||||
|
@ -196,7 +233,7 @@ if __name__ == '__main__': # pragma: no cover
|
|||
|
||||
def get_newest_version():
|
||||
f = urlopen('http://www.lua.org/manual/')
|
||||
r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
|
||||
r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
|
||||
for line in f:
|
||||
m = r.match(line)
|
||||
if m is not None:
|
||||
|
@ -204,7 +241,7 @@ if __name__ == '__main__': # pragma: no cover
|
|||
|
||||
def get_lua_functions(version):
|
||||
f = urlopen('http://www.lua.org/manual/%s/' % version)
|
||||
r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
|
||||
r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
|
||||
functions = []
|
||||
for line in f:
|
||||
m = r.match(line)
|
||||
|
@ -236,15 +273,22 @@ if __name__ == '__main__': # pragma: no cover
|
|||
|
||||
def run():
|
||||
version = get_newest_version()
|
||||
print('> Downloading function index for Lua %s' % version)
|
||||
functions = get_lua_functions(version)
|
||||
print('> %d functions found:' % len(functions))
|
||||
functions = set()
|
||||
for v in ('5.2', version):
|
||||
print('> Downloading function index for Lua %s' % v)
|
||||
f = get_lua_functions(v)
|
||||
print('> %d functions found, %d new:' %
|
||||
(len(f), len(set(f) - functions)))
|
||||
functions |= set(f)
|
||||
|
||||
functions = sorted(functions)
|
||||
|
||||
modules = {}
|
||||
for full_function_name in functions:
|
||||
print('>> %s' % full_function_name)
|
||||
m = get_function_module(full_function_name)
|
||||
modules.setdefault(m, []).append(full_function_name)
|
||||
modules = {k: tuple(v) for k, v in modules.iteritems()}
|
||||
|
||||
regenerate(__file__, modules)
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
Do not alter the LEXERS dictionary by hand.
|
||||
|
||||
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -24,8 +24,12 @@ LEXERS = {
|
|||
'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
|
||||
'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
|
||||
'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
|
||||
'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
|
||||
'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
|
||||
'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
|
||||
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
|
||||
'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
|
||||
'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
|
||||
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
|
||||
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
|
||||
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
|
||||
|
@ -45,16 +49,18 @@ LEXERS = {
|
|||
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
|
||||
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
|
||||
'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
|
||||
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
|
||||
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
|
||||
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
|
||||
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
|
||||
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
|
||||
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
|
||||
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
|
||||
'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)),
|
||||
'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
|
||||
'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
|
||||
'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
|
||||
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
|
||||
'BoogieLexer': ('pygments.lexers.esoteric', 'Boogie', ('boogie',), ('*.bpl',), ()),
|
||||
'BoogieLexer': ('pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
|
||||
'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
|
||||
'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()),
|
||||
'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
|
||||
|
@ -67,6 +73,8 @@ LEXERS = {
|
|||
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
|
||||
'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
|
||||
'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
|
||||
'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
|
||||
'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
|
||||
'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
|
||||
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
|
||||
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
|
||||
|
@ -78,6 +86,7 @@ LEXERS = {
|
|||
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
|
||||
'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
|
||||
'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
|
||||
'CleanLexer': ('pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()),
|
||||
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
|
||||
'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
|
||||
'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
|
||||
|
@ -94,6 +103,7 @@ LEXERS = {
|
|||
'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
|
||||
'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
|
||||
'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
|
||||
'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
|
||||
'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
|
||||
'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc',), ()),
|
||||
'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
|
||||
|
@ -111,7 +121,7 @@ LEXERS = {
|
|||
'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
|
||||
'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
|
||||
'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
|
||||
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
|
||||
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
|
||||
'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
|
||||
'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
|
||||
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
|
||||
|
@ -144,6 +154,8 @@ LEXERS = {
|
|||
'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
|
||||
'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
|
||||
'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
|
||||
'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
|
||||
'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
|
||||
'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
|
||||
'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
|
||||
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
|
||||
|
@ -168,6 +180,7 @@ LEXERS = {
|
|||
'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
|
||||
'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
|
||||
'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
|
||||
'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
|
||||
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
|
||||
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
|
||||
'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
|
||||
|
@ -189,7 +202,6 @@ LEXERS = {
|
|||
'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
|
||||
'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
|
||||
'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
|
||||
'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)),
|
||||
'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
|
||||
'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
|
||||
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
|
||||
|
@ -200,11 +212,14 @@ LEXERS = {
|
|||
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
|
||||
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
|
||||
'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
|
||||
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
|
||||
'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', ('json-object',), (), ('application/json-object',)),
|
||||
'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
|
||||
'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
|
||||
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
|
||||
'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
|
||||
'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
|
||||
'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle', 'juttle'), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
|
||||
'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
|
||||
'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
|
||||
'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
|
||||
|
@ -238,6 +253,7 @@ LEXERS = {
|
|||
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
|
||||
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
|
||||
'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
|
||||
'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md',), ('*.md',), ('text/x-markdown',)),
|
||||
'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
|
||||
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
|
||||
'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
|
||||
|
@ -248,6 +264,7 @@ LEXERS = {
|
|||
'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
|
||||
'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
|
||||
'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
|
||||
'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
|
||||
'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
|
||||
'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
|
||||
'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
|
||||
|
@ -264,17 +281,19 @@ LEXERS = {
|
|||
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
|
||||
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
|
||||
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
|
||||
'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
|
||||
'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
|
||||
'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
|
||||
'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
|
||||
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
|
||||
'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
|
||||
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
|
||||
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
|
||||
'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
|
||||
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
|
||||
'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
|
||||
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
|
||||
'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
|
||||
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
|
||||
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
|
||||
'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
|
||||
'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
|
||||
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
|
||||
'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
|
||||
|
@ -307,6 +326,7 @@ LEXERS = {
|
|||
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
|
||||
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
|
||||
'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
|
||||
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
|
||||
'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
|
||||
'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
|
||||
'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
|
||||
|
@ -318,6 +338,7 @@ LEXERS = {
|
|||
'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
|
||||
'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
|
||||
'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
|
||||
'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()),
|
||||
'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
|
||||
'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
|
||||
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
|
||||
|
@ -347,6 +368,7 @@ LEXERS = {
|
|||
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
|
||||
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
|
||||
'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs', '*.rs.in'), ('text/rust',)),
|
||||
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
|
||||
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
|
||||
'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
|
||||
'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
|
||||
|
@ -356,11 +378,13 @@ LEXERS = {
|
|||
'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
|
||||
'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
|
||||
'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
|
||||
'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
|
||||
'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
|
||||
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
|
||||
'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
|
||||
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
|
||||
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
|
||||
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
|
||||
'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
|
||||
'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
|
||||
'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
|
||||
|
@ -369,12 +393,14 @@ LEXERS = {
|
|||
'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
|
||||
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
|
||||
'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
|
||||
'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
|
||||
'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
|
||||
'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
|
||||
'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
|
||||
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
|
||||
'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
|
||||
'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
|
||||
'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
|
||||
'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
|
||||
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
|
||||
'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
|
||||
|
@ -386,12 +412,18 @@ LEXERS = {
|
|||
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
|
||||
'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
|
||||
'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
|
||||
'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
|
||||
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
|
||||
'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
|
||||
'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
|
||||
'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
|
||||
'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts',), ('text/x-typescript',)),
|
||||
'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)),
|
||||
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
|
||||
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
|
||||
'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.ts', '*.txt'), ('text/x-typoscript',)),
|
||||
'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
|
||||
'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
|
||||
'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
|
||||
'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
|
||||
'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
|
||||
'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
|
||||
|
@ -403,6 +435,8 @@ LEXERS = {
|
|||
'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
|
||||
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
|
||||
'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
|
||||
'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
|
||||
'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
|
||||
'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
|
||||
'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
|
||||
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
|
||||
|
@ -410,8 +444,10 @@ LEXERS = {
|
|||
'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
|
||||
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
|
||||
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
|
||||
'XorgLexer': ('pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
|
||||
'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
|
||||
'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
|
||||
'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
|
||||
'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
|
||||
'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
|
||||
'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Builtins for the MqlLexer.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
types = (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Builtin list for the OpenEdgeLexer.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
internet connection. don't run that at home, use
|
||||
a server ;-)
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Self-updating data files for PostgreSQL lexer.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Builtin list for the ScilabLexer.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
Do not edit the FUNCTIONS list by hand.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
This file contains the names of functions for Stan used by
|
||||
``pygments.lexers.math.StanLexer. This is for Stan language version 2.8.0.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
419
wakatime/packages/pygments/lexers/_stata_builtins.py
Normal file
419
wakatime/packages/pygments/lexers/_stata_builtins.py
Normal file
|
@ -0,0 +1,419 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers._stata_builtins
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Builtins for Stata
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
||||
builtins_base = (
|
||||
"if", "else", "in", "foreach", "for", "forv", "forva",
|
||||
"forval", "forvalu", "forvalue", "forvalues", "by", "bys",
|
||||
"bysort", "quietly", "qui", "about", "ac",
|
||||
"ac_7", "acprplot", "acprplot_7", "adjust", "ado", "adopath",
|
||||
"adoupdate", "alpha", "ameans", "an", "ano", "anov", "anova",
|
||||
"anova_estat", "anova_terms", "anovadef", "aorder", "ap", "app",
|
||||
"appe", "appen", "append", "arch", "arch_dr", "arch_estat",
|
||||
"arch_p", "archlm", "areg", "areg_p", "args", "arima",
|
||||
"arima_dr", "arima_estat", "arima_p", "as", "asmprobit",
|
||||
"asmprobit_estat", "asmprobit_lf", "asmprobit_mfx__dlg",
|
||||
"asmprobit_p", "ass", "asse", "asser", "assert", "avplot",
|
||||
"avplot_7", "avplots", "avplots_7", "bcskew0", "bgodfrey",
|
||||
"binreg", "bip0_lf", "biplot", "bipp_lf", "bipr_lf",
|
||||
"bipr_p", "biprobit", "bitest", "bitesti", "bitowt", "blogit",
|
||||
"bmemsize", "boot", "bootsamp", "bootstrap", "bootstrap_8",
|
||||
"boxco_l", "boxco_p", "boxcox", "boxcox_6", "boxcox_p",
|
||||
"bprobit", "br", "break", "brier", "bro", "brow", "brows",
|
||||
"browse", "brr", "brrstat", "bs", "bs_7", "bsampl_w",
|
||||
"bsample", "bsample_7", "bsqreg", "bstat", "bstat_7", "bstat_8",
|
||||
"bstrap", "bstrap_7", "ca", "ca_estat", "ca_p", "cabiplot",
|
||||
"camat", "canon", "canon_8", "canon_8_p", "canon_estat",
|
||||
"canon_p", "cap", "caprojection", "capt", "captu", "captur",
|
||||
"capture", "cat", "cc", "cchart", "cchart_7", "cci",
|
||||
"cd", "censobs_table", "centile", "cf", "char", "chdir",
|
||||
"checkdlgfiles", "checkestimationsample", "checkhlpfiles",
|
||||
"checksum", "chelp", "ci", "cii", "cl", "class", "classutil",
|
||||
"clear", "cli", "clis", "clist", "clo", "clog", "clog_lf",
|
||||
"clog_p", "clogi", "clogi_sw", "clogit", "clogit_lf",
|
||||
"clogit_p", "clogitp", "clogl_sw", "cloglog", "clonevar",
|
||||
"clslistarray", "cluster", "cluster_measures", "cluster_stop",
|
||||
"cluster_tree", "cluster_tree_8", "clustermat", "cmdlog",
|
||||
"cnr", "cnre", "cnreg", "cnreg_p", "cnreg_sw", "cnsreg",
|
||||
"codebook", "collaps4", "collapse", "colormult_nb",
|
||||
"colormult_nw", "compare", "compress", "conf", "confi",
|
||||
"confir", "confirm", "conren", "cons", "const", "constr",
|
||||
"constra", "constrai", "constrain", "constraint", "continue",
|
||||
"contract", "copy", "copyright", "copysource", "cor", "corc",
|
||||
"corr", "corr2data", "corr_anti", "corr_kmo", "corr_smc",
|
||||
"corre", "correl", "correla", "correlat", "correlate",
|
||||
"corrgram", "cou", "coun", "count", "cox", "cox_p", "cox_sw",
|
||||
"coxbase", "coxhaz", "coxvar", "cprplot", "cprplot_7",
|
||||
"crc", "cret", "cretu", "cretur", "creturn", "cross", "cs",
|
||||
"cscript", "cscript_log", "csi", "ct", "ct_is", "ctset",
|
||||
"ctst_5", "ctst_st", "cttost", "cumsp", "cumsp_7", "cumul",
|
||||
"cusum", "cusum_7", "cutil", "d", "datasig", "datasign",
|
||||
"datasigna", "datasignat", "datasignatu", "datasignatur",
|
||||
"datasignature", "datetof", "db", "dbeta", "de", "dec",
|
||||
"deco", "decod", "decode", "deff", "des", "desc", "descr",
|
||||
"descri", "describ", "describe", "destring", "dfbeta",
|
||||
"dfgls", "dfuller", "di", "di_g", "dir", "dirstats", "dis",
|
||||
"discard", "disp", "disp_res", "disp_s", "displ", "displa",
|
||||
"display", "distinct", "do", "doe", "doed", "doedi",
|
||||
"doedit", "dotplot", "dotplot_7", "dprobit", "drawnorm",
|
||||
"drop", "ds", "ds_util", "dstdize", "duplicates", "durbina",
|
||||
"dwstat", "dydx", "e", "ed", "edi", "edit", "egen",
|
||||
"eivreg", "emdef", "en", "enc", "enco", "encod", "encode",
|
||||
"eq", "erase", "ereg", "ereg_lf", "ereg_p", "ereg_sw",
|
||||
"ereghet", "ereghet_glf", "ereghet_glf_sh", "ereghet_gp",
|
||||
"ereghet_ilf", "ereghet_ilf_sh", "ereghet_ip", "eret",
|
||||
"eretu", "eretur", "ereturn", "err", "erro", "error", "est",
|
||||
"est_cfexist", "est_cfname", "est_clickable", "est_expand",
|
||||
"est_hold", "est_table", "est_unhold", "est_unholdok",
|
||||
"estat", "estat_default", "estat_summ", "estat_vce_only",
|
||||
"esti", "estimates", "etodow", "etof", "etomdy", "ex",
|
||||
"exi", "exit", "expand", "expandcl", "fac", "fact", "facto",
|
||||
"factor", "factor_estat", "factor_p", "factor_pca_rotated",
|
||||
"factor_rotate", "factormat", "fcast", "fcast_compute",
|
||||
"fcast_graph", "fdades", "fdadesc", "fdadescr", "fdadescri",
|
||||
"fdadescrib", "fdadescribe", "fdasav", "fdasave", "fdause",
|
||||
"fh_st", "open", "read", "close",
|
||||
"file", "filefilter", "fillin", "find_hlp_file", "findfile",
|
||||
"findit", "findit_7", "fit", "fl", "fli", "flis", "flist",
|
||||
"for5_0", "form", "forma", "format", "fpredict", "frac_154",
|
||||
"frac_adj", "frac_chk", "frac_cox", "frac_ddp", "frac_dis",
|
||||
"frac_dv", "frac_in", "frac_mun", "frac_pp", "frac_pq",
|
||||
"frac_pv", "frac_wgt", "frac_xo", "fracgen", "fracplot",
|
||||
"fracplot_7", "fracpoly", "fracpred", "fron_ex", "fron_hn",
|
||||
"fron_p", "fron_tn", "fron_tn2", "frontier", "ftodate", "ftoe",
|
||||
"ftomdy", "ftowdate", "g", "gamhet_glf", "gamhet_gp",
|
||||
"gamhet_ilf", "gamhet_ip", "gamma", "gamma_d2", "gamma_p",
|
||||
"gamma_sw", "gammahet", "gdi_hexagon", "gdi_spokes", "ge",
|
||||
"gen", "gene", "gener", "genera", "generat", "generate",
|
||||
"genrank", "genstd", "genvmean", "gettoken", "gl", "gladder",
|
||||
"gladder_7", "glim_l01", "glim_l02", "glim_l03", "glim_l04",
|
||||
"glim_l05", "glim_l06", "glim_l07", "glim_l08", "glim_l09",
|
||||
"glim_l10", "glim_l11", "glim_l12", "glim_lf", "glim_mu",
|
||||
"glim_nw1", "glim_nw2", "glim_nw3", "glim_p", "glim_v1",
|
||||
"glim_v2", "glim_v3", "glim_v4", "glim_v5", "glim_v6",
|
||||
"glim_v7", "glm", "glm_6", "glm_p", "glm_sw", "glmpred", "glo",
|
||||
"glob", "globa", "global", "glogit", "glogit_8", "glogit_p",
|
||||
"gmeans", "gnbre_lf", "gnbreg", "gnbreg_5", "gnbreg_p",
|
||||
"gomp_lf", "gompe_sw", "gomper_p", "gompertz", "gompertzhet",
|
||||
"gomphet_glf", "gomphet_glf_sh", "gomphet_gp", "gomphet_ilf",
|
||||
"gomphet_ilf_sh", "gomphet_ip", "gphdot", "gphpen",
|
||||
"gphprint", "gprefs", "gprobi_p", "gprobit", "gprobit_8", "gr",
|
||||
"gr7", "gr_copy", "gr_current", "gr_db", "gr_describe",
|
||||
"gr_dir", "gr_draw", "gr_draw_replay", "gr_drop", "gr_edit",
|
||||
"gr_editviewopts", "gr_example", "gr_example2", "gr_export",
|
||||
"gr_print", "gr_qscheme", "gr_query", "gr_read", "gr_rename",
|
||||
"gr_replay", "gr_save", "gr_set", "gr_setscheme", "gr_table",
|
||||
"gr_undo", "gr_use", "graph", "graph7", "grebar", "greigen",
|
||||
"greigen_7", "greigen_8", "grmeanby", "grmeanby_7",
|
||||
"gs_fileinfo", "gs_filetype", "gs_graphinfo", "gs_stat",
|
||||
"gsort", "gwood", "h", "hadimvo", "hareg", "hausman",
|
||||
"haver", "he", "heck_d2", "heckma_p", "heckman", "heckp_lf",
|
||||
"heckpr_p", "heckprob", "hel", "help", "hereg", "hetpr_lf",
|
||||
"hetpr_p", "hetprob", "hettest", "hexdump", "hilite",
|
||||
"hist", "hist_7", "histogram", "hlogit", "hlu", "hmeans",
|
||||
"hotel", "hotelling", "hprobit", "hreg", "hsearch", "icd9",
|
||||
"icd9_ff", "icd9p", "iis", "impute", "imtest", "inbase",
|
||||
"include", "inf", "infi", "infil", "infile", "infix", "inp",
|
||||
"inpu", "input", "ins", "insheet", "insp", "inspe",
|
||||
"inspec", "inspect", "integ", "inten", "intreg", "intreg_7",
|
||||
"intreg_p", "intrg2_ll", "intrg_ll", "intrg_ll2", "ipolate",
|
||||
"iqreg", "ir", "irf", "irf_create", "irfm", "iri", "is_svy",
|
||||
"is_svysum", "isid", "istdize", "ivprob_1_lf", "ivprob_lf",
|
||||
"ivprobit", "ivprobit_p", "ivreg", "ivreg_footnote",
|
||||
"ivtob_1_lf", "ivtob_lf", "ivtobit", "ivtobit_p", "jackknife",
|
||||
"jacknife", "jknife", "jknife_6", "jknife_8", "jkstat",
|
||||
"joinby", "kalarma1", "kap", "kap_3", "kapmeier", "kappa",
|
||||
"kapwgt", "kdensity", "kdensity_7", "keep", "ksm", "ksmirnov",
|
||||
"ktau", "kwallis", "l", "la", "lab", "labe", "label",
|
||||
"labelbook", "ladder", "levels", "levelsof", "leverage",
|
||||
"lfit", "lfit_p", "li", "lincom", "line", "linktest",
|
||||
"lis", "list", "lloghet_glf", "lloghet_glf_sh", "lloghet_gp",
|
||||
"lloghet_ilf", "lloghet_ilf_sh", "lloghet_ip", "llogi_sw",
|
||||
"llogis_p", "llogist", "llogistic", "llogistichet",
|
||||
"lnorm_lf", "lnorm_sw", "lnorma_p", "lnormal", "lnormalhet",
|
||||
"lnormhet_glf", "lnormhet_glf_sh", "lnormhet_gp",
|
||||
"lnormhet_ilf", "lnormhet_ilf_sh", "lnormhet_ip", "lnskew0",
|
||||
"loadingplot", "loc", "loca", "local", "log", "logi",
|
||||
"logis_lf", "logistic", "logistic_p", "logit", "logit_estat",
|
||||
"logit_p", "loglogs", "logrank", "loneway", "lookfor",
|
||||
"lookup", "lowess", "lowess_7", "lpredict", "lrecomp", "lroc",
|
||||
"lroc_7", "lrtest", "ls", "lsens", "lsens_7", "lsens_x",
|
||||
"lstat", "ltable", "ltable_7", "ltriang", "lv", "lvr2plot",
|
||||
"lvr2plot_7", "m", "ma", "mac", "macr", "macro", "makecns",
|
||||
"man", "manova", "manova_estat", "manova_p", "manovatest",
|
||||
"mantel", "mark", "markin", "markout", "marksample", "mat",
|
||||
"mat_capp", "mat_order", "mat_put_rr", "mat_rapp", "mata",
|
||||
"mata_clear", "mata_describe", "mata_drop", "mata_matdescribe",
|
||||
"mata_matsave", "mata_matuse", "mata_memory", "mata_mlib",
|
||||
"mata_mosave", "mata_rename", "mata_which", "matalabel",
|
||||
"matcproc", "matlist", "matname", "matr", "matri",
|
||||
"matrix", "matrix_input__dlg", "matstrik", "mcc", "mcci",
|
||||
"md0_", "md1_", "md1debug_", "md2_", "md2debug_", "mds",
|
||||
"mds_estat", "mds_p", "mdsconfig", "mdslong", "mdsmat",
|
||||
"mdsshepard", "mdytoe", "mdytof", "me_derd", "mean",
|
||||
"means", "median", "memory", "memsize", "meqparse", "mer",
|
||||
"merg", "merge", "mfp", "mfx", "mhelp", "mhodds", "minbound",
|
||||
"mixed_ll", "mixed_ll_reparm", "mkassert", "mkdir",
|
||||
"mkmat", "mkspline", "ml", "ml_5", "ml_adjs", "ml_bhhhs",
|
||||
"ml_c_d", "ml_check", "ml_clear", "ml_cnt", "ml_debug",
|
||||
"ml_defd", "ml_e0", "ml_e0_bfgs", "ml_e0_cycle", "ml_e0_dfp",
|
||||
"ml_e0i", "ml_e1", "ml_e1_bfgs", "ml_e1_bhhh", "ml_e1_cycle",
|
||||
"ml_e1_dfp", "ml_e2", "ml_e2_cycle", "ml_ebfg0", "ml_ebfr0",
|
||||
"ml_ebfr1", "ml_ebh0q", "ml_ebhh0", "ml_ebhr0", "ml_ebr0i",
|
||||
"ml_ecr0i", "ml_edfp0", "ml_edfr0", "ml_edfr1", "ml_edr0i",
|
||||
"ml_eds", "ml_eer0i", "ml_egr0i", "ml_elf", "ml_elf_bfgs",
|
||||
"ml_elf_bhhh", "ml_elf_cycle", "ml_elf_dfp", "ml_elfi",
|
||||
"ml_elfs", "ml_enr0i", "ml_enrr0", "ml_erdu0", "ml_erdu0_bfgs",
|
||||
"ml_erdu0_bhhh", "ml_erdu0_bhhhq", "ml_erdu0_cycle",
|
||||
"ml_erdu0_dfp", "ml_erdu0_nrbfgs", "ml_exde", "ml_footnote",
|
||||
"ml_geqnr", "ml_grad0", "ml_graph", "ml_hbhhh", "ml_hd0",
|
||||
"ml_hold", "ml_init", "ml_inv", "ml_log", "ml_max",
|
||||
"ml_mlout", "ml_mlout_8", "ml_model", "ml_nb0", "ml_opt",
|
||||
"ml_p", "ml_plot", "ml_query", "ml_rdgrd", "ml_repor",
|
||||
"ml_s_e", "ml_score", "ml_searc", "ml_technique", "ml_unhold",
|
||||
"mleval", "mlf_", "mlmatbysum", "mlmatsum", "mlog", "mlogi",
|
||||
"mlogit", "mlogit_footnote", "mlogit_p", "mlopts", "mlsum",
|
||||
"mlvecsum", "mnl0_", "mor", "more", "mov", "move", "mprobit",
|
||||
"mprobit_lf", "mprobit_p", "mrdu0_", "mrdu1_", "mvdecode",
|
||||
"mvencode", "mvreg", "mvreg_estat", "n", "nbreg",
|
||||
"nbreg_al", "nbreg_lf", "nbreg_p", "nbreg_sw", "nestreg", "net",
|
||||
"newey", "newey_7", "newey_p", "news", "nl", "nl_7", "nl_9",
|
||||
"nl_9_p", "nl_p", "nl_p_7", "nlcom", "nlcom_p", "nlexp2",
|
||||
"nlexp2_7", "nlexp2a", "nlexp2a_7", "nlexp3", "nlexp3_7",
|
||||
"nlgom3", "nlgom3_7", "nlgom4", "nlgom4_7", "nlinit", "nllog3",
|
||||
"nllog3_7", "nllog4", "nllog4_7", "nlog_rd", "nlogit",
|
||||
"nlogit_p", "nlogitgen", "nlogittree", "nlpred", "no",
|
||||
"nobreak", "noi", "nois", "noisi", "noisil", "noisily", "note",
|
||||
"notes", "notes_dlg", "nptrend", "numlabel", "numlist", "odbc",
|
||||
"old_ver", "olo", "olog", "ologi", "ologi_sw", "ologit",
|
||||
"ologit_p", "ologitp", "on", "one", "onew", "onewa", "oneway",
|
||||
"op_colnm", "op_comp", "op_diff", "op_inv", "op_str", "opr",
|
||||
"opro", "oprob", "oprob_sw", "oprobi", "oprobi_p", "oprobit",
|
||||
"oprobitp", "opts_exclusive", "order", "orthog", "orthpoly",
|
||||
"ou", "out", "outf", "outfi", "outfil", "outfile", "outs",
|
||||
"outsh", "outshe", "outshee", "outsheet", "ovtest", "pac",
|
||||
"pac_7", "palette", "parse", "parse_dissim", "pause", "pca",
|
||||
"pca_8", "pca_display", "pca_estat", "pca_p", "pca_rotate",
|
||||
"pcamat", "pchart", "pchart_7", "pchi", "pchi_7", "pcorr",
|
||||
"pctile", "pentium", "pergram", "pergram_7", "permute",
|
||||
"permute_8", "personal", "peto_st", "pkcollapse", "pkcross",
|
||||
"pkequiv", "pkexamine", "pkexamine_7", "pkshape", "pksumm",
|
||||
"pksumm_7", "pl", "plo", "plot", "plugin", "pnorm",
|
||||
"pnorm_7", "poisgof", "poiss_lf", "poiss_sw", "poisso_p",
|
||||
"poisson", "poisson_estat", "post", "postclose", "postfile",
|
||||
"postutil", "pperron", "pr", "prais", "prais_e", "prais_e2",
|
||||
"prais_p", "predict", "predictnl", "preserve", "print",
|
||||
"pro", "prob", "probi", "probit", "probit_estat", "probit_p",
|
||||
"proc_time", "procoverlay", "procrustes", "procrustes_estat",
|
||||
"procrustes_p", "profiler", "prog", "progr", "progra",
|
||||
"program", "prop", "proportion", "prtest", "prtesti", "pwcorr",
|
||||
"pwd", "q", "s", "qby", "qbys", "qchi", "qchi_7", "qladder",
|
||||
"qladder_7", "qnorm", "qnorm_7", "qqplot", "qqplot_7", "qreg",
|
||||
"qreg_c", "qreg_p", "qreg_sw", "qu", "quadchk", "quantile",
|
||||
"quantile_7", "que", "quer", "query", "range", "ranksum",
|
||||
"ratio", "rchart", "rchart_7", "rcof", "recast", "reclink",
|
||||
"recode", "reg", "reg3", "reg3_p", "regdw", "regr", "regre",
|
||||
"regre_p2", "regres", "regres_p", "regress", "regress_estat",
|
||||
"regriv_p", "remap", "ren", "rena", "renam", "rename",
|
||||
"renpfix", "repeat", "replace", "report", "reshape",
|
||||
"restore", "ret", "retu", "retur", "return", "rm", "rmdir",
|
||||
"robvar", "roccomp", "roccomp_7", "roccomp_8", "rocf_lf",
|
||||
"rocfit", "rocfit_8", "rocgold", "rocplot", "rocplot_7",
|
||||
"roctab", "roctab_7", "rolling", "rologit", "rologit_p",
|
||||
"rot", "rota", "rotat", "rotate", "rotatemat", "rreg",
|
||||
"rreg_p", "ru", "run", "runtest", "rvfplot", "rvfplot_7",
|
||||
"rvpplot", "rvpplot_7", "sa", "safesum", "sample",
|
||||
"sampsi", "sav", "save", "savedresults", "saveold", "sc",
|
||||
"sca", "scal", "scala", "scalar", "scatter", "scm_mine",
|
||||
"sco", "scob_lf", "scob_p", "scobi_sw", "scobit", "scor",
|
||||
"score", "scoreplot", "scoreplot_help", "scree", "screeplot",
|
||||
"screeplot_help", "sdtest", "sdtesti", "se", "search",
|
||||
"separate", "seperate", "serrbar", "serrbar_7", "serset", "set",
|
||||
"set_defaults", "sfrancia", "sh", "she", "shel", "shell",
|
||||
"shewhart", "shewhart_7", "signestimationsample", "signrank",
|
||||
"signtest", "simul", "simul_7", "simulate", "simulate_8",
|
||||
"sktest", "sleep", "slogit", "slogit_d2", "slogit_p", "smooth",
|
||||
"snapspan", "so", "sor", "sort", "spearman", "spikeplot",
|
||||
"spikeplot_7", "spikeplt", "spline_x", "split", "sqreg",
|
||||
"sqreg_p", "sret", "sretu", "sretur", "sreturn", "ssc", "st",
|
||||
"st_ct", "st_hc", "st_hcd", "st_hcd_sh", "st_is", "st_issys",
|
||||
"st_note", "st_promo", "st_set", "st_show", "st_smpl",
|
||||
"st_subid", "stack", "statsby", "statsby_8", "stbase", "stci",
|
||||
"stci_7", "stcox", "stcox_estat", "stcox_fr", "stcox_fr_ll",
|
||||
"stcox_p", "stcox_sw", "stcoxkm", "stcoxkm_7", "stcstat",
|
||||
"stcurv", "stcurve", "stcurve_7", "stdes", "stem", "stepwise",
|
||||
"stereg", "stfill", "stgen", "stir", "stjoin", "stmc", "stmh",
|
||||
"stphplot", "stphplot_7", "stphtest", "stphtest_7",
|
||||
"stptime", "strate", "strate_7", "streg", "streg_sw", "streset",
|
||||
"sts", "sts_7", "stset", "stsplit", "stsum", "sttocc",
|
||||
"sttoct", "stvary", "stweib", "su", "suest", "suest_8",
|
||||
"sum", "summ", "summa", "summar", "summari", "summariz",
|
||||
"summarize", "sunflower", "sureg", "survcurv", "survsum",
|
||||
"svar", "svar_p", "svmat", "svy", "svy_disp", "svy_dreg",
|
||||
"svy_est", "svy_est_7", "svy_estat", "svy_get", "svy_gnbreg_p",
|
||||
"svy_head", "svy_header", "svy_heckman_p", "svy_heckprob_p",
|
||||
"svy_intreg_p", "svy_ivreg_p", "svy_logistic_p", "svy_logit_p",
|
||||
"svy_mlogit_p", "svy_nbreg_p", "svy_ologit_p", "svy_oprobit_p",
|
||||
"svy_poisson_p", "svy_probit_p", "svy_regress_p", "svy_sub",
|
||||
"svy_sub_7", "svy_x", "svy_x_7", "svy_x_p", "svydes",
|
||||
"svydes_8", "svygen", "svygnbreg", "svyheckman", "svyheckprob",
|
||||
"svyintreg", "svyintreg_7", "svyintrg", "svyivreg", "svylc",
|
||||
"svylog_p", "svylogit", "svymarkout", "svymarkout_8",
|
||||
"svymean", "svymlog", "svymlogit", "svynbreg", "svyolog",
|
||||
"svyologit", "svyoprob", "svyoprobit", "svyopts",
|
||||
"svypois", "svypois_7", "svypoisson", "svyprobit", "svyprobt",
|
||||
"svyprop", "svyprop_7", "svyratio", "svyreg", "svyreg_p",
|
||||
"svyregress", "svyset", "svyset_7", "svyset_8", "svytab",
|
||||
"svytab_7", "svytest", "svytotal", "sw", "sw_8", "swcnreg",
|
||||
"swcox", "swereg", "swilk", "swlogis", "swlogit",
|
||||
"swologit", "swoprbt", "swpois", "swprobit", "swqreg",
|
||||
"swtobit", "swweib", "symmetry", "symmi", "symplot",
|
||||
"symplot_7", "syntax", "sysdescribe", "sysdir", "sysuse",
|
||||
"szroeter", "ta", "tab", "tab1", "tab2", "tab_or", "tabd",
|
||||
"tabdi", "tabdis", "tabdisp", "tabi", "table", "tabodds",
|
||||
"tabodds_7", "tabstat", "tabu", "tabul", "tabula", "tabulat",
|
||||
"tabulate", "te", "tempfile", "tempname", "tempvar", "tes",
|
||||
"test", "testnl", "testparm", "teststd", "tetrachoric",
|
||||
"time_it", "timer", "tis", "tob", "tobi", "tobit", "tobit_p",
|
||||
"tobit_sw", "token", "tokeni", "tokeniz", "tokenize",
|
||||
"tostring", "total", "translate", "translator", "transmap",
|
||||
"treat_ll", "treatr_p", "treatreg", "trim", "trnb_cons",
|
||||
"trnb_mean", "trpoiss_d2", "trunc_ll", "truncr_p", "truncreg",
|
||||
"tsappend", "tset", "tsfill", "tsline", "tsline_ex",
|
||||
"tsreport", "tsrevar", "tsrline", "tsset", "tssmooth",
|
||||
"tsunab", "ttest", "ttesti", "tut_chk", "tut_wait", "tutorial",
|
||||
"tw", "tware_st", "two", "twoway", "twoway__fpfit_serset",
|
||||
"twoway__function_gen", "twoway__histogram_gen",
|
||||
"twoway__ipoint_serset", "twoway__ipoints_serset",
|
||||
"twoway__kdensity_gen", "twoway__lfit_serset",
|
||||
"twoway__normgen_gen", "twoway__pci_serset",
|
||||
"twoway__qfit_serset", "twoway__scatteri_serset",
|
||||
"twoway__sunflower_gen", "twoway_ksm_serset", "ty", "typ",
|
||||
"type", "typeof", "u", "unab", "unabbrev", "unabcmd",
|
||||
"update", "us", "use", "uselabel", "var", "var_mkcompanion",
|
||||
"var_p", "varbasic", "varfcast", "vargranger", "varirf",
|
||||
"varirf_add", "varirf_cgraph", "varirf_create", "varirf_ctable",
|
||||
"varirf_describe", "varirf_dir", "varirf_drop", "varirf_erase",
|
||||
"varirf_graph", "varirf_ograph", "varirf_rename", "varirf_set",
|
||||
"varirf_table", "varlist", "varlmar", "varnorm", "varsoc",
|
||||
"varstable", "varstable_w", "varstable_w2", "varwle",
|
||||
"vce", "vec", "vec_fevd", "vec_mkphi", "vec_p", "vec_p_w",
|
||||
"vecirf_create", "veclmar", "veclmar_w", "vecnorm",
|
||||
"vecnorm_w", "vecrank", "vecstable", "verinst", "vers",
|
||||
"versi", "versio", "version", "view", "viewsource", "vif",
|
||||
"vwls", "wdatetof", "webdescribe", "webseek", "webuse",
|
||||
"weib1_lf", "weib2_lf", "weib_lf", "weib_lf0", "weibhet_glf",
|
||||
"weibhet_glf_sh", "weibhet_glfa", "weibhet_glfa_sh",
|
||||
"weibhet_gp", "weibhet_ilf", "weibhet_ilf_sh", "weibhet_ilfa",
|
||||
"weibhet_ilfa_sh", "weibhet_ip", "weibu_sw", "weibul_p",
|
||||
"weibull", "weibull_c", "weibull_s", "weibullhet",
|
||||
"wh", "whelp", "whi", "which", "whil", "while", "wilc_st",
|
||||
"wilcoxon", "win", "wind", "windo", "window", "winexec",
|
||||
"wntestb", "wntestb_7", "wntestq", "xchart", "xchart_7",
|
||||
"xcorr", "xcorr_7", "xi", "xi_6", "xmlsav", "xmlsave",
|
||||
"xmluse", "xpose", "xsh", "xshe", "xshel", "xshell",
|
||||
"xt_iis", "xt_tis", "xtab_p", "xtabond", "xtbin_p",
|
||||
"xtclog", "xtcloglog", "xtcloglog_8", "xtcloglog_d2",
|
||||
"xtcloglog_pa_p", "xtcloglog_re_p", "xtcnt_p", "xtcorr",
|
||||
"xtdata", "xtdes", "xtfront_p", "xtfrontier", "xtgee",
|
||||
"xtgee_elink", "xtgee_estat", "xtgee_makeivar", "xtgee_p",
|
||||
"xtgee_plink", "xtgls", "xtgls_p", "xthaus", "xthausman",
|
||||
"xtht_p", "xthtaylor", "xtile", "xtint_p", "xtintreg",
|
||||
"xtintreg_8", "xtintreg_d2", "xtintreg_p", "xtivp_1",
|
||||
"xtivp_2", "xtivreg", "xtline", "xtline_ex", "xtlogit",
|
||||
"xtlogit_8", "xtlogit_d2", "xtlogit_fe_p", "xtlogit_pa_p",
|
||||
"xtlogit_re_p", "xtmixed", "xtmixed_estat", "xtmixed_p",
|
||||
"xtnb_fe", "xtnb_lf", "xtnbreg", "xtnbreg_pa_p",
|
||||
"xtnbreg_refe_p", "xtpcse", "xtpcse_p", "xtpois", "xtpoisson",
|
||||
"xtpoisson_d2", "xtpoisson_pa_p", "xtpoisson_refe_p", "xtpred",
|
||||
"xtprobit", "xtprobit_8", "xtprobit_d2", "xtprobit_re_p",
|
||||
"xtps_fe", "xtps_lf", "xtps_ren", "xtps_ren_8", "xtrar_p",
|
||||
"xtrc", "xtrc_p", "xtrchh", "xtrefe_p", "xtreg", "xtreg_be",
|
||||
"xtreg_fe", "xtreg_ml", "xtreg_pa_p", "xtreg_re",
|
||||
"xtregar", "xtrere_p", "xtset", "xtsf_ll", "xtsf_llti",
|
||||
"xtsum", "xttab", "xttest0", "xttobit", "xttobit_8",
|
||||
"xttobit_p", "xttrans", "yx", "yxview__barlike_draw",
|
||||
"yxview_area_draw", "yxview_bar_draw", "yxview_dot_draw",
|
||||
"yxview_dropline_draw", "yxview_function_draw",
|
||||
"yxview_iarrow_draw", "yxview_ilabels_draw",
|
||||
"yxview_normal_draw", "yxview_pcarrow_draw",
|
||||
"yxview_pcbarrow_draw", "yxview_pccapsym_draw",
|
||||
"yxview_pcscatter_draw", "yxview_pcspike_draw",
|
||||
"yxview_rarea_draw", "yxview_rbar_draw", "yxview_rbarm_draw",
|
||||
"yxview_rcap_draw", "yxview_rcapsym_draw",
|
||||
"yxview_rconnected_draw", "yxview_rline_draw",
|
||||
"yxview_rscatter_draw", "yxview_rspike_draw",
|
||||
"yxview_spike_draw", "yxview_sunflower_draw", "zap_s", "zinb",
|
||||
"zinb_llf", "zinb_plf", "zip", "zip_llf", "zip_p", "zip_plf",
|
||||
"zt_ct_5", "zt_hc_5", "zt_hcd_5", "zt_is_5", "zt_iss_5",
|
||||
"zt_sho_5", "zt_smp_5", "ztbase_5", "ztcox_5", "ztdes_5",
|
||||
"ztereg_5", "ztfill_5", "ztgen_5", "ztir_5", "ztjoin_5", "ztnb",
|
||||
"ztnb_p", "ztp", "ztp_p", "zts_5", "ztset_5", "ztspli_5",
|
||||
"ztsum_5", "zttoct_5", "ztvary_5", "ztweib_5"
|
||||
)
|
||||
|
||||
builtins_functions = (
|
||||
"Cdhms", "Chms", "Clock", "Cmdyhms", "Cofc", "Cofd", "F",
|
||||
"Fden", "Ftail", "I", "J", "_caller", "abbrev", "abs", "acos",
|
||||
"acosh", "asin", "asinh", "atan", "atan2", "atanh",
|
||||
"autocode", "betaden", "binomial", "binomialp", "binomialtail",
|
||||
"binormal", "bofd", "byteorder", "c", "ceil", "char",
|
||||
"chi2", "chi2den", "chi2tail", "cholesky", "chop", "clip",
|
||||
"clock", "cloglog", "cofC", "cofd", "colnumb", "colsof", "comb",
|
||||
"cond", "corr", "cos", "cosh", "d", "daily", "date", "day",
|
||||
"det", "dgammapda", "dgammapdada", "dgammapdadx", "dgammapdx",
|
||||
"dgammapdxdx", "dhms", "diag", "diag0cnt", "digamma",
|
||||
"dofC", "dofb", "dofc", "dofh", "dofm", "dofq", "dofw",
|
||||
"dofy", "dow", "doy", "dunnettprob", "e", "el", "epsdouble",
|
||||
"epsfloat", "exp", "fileexists", "fileread", "filereaderror",
|
||||
"filewrite", "float", "floor", "fmtwidth", "gammaden",
|
||||
"gammap", "gammaptail", "get", "group", "h", "hadamard",
|
||||
"halfyear", "halfyearly", "has_eprop", "hh", "hhC", "hms",
|
||||
"hofd", "hours", "hypergeometric", "hypergeometricp", "ibeta",
|
||||
"ibetatail", "index", "indexnot", "inlist", "inrange", "int",
|
||||
"inv", "invF", "invFtail", "invbinomial", "invbinomialtail",
|
||||
"invchi2", "invchi2tail", "invcloglog", "invdunnettprob",
|
||||
"invgammap", "invgammaptail", "invibeta", "invibetatail",
|
||||
"invlogit", "invnFtail", "invnbinomial", "invnbinomialtail",
|
||||
"invnchi2", "invnchi2tail", "invnibeta", "invnorm", "invnormal",
|
||||
"invnttail", "invpoisson", "invpoissontail", "invsym", "invt",
|
||||
"invttail", "invtukeyprob", "irecode", "issym", "issymmetric",
|
||||
"itrim", "length", "ln", "lnfact", "lnfactorial", "lngamma",
|
||||
"lnnormal", "lnnormalden", "log", "log10", "logit", "lower",
|
||||
"ltrim", "m", "match", "matmissing", "matrix", "matuniform",
|
||||
"max", "maxbyte", "maxdouble", "maxfloat", "maxint", "maxlong",
|
||||
"mdy", "mdyhms", "mi", "min", "minbyte", "mindouble",
|
||||
"minfloat", "minint", "minlong", "minutes", "missing", "mm",
|
||||
"mmC", "mod", "mofd", "month", "monthly", "mreldif",
|
||||
"msofhours", "msofminutes", "msofseconds", "nF", "nFden",
|
||||
"nFtail", "nbetaden", "nbinomial", "nbinomialp", "nbinomialtail",
|
||||
"nchi2", "nchi2den", "nchi2tail", "nibeta", "norm", "normal",
|
||||
"normalden", "normd", "npnF", "npnchi2", "npnt", "nt", "ntden",
|
||||
"nttail", "nullmat", "plural", "poisson", "poissonp",
|
||||
"poissontail", "proper", "q", "qofd", "quarter", "quarterly",
|
||||
"r", "rbeta", "rbinomial", "rchi2", "real", "recode", "regexm",
|
||||
"regexr", "regexs", "reldif", "replay", "return", "reverse",
|
||||
"rgamma", "rhypergeometric", "rnbinomial", "rnormal", "round",
|
||||
"rownumb", "rowsof", "rpoisson", "rt", "rtrim", "runiform", "s",
|
||||
"scalar", "seconds", "sign", "sin", "sinh", "smallestdouble",
|
||||
"soundex", "soundex_nara", "sqrt", "ss", "ssC", "strcat",
|
||||
"strdup", "string", "strlen", "strlower", "strltrim", "strmatch",
|
||||
"strofreal", "strpos", "strproper", "strreverse", "strrtrim",
|
||||
"strtoname", "strtrim", "strupper", "subinstr", "subinword",
|
||||
"substr", "sum", "sweep", "syminv", "t", "tC", "tan", "tanh",
|
||||
"tc", "td", "tden", "th", "tin", "tm", "tq", "trace",
|
||||
"trigamma", "trim", "trunc", "ttail", "tukeyprob", "tw",
|
||||
"twithin", "uniform", "upper", "vec", "vecdiag", "w", "week",
|
||||
"weekly", "wofd", "word", "wordcount", "year", "yearly",
|
||||
"yh", "ym", "yofd", "yq", "yw"
|
||||
)
|
||||
|
||||
|
1004
wakatime/packages/pygments/lexers/_tsql_builtins.py
Normal file
1004
wakatime/packages/pygments/lexers/_tsql_builtins.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -5,7 +5,7 @@
|
|||
|
||||
This file is autogenerated by scripts/get_vimkw.py
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for ActionScript and MXML.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Just export lexer classes previously contained in this module.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for computer algebra systems.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -104,9 +104,9 @@ class MathematicaLexer(RegexLexer):
|
|||
(r'#\d*', Name.Variable),
|
||||
(r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
|
||||
|
||||
(r'-?[0-9]+\.[0-9]*', Number.Float),
|
||||
(r'-?[0-9]*\.[0-9]+', Number.Float),
|
||||
(r'-?[0-9]+', Number.Integer),
|
||||
(r'-?\d+\.\d*', Number.Float),
|
||||
(r'-?\d*\.\d+', Number.Float),
|
||||
(r'-?\d+', Number.Integer),
|
||||
|
||||
(words(operators), Operator),
|
||||
(words(punctuation), Punctuation),
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for AmbientTalk language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
87
wakatime/packages/pygments/lexers/ampl.py
Normal file
87
wakatime/packages/pygments/lexers/ampl.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.ampl
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for the ampl language. <http://ampl.com/>
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, using, this, words
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
||||
Number, Punctuation
|
||||
|
||||
__all__ = ['AmplLexer']
|
||||
|
||||
|
||||
class AmplLexer(RegexLexer):
|
||||
"""
|
||||
For AMPL source code.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'Ampl'
|
||||
aliases = ['ampl']
|
||||
filenames = ['*.run']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text.Whitespace),
|
||||
(r'#.*?\n', Comment.Single),
|
||||
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
|
||||
(words((
|
||||
'call', 'cd', 'close', 'commands', 'data', 'delete', 'display',
|
||||
'drop', 'end', 'environ', 'exit', 'expand', 'include', 'load',
|
||||
'model', 'objective', 'option', 'problem', 'purge', 'quit',
|
||||
'redeclare', 'reload', 'remove', 'reset', 'restore', 'shell',
|
||||
'show', 'solexpand', 'solution', 'solve', 'update', 'unload',
|
||||
'xref', 'coeff', 'coef', 'cover', 'obj', 'interval', 'default',
|
||||
'from', 'to', 'to_come', 'net_in', 'net_out', 'dimen',
|
||||
'dimension', 'check', 'complements', 'write', 'function',
|
||||
'pipe', 'format', 'if', 'then', 'else', 'in', 'while', 'repeat',
|
||||
'for'), suffix=r'\b'), Keyword.Reserved),
|
||||
(r'(integer|binary|symbolic|ordered|circular|reversed|INOUT|IN|OUT|LOCAL)',
|
||||
Keyword.Type),
|
||||
(r'\".*?\"', String.Double),
|
||||
(r'\'.*?\'', String.Single),
|
||||
(r'[()\[\]{},;:]+', Punctuation),
|
||||
(r'\b(\w+)(\.)(astatus|init0|init|lb0|lb1|lb2|lb|lrc|'
|
||||
r'lslack|rc|relax|slack|sstatus|status|ub0|ub1|ub2|'
|
||||
r'ub|urc|uslack|val)',
|
||||
bygroups(Name.Variable, Punctuation, Keyword.Reserved)),
|
||||
(r'(set|param|var|arc|minimize|maximize|subject to|s\.t\.|subj to|'
|
||||
r'node|table|suffix|read table|write table)(\s+)(\w+)',
|
||||
bygroups(Keyword.Declaration, Text, Name.Variable)),
|
||||
(r'(param)(\s*)(:)(\s*)(\w+)(\s*)(:)(\s*)((\w|\s)+)',
|
||||
bygroups(Keyword.Declaration, Text, Punctuation, Text,
|
||||
Name.Variable, Text, Punctuation, Text, Name.Variable)),
|
||||
(r'(let|fix|unfix)(\s*)((?:\{.*\})?)(\s*)(\w+)',
|
||||
bygroups(Keyword.Declaration, Text, using(this), Text, Name.Variable)),
|
||||
(words((
|
||||
'abs', 'acos', 'acosh', 'alias', 'asin', 'asinh', 'atan', 'atan2',
|
||||
'atanh', 'ceil', 'ctime', 'cos', 'exp', 'floor', 'log', 'log10',
|
||||
'max', 'min', 'precision', 'round', 'sin', 'sinh', 'sqrt', 'tan',
|
||||
'tanh', 'time', 'trunc', 'Beta', 'Cauchy', 'Exponential', 'Gamma',
|
||||
'Irand224', 'Normal', 'Normal01', 'Poisson', 'Uniform', 'Uniform01',
|
||||
'num', 'num0', 'ichar', 'char', 'length', 'substr', 'sprintf',
|
||||
'match', 'sub', 'gsub', 'print', 'printf', 'next', 'nextw', 'prev',
|
||||
'prevw', 'first', 'last', 'ord', 'ord0', 'card', 'arity',
|
||||
'indexarity'), prefix=r'\b', suffix=r'\b'), Name.Builtin),
|
||||
(r'(\+|\-|\*|/|\*\*|=|<=|>=|==|\||\^|<|>|\!|\.\.|:=|\&|\!=|<<|>>)',
|
||||
Operator),
|
||||
(words((
|
||||
'or', 'exists', 'forall', 'and', 'in', 'not', 'within', 'union',
|
||||
'diff', 'difference', 'symdiff', 'inter', 'intersect',
|
||||
'intersection', 'cross', 'setof', 'by', 'less', 'sum', 'prod',
|
||||
'product', 'div', 'mod'), suffix=r'\b'),
|
||||
Keyword.Reserved), # Operator.Name but not enough emphasized with that
|
||||
(r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
|
||||
(r'\d+([eE][+-]?\d+)?', Number.Integer),
|
||||
(r'[+-]?Infinity', Number.Integer),
|
||||
(r'(\w+|(\.(?!\.)))', Text)
|
||||
]
|
||||
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for APL.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
Contributed by Thomas Beale <https://github.com/wolandscat>,
|
||||
<https://bitbucket.org/thomas_beale>.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,21 +5,22 @@
|
|||
|
||||
Lexers for assembly languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer
|
||||
from pygments.lexer import RegexLexer, include, bygroups, using, words, \
|
||||
DelegatingLexer
|
||||
from pygments.lexers.c_cpp import CppLexer, CLexer
|
||||
from pygments.lexers.d import DLexer
|
||||
from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
|
||||
Other, Keyword, Operator
|
||||
|
||||
__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
|
||||
'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'NasmObjdumpLexer',
|
||||
'Ca65Lexer']
|
||||
'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'NasmLexer',
|
||||
'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer']
|
||||
|
||||
|
||||
class GasLexer(RegexLexer):
|
||||
|
@ -53,8 +54,6 @@ class GasLexer(RegexLexer):
|
|||
(number, Number.Integer),
|
||||
(r'[\r\n]+', Text, '#pop'),
|
||||
|
||||
(r'#.*?$', Comment, '#pop'),
|
||||
|
||||
include('punctuation'),
|
||||
include('whitespace')
|
||||
],
|
||||
|
@ -77,14 +76,14 @@ class GasLexer(RegexLexer):
|
|||
('$'+number, Number.Integer),
|
||||
(r"$'(.|\\')'", String.Char),
|
||||
(r'[\r\n]+', Text, '#pop'),
|
||||
(r'#.*?$', Comment, '#pop'),
|
||||
|
||||
include('punctuation'),
|
||||
include('whitespace')
|
||||
],
|
||||
'whitespace': [
|
||||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
(r'#.*?\n', Comment)
|
||||
(r'[;#].*?\n', Comment)
|
||||
],
|
||||
'punctuation': [
|
||||
(r'[-*,.()\[\]!:]+', Punctuation)
|
||||
|
@ -198,6 +197,141 @@ class CObjdumpLexer(DelegatingLexer):
|
|||
super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options)
|
||||
|
||||
|
||||
class HsailLexer(RegexLexer):
|
||||
"""
|
||||
For HSAIL assembly code.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'HSAIL'
|
||||
aliases = ['hsail', 'hsa']
|
||||
filenames = ['*.hsail']
|
||||
mimetypes = ['text/x-hsail']
|
||||
|
||||
string = r'"[^"]*?"'
|
||||
identifier = r'[a-zA-Z_][\w.]*'
|
||||
# Registers
|
||||
register_number = r'[0-9]+'
|
||||
register = r'(\$(c|s|d|q)' + register_number + ')'
|
||||
# Qualifiers
|
||||
alignQual = r'(align\(\d+\))'
|
||||
widthQual = r'(width\((\d+|all)\))'
|
||||
allocQual = r'(alloc\(agent\))'
|
||||
# Instruction Modifiers
|
||||
roundingMod = (r'((_ftz)?(_up|_down|_zero|_near))')
|
||||
datatypeMod = (r'_('
|
||||
# packedTypes
|
||||
r'u8x4|s8x4|u16x2|s16x2|u8x8|s8x8|u16x4|s16x4|u32x2|s32x2|'
|
||||
r'u8x16|s8x16|u16x8|s16x8|u32x4|s32x4|u64x2|s64x2|'
|
||||
r'f16x2|f16x4|f16x8|f32x2|f32x4|f64x2|'
|
||||
# baseTypes
|
||||
r'u8|s8|u16|s16|u32|s32|u64|s64|'
|
||||
r'b128|b8|b16|b32|b64|b1|'
|
||||
r'f16|f32|f64|'
|
||||
# opaqueType
|
||||
r'roimg|woimg|rwimg|samp|sig32|sig64)')
|
||||
|
||||
# Numeric Constant
|
||||
float = r'((\d+\.)|(\d*\.\d+))[eE][+-]?\d+'
|
||||
hexfloat = r'0[xX](([0-9a-fA-F]+\.[0-9a-fA-F]*)|([0-9a-fA-F]*\.[0-9a-fA-F]+))[pP][+-]?\d+'
|
||||
ieeefloat = r'0((h|H)[0-9a-fA-F]{4}|(f|F)[0-9a-fA-F]{8}|(d|D)[0-9a-fA-F]{16})'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
include('comments'),
|
||||
|
||||
(string, String),
|
||||
|
||||
(r'@' + identifier + ':?', Name.Label),
|
||||
|
||||
(register, Name.Variable.Anonymous),
|
||||
|
||||
include('keyword'),
|
||||
|
||||
(r'&' + identifier, Name.Variable.Global),
|
||||
(r'%' + identifier, Name.Variable),
|
||||
|
||||
(hexfloat, Number.Hex),
|
||||
(r'0[xX][a-fA-F0-9]+', Number.Hex),
|
||||
(ieeefloat, Number.Float),
|
||||
(float, Number.Float),
|
||||
('\d+', Number.Integer),
|
||||
|
||||
(r'[=<>{}\[\]()*.,:;!]|x\b', Punctuation)
|
||||
],
|
||||
'whitespace': [
|
||||
(r'(\n|\s)+', Text),
|
||||
],
|
||||
'comments': [
|
||||
(r'/\*.*?\*/', Comment.Multiline),
|
||||
(r'//.*?\n', Comment.Single),
|
||||
],
|
||||
'keyword': [
|
||||
# Types
|
||||
(r'kernarg' + datatypeMod, Keyword.Type),
|
||||
|
||||
# Regular keywords
|
||||
(r'\$(full|base|small|large|default|zero|near)', Keyword),
|
||||
(words((
|
||||
'module', 'extension', 'pragma', 'prog', 'indirect', 'signature',
|
||||
'decl', 'kernel', 'function', 'enablebreakexceptions',
|
||||
'enabledetectexceptions', 'maxdynamicgroupsize', 'maxflatgridsize',
|
||||
'maxflatworkgroupsize', 'requireddim', 'requiredgridsize',
|
||||
'requiredworkgroupsize', 'requirenopartialworkgroups'),
|
||||
suffix=r'\b'), Keyword),
|
||||
|
||||
# instructions
|
||||
(roundingMod, Keyword),
|
||||
(datatypeMod, Keyword),
|
||||
(r'_(' + alignQual + '|' + widthQual + ')', Keyword),
|
||||
(r'_kernarg', Keyword),
|
||||
(r'(nop|imagefence)\b', Keyword),
|
||||
(words((
|
||||
'cleardetectexcept', 'clock', 'cuid', 'debugtrap', 'dim',
|
||||
'getdetectexcept', 'groupbaseptr', 'kernargbaseptr', 'laneid',
|
||||
'maxcuid', 'maxwaveid', 'packetid', 'setdetectexcept', 'waveid',
|
||||
'workitemflatabsid', 'workitemflatid', 'nullptr', 'abs', 'bitrev',
|
||||
'currentworkgroupsize', 'currentworkitemflatid', 'fract', 'ncos',
|
||||
'neg', 'nexp2', 'nlog2', 'nrcp', 'nrsqrt', 'nsin', 'nsqrt',
|
||||
'gridgroups', 'gridsize', 'not', 'sqrt', 'workgroupid',
|
||||
'workgroupsize', 'workitemabsid', 'workitemid', 'ceil', 'floor',
|
||||
'rint', 'trunc', 'add', 'bitmask', 'borrow', 'carry', 'copysign',
|
||||
'div', 'rem', 'sub', 'shl', 'shr', 'and', 'or', 'xor', 'unpackhi',
|
||||
'unpacklo', 'max', 'min', 'fma', 'mad', 'bitextract', 'bitselect',
|
||||
'shuffle', 'cmov', 'bitalign', 'bytealign', 'lerp', 'nfma', 'mul',
|
||||
'mulhi', 'mul24hi', 'mul24', 'mad24', 'mad24hi', 'bitinsert',
|
||||
'combine', 'expand', 'lda', 'mov', 'pack', 'unpack', 'packcvt',
|
||||
'unpackcvt', 'sad', 'sementp', 'ftos', 'stof', 'cmp', 'ld', 'st',
|
||||
'_eq', '_ne', '_lt', '_le', '_gt', '_ge', '_equ', '_neu', '_ltu',
|
||||
'_leu', '_gtu', '_geu', '_num', '_nan', '_seq', '_sne', '_slt',
|
||||
'_sle', '_sgt', '_sge', '_snum', '_snan', '_sequ', '_sneu', '_sltu',
|
||||
'_sleu', '_sgtu', '_sgeu', 'atomic', '_ld', '_st', '_cas', '_add',
|
||||
'_and', '_exch', '_max', '_min', '_or', '_sub', '_wrapdec',
|
||||
'_wrapinc', '_xor', 'ret', 'cvt', '_readonly', '_kernarg', '_global',
|
||||
'br', 'cbr', 'sbr', '_scacq', '_screl', '_scar', '_rlx', '_wave',
|
||||
'_wg', '_agent', '_system', 'ldimage', 'stimage', '_v2', '_v3', '_v4',
|
||||
'_1d', '_2d', '_3d', '_1da', '_2da', '_1db', '_2ddepth', '_2dadepth',
|
||||
'_width', '_height', '_depth', '_array', '_channelorder',
|
||||
'_channeltype', 'querysampler', '_coord', '_filter', '_addressing',
|
||||
'barrier', 'wavebarrier', 'initfbar', 'joinfbar', 'waitfbar',
|
||||
'arrivefbar', 'leavefbar', 'releasefbar', 'ldf', 'activelaneid',
|
||||
'activelanecount', 'activelanemask', 'activelanepermute', 'call',
|
||||
'scall', 'icall', 'alloca', 'packetcompletionsig',
|
||||
'addqueuewriteindex', 'casqueuewriteindex', 'ldqueuereadindex',
|
||||
'stqueuereadindex', 'readonly', 'global', 'private', 'group',
|
||||
'spill', 'arg', '_upi', '_downi', '_zeroi', '_neari', '_upi_sat',
|
||||
'_downi_sat', '_zeroi_sat', '_neari_sat', '_supi', '_sdowni',
|
||||
'_szeroi', '_sneari', '_supi_sat', '_sdowni_sat', '_szeroi_sat',
|
||||
'_sneari_sat', '_pp', '_ps', '_sp', '_ss', '_s', '_p', '_pp_sat',
|
||||
'_ps_sat', '_sp_sat', '_ss_sat', '_s_sat', '_p_sat')), Keyword),
|
||||
|
||||
# Integer types
|
||||
(r'i[1-9]\d*', Keyword)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class LlvmLexer(RegexLexer):
|
||||
"""
|
||||
For LLVM assembly code.
|
||||
|
@ -240,69 +374,60 @@ class LlvmLexer(RegexLexer):
|
|||
],
|
||||
'keyword': [
|
||||
# Regular keywords
|
||||
(r'(begin|end'
|
||||
r'|true|false'
|
||||
r'|declare|define'
|
||||
r'|global|constant'
|
||||
|
||||
r'|private|linker_private|internal|available_externally|linkonce'
|
||||
r'|linkonce_odr|weak|weak_odr|appending|dllimport|dllexport'
|
||||
r'|common|default|hidden|protected|extern_weak|external'
|
||||
r'|thread_local|zeroinitializer|undef|null|to|tail|target|triple'
|
||||
r'|datalayout|volatile|nuw|nsw|nnan|ninf|nsz|arcp|fast|exact|inbounds'
|
||||
r'|align|addrspace|section|alias|module|asm|sideeffect|gc|dbg'
|
||||
r'|linker_private_weak'
|
||||
r'|attributes|blockaddress|initialexec|localdynamic|localexec'
|
||||
r'|prefix|unnamed_addr'
|
||||
|
||||
r'|ccc|fastcc|coldcc|x86_stdcallcc|x86_fastcallcc|arm_apcscc'
|
||||
r'|arm_aapcscc|arm_aapcs_vfpcc|ptx_device|ptx_kernel'
|
||||
r'|intel_ocl_bicc|msp430_intrcc|spir_func|spir_kernel'
|
||||
r'|x86_64_sysvcc|x86_64_win64cc|x86_thiscallcc'
|
||||
|
||||
r'|cc|c'
|
||||
|
||||
r'|signext|zeroext|inreg|sret|nounwind|noreturn|noalias|nocapture'
|
||||
r'|byval|nest|readnone|readonly'
|
||||
r'|inlinehint|noinline|alwaysinline|optsize|ssp|sspreq|noredzone'
|
||||
r'|noimplicitfloat|naked'
|
||||
r'|builtin|cold|nobuiltin|noduplicate|nonlazybind|optnone'
|
||||
r'|returns_twice|sanitize_address|sanitize_memory|sanitize_thread'
|
||||
r'|sspstrong|uwtable|returned'
|
||||
|
||||
r'|type|opaque'
|
||||
|
||||
r'|eq|ne|slt|sgt|sle'
|
||||
r'|sge|ult|ugt|ule|uge'
|
||||
r'|oeq|one|olt|ogt|ole'
|
||||
r'|oge|ord|uno|ueq|une'
|
||||
r'|x'
|
||||
r'|acq_rel|acquire|alignstack|atomic|catch|cleanup|filter'
|
||||
r'|inteldialect|max|min|monotonic|nand|personality|release'
|
||||
r'|seq_cst|singlethread|umax|umin|unordered|xchg'
|
||||
|
||||
# instructions
|
||||
r'|add|fadd|sub|fsub|mul|fmul|udiv|sdiv|fdiv|urem|srem|frem|shl'
|
||||
r'|lshr|ashr|and|or|xor|icmp|fcmp'
|
||||
|
||||
r'|phi|call|trunc|zext|sext|fptrunc|fpext|uitofp|sitofp|fptoui'
|
||||
r'|fptosi|inttoptr|ptrtoint|bitcast|addrspacecast'
|
||||
r'|select|va_arg|ret|br|switch'
|
||||
r'|invoke|unwind|unreachable'
|
||||
r'|indirectbr|landingpad|resume'
|
||||
|
||||
r'|malloc|alloca|free|load|store|getelementptr'
|
||||
|
||||
r'|extractelement|insertelement|shufflevector|getresult'
|
||||
r'|extractvalue|insertvalue'
|
||||
|
||||
r'|atomicrmw|cmpxchg|fence'
|
||||
|
||||
r')\b', Keyword),
|
||||
(words((
|
||||
'begin', 'end', 'true', 'false', 'declare', 'define', 'global',
|
||||
'constant', 'private', 'linker_private', 'internal',
|
||||
'available_externally', 'linkonce', 'linkonce_odr', 'weak',
|
||||
'weak_odr', 'appending', 'dllimport', 'dllexport', 'common',
|
||||
'default', 'hidden', 'protected', 'extern_weak', 'external',
|
||||
'thread_local', 'zeroinitializer', 'undef', 'null', 'to', 'tail',
|
||||
'target', 'triple', 'datalayout', 'volatile', 'nuw', 'nsw', 'nnan',
|
||||
'ninf', 'nsz', 'arcp', 'fast', 'exact', 'inbounds', 'align',
|
||||
'addrspace', 'section', 'alias', 'module', 'asm', 'sideeffect',
|
||||
'gc', 'dbg', 'linker_private_weak', 'attributes', 'blockaddress',
|
||||
'initialexec', 'localdynamic', 'localexec', 'prefix', 'unnamed_addr',
|
||||
'ccc', 'fastcc', 'coldcc', 'x86_stdcallcc', 'x86_fastcallcc',
|
||||
'arm_apcscc', 'arm_aapcscc', 'arm_aapcs_vfpcc', 'ptx_device',
|
||||
'ptx_kernel', 'intel_ocl_bicc', 'msp430_intrcc', 'spir_func',
|
||||
'spir_kernel', 'x86_64_sysvcc', 'x86_64_win64cc', 'x86_thiscallcc',
|
||||
'cc', 'c', 'signext', 'zeroext', 'inreg', 'sret', 'nounwind',
|
||||
'noreturn', 'noalias', 'nocapture', 'byval', 'nest', 'readnone',
|
||||
'readonly', 'inlinehint', 'noinline', 'alwaysinline', 'optsize', 'ssp',
|
||||
'sspreq', 'noredzone', 'noimplicitfloat', 'naked', 'builtin', 'cold',
|
||||
'nobuiltin', 'noduplicate', 'nonlazybind', 'optnone', 'returns_twice',
|
||||
'sanitize_address', 'sanitize_memory', 'sanitize_thread', 'sspstrong',
|
||||
'uwtable', 'returned', 'type', 'opaque', 'eq', 'ne', 'slt', 'sgt',
|
||||
'sle', 'sge', 'ult', 'ugt', 'ule', 'uge', 'oeq', 'one', 'olt', 'ogt',
|
||||
'ole', 'oge', 'ord', 'uno', 'ueq', 'une', 'x', 'acq_rel', 'acquire',
|
||||
'alignstack', 'atomic', 'catch', 'cleanup', 'filter', 'inteldialect',
|
||||
'max', 'min', 'monotonic', 'nand', 'personality', 'release', 'seq_cst',
|
||||
'singlethread', 'umax', 'umin', 'unordered', 'xchg', 'add', 'fadd',
|
||||
'sub', 'fsub', 'mul', 'fmul', 'udiv', 'sdiv', 'fdiv', 'urem', 'srem',
|
||||
'frem', 'shl', 'lshr', 'ashr', 'and', 'or', 'xor', 'icmp', 'fcmp',
|
||||
'phi', 'call', 'trunc', 'zext', 'sext', 'fptrunc', 'fpext', 'uitofp',
|
||||
'sitofp', 'fptoui', 'fptosi', 'inttoptr', 'ptrtoint', 'bitcast',
|
||||
'addrspacecast', 'select', 'va_arg', 'ret', 'br', 'switch', 'invoke',
|
||||
'unwind', 'unreachable', 'indirectbr', 'landingpad', 'resume',
|
||||
'malloc', 'alloca', 'free', 'load', 'store', 'getelementptr',
|
||||
'extractelement', 'insertelement', 'shufflevector', 'getresult',
|
||||
'extractvalue', 'insertvalue', 'atomicrmw', 'cmpxchg', 'fence',
|
||||
'allocsize', 'amdgpu_cs', 'amdgpu_gs', 'amdgpu_kernel', 'amdgpu_ps',
|
||||
'amdgpu_vs', 'any', 'anyregcc', 'argmemonly', 'avr_intrcc',
|
||||
'avr_signalcc', 'caller', 'catchpad', 'catchret', 'catchswitch',
|
||||
'cleanuppad', 'cleanupret', 'comdat', 'convergent', 'cxx_fast_tlscc',
|
||||
'deplibs', 'dereferenceable', 'dereferenceable_or_null', 'distinct',
|
||||
'exactmatch', 'externally_initialized', 'from', 'ghccc', 'hhvm_ccc',
|
||||
'hhvmcc', 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly',
|
||||
'inalloca', 'jumptable', 'largest', 'local_unnamed_addr', 'minsize',
|
||||
'musttail', 'noduplicates', 'none', 'nonnull', 'norecurse', 'notail',
|
||||
'preserve_allcc', 'preserve_mostcc', 'prologue', 'safestack', 'samesize',
|
||||
'source_filename', 'swiftcc', 'swifterror', 'swiftself', 'webkit_jscc',
|
||||
'within', 'writeonly', 'x86_intrcc', 'x86_vectorcallcc'),
|
||||
suffix=r'\b'), Keyword),
|
||||
|
||||
# Types
|
||||
(r'void|half|float|double|x86_fp80|fp128|ppc_fp128|label|metadata',
|
||||
Keyword.Type),
|
||||
(words(('void', 'half', 'float', 'double', 'x86_fp80', 'fp128',
|
||||
'ppc_fp128', 'label', 'metadata', 'token')), Keyword.Type),
|
||||
|
||||
# Integer types
|
||||
(r'i[1-9]\d*', Keyword)
|
||||
|
@ -397,6 +522,86 @@ class NasmObjdumpLexer(ObjdumpLexer):
|
|||
tokens = _objdump_lexer_tokens(NasmLexer)
|
||||
|
||||
|
||||
class TasmLexer(RegexLexer):
|
||||
"""
|
||||
For Tasm (Turbo Assembler) assembly code.
|
||||
"""
|
||||
name = 'TASM'
|
||||
aliases = ['tasm']
|
||||
filenames = ['*.asm', '*.ASM', '*.tasm']
|
||||
mimetypes = ['text/x-tasm']
|
||||
|
||||
identifier = r'[@a-z$._?][\w$.?#@~]*'
|
||||
hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
|
||||
octn = r'[0-7]+q'
|
||||
binn = r'[01]+b'
|
||||
decn = r'[0-9]+'
|
||||
floatn = decn + r'\.e?' + decn
|
||||
string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
|
||||
declkw = r'(?:res|d)[bwdqt]|times'
|
||||
register = (r'r[0-9][0-5]?[bwd]|'
|
||||
r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
|
||||
r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
|
||||
wordop = r'seg|wrt|strict'
|
||||
type = r'byte|[dq]?word'
|
||||
directives = (r'BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
|
||||
r'ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|'
|
||||
r'EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|'
|
||||
r'P386|MODEL|ASSUME|CODESEG|SIZE')
|
||||
# T[A-Z][a-z] is more of a convention. Lexer should filter out STRUC definitions
|
||||
# and then 'add' them to datatype somehow.
|
||||
datatype = (r'db|dd|dw|T[A-Z][a-z]+')
|
||||
|
||||
flags = re.IGNORECASE | re.MULTILINE
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'^\s*%', Comment.Preproc, 'preproc'),
|
||||
include('whitespace'),
|
||||
(identifier + ':', Name.Label),
|
||||
(directives, Keyword, 'instruction-args'),
|
||||
(r'(%s)(\s+)(%s)' % (identifier, datatype),
|
||||
bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
|
||||
'instruction-args'),
|
||||
(declkw, Keyword.Declaration, 'instruction-args'),
|
||||
(identifier, Name.Function, 'instruction-args'),
|
||||
(r'[\r\n]+', Text)
|
||||
],
|
||||
'instruction-args': [
|
||||
(string, String),
|
||||
(hexn, Number.Hex),
|
||||
(octn, Number.Oct),
|
||||
(binn, Number.Bin),
|
||||
(floatn, Number.Float),
|
||||
(decn, Number.Integer),
|
||||
include('punctuation'),
|
||||
(register, Name.Builtin),
|
||||
(identifier, Name.Variable),
|
||||
# Do not match newline when it's preceeded by a backslash
|
||||
(r'(\\\s*)(;.*)([\r\n])', bygroups(Text, Comment.Single, Text)),
|
||||
(r'[\r\n]+', Text, '#pop'),
|
||||
include('whitespace')
|
||||
],
|
||||
'preproc': [
|
||||
(r'[^;\n]+', Comment.Preproc),
|
||||
(r';.*?\n', Comment.Single, '#pop'),
|
||||
(r'\n', Comment.Preproc, '#pop'),
|
||||
],
|
||||
'whitespace': [
|
||||
(r'[\n\r]', Text),
|
||||
(r'\\[\n\r]', Text),
|
||||
(r'[ \t]+', Text),
|
||||
(r';.*', Comment.Single)
|
||||
],
|
||||
'punctuation': [
|
||||
(r'[,():\[\]]+', Punctuation),
|
||||
(r'[&|^<>+*=/%~-]+', Operator),
|
||||
(r'[$]+', Keyword.Constant),
|
||||
(wordop, Operator.Word),
|
||||
(type, Keyword.Type)
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class Ca65Lexer(RegexLexer):
|
||||
"""
|
||||
For ca65 assembler sources.
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for automation scripting languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -31,8 +31,8 @@ class AutohotkeyLexer(RegexLexer):
|
|||
'root': [
|
||||
(r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
|
||||
(r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
|
||||
(r'\s+;.*?$', Comment.Singleline),
|
||||
(r'^;.*?$', Comment.Singleline),
|
||||
(r'\s+;.*?$', Comment.Single),
|
||||
(r'^;.*?$', Comment.Single),
|
||||
(r'[]{}(),;[]', Punctuation),
|
||||
(r'(in|is|and|or|not)\b', Operator.Word),
|
||||
(r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for BASIC like languages (other than VB.net).
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
160
wakatime/packages/pygments/lexers/bibtex.py
Normal file
160
wakatime/packages/pygments/lexers/bibtex.py
Normal file
|
@ -0,0 +1,160 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.bibtex
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for BibTeX bibliography data and styles
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, default, \
|
||||
words
|
||||
from pygments.token import Name, Comment, String, Error, Number, Text, \
|
||||
Keyword, Punctuation
|
||||
|
||||
__all__ = ['BibTeXLexer', 'BSTLexer']
|
||||
|
||||
|
||||
class BibTeXLexer(ExtendedRegexLexer):
|
||||
"""
|
||||
A lexer for BibTeX bibliography data format.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
name = 'BibTeX'
|
||||
aliases = ['bib', 'bibtex']
|
||||
filenames = ['*.bib']
|
||||
mimetypes = ["text/x-bibtex"]
|
||||
flags = re.IGNORECASE
|
||||
|
||||
ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
|
||||
IDENTIFIER = '[{0}][{1}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
|
||||
|
||||
def open_brace_callback(self, match, ctx):
|
||||
opening_brace = match.group()
|
||||
ctx.opening_brace = opening_brace
|
||||
yield match.start(), Punctuation, opening_brace
|
||||
ctx.pos = match.end()
|
||||
|
||||
def close_brace_callback(self, match, ctx):
|
||||
closing_brace = match.group()
|
||||
if (
|
||||
ctx.opening_brace == '{' and closing_brace != '}' or
|
||||
ctx.opening_brace == '(' and closing_brace != ')'
|
||||
):
|
||||
yield match.start(), Error, closing_brace
|
||||
else:
|
||||
yield match.start(), Punctuation, closing_brace
|
||||
del ctx.opening_brace
|
||||
ctx.pos = match.end()
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
('@comment', Comment),
|
||||
('@preamble', Name.Class, ('closing-brace', 'value', 'opening-brace')),
|
||||
('@string', Name.Class, ('closing-brace', 'field', 'opening-brace')),
|
||||
('@' + IDENTIFIER, Name.Class,
|
||||
('closing-brace', 'command-body', 'opening-brace')),
|
||||
('.+', Comment),
|
||||
],
|
||||
'opening-brace': [
|
||||
include('whitespace'),
|
||||
(r'[{(]', open_brace_callback, '#pop'),
|
||||
],
|
||||
'closing-brace': [
|
||||
include('whitespace'),
|
||||
(r'[})]', close_brace_callback, '#pop'),
|
||||
],
|
||||
'command-body': [
|
||||
include('whitespace'),
|
||||
(r'[^\s\,\}]+', Name.Label, ('#pop', 'fields')),
|
||||
],
|
||||
'fields': [
|
||||
include('whitespace'),
|
||||
(',', Punctuation, 'field'),
|
||||
default('#pop'),
|
||||
],
|
||||
'field': [
|
||||
include('whitespace'),
|
||||
(IDENTIFIER, Name.Attribute, ('value', '=')),
|
||||
default('#pop'),
|
||||
],
|
||||
'=': [
|
||||
include('whitespace'),
|
||||
('=', Punctuation, '#pop'),
|
||||
],
|
||||
'value': [
|
||||
include('whitespace'),
|
||||
(IDENTIFIER, Name.Variable),
|
||||
('"', String, 'quoted-string'),
|
||||
(r'\{', String, 'braced-string'),
|
||||
(r'[\d]+', Number),
|
||||
('#', Punctuation),
|
||||
default('#pop'),
|
||||
],
|
||||
'quoted-string': [
|
||||
(r'\{', String, 'braced-string'),
|
||||
('"', String, '#pop'),
|
||||
('[^\{\"]+', String),
|
||||
],
|
||||
'braced-string': [
|
||||
(r'\{', String, '#push'),
|
||||
(r'\}', String, '#pop'),
|
||||
('[^\{\}]+', String),
|
||||
],
|
||||
'whitespace': [
|
||||
(r'\s+', Text),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class BSTLexer(RegexLexer):
|
||||
"""
|
||||
A lexer for BibTeX bibliography styles.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
name = 'BST'
|
||||
aliases = ['bst', 'bst-pybtex']
|
||||
filenames = ['*.bst']
|
||||
flags = re.IGNORECASE | re.MULTILINE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('whitespace'),
|
||||
(words(['read', 'sort']), Keyword),
|
||||
(words(['execute', 'integers', 'iterate', 'reverse', 'strings']),
|
||||
Keyword, ('group')),
|
||||
(words(['function', 'macro']), Keyword, ('group', 'group')),
|
||||
(words(['entry']), Keyword, ('group', 'group', 'group')),
|
||||
],
|
||||
'group': [
|
||||
include('whitespace'),
|
||||
(r'\{', Punctuation, ('#pop', 'group-end', 'body')),
|
||||
],
|
||||
'group-end': [
|
||||
include('whitespace'),
|
||||
(r'\}', Punctuation, '#pop'),
|
||||
],
|
||||
'body': [
|
||||
include('whitespace'),
|
||||
(r"\'[^#\"\{\}\s]+", Name.Function),
|
||||
(r'[^#\"\{\}\s]+\$', Name.Builtin),
|
||||
(r'[^#\"\{\}\s]+', Name.Variable),
|
||||
(r'"[^\"]*"', String),
|
||||
(r'#-?\d+', Number),
|
||||
(r'\{', Punctuation, ('group-end', 'body')),
|
||||
default('#pop'),
|
||||
],
|
||||
'whitespace': [
|
||||
('\s+', Text),
|
||||
('%.*?$', Comment.SingleLine),
|
||||
],
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for "business-oriented" languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -57,9 +57,9 @@ class CobolLexer(RegexLexer):
|
|||
],
|
||||
'core': [
|
||||
# Figurative constants
|
||||
(r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?'
|
||||
(r'(^|(?<=[^\w\-]))(ALL\s+)?'
|
||||
r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
|
||||
r'\s*($|(?=[^0-9a-z_\-]))',
|
||||
r'\s*($|(?=[^\w\-]))',
|
||||
Name.Constant),
|
||||
|
||||
# Reserved words STATEMENTS and other bolds
|
||||
|
@ -79,8 +79,8 @@ class CobolLexer(RegexLexer):
|
|||
'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
|
||||
'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
|
||||
'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
|
||||
'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^0-9a-z_\-]))',
|
||||
suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
|
||||
'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^\w\-]))',
|
||||
suffix=r'\s*($|(?=[^\w\-]))'),
|
||||
Keyword.Reserved),
|
||||
|
||||
# Reserved words
|
||||
|
@ -89,33 +89,33 @@ class CobolLexer(RegexLexer):
|
|||
'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
|
||||
'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
|
||||
'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
|
||||
'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC', 'AUTOTERMINATE',
|
||||
'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
|
||||
'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC',
|
||||
'AUTOTERMINATE', 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
|
||||
'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
|
||||
'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL', 'COLLATING',
|
||||
'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE', 'COMMIT', 'COMMON',
|
||||
'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
|
||||
'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL',
|
||||
'COLLATING', 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE',
|
||||
'COMMIT', 'COMMON', 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
|
||||
'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
|
||||
'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE', 'DEBUGGING',
|
||||
'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
|
||||
'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE',
|
||||
'DEBUGGING', 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
|
||||
'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
|
||||
'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
|
||||
'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
|
||||
'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
|
||||
'EXCLUSIVE', 'EXTEND', 'EXTERNAL',
|
||||
'FILE-ID', 'FILLER', 'FINAL', 'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
|
||||
'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL', 'FUNCTION',
|
||||
'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
|
||||
'EXCLUSIVE', 'EXTEND', 'EXTERNAL', 'FILE-ID', 'FILLER', 'FINAL',
|
||||
'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
|
||||
'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL',
|
||||
'FUNCTION', 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
|
||||
'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
|
||||
'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
|
||||
'INITIAL', 'INITIALIZED', 'INPUT',
|
||||
'INTO', 'INTRINSIC', 'INVALID', 'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
|
||||
'INITIAL', 'INITIALIZED', 'INPUT', 'INTO', 'INTRINSIC', 'INVALID',
|
||||
'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
|
||||
'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
|
||||
'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
|
||||
'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE',
|
||||
'MULTIPLE', 'NATIONAL', 'NATIONAL-EDITED', 'NATIVE',
|
||||
'NEGATIVE', 'NEXT', 'NO', 'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC',
|
||||
'NUMERIC-EDITED', 'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
|
||||
'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE', 'MULTIPLE',
|
||||
'NATIONAL', 'NATIONAL-EDITED', 'NATIVE', 'NEGATIVE', 'NEXT', 'NO',
|
||||
'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC', 'NUMERIC-EDITED',
|
||||
'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
|
||||
'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
|
||||
'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
|
||||
'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
|
||||
|
@ -137,40 +137,42 @@ class CobolLexer(RegexLexer):
|
|||
'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
|
||||
'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
|
||||
'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
|
||||
prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
|
||||
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
|
||||
Keyword.Pseudo),
|
||||
|
||||
# inactive reserved words
|
||||
(words((
|
||||
'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE', 'B-AND',
|
||||
'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER', 'CF', 'CH', 'CHAIN', 'CLASS-ID',
|
||||
'CLASSIFICATION', 'COMMUNICATION', 'CONDITION', 'DATA-POINTER',
|
||||
'DESTINATION', 'DISABLE', 'EC', 'EGI', 'EMI', 'ENABLE', 'END-RECEIVE',
|
||||
'ENTRY-CONVENTION', 'EO', 'ESI', 'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY',
|
||||
'FLOAT-BINARY-16', 'FLOAT-BINARY-34', 'FLOAT-BINARY-7',
|
||||
'FLOAT-DECIMAL-16', 'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT',
|
||||
'FUNCTION-POINTER', 'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY',
|
||||
'INHERITS', 'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
|
||||
'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE',
|
||||
'B-AND', 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER',
|
||||
'CF', 'CH', 'CHAIN', 'CLASS-ID', 'CLASSIFICATION', 'COMMUNICATION',
|
||||
'CONDITION', 'DATA-POINTER', 'DESTINATION', 'DISABLE', 'EC', 'EGI',
|
||||
'EMI', 'ENABLE', 'END-RECEIVE', 'ENTRY-CONVENTION', 'EO', 'ESI',
|
||||
'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY', 'FLOAT-BINARY-16',
|
||||
'FLOAT-BINARY-34', 'FLOAT-BINARY-7', 'FLOAT-DECIMAL-16',
|
||||
'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT', 'FUNCTION-POINTER',
|
||||
'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY', 'INHERITS',
|
||||
'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
|
||||
'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
|
||||
'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE', 'NORMAL',
|
||||
'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE', 'PAGE-COUNTER', 'PF', 'PH',
|
||||
'PROPERTY', 'PROTOTYPE', 'PURGE', 'QUEUE', 'RAISE', 'RAISING', 'RECEIVE',
|
||||
'RELATION', 'REPLACE', 'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY',
|
||||
'RF', 'RH', 'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT', 'STEP',
|
||||
'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3', 'SUPER', 'SYMBOL',
|
||||
'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT', 'TYPEDEF', 'UCS-4', 'UNIVERSAL',
|
||||
'USER-DEFAULT', 'UTF-16', 'UTF-8', 'VAL-STATUS', 'VALID', 'VALIDATE',
|
||||
'VALIDATE-STATUS'),
|
||||
prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
|
||||
'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE',
|
||||
'NORMAL', 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE',
|
||||
'PAGE-COUNTER', 'PF', 'PH', 'PROPERTY', 'PROTOTYPE', 'PURGE',
|
||||
'QUEUE', 'RAISE', 'RAISING', 'RECEIVE', 'RELATION', 'REPLACE',
|
||||
'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY', 'RF', 'RH',
|
||||
'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT',
|
||||
'STEP', 'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3',
|
||||
'SUPER', 'SYMBOL', 'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT',
|
||||
'TYPEDEF', 'UCS-4', 'UNIVERSAL', 'USER-DEFAULT', 'UTF-16', 'UTF-8',
|
||||
'VAL-STATUS', 'VALID', 'VALIDATE', 'VALIDATE-STATUS'),
|
||||
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
|
||||
Error),
|
||||
|
||||
# Data Types
|
||||
(r'(^|(?<=[^0-9a-z_\-]))'
|
||||
(r'(^|(?<=[^\w\-]))'
|
||||
r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
|
||||
r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
|
||||
r'BINARY-C-LONG|'
|
||||
r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
|
||||
r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type),
|
||||
r'BINARY)\s*($|(?=[^\w\-]))', Keyword.Type),
|
||||
|
||||
# Operators
|
||||
(r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
|
||||
|
@ -180,7 +182,7 @@ class CobolLexer(RegexLexer):
|
|||
(r'([(),;:&%.])', Punctuation),
|
||||
|
||||
# Intrinsics
|
||||
(r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
|
||||
(r'(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
|
||||
r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
|
||||
r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
|
||||
r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
|
||||
|
@ -192,13 +194,13 @@ class CobolLexer(RegexLexer):
|
|||
r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
|
||||
r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
|
||||
r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
|
||||
r'($|(?=[^0-9a-z_\-]))', Name.Function),
|
||||
r'($|(?=[^\w\-]))', Name.Function),
|
||||
|
||||
# Booleans
|
||||
(r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin),
|
||||
(r'(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))', Name.Builtin),
|
||||
# Comparing Operators
|
||||
(r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|'
|
||||
r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word),
|
||||
(r'(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|'
|
||||
r'greater|less|than|not|and|or)\s*($|(?=[^\w\-]))', Operator.Word),
|
||||
],
|
||||
|
||||
# \"[^\"\n]*\"|\'[^\'\n]*\'
|
||||
|
@ -254,6 +256,7 @@ class ABAPLexer(RegexLexer):
|
|||
(r'\s+', Text),
|
||||
(r'^\*.*$', Comment.Single),
|
||||
(r'\".*?\n', Comment.Single),
|
||||
(r'##\w+', Comment.Special),
|
||||
],
|
||||
'variable-names': [
|
||||
(r'<\S+>', Name.Variable),
|
||||
|
@ -262,8 +265,8 @@ class ABAPLexer(RegexLexer):
|
|||
'root': [
|
||||
include('common'),
|
||||
# function calls
|
||||
(r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
|
||||
bygroups(Keyword, Text, Name.Function)),
|
||||
(r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
|
||||
Keyword),
|
||||
(r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
|
||||
r'TRANSACTION|TRANSFORMATION))\b',
|
||||
Keyword),
|
||||
|
@ -283,6 +286,12 @@ class ABAPLexer(RegexLexer):
|
|||
# call methodnames returning style
|
||||
(r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
|
||||
|
||||
# text elements
|
||||
(r'(TEXT)(-)(\d{3})',
|
||||
bygroups(Keyword, Punctuation, Number.Integer)),
|
||||
(r'(TEXT)(-)(\w{3})',
|
||||
bygroups(Keyword, Punctuation, Name.Variable)),
|
||||
|
||||
# keywords with dashes in them.
|
||||
# these need to be first, because for instance the -ID part
|
||||
# of MESSAGE-ID wouldn't get highlighted if MESSAGE was
|
||||
|
@ -299,13 +308,13 @@ class ABAPLexer(RegexLexer):
|
|||
r'OUTPUT-LENGTH|PRINT-CONTROL|'
|
||||
r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
|
||||
r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
|
||||
r'TYPE-POOL|TYPE-POOLS'
|
||||
r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
|
||||
r')\b', Keyword),
|
||||
|
||||
# keyword kombinations
|
||||
(r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
|
||||
r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
|
||||
r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
|
||||
(r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
|
||||
r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
|
||||
r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
|
||||
r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
|
||||
r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
|
||||
r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
|
||||
|
@ -341,10 +350,16 @@ class ABAPLexer(RegexLexer):
|
|||
r'(BEGIN|END)\s+OF|'
|
||||
r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
|
||||
r'COMPARING(\s+ALL\s+FIELDS)?|'
|
||||
r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
|
||||
r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
|
||||
r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
|
||||
r'END-OF-(DEFINITION|PAGE|SELECTION)|'
|
||||
r'WITH\s+FRAME(\s+TITLE)|'
|
||||
r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
|
||||
r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
|
||||
r'(RESPECTING|IGNORING)\s+CASE|'
|
||||
r'IN\s+UPDATE\s+TASK|'
|
||||
r'(SOURCE|RESULT)\s+(XML)?|'
|
||||
r'REFERENCE\s+INTO|'
|
||||
|
||||
# simple kombinations
|
||||
r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
|
||||
|
@ -353,39 +368,41 @@ class ABAPLexer(RegexLexer):
|
|||
r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
|
||||
r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
|
||||
r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
|
||||
r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
|
||||
r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
|
||||
|
||||
# single word keywords.
|
||||
(r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
|
||||
r'ASSIGN(ING)?|AT(\s+FIRST)?|'
|
||||
(r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
|
||||
r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
|
||||
r'BACK|BLOCK|BREAK-POINT|'
|
||||
r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
|
||||
r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
|
||||
r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
|
||||
r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
|
||||
r'DETAIL|DIRECTORY|DIVIDE|DO|'
|
||||
r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
|
||||
r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
|
||||
r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
|
||||
r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
|
||||
r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
|
||||
r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
|
||||
r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
|
||||
r'ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
|
||||
r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
|
||||
r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
|
||||
r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
|
||||
r'HIDE|'
|
||||
r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
|
||||
r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
|
||||
r'LENGTH|LINES|LOAD|LOCAL|'
|
||||
r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
|
||||
r'JOIN|'
|
||||
r'KEY|'
|
||||
r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
|
||||
r'NODES|'
|
||||
r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
|
||||
r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
|
||||
r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
|
||||
r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
|
||||
r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
|
||||
r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
|
||||
r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
|
||||
r'NEXT|'
|
||||
r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
|
||||
r'NODES|NUMBER|'
|
||||
r'OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|'
|
||||
r'PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|'
|
||||
r'RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|'
|
||||
r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|'
|
||||
r'SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|'
|
||||
r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|'
|
||||
r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
|
||||
r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
|
||||
r'ULINE|UNDER|UNPACK|UPDATE|USING|'
|
||||
r'VALUE|VALUES|VIA|'
|
||||
r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
|
||||
r'VALUE|VALUES|VIA|VARYING|VARY|'
|
||||
r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
|
||||
|
||||
# builtins
|
||||
(r'(abs|acos|asin|atan|'
|
||||
|
@ -411,18 +428,21 @@ class ABAPLexer(RegexLexer):
|
|||
|
||||
# operators which look like variable names before
|
||||
# parsing variable names.
|
||||
(r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
|
||||
(r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
|
||||
r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
|
||||
r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
|
||||
r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
|
||||
|
||||
include('variable-names'),
|
||||
|
||||
# standard oparators after variable names,
|
||||
# standard operators after variable names,
|
||||
# because < and > are part of field symbols.
|
||||
(r'[?*<>=\-+]', Operator),
|
||||
(r'[?*<>=\-+&]', Operator),
|
||||
(r"'(''|[^'])*'", String.Single),
|
||||
(r"`([^`])*`", String.Single),
|
||||
(r'[/;:()\[\],.]', Punctuation)
|
||||
(r"([|}])([^{}|]*?)([|{])",
|
||||
bygroups(Punctuation, String.Single, Punctuation)),
|
||||
(r'[/;:()\[\],.]', Punctuation),
|
||||
(r'(!)(\w+)', bygroups(Operator, Name)),
|
||||
],
|
||||
}
|
||||
|
||||
|
@ -439,15 +459,15 @@ class OpenEdgeLexer(RegexLexer):
|
|||
filenames = ['*.p', '*.cls']
|
||||
mimetypes = ['text/x-openedge', 'application/x-openedge']
|
||||
|
||||
types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
|
||||
types = (r'(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
|
||||
r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
|
||||
r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
|
||||
r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
|
||||
r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
|
||||
r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))')
|
||||
|
||||
keywords = words(OPENEDGEKEYWORDS,
|
||||
prefix=r'(?i)(^|(?<=[^0-9a-z_\-]))',
|
||||
suffix=r'\s*($|(?=[^0-9a-z_\-]))')
|
||||
prefix=r'(?i)(^|(?<=[^\w\-]))',
|
||||
suffix=r'\s*($|(?=[^\w\-]))')
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for C/C++ languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -46,12 +46,15 @@ class CFamilyLexer(RegexLexer):
|
|||
(r'\n', Text),
|
||||
(r'\s+', Text),
|
||||
(r'\\\n', Text), # line continuation
|
||||
(r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
|
||||
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
|
||||
(r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
|
||||
(r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
|
||||
# Open until EOF, so no ending delimeter
|
||||
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
|
||||
],
|
||||
'statements': [
|
||||
(r'L?"', String, 'string'),
|
||||
(r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
|
||||
(r'(L?)(")', bygroups(String.Affix, String), 'string'),
|
||||
(r"(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
|
||||
bygroups(String.Affix, String.Char, String.Char, String.Char)),
|
||||
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
|
||||
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
|
||||
(r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
|
||||
|
@ -60,10 +63,11 @@ class CFamilyLexer(RegexLexer):
|
|||
(r'\*/', Error),
|
||||
(r'[~!%^&*+=|?:<>/-]', Operator),
|
||||
(r'[()\[\],.]', Punctuation),
|
||||
(words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do',
|
||||
'else', 'enum', 'extern', 'for', 'goto', 'if', 'register',
|
||||
'restricted', 'return', 'sizeof', 'static', 'struct',
|
||||
'switch', 'typedef', 'union', 'volatile', 'while'),
|
||||
(words(('asm', 'auto', 'break', 'case', 'const', 'continue',
|
||||
'default', 'do', 'else', 'enum', 'extern', 'for', 'goto',
|
||||
'if', 'register', 'restricted', 'return', 'sizeof',
|
||||
'static', 'struct', 'switch', 'typedef', 'union',
|
||||
'volatile', 'while'),
|
||||
suffix=r'\b'), Keyword),
|
||||
(r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b',
|
||||
Keyword.Type),
|
||||
|
@ -123,7 +127,8 @@ class CFamilyLexer(RegexLexer):
|
|||
(r'\\', String), # stray backslash
|
||||
],
|
||||
'macro': [
|
||||
(r'(include)(' + _ws1 + ')([^\n]+)', bygroups(Comment.Preproc, Text, Comment.PreprocFile)),
|
||||
(r'(include)(' + _ws1 + r')([^\n]+)',
|
||||
bygroups(Comment.Preproc, Text, Comment.PreprocFile)),
|
||||
(r'[^/\n]+', Comment.Preproc),
|
||||
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
|
||||
(r'//.*?\n', Comment.Single, '#pop'),
|
||||
|
@ -206,7 +211,7 @@ class CppLexer(CFamilyLexer):
|
|||
tokens = {
|
||||
'statements': [
|
||||
(words((
|
||||
'asm', 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
|
||||
'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
|
||||
'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
|
||||
'private', 'protected', 'public', 'reinterpret_cast',
|
||||
'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
|
||||
|
@ -217,7 +222,11 @@ class CppLexer(CFamilyLexer):
|
|||
(r'char(16_t|32_t)\b', Keyword.Type),
|
||||
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
|
||||
# C++11 raw strings
|
||||
(r'R"\(', String, 'rawstring'),
|
||||
(r'(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
|
||||
bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
|
||||
String, String.Delimiter, String)),
|
||||
# C++11 UTF-8/16/32 strings
|
||||
(r'(u8|u|U)(")', bygroups(String.Affix, String), 'string'),
|
||||
inherit,
|
||||
],
|
||||
'root': [
|
||||
|
@ -234,11 +243,6 @@ class CppLexer(CFamilyLexer):
|
|||
# template specification
|
||||
(r'\s*(?=>)', Text, '#pop'),
|
||||
],
|
||||
'rawstring': [
|
||||
(r'\)"', String, '#pop'),
|
||||
(r'[^)]+', String),
|
||||
(r'\)', String),
|
||||
],
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for other C-like languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -105,7 +105,7 @@ class ClayLexer(RegexLexer):
|
|||
tokens = {
|
||||
'root': [
|
||||
(r'\s', Text),
|
||||
(r'//.*?$', Comment.Singleline),
|
||||
(r'//.*?$', Comment.Single),
|
||||
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
|
||||
(r'\b(public|private|import|as|record|variant|instance'
|
||||
r'|define|overload|default|external|alias'
|
||||
|
@ -427,115 +427,115 @@ class ArduinoLexer(CppLexer):
|
|||
filenames = ['*.ino']
|
||||
mimetypes = ['text/x-arduino']
|
||||
|
||||
# Language constants
|
||||
constants = set(('DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE',
|
||||
'REPORT_DIGITAL', 'REPORT_ANALOG', 'INPUT_PULLUP',
|
||||
'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET', 'LED_BUILTIN',
|
||||
'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL',
|
||||
'DEFAULT', 'OUTPUT', 'INPUT', 'HIGH', 'LOW'))
|
||||
|
||||
# Language sketch main structure functions
|
||||
structure = set(('setup', 'loop'))
|
||||
|
||||
# Language variable types
|
||||
storage = set(('boolean', 'const', 'byte', 'word', 'string', 'String', 'array'))
|
||||
# Language operators
|
||||
operators = set(('not', 'or', 'and', 'xor'))
|
||||
|
||||
# Language 'variables'
|
||||
variables = set((
|
||||
'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
|
||||
'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET',
|
||||
'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH',
|
||||
'LOW', 'INPUT', 'OUTPUT', 'INPUT_PULLUP', 'LED_BUILTIN', 'true', 'false',
|
||||
'void', 'boolean', 'char', 'unsigned char', 'byte', 'int', 'unsigned int',
|
||||
'word', 'long', 'unsigned long', 'short', 'float', 'double', 'string', 'String',
|
||||
'array', 'static', 'volatile', 'const', 'boolean', 'byte', 'word', 'string',
|
||||
'String', 'array', 'int', 'float', 'private', 'char', 'virtual', 'operator',
|
||||
'sizeof', 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int8_t', 'int16_t',
|
||||
'int32_t', 'int64_t', 'dynamic_cast', 'typedef', 'const_cast', 'const',
|
||||
'struct', 'static_cast', 'union', 'unsigned', 'long', 'volatile', 'static',
|
||||
'protected', 'bool', 'public', 'friend', 'auto', 'void', 'enum', 'extern',
|
||||
'class', 'short', 'reinterpret_cast', 'double', 'register', 'explicit',
|
||||
'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary',
|
||||
'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
|
||||
'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
|
||||
'atomic_llong', 'atomic_ullong', 'PROGMEM'))
|
||||
|
||||
# Language shipped functions and class ( )
|
||||
functions = set(('KeyboardController', 'MouseController', 'SoftwareSerial',
|
||||
'EthernetServer', 'EthernetClient', 'LiquidCrystal',
|
||||
'RobotControl', 'GSMVoiceCall', 'EthernetUDP', 'EsploraTFT',
|
||||
'HttpClient', 'RobotMotor', 'WiFiClient', 'GSMScanner',
|
||||
'FileSystem', 'Scheduler', 'GSMServer', 'YunClient', 'YunServer',
|
||||
'IPAddress', 'GSMClient', 'GSMModem', 'Keyboard', 'Ethernet',
|
||||
'Console', 'GSMBand', 'Esplora', 'Stepper', 'Process',
|
||||
'WiFiUDP', 'GSM_SMS', 'Mailbox', 'USBHost', 'Firmata', 'PImage',
|
||||
'Client', 'Server', 'GSMPIN', 'FileIO', 'Bridge', 'Serial',
|
||||
'EEPROM', 'Stream', 'Mouse', 'Audio', 'Servo', 'File', 'Task',
|
||||
'GPRS', 'WiFi', 'Wire', 'TFT', 'GSM', 'SPI', 'SD',
|
||||
'runShellCommandAsynchronously', 'analogWriteResolution',
|
||||
'retrieveCallingNumber', 'printFirmwareVersion',
|
||||
'analogReadResolution', 'sendDigitalPortPair',
|
||||
'noListenOnLocalhost', 'readJoystickButton', 'setFirmwareVersion',
|
||||
'readJoystickSwitch', 'scrollDisplayRight', 'getVoiceCallStatus',
|
||||
'scrollDisplayLeft', 'writeMicroseconds', 'delayMicroseconds',
|
||||
'beginTransmission', 'getSignalStrength', 'runAsynchronously',
|
||||
'getAsynchronously', 'listenOnLocalhost', 'getCurrentCarrier',
|
||||
'readAccelerometer', 'messageAvailable', 'sendDigitalPorts',
|
||||
'lineFollowConfig', 'countryNameWrite', 'runShellCommand',
|
||||
'readStringUntil', 'rewindDirectory', 'readTemperature',
|
||||
'setClockDivider', 'readLightSensor', 'endTransmission',
|
||||
'analogReference', 'detachInterrupt', 'countryNameRead',
|
||||
'attachInterrupt', 'encryptionType', 'readBytesUntil',
|
||||
'robotNameWrite', 'readMicrophone', 'robotNameRead', 'cityNameWrite',
|
||||
'userNameWrite', 'readJoystickY', 'readJoystickX', 'mouseReleased',
|
||||
'openNextFile', 'scanNetworks', 'noInterrupts', 'digitalWrite',
|
||||
'beginSpeaker', 'mousePressed', 'isActionDone', 'mouseDragged',
|
||||
'displayLogos', 'noAutoscroll', 'addParameter', 'remoteNumber',
|
||||
'getModifiers', 'keyboardRead', 'userNameRead', 'waitContinue',
|
||||
'processInput', 'parseCommand', 'printVersion', 'readNetworks',
|
||||
'writeMessage', 'blinkVersion', 'cityNameRead', 'readMessage',
|
||||
'setDataMode', 'parsePacket', 'isListening', 'setBitOrder',
|
||||
'beginPacket', 'isDirectory', 'motorsWrite', 'drawCompass',
|
||||
'digitalRead', 'clearScreen', 'serialEvent', 'rightToLeft',
|
||||
'setTextSize', 'leftToRight', 'requestFrom', 'keyReleased',
|
||||
'compassRead', 'analogWrite', 'interrupts', 'WiFiServer',
|
||||
'disconnect', 'playMelody', 'parseFloat', 'autoscroll',
|
||||
'getPINUsed', 'setPINUsed', 'setTimeout', 'sendAnalog',
|
||||
'readSlider', 'analogRead', 'beginWrite', 'createChar',
|
||||
'motorsStop', 'keyPressed', 'tempoWrite', 'readButton',
|
||||
'subnetMask', 'debugPrint', 'macAddress', 'writeGreen',
|
||||
'randomSeed', 'attachGPRS', 'readString', 'sendString',
|
||||
'remotePort', 'releaseAll', 'mouseMoved', 'background',
|
||||
'getXChange', 'getYChange', 'answerCall', 'getResult',
|
||||
'voiceCall', 'endPacket', 'constrain', 'getSocket', 'writeJSON',
|
||||
'getButton', 'available', 'connected', 'findUntil', 'readBytes',
|
||||
'exitValue', 'readGreen', 'writeBlue', 'startLoop', 'IPAddress',
|
||||
'isPressed', 'sendSysex', 'pauseMode', 'gatewayIP', 'setCursor',
|
||||
'getOemKey', 'tuneWrite', 'noDisplay', 'loadImage', 'switchPIN',
|
||||
'onRequest', 'onReceive', 'changePIN', 'playFile', 'noBuffer',
|
||||
'parseInt', 'overflow', 'checkPIN', 'knobRead', 'beginTFT',
|
||||
'bitClear', 'updateIR', 'bitWrite', 'position', 'writeRGB',
|
||||
'highByte', 'writeRed', 'setSpeed', 'readBlue', 'noStroke',
|
||||
'remoteIP', 'transfer', 'shutdown', 'hangCall', 'beginSMS',
|
||||
'endWrite', 'attached', 'maintain', 'noCursor', 'checkReg',
|
||||
'checkPUK', 'shiftOut', 'isValid', 'shiftIn', 'pulseIn',
|
||||
'connect', 'println', 'localIP', 'pinMode', 'getIMEI',
|
||||
'display', 'noBlink', 'process', 'getBand', 'running', 'beginSD',
|
||||
'drawBMP', 'lowByte', 'setBand', 'release', 'bitRead', 'prepare',
|
||||
'pointTo', 'readRed', 'setMode', 'noFill', 'remove', 'listen',
|
||||
'stroke', 'detach', 'attach', 'noTone', 'exists', 'buffer',
|
||||
'height', 'bitSet', 'circle', 'config', 'cursor', 'random',
|
||||
'IRread', 'sizeof', 'setDNS', 'endSMS', 'getKey', 'micros',
|
||||
'millis', 'begin', 'print', 'write', 'ready', 'flush', 'width',
|
||||
'isPIN', 'blink', 'clear', 'press', 'mkdir', 'rmdir', 'close',
|
||||
'point', 'yield', 'image', 'float', 'BSSID', 'click', 'delay',
|
||||
'read', 'text', 'move', 'peek', 'beep', 'rect', 'line', 'open',
|
||||
'seek', 'fill', 'size', 'turn', 'stop', 'home', 'find', 'char',
|
||||
'byte', 'step', 'word', 'long', 'tone', 'sqrt', 'RSSI', 'SSID',
|
||||
'end', 'bit', 'tan', 'cos', 'sin', 'pow', 'map', 'abs', 'max',
|
||||
'min', 'int', 'get', 'run', 'put'))
|
||||
|
||||
functions = set((
|
||||
'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
|
||||
'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
|
||||
'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient',
|
||||
'GSMScanner', 'FileSystem', 'Scheduler', 'GSMServer', 'YunClient', 'YunServer',
|
||||
'IPAddress', 'GSMClient', 'GSMModem', 'Keyboard', 'Ethernet', 'Console',
|
||||
'GSMBand', 'Esplora', 'Stepper', 'Process', 'WiFiUDP', 'GSM_SMS', 'Mailbox',
|
||||
'USBHost', 'Firmata', 'PImage', 'Client', 'Server', 'GSMPIN', 'FileIO',
|
||||
'Bridge', 'Serial', 'EEPROM', 'Stream', 'Mouse', 'Audio', 'Servo', 'File',
|
||||
'Task', 'GPRS', 'WiFi', 'Wire', 'TFT', 'GSM', 'SPI', 'SD',
|
||||
'runShellCommandAsynchronously', 'analogWriteResolution',
|
||||
'retrieveCallingNumber', 'printFirmwareVersion', 'analogReadResolution',
|
||||
'sendDigitalPortPair', 'noListenOnLocalhost', 'readJoystickButton',
|
||||
'setFirmwareVersion', 'readJoystickSwitch', 'scrollDisplayRight',
|
||||
'getVoiceCallStatus', 'scrollDisplayLeft', 'writeMicroseconds',
|
||||
'delayMicroseconds', 'beginTransmission', 'getSignalStrength',
|
||||
'runAsynchronously', 'getAsynchronously', 'listenOnLocalhost',
|
||||
'getCurrentCarrier', 'readAccelerometer', 'messageAvailable',
|
||||
'sendDigitalPorts', 'lineFollowConfig', 'countryNameWrite', 'runShellCommand',
|
||||
'readStringUntil', 'rewindDirectory', 'readTemperature', 'setClockDivider',
|
||||
'readLightSensor', 'endTransmission', 'analogReference', 'detachInterrupt',
|
||||
'countryNameRead', 'attachInterrupt', 'encryptionType', 'readBytesUntil',
|
||||
'robotNameWrite', 'readMicrophone', 'robotNameRead', 'cityNameWrite',
|
||||
'userNameWrite', 'readJoystickY', 'readJoystickX', 'mouseReleased',
|
||||
'openNextFile', 'scanNetworks', 'noInterrupts', 'digitalWrite', 'beginSpeaker',
|
||||
'mousePressed', 'isActionDone', 'mouseDragged', 'displayLogos', 'noAutoscroll',
|
||||
'addParameter', 'remoteNumber', 'getModifiers', 'keyboardRead', 'userNameRead',
|
||||
'waitContinue', 'processInput', 'parseCommand', 'printVersion', 'readNetworks',
|
||||
'writeMessage', 'blinkVersion', 'cityNameRead', 'readMessage', 'setDataMode',
|
||||
'parsePacket', 'isListening', 'setBitOrder', 'beginPacket', 'isDirectory',
|
||||
'motorsWrite', 'drawCompass', 'digitalRead', 'clearScreen', 'serialEvent',
|
||||
'rightToLeft', 'setTextSize', 'leftToRight', 'requestFrom', 'keyReleased',
|
||||
'compassRead', 'analogWrite', 'interrupts', 'WiFiServer', 'disconnect',
|
||||
'playMelody', 'parseFloat', 'autoscroll', 'getPINUsed', 'setPINUsed',
|
||||
'setTimeout', 'sendAnalog', 'readSlider', 'analogRead', 'beginWrite',
|
||||
'createChar', 'motorsStop', 'keyPressed', 'tempoWrite', 'readButton',
|
||||
'subnetMask', 'debugPrint', 'macAddress', 'writeGreen', 'randomSeed',
|
||||
'attachGPRS', 'readString', 'sendString', 'remotePort', 'releaseAll',
|
||||
'mouseMoved', 'background', 'getXChange', 'getYChange', 'answerCall',
|
||||
'getResult', 'voiceCall', 'endPacket', 'constrain', 'getSocket', 'writeJSON',
|
||||
'getButton', 'available', 'connected', 'findUntil', 'readBytes', 'exitValue',
|
||||
'readGreen', 'writeBlue', 'startLoop', 'IPAddress', 'isPressed', 'sendSysex',
|
||||
'pauseMode', 'gatewayIP', 'setCursor', 'getOemKey', 'tuneWrite', 'noDisplay',
|
||||
'loadImage', 'switchPIN', 'onRequest', 'onReceive', 'changePIN', 'playFile',
|
||||
'noBuffer', 'parseInt', 'overflow', 'checkPIN', 'knobRead', 'beginTFT',
|
||||
'bitClear', 'updateIR', 'bitWrite', 'position', 'writeRGB', 'highByte',
|
||||
'writeRed', 'setSpeed', 'readBlue', 'noStroke', 'remoteIP', 'transfer',
|
||||
'shutdown', 'hangCall', 'beginSMS', 'endWrite', 'attached', 'maintain',
|
||||
'noCursor', 'checkReg', 'checkPUK', 'shiftOut', 'isValid', 'shiftIn', 'pulseIn',
|
||||
'connect', 'println', 'localIP', 'pinMode', 'getIMEI', 'display', 'noBlink',
|
||||
'process', 'getBand', 'running', 'beginSD', 'drawBMP', 'lowByte', 'setBand',
|
||||
'release', 'bitRead', 'prepare', 'pointTo', 'readRed', 'setMode', 'noFill',
|
||||
'remove', 'listen', 'stroke', 'detach', 'attach', 'noTone', 'exists', 'buffer',
|
||||
'height', 'bitSet', 'circle', 'config', 'cursor', 'random', 'IRread', 'setDNS',
|
||||
'endSMS', 'getKey', 'micros', 'millis', 'begin', 'print', 'write', 'ready',
|
||||
'flush', 'width', 'isPIN', 'blink', 'clear', 'press', 'mkdir', 'rmdir', 'close',
|
||||
'point', 'yield', 'image', 'BSSID', 'click', 'delay', 'read', 'text', 'move',
|
||||
'peek', 'beep', 'rect', 'line', 'open', 'seek', 'fill', 'size', 'turn', 'stop',
|
||||
'home', 'find', 'step', 'tone', 'sqrt', 'RSSI', 'SSID', 'end', 'bit', 'tan',
|
||||
'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put',
|
||||
'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
|
||||
'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
|
||||
'isHexadecimalDigit'))
|
||||
|
||||
# do not highlight
|
||||
suppress_highlight = set((
|
||||
'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
|
||||
'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
|
||||
'static_assert', 'thread_local', 'restrict'))
|
||||
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
|
||||
if token is Name:
|
||||
if value in self.constants:
|
||||
yield index, Keyword.Constant, value
|
||||
elif value in self.functions:
|
||||
yield index, Name.Function, value
|
||||
elif value in self.storage:
|
||||
yield index, Keyword.Type, value
|
||||
else:
|
||||
yield index, token, value
|
||||
elif token is Name.Function:
|
||||
if value in self.structure:
|
||||
yield index, Name.Other, value
|
||||
else:
|
||||
yield index, token, value
|
||||
elif token is Keyword:
|
||||
if value in self.storage:
|
||||
yield index, Keyword.Type, value
|
||||
else:
|
||||
yield index, token, value
|
||||
if value in self.structure:
|
||||
yield index, Name.Builtin, value
|
||||
elif value in self.operators:
|
||||
yield index, Operator, value
|
||||
elif value in self.variables:
|
||||
yield index, Keyword.Reserved, value
|
||||
elif value in self.suppress_highlight:
|
||||
yield index, Name, value
|
||||
elif value in self.functions:
|
||||
yield index, Name.Function, value
|
||||
else:
|
||||
yield index, token, value
|
||||
|
|
78
wakatime/packages/pygments/lexers/capnproto.py
Normal file
78
wakatime/packages/pygments/lexers/capnproto.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.capnproto
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexers for the Cap'n Proto schema language.
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, default
|
||||
from pygments.token import Text, Comment, Keyword, Name, Literal
|
||||
|
||||
__all__ = ['CapnProtoLexer']
|
||||
|
||||
|
||||
class CapnProtoLexer(RegexLexer):
|
||||
"""
|
||||
For `Cap'n Proto <https://capnproto.org>`_ source.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'Cap\'n Proto'
|
||||
filenames = ['*.capnp']
|
||||
aliases = ['capnp']
|
||||
|
||||
flags = re.MULTILINE | re.UNICODE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'#.*?$', Comment.Single),
|
||||
(r'@[0-9a-zA-Z]*', Name.Decorator),
|
||||
(r'=', Literal, 'expression'),
|
||||
(r':', Name.Class, 'type'),
|
||||
(r'\$', Name.Attribute, 'annotation'),
|
||||
(r'(struct|enum|interface|union|import|using|const|annotation|'
|
||||
r'extends|in|of|on|as|with|from|fixed)\b',
|
||||
Keyword),
|
||||
(r'[\w.]+', Name),
|
||||
(r'[^#@=:$\w]+', Text),
|
||||
],
|
||||
'type': [
|
||||
(r'[^][=;,(){}$]+', Name.Class),
|
||||
(r'[[(]', Name.Class, 'parentype'),
|
||||
default('#pop'),
|
||||
],
|
||||
'parentype': [
|
||||
(r'[^][;()]+', Name.Class),
|
||||
(r'[[(]', Name.Class, '#push'),
|
||||
(r'[])]', Name.Class, '#pop'),
|
||||
default('#pop'),
|
||||
],
|
||||
'expression': [
|
||||
(r'[^][;,(){}$]+', Literal),
|
||||
(r'[[(]', Literal, 'parenexp'),
|
||||
default('#pop'),
|
||||
],
|
||||
'parenexp': [
|
||||
(r'[^][;()]+', Literal),
|
||||
(r'[[(]', Literal, '#push'),
|
||||
(r'[])]', Literal, '#pop'),
|
||||
default('#pop'),
|
||||
],
|
||||
'annotation': [
|
||||
(r'[^][;,(){}=:]+', Name.Attribute),
|
||||
(r'[[(]', Name.Attribute, 'annexp'),
|
||||
default('#pop'),
|
||||
],
|
||||
'annexp': [
|
||||
(r'[^][;()]+', Name.Attribute),
|
||||
(r'[[(]', Name.Attribute, '#push'),
|
||||
(r'[])]', Name.Attribute, '#pop'),
|
||||
default('#pop'),
|
||||
],
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexer for the Chapel language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -42,17 +42,17 @@ class ChapelLexer(RegexLexer):
|
|||
(r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
|
||||
Keyword.Type),
|
||||
(words((
|
||||
'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall',
|
||||
'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum',
|
||||
'except', 'export', 'extern', 'for', 'forall', 'if', 'index',
|
||||
'inline', 'iter', 'label', 'lambda', 'let', 'local', 'new',
|
||||
'noinit', 'on', 'only', 'otherwise', 'pragma', 'private',
|
||||
'public', 'reduce', 'require', 'return', 'scan', 'select',
|
||||
'serial', 'single', 'sparse', 'subdomain', 'sync', 'then',
|
||||
'use', 'when', 'where', 'while', 'with', 'yield', 'zip'),
|
||||
suffix=r'\b'),
|
||||
'align', 'as', 'atomic', 'begin', 'break', 'by', 'cobegin',
|
||||
'coforall', 'continue', 'delete', 'dmapped', 'do', 'domain',
|
||||
'else', 'enum', 'except', 'export', 'extern', 'for', 'forall',
|
||||
'if', 'index', 'inline', 'iter', 'label', 'lambda', 'let',
|
||||
'local', 'new', 'noinit', 'on', 'only', 'otherwise', 'pragma',
|
||||
'private', 'public', 'reduce', 'require', 'return', 'scan',
|
||||
'select', 'serial', 'single', 'sparse', 'subdomain', 'sync',
|
||||
'then', 'use', 'when', 'where', 'while', 'with', 'yield',
|
||||
'zip'), suffix=r'\b'),
|
||||
Keyword),
|
||||
(r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
|
||||
(r'(proc)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
|
||||
(r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
|
||||
'classname'),
|
||||
|
||||
|
@ -96,6 +96,7 @@ class ChapelLexer(RegexLexer):
|
|||
(r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
|
||||
],
|
||||
'procname': [
|
||||
(r'[a-zA-Z_][\w$]*', Name.Function, '#pop'),
|
||||
(r'([a-zA-Z_][\w$]+|\~[a-zA-Z_][\w$]+|[+*/!~%<>=&^|\-]{1,2})',
|
||||
Name.Function, '#pop'),
|
||||
],
|
||||
}
|
||||
|
|
288
wakatime/packages/pygments/lexers/clean.py
Normal file
288
wakatime/packages/pygments/lexers/clean.py
Normal file
|
@ -0,0 +1,288 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.clean
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexer for the Clean language.
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from pygments.lexer import ExtendedRegexLexer, LexerContext, \
|
||||
bygroups, words, include, default
|
||||
from pygments.token import Comment, Keyword, Literal, Name, Number, Operator, \
|
||||
Punctuation, String, Text, Whitespace
|
||||
|
||||
__all__ = ['CleanLexer']
|
||||
|
||||
|
||||
class CleanLexer(ExtendedRegexLexer):
|
||||
"""
|
||||
Lexer for the general purpose, state-of-the-art, pure and lazy functional
|
||||
programming language Clean (http://clean.cs.ru.nl/Clean).
|
||||
|
||||
.. versionadded: 2.2
|
||||
"""
|
||||
name = 'Clean'
|
||||
aliases = ['clean']
|
||||
filenames = ['*.icl', '*.dcl']
|
||||
|
||||
def get_tokens_unprocessed(self, text=None, context=None):
|
||||
ctx = LexerContext(text, 0)
|
||||
ctx.indent = 0
|
||||
return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context=ctx)
|
||||
|
||||
def check_class_not_import(lexer, match, ctx):
|
||||
if match.group(0) == 'import':
|
||||
yield match.start(), Keyword.Namespace, match.group(0)
|
||||
ctx.stack = ctx.stack[:-1] + ['fromimportfunc']
|
||||
else:
|
||||
yield match.start(), Name.Class, match.group(0)
|
||||
ctx.pos = match.end()
|
||||
|
||||
def check_instance_class(lexer, match, ctx):
|
||||
if match.group(0) == 'instance' or match.group(0) == 'class':
|
||||
yield match.start(), Keyword, match.group(0)
|
||||
else:
|
||||
yield match.start(), Name.Function, match.group(0)
|
||||
ctx.stack = ctx.stack + ['fromimportfunctype']
|
||||
ctx.pos = match.end()
|
||||
|
||||
@staticmethod
|
||||
def indent_len(text):
|
||||
# Tabs are four spaces:
|
||||
# https://svn.cs.ru.nl/repos/clean-platform/trunk/doc/STANDARDS.txt
|
||||
text = text.replace('\n', '')
|
||||
return len(text.replace('\t', ' ')), len(text)
|
||||
|
||||
def store_indent(lexer, match, ctx):
|
||||
ctx.indent, _ = CleanLexer.indent_len(match.group(0))
|
||||
ctx.pos = match.end()
|
||||
yield match.start(), Text, match.group(0)
|
||||
|
||||
def check_indent1(lexer, match, ctx):
|
||||
indent, reallen = CleanLexer.indent_len(match.group(0))
|
||||
if indent > ctx.indent:
|
||||
yield match.start(), Whitespace, match.group(0)
|
||||
ctx.pos = match.start() + reallen + 1
|
||||
else:
|
||||
ctx.indent = 0
|
||||
ctx.pos = match.start()
|
||||
ctx.stack = ctx.stack[:-1]
|
||||
yield match.start(), Whitespace, match.group(0)[1:]
|
||||
|
||||
def check_indent2(lexer, match, ctx):
|
||||
indent, reallen = CleanLexer.indent_len(match.group(0))
|
||||
if indent > ctx.indent:
|
||||
yield match.start(), Whitespace, match.group(0)
|
||||
ctx.pos = match.start() + reallen + 1
|
||||
else:
|
||||
ctx.indent = 0
|
||||
ctx.pos = match.start()
|
||||
ctx.stack = ctx.stack[:-2]
|
||||
|
||||
def check_indent3(lexer, match, ctx):
|
||||
indent, reallen = CleanLexer.indent_len(match.group(0))
|
||||
if indent > ctx.indent:
|
||||
yield match.start(), Whitespace, match.group(0)
|
||||
ctx.pos = match.start() + reallen + 1
|
||||
else:
|
||||
ctx.indent = 0
|
||||
ctx.pos = match.start()
|
||||
ctx.stack = ctx.stack[:-3]
|
||||
yield match.start(), Whitespace, match.group(0)[1:]
|
||||
if match.group(0) == '\n\n':
|
||||
ctx.pos = ctx.pos + 1
|
||||
|
||||
def skip(lexer, match, ctx):
|
||||
ctx.stack = ctx.stack[:-1]
|
||||
ctx.pos = match.end()
|
||||
yield match.start(), Comment, match.group(0)
|
||||
|
||||
keywords = ('class', 'instance', 'where', 'with', 'let', 'let!',
|
||||
'in', 'case', 'of', 'infix', 'infixr', 'infixl', 'generic',
|
||||
'derive', 'otherwise', 'code', 'inline')
|
||||
|
||||
tokens = {
|
||||
'common': [
|
||||
(r';', Punctuation, '#pop'),
|
||||
(r'//', Comment, 'singlecomment'),
|
||||
],
|
||||
'root': [
|
||||
# Comments
|
||||
(r'//.*\n', Comment.Single),
|
||||
(r'(?s)/\*\*.*?\*/', Comment.Special),
|
||||
(r'(?s)/\*.*?\*/', Comment.Multi),
|
||||
|
||||
# Modules, imports, etc.
|
||||
(r'\b((?:implementation|definition|system)\s+)?(module)(\s+)([\w`.]+)',
|
||||
bygroups(Keyword.Namespace, Keyword.Namespace, Text, Name.Class)),
|
||||
(r'(?<=\n)import(?=\s)', Keyword.Namespace, 'import'),
|
||||
(r'(?<=\n)from(?=\s)', Keyword.Namespace, 'fromimport'),
|
||||
|
||||
# Keywords
|
||||
# We cannot use (?s)^|(?<=\s) as prefix, so need to repeat this
|
||||
(words(keywords, prefix=r'(?<=\s)', suffix=r'(?=\s)'), Keyword),
|
||||
(words(keywords, prefix=r'^', suffix=r'(?=\s)'), Keyword),
|
||||
|
||||
# Function definitions
|
||||
(r'(?=\{\|)', Whitespace, 'genericfunction'),
|
||||
(r'(?<=\n)([ \t]*)([\w`$()=\-<>~*\^|+&%]+)((?:\s+\w)*)(\s*)(::)',
|
||||
bygroups(store_indent, Name.Function, Keyword.Type, Whitespace,
|
||||
Punctuation),
|
||||
'functiondefargs'),
|
||||
|
||||
# Type definitions
|
||||
(r'(?<=\n)([ \t]*)(::)', bygroups(store_indent, Punctuation), 'typedef'),
|
||||
(r'^([ \t]*)(::)', bygroups(store_indent, Punctuation), 'typedef'),
|
||||
|
||||
# Literals
|
||||
(r'\'\\?.(?<!\\)\'', String.Char),
|
||||
(r'\'\\\d+\'', String.Char),
|
||||
(r'\'\\\\\'', String.Char), # (special case for '\\')
|
||||
(r'[+\-~]?\s*\d+\.\d+(E[+\-~]?\d+)?\b', Number.Float),
|
||||
(r'[+\-~]?\s*0[0-7]\b', Number.Oct),
|
||||
(r'[+\-~]?\s*0x[0-9a-fA-F]\b', Number.Hex),
|
||||
(r'[+\-~]?\s*\d+\b', Number.Integer),
|
||||
(r'"', String.Double, 'doubleqstring'),
|
||||
(words(('True', 'False'), prefix=r'(?<=\s)', suffix=r'(?=\s)'),
|
||||
Literal),
|
||||
|
||||
# Qualified names
|
||||
(r'(\')([\w.]+)(\'\.)',
|
||||
bygroups(Punctuation, Name.Namespace, Punctuation)),
|
||||
|
||||
# Everything else is some name
|
||||
(r'([\w`$%/?@]+\.?)*[\w`$%/?@]+', Name),
|
||||
|
||||
# Punctuation
|
||||
(r'[{}()\[\],:;.#]', Punctuation),
|
||||
(r'[+\-=!<>|&~*\^/]', Operator),
|
||||
(r'\\\\', Operator),
|
||||
|
||||
# Lambda expressions
|
||||
(r'\\.*?(->|\.|=)', Name.Function),
|
||||
|
||||
# Whitespace
|
||||
(r'\s', Whitespace),
|
||||
|
||||
include('common'),
|
||||
],
|
||||
'fromimport': [
|
||||
include('common'),
|
||||
(r'([\w`.]+)', check_class_not_import),
|
||||
(r'\n', Whitespace, '#pop'),
|
||||
(r'\s', Whitespace),
|
||||
],
|
||||
'fromimportfunc': [
|
||||
include('common'),
|
||||
(r'(::)(\s+)([^,\s]+)', bygroups(Punctuation, Text, Keyword.Type)),
|
||||
(r'([\w`$()=\-<>~*\^|+&%/]+)', check_instance_class),
|
||||
(r',', Punctuation),
|
||||
(r'\n', Whitespace, '#pop'),
|
||||
(r'\s', Whitespace),
|
||||
],
|
||||
'fromimportfunctype': [
|
||||
include('common'),
|
||||
(r'[{(\[]', Punctuation, 'combtype'),
|
||||
(r',', Punctuation, '#pop'),
|
||||
(r'[:;.#]', Punctuation),
|
||||
(r'\n', Whitespace, '#pop:2'),
|
||||
(r'[^\S\n]+', Whitespace),
|
||||
(r'\S+', Keyword.Type),
|
||||
],
|
||||
'combtype': [
|
||||
include('common'),
|
||||
(r'[})\]]', Punctuation, '#pop'),
|
||||
(r'[{(\[]', Punctuation, '#pop'),
|
||||
(r'[,:;.#]', Punctuation),
|
||||
(r'\s+', Whitespace),
|
||||
(r'\S+', Keyword.Type),
|
||||
],
|
||||
'import': [
|
||||
include('common'),
|
||||
(words(('from', 'import', 'as', 'qualified'),
|
||||
prefix='(?<=\s)', suffix='(?=\s)'), Keyword.Namespace),
|
||||
(r'[\w`.]+', Name.Class),
|
||||
(r'\n', Whitespace, '#pop'),
|
||||
(r',', Punctuation),
|
||||
(r'[^\S\n]+', Whitespace),
|
||||
],
|
||||
'singlecomment': [
|
||||
(r'(.)(?=\n)', skip),
|
||||
(r'.+(?!\n)', Comment),
|
||||
],
|
||||
'doubleqstring': [
|
||||
(r'[^\\"]+', String.Double),
|
||||
(r'"', String.Double, '#pop'),
|
||||
(r'\\.', String.Double),
|
||||
],
|
||||
'typedef': [
|
||||
include('common'),
|
||||
(r'[\w`]+', Keyword.Type),
|
||||
(r'[:=|(),\[\]{}!*]', Punctuation),
|
||||
(r'->', Punctuation),
|
||||
(r'\n(?=[^\s|])', Whitespace, '#pop'),
|
||||
(r'\s', Whitespace),
|
||||
(r'.', Keyword.Type),
|
||||
],
|
||||
'genericfunction': [
|
||||
include('common'),
|
||||
(r'\{\|', Punctuation),
|
||||
(r'\|\}', Punctuation, '#pop'),
|
||||
(r',', Punctuation),
|
||||
(r'->', Punctuation),
|
||||
(r'(\s+of\s+)(\{)', bygroups(Keyword, Punctuation), 'genericftypes'),
|
||||
(r'\s', Whitespace),
|
||||
(r'[\w`\[\]{}!]+', Keyword.Type),
|
||||
(r'[*()]', Punctuation),
|
||||
],
|
||||
'genericftypes': [
|
||||
include('common'),
|
||||
(r'[\w`]+', Keyword.Type),
|
||||
(r',', Punctuation),
|
||||
(r'\s', Whitespace),
|
||||
(r'\}', Punctuation, '#pop'),
|
||||
],
|
||||
'functiondefargs': [
|
||||
include('common'),
|
||||
(r'\n(\s*)', check_indent1),
|
||||
(r'[!{}()\[\],:;.#]', Punctuation),
|
||||
(r'->', Punctuation, 'functiondefres'),
|
||||
(r'^(?=\S)', Whitespace, '#pop'),
|
||||
(r'\S', Keyword.Type),
|
||||
(r'\s', Whitespace),
|
||||
],
|
||||
'functiondefres': [
|
||||
include('common'),
|
||||
(r'\n(\s*)', check_indent2),
|
||||
(r'^(?=\S)', Whitespace, '#pop:2'),
|
||||
(r'[!{}()\[\],:;.#]', Punctuation),
|
||||
(r'\|', Punctuation, 'functiondefclasses'),
|
||||
(r'\S', Keyword.Type),
|
||||
(r'\s', Whitespace),
|
||||
],
|
||||
'functiondefclasses': [
|
||||
include('common'),
|
||||
(r'\n(\s*)', check_indent3),
|
||||
(r'^(?=\S)', Whitespace, '#pop:3'),
|
||||
(r'[,&]', Punctuation),
|
||||
(r'\[', Punctuation, 'functiondefuniquneq'),
|
||||
(r'[\w`$()=\-<>~*\^|+&%/{}\[\]@]', Name.Function, 'functionname'),
|
||||
(r'\s+', Whitespace),
|
||||
],
|
||||
'functiondefuniquneq': [
|
||||
include('common'),
|
||||
(r'[a-z]+', Keyword.Type),
|
||||
(r'\s+', Whitespace),
|
||||
(r'<=|,', Punctuation),
|
||||
(r'\]', Punctuation, '#pop')
|
||||
],
|
||||
'functionname': [
|
||||
include('common'),
|
||||
(r'[\w`$()=\-<>~*\^|+&%/]+', Name.Function),
|
||||
(r'(?=\{\|)', Punctuation, 'genericfunction'),
|
||||
default('#pop'),
|
||||
]
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Just export lexer classes previously contained in this module.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -29,5 +29,6 @@ from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
|
|||
from pygments.lexers.ooc import OocLexer
|
||||
from pygments.lexers.felix import FelixLexer
|
||||
from pygments.lexers.nimrod import NimrodLexer
|
||||
from pygments.lexers.crystal import CrystalLexer
|
||||
|
||||
__all__ = []
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for configuration file formats.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -39,8 +39,10 @@ class IniLexer(RegexLexer):
|
|||
(r'[;#].*', Comment.Single),
|
||||
(r'\[.*?\]$', Keyword),
|
||||
(r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
|
||||
bygroups(Name.Attribute, Text, Operator, Text, String))
|
||||
]
|
||||
bygroups(Name.Attribute, Text, Operator, Text, String)),
|
||||
# standalone option, supported by some INI parsers
|
||||
(r'(.+?)$', Name.Attribute),
|
||||
],
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
|
@ -98,6 +100,8 @@ class PropertiesLexer(RegexLexer):
|
|||
"""
|
||||
Lexer for configuration files in Java's properties format.
|
||||
|
||||
Note: trailing whitespace counts as part of the value as per spec
|
||||
|
||||
.. versionadded:: 1.4
|
||||
"""
|
||||
|
||||
|
@ -108,10 +112,14 @@ class PropertiesLexer(RegexLexer):
|
|||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
(r'(?:[;#]|//).*$', Comment),
|
||||
(r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Text, String)),
|
||||
(r'^\w+(\\[ \t]\w*)*$', Name.Attribute),
|
||||
(r'(^ *)([#!].*)', bygroups(Text, Comment)),
|
||||
# More controversial comments
|
||||
(r'(^ *)((?:;|//).*)', bygroups(Text, Comment)),
|
||||
(r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
|
||||
bygroups(Name.Attribute, Text, Operator, Text, String)),
|
||||
(r'\s', Text),
|
||||
],
|
||||
}
|
||||
|
||||
|
@ -454,7 +462,7 @@ class NginxConfLexer(RegexLexer):
|
|||
"""
|
||||
name = 'Nginx configuration file'
|
||||
aliases = ['nginx']
|
||||
filenames = []
|
||||
filenames = ['nginx.conf']
|
||||
mimetypes = ['text/x-nginx-conf']
|
||||
|
||||
tokens = {
|
||||
|
@ -598,7 +606,7 @@ class TerraformLexer(RegexLexer):
|
|||
(r'(".*")', bygroups(String.Double)),
|
||||
],
|
||||
'punctuation': [
|
||||
(r'[\[\]\(\),.]', Punctuation),
|
||||
(r'[\[\](),.]', Punctuation),
|
||||
],
|
||||
# Keep this seperate from punctuation - we sometimes want to use different
|
||||
# Tokens for { }
|
||||
|
@ -629,9 +637,8 @@ class TermcapLexer(RegexLexer):
|
|||
.. versionadded:: 2.1
|
||||
"""
|
||||
name = 'Termcap'
|
||||
aliases = ['termcap',]
|
||||
|
||||
filenames = ['termcap', 'termcap.src',]
|
||||
aliases = ['termcap']
|
||||
filenames = ['termcap', 'termcap.src']
|
||||
mimetypes = []
|
||||
|
||||
# NOTE:
|
||||
|
@ -642,13 +649,13 @@ class TermcapLexer(RegexLexer):
|
|||
tokens = {
|
||||
'root': [
|
||||
(r'^#.*$', Comment),
|
||||
(r'^[^\s#:\|]+', Name.Tag, 'names'),
|
||||
(r'^[^\s#:|]+', Name.Tag, 'names'),
|
||||
],
|
||||
'names': [
|
||||
(r'\n', Text, '#pop'),
|
||||
(r':', Punctuation, 'defs'),
|
||||
(r'\|', Punctuation),
|
||||
(r'[^:\|]+', Name.Attribute),
|
||||
(r'[^:|]+', Name.Attribute),
|
||||
],
|
||||
'defs': [
|
||||
(r'\\\n[ \t]*', Text),
|
||||
|
@ -676,9 +683,8 @@ class TerminfoLexer(RegexLexer):
|
|||
.. versionadded:: 2.1
|
||||
"""
|
||||
name = 'Terminfo'
|
||||
aliases = ['terminfo',]
|
||||
|
||||
filenames = ['terminfo', 'terminfo.src',]
|
||||
aliases = ['terminfo']
|
||||
filenames = ['terminfo', 'terminfo.src']
|
||||
mimetypes = []
|
||||
|
||||
# NOTE:
|
||||
|
@ -689,13 +695,13 @@ class TerminfoLexer(RegexLexer):
|
|||
tokens = {
|
||||
'root': [
|
||||
(r'^#.*$', Comment),
|
||||
(r'^[^\s#,\|]+', Name.Tag, 'names'),
|
||||
(r'^[^\s#,|]+', Name.Tag, 'names'),
|
||||
],
|
||||
'names': [
|
||||
(r'\n', Text, '#pop'),
|
||||
(r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'),
|
||||
(r'\|', Punctuation),
|
||||
(r'[^,\|]+', Name.Attribute),
|
||||
(r'[^,|]+', Name.Attribute),
|
||||
],
|
||||
'defs': [
|
||||
(r'\n[ \t]+', Text),
|
||||
|
@ -724,8 +730,8 @@ class PkgConfigLexer(RegexLexer):
|
|||
"""
|
||||
|
||||
name = 'PkgConfig'
|
||||
aliases = ['pkgconfig',]
|
||||
filenames = ['*.pc',]
|
||||
aliases = ['pkgconfig']
|
||||
filenames = ['*.pc']
|
||||
mimetypes = []
|
||||
|
||||
tokens = {
|
||||
|
@ -791,8 +797,8 @@ class PacmanConfLexer(RegexLexer):
|
|||
"""
|
||||
|
||||
name = 'PacmanConf'
|
||||
aliases = ['pacmanconf',]
|
||||
filenames = ['pacman.conf',]
|
||||
aliases = ['pacmanconf']
|
||||
filenames = ['pacman.conf']
|
||||
mimetypes = []
|
||||
|
||||
tokens = {
|
||||
|
@ -820,7 +826,7 @@ class PacmanConfLexer(RegexLexer):
|
|||
'%u', # url
|
||||
), suffix=r'\b'),
|
||||
Name.Variable),
|
||||
|
||||
|
||||
# fallback
|
||||
(r'.', Text),
|
||||
],
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for misc console output.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
393
wakatime/packages/pygments/lexers/crystal.py
Normal file
393
wakatime/packages/pygments/lexers/crystal.py
Normal file
|
@ -0,0 +1,393 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.crystal
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lexer for Crystal.
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import ExtendedRegexLexer, include, \
|
||||
bygroups, default, LexerContext, words
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
||||
Number, Punctuation, Error
|
||||
|
||||
__all__ = ['CrystalLexer']
|
||||
|
||||
line_re = re.compile('.*?\n')
|
||||
|
||||
|
||||
CRYSTAL_OPERATORS = [
|
||||
'!=', '!~', '!', '%', '&&', '&', '**', '*', '+', '-', '/', '<=>', '<<', '<=', '<',
|
||||
'===', '==', '=~', '=', '>=', '>>', '>', '[]=', '[]?', '[]', '^', '||', '|', '~'
|
||||
]
|
||||
|
||||
|
||||
class CrystalLexer(ExtendedRegexLexer):
|
||||
"""
|
||||
For `Crystal <http://crystal-lang.org>`_ source code.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
name = 'Crystal'
|
||||
aliases = ['cr', 'crystal']
|
||||
filenames = ['*.cr']
|
||||
mimetypes = ['text/x-crystal']
|
||||
|
||||
flags = re.DOTALL | re.MULTILINE
|
||||
|
||||
def heredoc_callback(self, match, ctx):
|
||||
# okay, this is the hardest part of parsing Crystal...
|
||||
# match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
|
||||
|
||||
start = match.start(1)
|
||||
yield start, Operator, match.group(1) # <<-?
|
||||
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
|
||||
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
|
||||
yield match.start(4), String.Heredoc, match.group(4) # quote again
|
||||
|
||||
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
|
||||
outermost = not bool(heredocstack)
|
||||
heredocstack.append((match.group(1) == '<<-', match.group(3)))
|
||||
|
||||
ctx.pos = match.start(5)
|
||||
ctx.end = match.end(5)
|
||||
# this may find other heredocs
|
||||
for i, t, v in self.get_tokens_unprocessed(context=ctx):
|
||||
yield i, t, v
|
||||
ctx.pos = match.end()
|
||||
|
||||
if outermost:
|
||||
# this is the outer heredoc again, now we can process them all
|
||||
for tolerant, hdname in heredocstack:
|
||||
lines = []
|
||||
for match in line_re.finditer(ctx.text, ctx.pos):
|
||||
if tolerant:
|
||||
check = match.group().strip()
|
||||
else:
|
||||
check = match.group().rstrip()
|
||||
if check == hdname:
|
||||
for amatch in lines:
|
||||
yield amatch.start(), String.Heredoc, amatch.group()
|
||||
yield match.start(), String.Delimiter, match.group()
|
||||
ctx.pos = match.end()
|
||||
break
|
||||
else:
|
||||
lines.append(match)
|
||||
else:
|
||||
# end of heredoc not found -- error!
|
||||
for amatch in lines:
|
||||
yield amatch.start(), Error, amatch.group()
|
||||
ctx.end = len(ctx.text)
|
||||
del heredocstack[:]
|
||||
|
||||
def gen_crystalstrings_rules():
|
||||
def intp_regex_callback(self, match, ctx):
|
||||
yield match.start(1), String.Regex, match.group(1) # begin
|
||||
nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
|
||||
for i, t, v in self.get_tokens_unprocessed(context=nctx):
|
||||
yield match.start(3)+i, t, v
|
||||
yield match.start(4), String.Regex, match.group(4) # end[imsx]*
|
||||
ctx.pos = match.end()
|
||||
|
||||
def intp_string_callback(self, match, ctx):
|
||||
yield match.start(1), String.Other, match.group(1)
|
||||
nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
|
||||
for i, t, v in self.get_tokens_unprocessed(context=nctx):
|
||||
yield match.start(3)+i, t, v
|
||||
yield match.start(4), String.Other, match.group(4) # end
|
||||
ctx.pos = match.end()
|
||||
|
||||
states = {}
|
||||
states['strings'] = [
|
||||
(r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
|
||||
(words(CRYSTAL_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
|
||||
(r":'(\\\\|\\'|[^'])*'", String.Symbol),
|
||||
# This allows arbitrary text after '\ for simplicity
|
||||
(r"'(\\\\|\\'|[^']|\\[^'\\]+)'", String.Char),
|
||||
(r':"', String.Symbol, 'simple-sym'),
|
||||
# Crystal doesn't have "symbol:"s but this simplifies function args
|
||||
(r'([a-zA-Z_]\w*)(:)(?!:)', bygroups(String.Symbol, Punctuation)),
|
||||
(r'"', String.Double, 'simple-string'),
|
||||
(r'(?<!\.)`', String.Backtick, 'simple-backtick'),
|
||||
]
|
||||
|
||||
# double-quoted string and symbol
|
||||
for name, ttype, end in ('string', String.Double, '"'), \
|
||||
('sym', String.Symbol, '"'), \
|
||||
('backtick', String.Backtick, '`'):
|
||||
states['simple-'+name] = [
|
||||
include('string-escaped' if name == 'sym' else 'string-intp-escaped'),
|
||||
(r'[^\\%s#]+' % end, ttype),
|
||||
(r'[\\#]', ttype),
|
||||
(end, ttype, '#pop'),
|
||||
]
|
||||
|
||||
# braced quoted strings
|
||||
for lbrace, rbrace, bracecc, name in \
|
||||
('\\{', '\\}', '{}', 'cb'), \
|
||||
('\\[', '\\]', '\\[\\]', 'sb'), \
|
||||
('\\(', '\\)', '()', 'pa'), \
|
||||
('<', '>', '<>', 'ab'):
|
||||
states[name+'-intp-string'] = [
|
||||
(r'\\[' + lbrace + ']', String.Other),
|
||||
(lbrace, String.Other, '#push'),
|
||||
(rbrace, String.Other, '#pop'),
|
||||
include('string-intp-escaped'),
|
||||
(r'[\\#' + bracecc + ']', String.Other),
|
||||
(r'[^\\#' + bracecc + ']+', String.Other),
|
||||
]
|
||||
states['strings'].append((r'%' + lbrace, String.Other,
|
||||
name+'-intp-string'))
|
||||
states[name+'-string'] = [
|
||||
(r'\\[\\' + bracecc + ']', String.Other),
|
||||
(lbrace, String.Other, '#push'),
|
||||
(rbrace, String.Other, '#pop'),
|
||||
(r'[\\#' + bracecc + ']', String.Other),
|
||||
(r'[^\\#' + bracecc + ']+', String.Other),
|
||||
]
|
||||
# http://crystal-lang.org/docs/syntax_and_semantics/literals/array.html
|
||||
states['strings'].append((r'%[wi]' + lbrace, String.Other,
|
||||
name+'-string'))
|
||||
states[name+'-regex'] = [
|
||||
(r'\\[\\' + bracecc + ']', String.Regex),
|
||||
(lbrace, String.Regex, '#push'),
|
||||
(rbrace + '[imsx]*', String.Regex, '#pop'),
|
||||
include('string-intp'),
|
||||
(r'[\\#' + bracecc + ']', String.Regex),
|
||||
(r'[^\\#' + bracecc + ']+', String.Regex),
|
||||
]
|
||||
states['strings'].append((r'%r' + lbrace, String.Regex,
|
||||
name+'-regex'))
|
||||
|
||||
# these must come after %<brace>!
|
||||
states['strings'] += [
|
||||
# %r regex
|
||||
(r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[imsx]*)',
|
||||
intp_regex_callback),
|
||||
# regular fancy strings with qsw
|
||||
(r'(%[wi]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
|
||||
intp_string_callback),
|
||||
# special forms of fancy strings after operators or
|
||||
# in method calls with braces
|
||||
(r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
|
||||
bygroups(Text, String.Other, None)),
|
||||
# and because of fixed width lookbehinds the whole thing a
|
||||
# second time for line startings...
|
||||
(r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
|
||||
bygroups(Text, String.Other, None)),
|
||||
# all regular fancy strings without qsw
|
||||
(r'(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)',
|
||||
intp_string_callback),
|
||||
]
|
||||
|
||||
return states
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'#.*?$', Comment.Single),
|
||||
# keywords
|
||||
(words('''
|
||||
abstract asm as begin break case do else elsif end ensure extend ifdef if
|
||||
include instance_sizeof next of pointerof private protected rescue return
|
||||
require sizeof super then typeof unless until when while with yield
|
||||
'''.split(), suffix=r'\b'), Keyword),
|
||||
(words(['true', 'false', 'nil'], suffix=r'\b'), Keyword.Constant),
|
||||
# start of function, class and module names
|
||||
(r'(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
|
||||
bygroups(Keyword, Text, Name.Namespace)),
|
||||
(r'(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)',
|
||||
bygroups(Keyword, Text, Name.Namespace), 'funcname'),
|
||||
(r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
|
||||
(r'(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)',
|
||||
bygroups(Keyword, Text, Name.Namespace), 'classname'),
|
||||
(r'(self|out|uninitialized)\b|(is_a|responds_to)\?', Keyword.Pseudo),
|
||||
# macros
|
||||
(words('''
|
||||
debugger record pp assert_responds_to spawn parallel
|
||||
getter setter property delegate def_hash def_equals def_equals_and_hash
|
||||
forward_missing_to
|
||||
'''.split(), suffix=r'\b'), Name.Builtin.Pseudo),
|
||||
(r'getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b', Name.Builtin.Pseudo),
|
||||
# builtins
|
||||
# http://crystal-lang.org/api/toplevel.html
|
||||
(words('''
|
||||
Object Value Struct Reference Proc Class Nil Symbol Enum Void
|
||||
Bool Number Int Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64
|
||||
Float Float32 Float64 Char String
|
||||
Pointer Slice Range Exception Regex
|
||||
Mutex StaticArray Array Hash Set Tuple Deque Box Process File
|
||||
Dir Time Channel Concurrent Scheduler
|
||||
abort at_exit caller delay exit fork future get_stack_top gets
|
||||
lazy loop main p print printf puts
|
||||
raise rand read_line sleep sprintf system with_color
|
||||
'''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin),
|
||||
# normal heredocs
|
||||
(r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
|
||||
heredoc_callback),
|
||||
# empty string heredocs
|
||||
(r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
|
||||
(r'__END__', Comment.Preproc, 'end-part'),
|
||||
# multiline regex (after keywords or assignments)
|
||||
(r'(?:^|(?<=[=<>~!:])|'
|
||||
r'(?<=(?:\s|;)when\s)|'
|
||||
r'(?<=(?:\s|;)or\s)|'
|
||||
r'(?<=(?:\s|;)and\s)|'
|
||||
r'(?<=\.index\s)|'
|
||||
r'(?<=\.scan\s)|'
|
||||
r'(?<=\.sub\s)|'
|
||||
r'(?<=\.sub!\s)|'
|
||||
r'(?<=\.gsub\s)|'
|
||||
r'(?<=\.gsub!\s)|'
|
||||
r'(?<=\.match\s)|'
|
||||
r'(?<=(?:\s|;)if\s)|'
|
||||
r'(?<=(?:\s|;)elsif\s)|'
|
||||
r'(?<=^when\s)|'
|
||||
r'(?<=^index\s)|'
|
||||
r'(?<=^scan\s)|'
|
||||
r'(?<=^sub\s)|'
|
||||
r'(?<=^gsub\s)|'
|
||||
r'(?<=^sub!\s)|'
|
||||
r'(?<=^gsub!\s)|'
|
||||
r'(?<=^match\s)|'
|
||||
r'(?<=^if\s)|'
|
||||
r'(?<=^elsif\s)'
|
||||
r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
|
||||
# multiline regex (in method calls or subscripts)
|
||||
(r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
|
||||
# multiline regex (this time the funny no whitespace rule)
|
||||
(r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
|
||||
'multiline-regex'),
|
||||
# lex numbers and ignore following regular expressions which
|
||||
# are division operators in fact (grrrr. i hate that. any
|
||||
# better ideas?)
|
||||
# since pygments 0.7 we also eat a "?" operator after numbers
|
||||
# so that the char operator does not work. Chars are not allowed
|
||||
# there so that you can use the ternary operator.
|
||||
# stupid example:
|
||||
# x>=0?n[x]:""
|
||||
(r'(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
|
||||
bygroups(Number.Oct, Text, Operator)),
|
||||
(r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
|
||||
bygroups(Number.Hex, Text, Operator)),
|
||||
(r'(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
|
||||
bygroups(Number.Bin, Text, Operator)),
|
||||
# 3 separate expressions for floats because any of the 3 optional
|
||||
# parts makes it a float
|
||||
(r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?'
|
||||
r'(?:_?f[0-9]+)?)(\s*)([/?])?',
|
||||
bygroups(Number.Float, Text, Operator)),
|
||||
(r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)'
|
||||
r'(?:_?f[0-9]+)?)(\s*)([/?])?',
|
||||
bygroups(Number.Float, Text, Operator)),
|
||||
(r'((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?'
|
||||
r'(?:_?f[0-9]+))(\s*)([/?])?',
|
||||
bygroups(Number.Float, Text, Operator)),
|
||||
(r'(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?',
|
||||
bygroups(Number.Integer, Text, Operator)),
|
||||
# Names
|
||||
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
|
||||
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
|
||||
(r'\$\w+', Name.Variable.Global),
|
||||
(r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
|
||||
(r'\$-[0adFiIlpvw]', Name.Variable.Global),
|
||||
(r'::', Operator),
|
||||
include('strings'),
|
||||
# chars
|
||||
(r'\?(\\[MC]-)*' # modifiers
|
||||
r'(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
|
||||
r'(?!\w)',
|
||||
String.Char),
|
||||
(r'[A-Z][A-Z_]+\b', Name.Constant),
|
||||
# macro expansion
|
||||
(r'\{%', String.Interpol, 'in-macro-control'),
|
||||
(r'\{\{', String.Interpol, 'in-macro-expr'),
|
||||
# attributes
|
||||
(r'(@\[)(\s*)([A-Z]\w*)',
|
||||
bygroups(Operator, Text, Name.Decorator), 'in-attr'),
|
||||
# this is needed because Crystal attributes can look
|
||||
# like keywords (class) or like this: ` ?!?
|
||||
(words(CRYSTAL_OPERATORS, prefix=r'(\.|::)'),
|
||||
bygroups(Operator, Name.Operator)),
|
||||
(r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
|
||||
bygroups(Operator, Name)),
|
||||
# Names can end with [!?] unless it's "!="
|
||||
(r'[a-zA-Z_]\w*(?:[!?](?!=))?', Name),
|
||||
(r'(\[|\]\??|\*\*|<=>?|>=|<<?|>>?|=~|===|'
|
||||
r'!~|&&?|\|\||\.{1,3})', Operator),
|
||||
(r'[-+/*%=<>&!^|~]=?', Operator),
|
||||
(r'[(){};,/?:\\]', Punctuation),
|
||||
(r'\s+', Text)
|
||||
],
|
||||
'funcname': [
|
||||
(r'(?:([a-zA-Z_]\w*)(\.))?'
|
||||
r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
|
||||
r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
|
||||
bygroups(Name.Class, Operator, Name.Function), '#pop'),
|
||||
default('#pop')
|
||||
],
|
||||
'classname': [
|
||||
(r'[A-Z_]\w*', Name.Class),
|
||||
(r'(\()(\s*)([A-Z_]\w*)(\s*)(\))',
|
||||
bygroups(Punctuation, Text, Name.Class, Text, Punctuation)),
|
||||
default('#pop')
|
||||
],
|
||||
'in-intp': [
|
||||
(r'\{', String.Interpol, '#push'),
|
||||
(r'\}', String.Interpol, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
'string-intp': [
|
||||
(r'#\{', String.Interpol, 'in-intp'),
|
||||
],
|
||||
'string-escaped': [
|
||||
(r'\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})', String.Escape)
|
||||
],
|
||||
'string-intp-escaped': [
|
||||
include('string-intp'),
|
||||
include('string-escaped'),
|
||||
],
|
||||
'interpolated-regex': [
|
||||
include('string-intp'),
|
||||
(r'[\\#]', String.Regex),
|
||||
(r'[^\\#]+', String.Regex),
|
||||
],
|
||||
'interpolated-string': [
|
||||
include('string-intp'),
|
||||
(r'[\\#]', String.Other),
|
||||
(r'[^\\#]+', String.Other),
|
||||
],
|
||||
'multiline-regex': [
|
||||
include('string-intp'),
|
||||
(r'\\\\', String.Regex),
|
||||
(r'\\/', String.Regex),
|
||||
(r'[\\#]', String.Regex),
|
||||
(r'[^\\/#]+', String.Regex),
|
||||
(r'/[imsx]*', String.Regex, '#pop'),
|
||||
],
|
||||
'end-part': [
|
||||
(r'.+', Comment.Preproc, '#pop')
|
||||
],
|
||||
'in-macro-control': [
|
||||
(r'\{%', String.Interpol, '#push'),
|
||||
(r'%\}', String.Interpol, '#pop'),
|
||||
(r'for\b|in\b', Keyword),
|
||||
include('root'),
|
||||
],
|
||||
'in-macro-expr': [
|
||||
(r'\{\{', String.Interpol, '#push'),
|
||||
(r'\}\}', String.Interpol, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
'in-attr': [
|
||||
(r'\[', Operator, '#push'),
|
||||
(r'\]', Operator, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
}
|
||||
tokens.update(gen_crystalstrings_rules())
|
|
@ -5,11 +5,11 @@
|
|||
|
||||
Lexers for CSound languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import copy, re
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, default, include, using, words
|
||||
from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
|
||||
|
@ -21,7 +21,7 @@ from pygments.lexers.scripting import LuaLexer
|
|||
|
||||
__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer']
|
||||
|
||||
newline = (r'((?:;|//).*)*(\n)', bygroups(Comment.Single, Text))
|
||||
newline = (r'((?:(?:;|//).*)*)(\n)', bygroups(Comment.Single, Text))
|
||||
|
||||
|
||||
class CsoundLexer(RegexLexer):
|
||||
|
@ -177,7 +177,7 @@ class CsoundOrchestraLexer(CsoundLexer):
|
|||
(r'0[xX][a-fA-F0-9]+', Number.Hex),
|
||||
(r'\d+', Number.Integer),
|
||||
(r'"', String, 'single-line string'),
|
||||
(r'{{', String, 'multi-line string'),
|
||||
(r'\{\{', String, 'multi-line string'),
|
||||
(r'[+\-*/%^!=&|<>#~¬]', Operator),
|
||||
(r'[](),?:[]', Punctuation),
|
||||
(words((
|
||||
|
@ -273,40 +273,40 @@ class CsoundOrchestraLexer(CsoundLexer):
|
|||
(r'[\\"~$%\^\n]', String)
|
||||
],
|
||||
'multi-line string': [
|
||||
(r'}}', String, '#pop'),
|
||||
(r'[^\}]+|\}(?!\})', String)
|
||||
(r'\}\}', String, '#pop'),
|
||||
(r'[^}]+|\}(?!\})', String)
|
||||
],
|
||||
|
||||
'scoreline opcode': [
|
||||
include('whitespace or macro call'),
|
||||
(r'{{', String, 'scoreline'),
|
||||
(r'\{\{', String, 'scoreline'),
|
||||
default('#pop')
|
||||
],
|
||||
'scoreline': [
|
||||
(r'}}', String, '#pop'),
|
||||
(r'([^\}]+)|\}(?!\})', using(CsoundScoreLexer))
|
||||
(r'\}\}', String, '#pop'),
|
||||
(r'([^}]+)|\}(?!\})', using(CsoundScoreLexer))
|
||||
],
|
||||
|
||||
'python opcode': [
|
||||
include('whitespace or macro call'),
|
||||
(r'{{', String, 'python'),
|
||||
(r'\{\{', String, 'python'),
|
||||
default('#pop')
|
||||
],
|
||||
'python': [
|
||||
(r'}}', String, '#pop'),
|
||||
(r'([^\}]+)|\}(?!\})', using(PythonLexer))
|
||||
(r'\}\}', String, '#pop'),
|
||||
(r'([^}]+)|\}(?!\})', using(PythonLexer))
|
||||
],
|
||||
|
||||
'lua opcode': [
|
||||
include('whitespace or macro call'),
|
||||
(r'"', String, 'single-line string'),
|
||||
(r'{{', String, 'lua'),
|
||||
(r'\{\{', String, 'lua'),
|
||||
(r',', Punctuation),
|
||||
default('#pop')
|
||||
],
|
||||
'lua': [
|
||||
(r'}}', String, '#pop'),
|
||||
(r'([^\}]+)|\}(?!\})', using(LuaLexer))
|
||||
(r'\}\}', String, '#pop'),
|
||||
(r'([^}]+)|\}(?!\})', using(LuaLexer))
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -315,7 +315,7 @@ class CsoundDocumentLexer(RegexLexer):
|
|||
"""
|
||||
For `Csound <http://csound.github.io>`_ documents.
|
||||
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
|
||||
name = 'Csound Document'
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for CSS and related stylesheet formats.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -21,6 +21,251 @@ from pygments.util import iteritems
|
|||
__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
|
||||
|
||||
|
||||
# List of vendor prefixes obtained from:
|
||||
# https://www.w3.org/TR/CSS21/syndata.html#vendor-keyword-history
|
||||
_vendor_prefixes = (
|
||||
'-ms-', 'mso-', '-moz-', '-o-', '-xv-', '-atsc-', '-wap-', '-khtml-',
|
||||
'-webkit-', 'prince-', '-ah-', '-hp-', '-ro-', '-rim-', '-tc-',
|
||||
)
|
||||
|
||||
# List of CSS properties obtained from:
|
||||
# https://www.w3.org/Style/CSS/all-properties.en.html
|
||||
# Note: handle --* separately
|
||||
_css_properties = (
|
||||
'align-content', 'align-items', 'align-self', 'alignment-baseline', 'all',
|
||||
'animation', 'animation-delay', 'animation-direction',
|
||||
'animation-duration', 'animation-fill-mode', 'animation-iteration-count',
|
||||
'animation-name', 'animation-play-state', 'animation-timing-function',
|
||||
'appearance', 'azimuth', 'backface-visibility', 'background',
|
||||
'background-attachment', 'background-blend-mode', 'background-clip',
|
||||
'background-color', 'background-image', 'background-origin',
|
||||
'background-position', 'background-repeat', 'background-size',
|
||||
'baseline-shift', 'bookmark-label', 'bookmark-level', 'bookmark-state',
|
||||
'border', 'border-bottom', 'border-bottom-color',
|
||||
'border-bottom-left-radius', 'border-bottom-right-radius',
|
||||
'border-bottom-style', 'border-bottom-width', 'border-boundary',
|
||||
'border-collapse', 'border-color', 'border-image', 'border-image-outset',
|
||||
'border-image-repeat', 'border-image-slice', 'border-image-source',
|
||||
'border-image-width', 'border-left', 'border-left-color',
|
||||
'border-left-style', 'border-left-width', 'border-radius', 'border-right',
|
||||
'border-right-color', 'border-right-style', 'border-right-width',
|
||||
'border-spacing', 'border-style', 'border-top', 'border-top-color',
|
||||
'border-top-left-radius', 'border-top-right-radius', 'border-top-style',
|
||||
'border-top-width', 'border-width', 'bottom', 'box-decoration-break',
|
||||
'box-shadow', 'box-sizing', 'box-snap', 'box-suppress', 'break-after',
|
||||
'break-before', 'break-inside', 'caption-side', 'caret', 'caret-animation',
|
||||
'caret-color', 'caret-shape', 'chains', 'clear', 'clip', 'clip-path',
|
||||
'clip-rule', 'color', 'color-interpolation-filters', 'column-count',
|
||||
'column-fill', 'column-gap', 'column-rule', 'column-rule-color',
|
||||
'column-rule-style', 'column-rule-width', 'column-span', 'column-width',
|
||||
'columns', 'content', 'counter-increment', 'counter-reset', 'counter-set',
|
||||
'crop', 'cue', 'cue-after', 'cue-before', 'cursor', 'direction', 'display',
|
||||
'dominant-baseline', 'elevation', 'empty-cells', 'filter', 'flex',
|
||||
'flex-basis', 'flex-direction', 'flex-flow', 'flex-grow', 'flex-shrink',
|
||||
'flex-wrap', 'float', 'float-defer', 'float-offset', 'float-reference',
|
||||
'flood-color', 'flood-opacity', 'flow', 'flow-from', 'flow-into', 'font',
|
||||
'font-family', 'font-feature-settings', 'font-kerning',
|
||||
'font-language-override', 'font-size', 'font-size-adjust', 'font-stretch',
|
||||
'font-style', 'font-synthesis', 'font-variant', 'font-variant-alternates',
|
||||
'font-variant-caps', 'font-variant-east-asian', 'font-variant-ligatures',
|
||||
'font-variant-numeric', 'font-variant-position', 'font-weight',
|
||||
'footnote-display', 'footnote-policy', 'glyph-orientation-vertical',
|
||||
'grid', 'grid-area', 'grid-auto-columns', 'grid-auto-flow',
|
||||
'grid-auto-rows', 'grid-column', 'grid-column-end', 'grid-column-gap',
|
||||
'grid-column-start', 'grid-gap', 'grid-row', 'grid-row-end',
|
||||
'grid-row-gap', 'grid-row-start', 'grid-template', 'grid-template-areas',
|
||||
'grid-template-columns', 'grid-template-rows', 'hanging-punctuation',
|
||||
'height', 'hyphenate-character', 'hyphenate-limit-chars',
|
||||
'hyphenate-limit-last', 'hyphenate-limit-lines', 'hyphenate-limit-zone',
|
||||
'hyphens', 'image-orientation', 'image-resolution', 'initial-letter',
|
||||
'initial-letter-align', 'initial-letter-wrap', 'isolation',
|
||||
'justify-content', 'justify-items', 'justify-self', 'left',
|
||||
'letter-spacing', 'lighting-color', 'line-break', 'line-grid',
|
||||
'line-height', 'line-snap', 'list-style', 'list-style-image',
|
||||
'list-style-position', 'list-style-type', 'margin', 'margin-bottom',
|
||||
'margin-left', 'margin-right', 'margin-top', 'marker-side',
|
||||
'marquee-direction', 'marquee-loop', 'marquee-speed', 'marquee-style',
|
||||
'mask', 'mask-border', 'mask-border-mode', 'mask-border-outset',
|
||||
'mask-border-repeat', 'mask-border-slice', 'mask-border-source',
|
||||
'mask-border-width', 'mask-clip', 'mask-composite', 'mask-image',
|
||||
'mask-mode', 'mask-origin', 'mask-position', 'mask-repeat', 'mask-size',
|
||||
'mask-type', 'max-height', 'max-lines', 'max-width', 'min-height',
|
||||
'min-width', 'mix-blend-mode', 'motion', 'motion-offset', 'motion-path',
|
||||
'motion-rotation', 'move-to', 'nav-down', 'nav-left', 'nav-right',
|
||||
'nav-up', 'object-fit', 'object-position', 'offset-after', 'offset-before',
|
||||
'offset-end', 'offset-start', 'opacity', 'order', 'orphans', 'outline',
|
||||
'outline-color', 'outline-offset', 'outline-style', 'outline-width',
|
||||
'overflow', 'overflow-style', 'overflow-wrap', 'overflow-x', 'overflow-y',
|
||||
'padding', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top',
|
||||
'page', 'page-break-after', 'page-break-before', 'page-break-inside',
|
||||
'page-policy', 'pause', 'pause-after', 'pause-before', 'perspective',
|
||||
'perspective-origin', 'pitch', 'pitch-range', 'play-during', 'polar-angle',
|
||||
'polar-distance', 'position', 'presentation-level', 'quotes',
|
||||
'region-fragment', 'resize', 'rest', 'rest-after', 'rest-before',
|
||||
'richness', 'right', 'rotation', 'rotation-point', 'ruby-align',
|
||||
'ruby-merge', 'ruby-position', 'running', 'scroll-snap-coordinate',
|
||||
'scroll-snap-destination', 'scroll-snap-points-x', 'scroll-snap-points-y',
|
||||
'scroll-snap-type', 'shape-image-threshold', 'shape-inside', 'shape-margin',
|
||||
'shape-outside', 'size', 'speak', 'speak-as', 'speak-header',
|
||||
'speak-numeral', 'speak-punctuation', 'speech-rate', 'stress', 'string-set',
|
||||
'tab-size', 'table-layout', 'text-align', 'text-align-last',
|
||||
'text-combine-upright', 'text-decoration', 'text-decoration-color',
|
||||
'text-decoration-line', 'text-decoration-skip', 'text-decoration-style',
|
||||
'text-emphasis', 'text-emphasis-color', 'text-emphasis-position',
|
||||
'text-emphasis-style', 'text-indent', 'text-justify', 'text-orientation',
|
||||
'text-overflow', 'text-shadow', 'text-space-collapse', 'text-space-trim',
|
||||
'text-spacing', 'text-transform', 'text-underline-position', 'text-wrap',
|
||||
'top', 'transform', 'transform-origin', 'transform-style', 'transition',
|
||||
'transition-delay', 'transition-duration', 'transition-property',
|
||||
'transition-timing-function', 'unicode-bidi', 'user-select',
|
||||
'vertical-align', 'visibility', 'voice-balance', 'voice-duration',
|
||||
'voice-family', 'voice-pitch', 'voice-range', 'voice-rate', 'voice-stress',
|
||||
'voice-volume', 'volume', 'white-space', 'widows', 'width', 'will-change',
|
||||
'word-break', 'word-spacing', 'word-wrap', 'wrap-after', 'wrap-before',
|
||||
'wrap-flow', 'wrap-inside', 'wrap-through', 'writing-mode', 'z-index',
|
||||
)
|
||||
|
||||
# List of keyword values obtained from:
|
||||
# http://cssvalues.com/
|
||||
_keyword_values = (
|
||||
'absolute', 'alias', 'all', 'all-petite-caps', 'all-scroll',
|
||||
'all-small-caps', 'allow-end', 'alpha', 'alternate', 'alternate-reverse',
|
||||
'always', 'armenian', 'auto', 'avoid', 'avoid-column', 'avoid-page',
|
||||
'backwards', 'balance', 'baseline', 'below', 'blink', 'block', 'bold',
|
||||
'bolder', 'border-box', 'both', 'bottom', 'box-decoration', 'break-word',
|
||||
'capitalize', 'cell', 'center', 'circle', 'clip', 'clone', 'close-quote',
|
||||
'col-resize', 'collapse', 'color', 'color-burn', 'color-dodge', 'column',
|
||||
'column-reverse', 'compact', 'condensed', 'contain', 'container',
|
||||
'content-box', 'context-menu', 'copy', 'cover', 'crisp-edges', 'crosshair',
|
||||
'currentColor', 'cursive', 'darken', 'dashed', 'decimal',
|
||||
'decimal-leading-zero', 'default', 'descendants', 'difference', 'digits',
|
||||
'disc', 'distribute', 'dot', 'dotted', 'double', 'double-circle', 'e-resize',
|
||||
'each-line', 'ease', 'ease-in', 'ease-in-out', 'ease-out', 'edges',
|
||||
'ellipsis', 'end', 'ew-resize', 'exclusion', 'expanded', 'extra-condensed',
|
||||
'extra-expanded', 'fantasy', 'fill', 'fill-box', 'filled', 'first', 'fixed',
|
||||
'flat', 'flex', 'flex-end', 'flex-start', 'flip', 'force-end', 'forwards',
|
||||
'from-image', 'full-width', 'geometricPrecision', 'georgian', 'groove',
|
||||
'hanging', 'hard-light', 'help', 'hidden', 'hide', 'horizontal', 'hue',
|
||||
'icon', 'infinite', 'inherit', 'initial', 'ink', 'inline', 'inline-block',
|
||||
'inline-flex', 'inline-table', 'inset', 'inside', 'inter-word', 'invert',
|
||||
'isolate', 'italic', 'justify', 'large', 'larger', 'last', 'left',
|
||||
'lighten', 'lighter', 'line-through', 'linear', 'list-item', 'local',
|
||||
'loose', 'lower-alpha', 'lower-greek', 'lower-latin', 'lower-roman',
|
||||
'lowercase', 'ltr', 'luminance', 'luminosity', 'mandatory', 'manipulation',
|
||||
'manual', 'margin-box', 'match-parent', 'medium', 'mixed', 'monospace',
|
||||
'move', 'multiply', 'n-resize', 'ne-resize', 'nesw-resize',
|
||||
'no-close-quote', 'no-drop', 'no-open-quote', 'no-repeat', 'none', 'normal',
|
||||
'not-allowed', 'nowrap', 'ns-resize', 'nw-resize', 'nwse-resize', 'objects',
|
||||
'oblique', 'off', 'on', 'open', 'open-quote', 'optimizeLegibility',
|
||||
'optimizeSpeed', 'outset', 'outside', 'over', 'overlay', 'overline',
|
||||
'padding-box', 'page', 'pan-down', 'pan-left', 'pan-right', 'pan-up',
|
||||
'pan-x', 'pan-y', 'paused', 'petite-caps', 'pixelated', 'pointer',
|
||||
'preserve-3d', 'progress', 'proximity', 'relative', 'repeat',
|
||||
'repeat no-repeat', 'repeat-x', 'repeat-y', 'reverse', 'ridge', 'right',
|
||||
'round', 'row', 'row-resize', 'row-reverse', 'rtl', 'ruby', 'ruby-base',
|
||||
'ruby-base-container', 'ruby-text', 'ruby-text-container', 'run-in',
|
||||
'running', 's-resize', 'sans-serif', 'saturation', 'scale-down', 'screen',
|
||||
'scroll', 'se-resize', 'semi-condensed', 'semi-expanded', 'separate',
|
||||
'serif', 'sesame', 'show', 'sideways', 'sideways-left', 'sideways-right',
|
||||
'slice', 'small', 'small-caps', 'smaller', 'smooth', 'snap', 'soft-light',
|
||||
'solid', 'space', 'space-around', 'space-between', 'spaces', 'square',
|
||||
'start', 'static', 'step-end', 'step-start', 'sticky', 'stretch', 'strict',
|
||||
'stroke-box', 'style', 'sw-resize', 'table', 'table-caption', 'table-cell',
|
||||
'table-column', 'table-column-group', 'table-footer-group',
|
||||
'table-header-group', 'table-row', 'table-row-group', 'text', 'thick',
|
||||
'thin', 'titling-caps', 'to', 'top', 'triangle', 'ultra-condensed',
|
||||
'ultra-expanded', 'under', 'underline', 'unicase', 'unset', 'upper-alpha',
|
||||
'upper-latin', 'upper-roman', 'uppercase', 'upright', 'use-glyph-orientation',
|
||||
'vertical', 'vertical-text', 'view-box', 'visible', 'w-resize', 'wait',
|
||||
'wavy', 'weight', 'weight style', 'wrap', 'wrap-reverse', 'x-large',
|
||||
'x-small', 'xx-large', 'xx-small', 'zoom-in', 'zoom-out',
|
||||
)
|
||||
|
||||
# List of extended color keywords obtained from:
|
||||
# https://drafts.csswg.org/css-color/#named-colors
|
||||
_color_keywords = (
|
||||
'aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige',
|
||||
'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown',
|
||||
'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral',
|
||||
'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan',
|
||||
'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki',
|
||||
'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred',
|
||||
'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray',
|
||||
'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue',
|
||||
'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite',
|
||||
'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod',
|
||||
'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred',
|
||||
'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen',
|
||||
'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan',
|
||||
'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey',
|
||||
'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue',
|
||||
'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow',
|
||||
'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine',
|
||||
'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen',
|
||||
'mediumslateblue', 'mediumspringgreen', 'mediumturquoise',
|
||||
'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin',
|
||||
'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange',
|
||||
'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise',
|
||||
'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum',
|
||||
'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue',
|
||||
'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna',
|
||||
'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow',
|
||||
'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise',
|
||||
'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen',
|
||||
) + ('transparent',)
|
||||
|
||||
# List of other keyword values from other sources:
|
||||
_other_keyword_values = (
|
||||
'above', 'aural', 'behind', 'bidi-override', 'center-left', 'center-right',
|
||||
'cjk-ideographic', 'continuous', 'crop', 'cross', 'embed', 'far-left',
|
||||
'far-right', 'fast', 'faster', 'hebrew', 'high', 'higher', 'hiragana',
|
||||
'hiragana-iroha', 'katakana', 'katakana-iroha', 'landscape', 'left-side',
|
||||
'leftwards', 'level', 'loud', 'low', 'lower', 'message-box', 'middle',
|
||||
'mix', 'narrower', 'once', 'portrait', 'right-side', 'rightwards', 'silent',
|
||||
'slow', 'slower', 'small-caption', 'soft', 'spell-out', 'status-bar',
|
||||
'super', 'text-bottom', 'text-top', 'wider', 'x-fast', 'x-high', 'x-loud',
|
||||
'x-low', 'x-soft', 'yes', 'pre', 'pre-wrap', 'pre-line',
|
||||
)
|
||||
|
||||
# List of functional notation and function keyword values:
|
||||
_functional_notation_keyword_values = (
|
||||
'attr', 'blackness', 'blend', 'blenda', 'blur', 'brightness', 'calc',
|
||||
'circle', 'color-mod', 'contrast', 'counter', 'cubic-bezier', 'device-cmyk',
|
||||
'drop-shadow', 'ellipse', 'gray', 'grayscale', 'hsl', 'hsla', 'hue',
|
||||
'hue-rotate', 'hwb', 'image', 'inset', 'invert', 'lightness',
|
||||
'linear-gradient', 'matrix', 'matrix3d', 'opacity', 'perspective',
|
||||
'polygon', 'radial-gradient', 'rect', 'repeating-linear-gradient',
|
||||
'repeating-radial-gradient', 'rgb', 'rgba', 'rotate', 'rotate3d', 'rotateX',
|
||||
'rotateY', 'rotateZ', 'saturate', 'saturation', 'scale', 'scale3d',
|
||||
'scaleX', 'scaleY', 'scaleZ', 'sepia', 'shade', 'skewX', 'skewY', 'steps',
|
||||
'tint', 'toggle', 'translate', 'translate3d', 'translateX', 'translateY',
|
||||
'translateZ', 'whiteness',
|
||||
)
|
||||
# Note! Handle url(...) separately.
|
||||
|
||||
# List of units obtained from:
|
||||
# https://www.w3.org/TR/css3-values/
|
||||
_angle_units = (
|
||||
'deg', 'grad', 'rad', 'turn',
|
||||
)
|
||||
_frequency_units = (
|
||||
'Hz', 'kHz',
|
||||
)
|
||||
_length_units = (
|
||||
'em', 'ex', 'ch', 'rem',
|
||||
'vh', 'vw', 'vmin', 'vmax',
|
||||
'px', 'mm', 'cm', 'in', 'pt', 'pc', 'q',
|
||||
)
|
||||
_resolution_units = (
|
||||
'dpi', 'dpcm', 'dppx',
|
||||
)
|
||||
_time_units = (
|
||||
's', 'ms',
|
||||
)
|
||||
_all_units = _angle_units + _frequency_units + _length_units + \
|
||||
_resolution_units + _time_units
|
||||
|
||||
|
||||
class CssLexer(RegexLexer):
|
||||
"""
|
||||
For CSS (Cascading Style Sheets).
|
||||
|
@ -39,10 +284,10 @@ class CssLexer(RegexLexer):
|
|||
(r'\s+', Text),
|
||||
(r'/\*(?:.|\n)*?\*/', Comment),
|
||||
(r'\{', Punctuation, 'content'),
|
||||
(r'\:[\w-]+', Name.Decorator),
|
||||
(r'\.[\w-]+', Name.Class),
|
||||
(r'\#[\w-]+', Name.Namespace),
|
||||
(r'@[\w-]+', Keyword, 'atrule'),
|
||||
(r'(\:{1,2})([\w-]+)', bygroups(Punctuation, Name.Decorator)),
|
||||
(r'(\.)([\w-]+)', bygroups(Punctuation, Name.Class)),
|
||||
(r'(\#)([\w-]+)', bygroups(Punctuation, Name.Namespace)),
|
||||
(r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'),
|
||||
(r'[\w-]+', Name.Tag),
|
||||
(r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
|
@ -60,107 +305,81 @@ class CssLexer(RegexLexer):
|
|||
'content': [
|
||||
(r'\s+', Text),
|
||||
(r'\}', Punctuation, '#pop'),
|
||||
(r'url\(.*?\)', String.Other),
|
||||
(r';', Punctuation),
|
||||
(r'^@.*?$', Comment.Preproc),
|
||||
(words((
|
||||
'azimuth', 'background-attachment', 'background-color',
|
||||
'background-image', 'background-position', 'background-repeat',
|
||||
'background', 'border-bottom-color', 'border-bottom-style',
|
||||
'border-bottom-width', 'border-left-color', 'border-left-style',
|
||||
'border-left-width', 'border-right', 'border-right-color',
|
||||
'border-right-style', 'border-right-width', 'border-top-color',
|
||||
'border-top-style', 'border-top-width', 'border-bottom',
|
||||
'border-collapse', 'border-left', 'border-width', 'border-color',
|
||||
'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
|
||||
'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
|
||||
'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
|
||||
'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
|
||||
'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
|
||||
'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
|
||||
'list-style-type', 'list-style-image', 'list-style-position',
|
||||
'list-style', 'margin-bottom', 'margin-left', 'margin-right',
|
||||
'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
|
||||
'min-height', 'min-width', 'opacity', 'orphans', 'outline-color',
|
||||
'outline-style', 'outline-width', 'outline', 'overflow', 'overflow-x',
|
||||
'overflow-y', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top',
|
||||
'padding', 'page', 'page-break-after', 'page-break-before', 'page-break-inside',
|
||||
'pause-after', 'pause-before', 'pause', 'pitch-range', 'pitch',
|
||||
'play-during', 'position', 'quotes', 'richness', 'right', 'size',
|
||||
'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
|
||||
'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
|
||||
'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
|
||||
'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
|
||||
'widows', 'width', 'word-spacing', 'z-index', 'bottom',
|
||||
'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
|
||||
'behind', 'below', 'bidi-override', 'blink', 'block', 'bolder', 'bold', 'both',
|
||||
'capitalize', 'center-left', 'center-right', 'center', 'circle',
|
||||
'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
|
||||
'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
|
||||
'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
|
||||
'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
|
||||
'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
|
||||
'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
|
||||
'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
|
||||
'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
|
||||
'left-side', 'leftwards', 'left', 'level', 'lighter', 'line-through', 'list-item',
|
||||
'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
|
||||
'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
|
||||
'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
|
||||
'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
|
||||
'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
|
||||
'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
|
||||
'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
|
||||
'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
|
||||
'slower', 'slow', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
|
||||
'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
|
||||
'table-caption', 'table-cell', 'table-column', 'table-column-group',
|
||||
'table-footer-group', 'table-header-group', 'table-row',
|
||||
'table-row-group', 'text-bottom', 'text-top', 'text', 'thick', 'thin',
|
||||
'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
|
||||
'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
|
||||
'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
|
||||
'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
|
||||
Name.Builtin),
|
||||
(words((
|
||||
'indigo', 'gold', 'firebrick', 'indianred', 'yellow', 'darkolivegreen',
|
||||
'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
|
||||
'mediumslateblue', 'black', 'springgreen', 'crimson', 'lightsalmon', 'brown',
|
||||
'turquoise', 'olivedrab', 'cyan', 'silver', 'skyblue', 'gray', 'darkturquoise',
|
||||
'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink', 'teal',
|
||||
'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
|
||||
'violet', 'navy', 'orchid', 'blue', 'ghostwhite', 'honeydew', 'cornflowerblue',
|
||||
'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'red', 'bisque', 'slategray',
|
||||
'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
|
||||
'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'purple', 'lightgrey',
|
||||
'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
|
||||
'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
|
||||
'lightcoral', 'orangered', 'navajowhite', 'lime', 'palegreen', 'burlywood',
|
||||
'seashell', 'mediumspringgreen', 'fuchsia', 'papayawhip', 'blanchedalmond',
|
||||
'peru', 'aquamarine', 'white', 'darkslategray', 'ivory', 'dodgerblue',
|
||||
'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue', 'olive',
|
||||
'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
|
||||
'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
|
||||
'plum', 'aqua', 'darkgoldenrod', 'maroon', 'sandybrown', 'magenta', 'tan',
|
||||
'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
|
||||
'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
|
||||
'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
|
||||
'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
|
||||
'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine', 'green',
|
||||
'blueviolet', 'peachpuff'), suffix=r'\b'),
|
||||
Name.Builtin),
|
||||
|
||||
(words(_vendor_prefixes,), Keyword.Pseudo),
|
||||
(r'('+r'|'.join(_css_properties)+r')(\s*)(\:)',
|
||||
bygroups(Keyword, Text, Punctuation), 'value-start'),
|
||||
(r'([a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name, Text, Punctuation),
|
||||
'value-start'),
|
||||
|
||||
(r'/\*(?:.|\n)*?\*/', Comment),
|
||||
],
|
||||
'value-start': [
|
||||
(r'\s+', Text),
|
||||
(words(_vendor_prefixes,), Name.Builtin.Pseudo),
|
||||
include('urls'),
|
||||
(r'('+r'|'.join(_functional_notation_keyword_values)+r')(\()',
|
||||
bygroups(Name.Builtin, Punctuation), 'function-start'),
|
||||
(r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
|
||||
(words(_keyword_values, suffix=r'\b'), Keyword.Constant),
|
||||
(words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
|
||||
(words(_color_keywords, suffix=r'\b'), Keyword.Constant),
|
||||
(words(_css_properties, suffix=r'\b'), Keyword), # for transition-property etc.
|
||||
(r'\!important', Comment.Preproc),
|
||||
(r'/\*(?:.|\n)*?\*/', Comment),
|
||||
(r'\#[a-zA-Z0-9]{1,6}', Number),
|
||||
(r'[.-]?[0-9]*[.]?[0-9]+(em|px|pt|pc|in|mm|cm|ex|s)\b', Number),
|
||||
# Separate regex for percentages, as can't do word boundaries with %
|
||||
(r'[.-]?[0-9]*[.]?[0-9]+%', Number),
|
||||
(r'-?[0-9]+', Number),
|
||||
(r'[~^*!%&<>|+=@:,./?-]+', Operator),
|
||||
(r'[\[\]();]+', Punctuation),
|
||||
|
||||
include('numeric-values'),
|
||||
|
||||
(r'[~^*!%&<>|+=@:./?-]+', Operator),
|
||||
(r'[\[\](),]+', Punctuation),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
(r"'(\\\\|\\'|[^'])*'", String.Single),
|
||||
(r'[a-zA-Z_]\w*', Name)
|
||||
]
|
||||
(r'[a-zA-Z_][\w-]*', Name),
|
||||
(r';', Punctuation, '#pop'),
|
||||
(r'\}', Punctuation, '#pop:2'),
|
||||
],
|
||||
'function-start': [
|
||||
(r'\s+', Text),
|
||||
include('urls'),
|
||||
(words(_vendor_prefixes,), Keyword.Pseudo),
|
||||
(words(_keyword_values, suffix=r'\b'), Keyword.Constant),
|
||||
(words(_other_keyword_values, suffix=r'\b'), Keyword.Constant),
|
||||
(words(_color_keywords, suffix=r'\b'), Keyword.Constant),
|
||||
|
||||
# function-start may be entered recursively
|
||||
(r'(' + r'|'.join(_functional_notation_keyword_values) + r')(\()',
|
||||
bygroups(Name.Builtin, Punctuation), 'function-start'),
|
||||
(r'([a-zA-Z_][\w-]+)(\()', bygroups(Name.Function, Punctuation), 'function-start'),
|
||||
|
||||
(r'/\*(?:.|\n)*?\*/', Comment),
|
||||
include('numeric-values'),
|
||||
(r'[*+/-]', Operator),
|
||||
(r'[,]', Punctuation),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
(r"'(\\\\|\\'|[^'])*'", String.Single),
|
||||
(r'[a-zA-Z_-]\w*', Name),
|
||||
(r'\)', Punctuation, '#pop'),
|
||||
],
|
||||
'urls': [
|
||||
(r'(url)(\()(".*?")(\))', bygroups(Name.Builtin, Punctuation,
|
||||
String.Double, Punctuation)),
|
||||
(r"(url)(\()('.*?')(\))", bygroups(Name.Builtin, Punctuation,
|
||||
String.Single, Punctuation)),
|
||||
(r'(url)(\()(.*?)(\))', bygroups(Name.Builtin, Punctuation,
|
||||
String.Other, Punctuation)),
|
||||
],
|
||||
'numeric-values': [
|
||||
(r'\#[a-zA-Z0-9]{1,6}', Number.Hex),
|
||||
(r'[+\-]?[0-9]*[.][0-9]+', Number.Float, 'numeric-end'),
|
||||
(r'[+\-]?[0-9]+', Number.Integer, 'numeric-end'),
|
||||
],
|
||||
'numeric-end': [
|
||||
(words(_all_units, suffix=r'\b'), Keyword.Type),
|
||||
(r'%', Keyword.Type),
|
||||
default('#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
@ -170,35 +389,7 @@ common_sass_tokens = {
|
|||
(r'[!$][\w-]+', Name.Variable),
|
||||
(r'url\(', String.Other, 'string-url'),
|
||||
(r'[a-z_-][\w-]*(?=\()', Name.Function),
|
||||
(words((
|
||||
'azimuth', 'background-attachment', 'background-color',
|
||||
'background-image', 'background-position', 'background-repeat',
|
||||
'background', 'border-bottom-color', 'border-bottom-style',
|
||||
'border-bottom-width', 'border-left-color', 'border-left-style',
|
||||
'border-left-width', 'border-right', 'border-right-color',
|
||||
'border-right-style', 'border-right-width', 'border-top-color',
|
||||
'border-top-style', 'border-top-width', 'border-bottom',
|
||||
'border-collapse', 'border-left', 'border-width', 'border-color',
|
||||
'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
|
||||
'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
|
||||
'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
|
||||
'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
|
||||
'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
|
||||
'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
|
||||
'list-style-type', 'list-style-image', 'list-style-position',
|
||||
'list-style', 'margin-bottom', 'margin-left', 'margin-right',
|
||||
'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
|
||||
'min-height', 'min-width', 'opacity', 'orphans', 'outline', 'outline-color',
|
||||
'outline-style', 'outline-width', 'overflow', 'padding-bottom',
|
||||
'padding-left', 'padding-right', 'padding-top', 'padding', 'page',
|
||||
'page-break-after', 'page-break-before', 'page-break-inside',
|
||||
'pause-after', 'pause-before', 'pause', 'pitch', 'pitch-range',
|
||||
'play-during', 'position', 'quotes', 'richness', 'right', 'size',
|
||||
'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
|
||||
'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
|
||||
'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
|
||||
'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
|
||||
'widows', 'width', 'word-spacing', 'z-index', 'bottom', 'left',
|
||||
(words(_css_properties + (
|
||||
'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
|
||||
'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
|
||||
'capitalize', 'center-left', 'center-right', 'center', 'circle',
|
||||
|
@ -229,33 +420,7 @@ common_sass_tokens = {
|
|||
'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
|
||||
'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
|
||||
Name.Constant),
|
||||
(words((
|
||||
'indigo', 'gold', 'firebrick', 'indianred', 'darkolivegreen',
|
||||
'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
|
||||
'mediumslateblue', 'springgreen', 'crimson', 'lightsalmon', 'brown',
|
||||
'turquoise', 'olivedrab', 'cyan', 'skyblue', 'darkturquoise',
|
||||
'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink',
|
||||
'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
|
||||
'violet', 'orchid', 'ghostwhite', 'honeydew', 'cornflowerblue',
|
||||
'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'bisque', 'slategray',
|
||||
'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
|
||||
'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'lightgrey',
|
||||
'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
|
||||
'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
|
||||
'lightcoral', 'orangered', 'navajowhite', 'palegreen', 'burlywood',
|
||||
'seashell', 'mediumspringgreen', 'papayawhip', 'blanchedalmond',
|
||||
'peru', 'aquamarine', 'darkslategray', 'ivory', 'dodgerblue',
|
||||
'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue',
|
||||
'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
|
||||
'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
|
||||
'plum', 'darkgoldenrod', 'sandybrown', 'magenta', 'tan',
|
||||
'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
|
||||
'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
|
||||
'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
|
||||
'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
|
||||
'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine',
|
||||
'blueviolet', 'peachpuff'), suffix=r'\b'),
|
||||
Name.Entity),
|
||||
(words(_color_keywords, suffix=r'\b'), Name.Entity),
|
||||
(words((
|
||||
'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
|
||||
'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
|
||||
|
@ -476,8 +641,8 @@ class ScssLexer(RegexLexer):
|
|||
(r'@[\w-]+', Keyword, 'selector'),
|
||||
(r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
|
||||
# TODO: broken, and prone to infinite loops.
|
||||
#(r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
|
||||
#(r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
|
||||
# (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
|
||||
# (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
|
||||
default('selector'),
|
||||
],
|
||||
|
||||
|
@ -518,7 +683,7 @@ class LessCssLexer(CssLexer):
|
|||
inherit,
|
||||
],
|
||||
'content': [
|
||||
(r'{', Punctuation, '#push'),
|
||||
(r'\{', Punctuation, '#push'),
|
||||
inherit,
|
||||
],
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for D languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Pygments lexers for Dalvik VM-related languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for data file format.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -14,9 +14,9 @@ import re
|
|||
from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
|
||||
include, bygroups, inherit
|
||||
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
|
||||
Punctuation, Literal
|
||||
Punctuation, Literal, Error
|
||||
|
||||
__all__ = ['YamlLexer', 'JsonLexer', 'JsonLdLexer']
|
||||
__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
|
||||
|
||||
|
||||
class YamlLexerContext(LexerContext):
|
||||
|
@ -247,10 +247,10 @@ class YamlLexer(ExtendedRegexLexer):
|
|||
# tags, anchors, aliases
|
||||
'descriptors': [
|
||||
# a full-form tag
|
||||
(r'!<[\w;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
|
||||
(r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
|
||||
# a tag in the form '!', '!suffix' or '!handle!suffix'
|
||||
(r'!(?:[\w-]+)?'
|
||||
r'(?:![\w;/?:@&=+$,.!~*\'()\[\]%-]+)?', Keyword.Type),
|
||||
(r'!(?:[\w-]+!)?'
|
||||
r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type),
|
||||
# an anchor
|
||||
(r'&[\w-]+', Name.Label),
|
||||
# an alias
|
||||
|
@ -476,7 +476,7 @@ class JsonLexer(RegexLexer):
|
|||
# comma terminates the attribute but expects more
|
||||
(r',', Punctuation, '#pop'),
|
||||
# a closing bracket terminates the entire object, so pop twice
|
||||
(r'\}', Punctuation, ('#pop', '#pop')),
|
||||
(r'\}', Punctuation, '#pop:2'),
|
||||
],
|
||||
|
||||
# a json object - { attr, attr, ... }
|
||||
|
@ -508,6 +508,31 @@ class JsonLexer(RegexLexer):
|
|||
],
|
||||
}
|
||||
|
||||
|
||||
class JsonBareObjectLexer(JsonLexer):
|
||||
"""
|
||||
For JSON data structures (with missing object curly braces).
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
name = 'JSONBareObject'
|
||||
aliases = ['json-object']
|
||||
filenames = []
|
||||
mimetypes = ['application/json-object']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\}', Error),
|
||||
include('objectvalue'),
|
||||
],
|
||||
'objectattribute': [
|
||||
(r'\}', Error),
|
||||
inherit,
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class JsonLdLexer(JsonLexer):
|
||||
"""
|
||||
For `JSON-LD <http://json-ld.org/>`_ linked data.
|
||||
|
|
|
@ -5,15 +5,17 @@
|
|||
|
||||
Lexers for diff/patch formats.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, include, bygroups
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
|
||||
Literal
|
||||
|
||||
__all__ = ['DiffLexer', 'DarcsPatchLexer']
|
||||
__all__ = ['DiffLexer', 'DarcsPatchLexer', 'WDiffLexer']
|
||||
|
||||
|
||||
class DiffLexer(RegexLexer):
|
||||
|
@ -104,3 +106,60 @@ class DarcsPatchLexer(RegexLexer):
|
|||
(r'[^\n\[]+', Generic.Deleted),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class WDiffLexer(RegexLexer):
|
||||
"""
|
||||
A `wdiff <https://www.gnu.org/software/wdiff/>`_ lexer.
|
||||
|
||||
Note that:
|
||||
|
||||
* only to normal output (without option like -l).
|
||||
* if target files of wdiff contain "[-", "-]", "{+", "+}",
|
||||
especially they are unbalanced, this lexer will get confusing.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
name = 'WDiff'
|
||||
aliases = ['wdiff']
|
||||
filenames = ['*.wdiff']
|
||||
mimetypes = []
|
||||
|
||||
flags = re.MULTILINE | re.DOTALL
|
||||
|
||||
# We can only assume "[-" after "[-" before "-]" is `nested`,
|
||||
# for instance wdiff to wdiff outputs. We have no way to
|
||||
# distinct these marker is of wdiff output from original text.
|
||||
|
||||
ins_op = r"\{\+"
|
||||
ins_cl = r"\+\}"
|
||||
del_op = r"\[\-"
|
||||
del_cl = r"\-\]"
|
||||
normal = r'[^{}[\]+-]+' # for performance
|
||||
tokens = {
|
||||
'root': [
|
||||
(ins_op, Generic.Inserted, 'inserted'),
|
||||
(del_op, Generic.Deleted, 'deleted'),
|
||||
(normal, Text),
|
||||
(r'.', Text),
|
||||
],
|
||||
'inserted': [
|
||||
(ins_op, Generic.Inserted, '#push'),
|
||||
(del_op, Generic.Inserted, '#push'),
|
||||
(del_cl, Generic.Inserted, '#pop'),
|
||||
|
||||
(ins_cl, Generic.Inserted, '#pop'),
|
||||
(normal, Generic.Inserted),
|
||||
(r'.', Generic.Inserted),
|
||||
],
|
||||
'deleted': [
|
||||
(del_op, Generic.Deleted, '#push'),
|
||||
(ins_op, Generic.Deleted, '#push'),
|
||||
(ins_cl, Generic.Deleted, '#pop'),
|
||||
|
||||
(del_cl, Generic.Deleted, '#pop'),
|
||||
(normal, Generic.Deleted),
|
||||
(r'.', Generic.Deleted),
|
||||
],
|
||||
}
|
||||
|
|
|
@ -5,13 +5,13 @@
|
|||
|
||||
Lexers for .net languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
|
||||
using, this, default
|
||||
using, this, default, words
|
||||
from pygments.token import Punctuation, \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
|
||||
from pygments.util import get_choice_opt, iteritems
|
||||
|
@ -375,8 +375,8 @@ class VbNetLexer(RegexLexer):
|
|||
filenames = ['*.vb', '*.bas']
|
||||
mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
|
||||
|
||||
uni_name = '[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' + \
|
||||
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
|
||||
uni_name = '[_' + uni.combine('Ll', 'Lt', 'Lm', 'Nl') + ']' + \
|
||||
'[' + uni.combine('Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
|
||||
'Cf', 'Mn', 'Mc') + ']*'
|
||||
|
||||
flags = re.MULTILINE | re.IGNORECASE
|
||||
|
@ -394,25 +394,26 @@ class VbNetLexer(RegexLexer):
|
|||
(r'[(){}!#,.:]', Punctuation),
|
||||
(r'Option\s+(Strict|Explicit|Compare)\s+'
|
||||
r'(On|Off|Binary|Text)', Keyword.Declaration),
|
||||
(r'(?<!\.)(AddHandler|Alias|'
|
||||
r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
|
||||
r'CDec|CDbl|CInt|CLng|CObj|Continue|CSByte|CShort|'
|
||||
r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
|
||||
r'Default|Delegate|DirectCast|Do|Each|Else|ElseIf|'
|
||||
r'EndIf|Erase|Error|Event|Exit|False|Finally|For|'
|
||||
r'Friend|Get|Global|GoSub|GoTo|Handles|If|'
|
||||
r'Implements|Inherits|Interface|'
|
||||
r'Let|Lib|Loop|Me|MustInherit|'
|
||||
r'MustOverride|MyBase|MyClass|Narrowing|New|Next|'
|
||||
r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
|
||||
r'Operator|Option|Optional|Overloads|Overridable|'
|
||||
r'Overrides|ParamArray|Partial|Private|Protected|'
|
||||
r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
|
||||
r'Return|Select|Set|Shadows|Shared|Single|'
|
||||
r'Static|Step|Stop|SyncLock|Then|'
|
||||
r'Throw|To|True|Try|TryCast|Wend|'
|
||||
r'Using|When|While|Widening|With|WithEvents|'
|
||||
r'WriteOnly)\b', Keyword),
|
||||
(words((
|
||||
'AddHandler', 'Alias', 'ByRef', 'ByVal', 'Call', 'Case',
|
||||
'Catch', 'CBool', 'CByte', 'CChar', 'CDate', 'CDec', 'CDbl',
|
||||
'CInt', 'CLng', 'CObj', 'Continue', 'CSByte', 'CShort', 'CSng',
|
||||
'CStr', 'CType', 'CUInt', 'CULng', 'CUShort', 'Declare',
|
||||
'Default', 'Delegate', 'DirectCast', 'Do', 'Each', 'Else',
|
||||
'ElseIf', 'EndIf', 'Erase', 'Error', 'Event', 'Exit', 'False',
|
||||
'Finally', 'For', 'Friend', 'Get', 'Global', 'GoSub', 'GoTo',
|
||||
'Handles', 'If', 'Implements', 'Inherits', 'Interface', 'Let',
|
||||
'Lib', 'Loop', 'Me', 'MustInherit', 'MustOverride', 'MyBase',
|
||||
'MyClass', 'Narrowing', 'New', 'Next', 'Not', 'Nothing',
|
||||
'NotInheritable', 'NotOverridable', 'Of', 'On', 'Operator',
|
||||
'Option', 'Optional', 'Overloads', 'Overridable', 'Overrides',
|
||||
'ParamArray', 'Partial', 'Private', 'Protected', 'Public',
|
||||
'RaiseEvent', 'ReadOnly', 'ReDim', 'RemoveHandler', 'Resume',
|
||||
'Return', 'Select', 'Set', 'Shadows', 'Shared', 'Single',
|
||||
'Static', 'Step', 'Stop', 'SyncLock', 'Then', 'Throw', 'To',
|
||||
'True', 'Try', 'TryCast', 'Wend', 'Using', 'When', 'While',
|
||||
'Widening', 'With', 'WithEvents', 'WriteOnly'),
|
||||
prefix='(?<!\.)', suffix=r'\b'), Keyword),
|
||||
(r'(?<!\.)End\b', Keyword, 'end'),
|
||||
(r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
|
||||
(r'(?<!\.)(Function|Sub|Property)(\s+)',
|
||||
|
|
|
@ -5,20 +5,20 @@
|
|||
|
||||
Lexers for various domain-specific languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, words, include, default, \
|
||||
this, using, combined
|
||||
from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
|
||||
include, default, this, using, combined
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
||||
Number, Punctuation, Literal, Whitespace
|
||||
|
||||
__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer',
|
||||
'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
|
||||
'CrmshLexer', 'ThriftLexer']
|
||||
'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
|
||||
|
||||
|
||||
class ProtoBufLexer(RegexLexer):
|
||||
|
@ -36,7 +36,7 @@ class ProtoBufLexer(RegexLexer):
|
|||
tokens = {
|
||||
'root': [
|
||||
(r'[ \t]+', Text),
|
||||
(r'[,;{}\[\]()]', Punctuation),
|
||||
(r'[,;{}\[\]()<>]', Punctuation),
|
||||
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
|
||||
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
|
||||
(words((
|
||||
|
@ -111,8 +111,8 @@ class ThriftLexer(RegexLexer):
|
|||
include('keywords'),
|
||||
include('numbers'),
|
||||
(r'[&=]', Operator),
|
||||
(r'[:;\,\{\}\(\)\<>\[\]]', Punctuation),
|
||||
(r'[a-zA-Z_](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*', Name),
|
||||
(r'[:;,{}()<>\[\]]', Punctuation),
|
||||
(r'[a-zA-Z_](\.\w|\w)*', Name),
|
||||
],
|
||||
'whitespace': [
|
||||
(r'\n', Text.Whitespace),
|
||||
|
@ -135,7 +135,7 @@ class ThriftLexer(RegexLexer):
|
|||
(r'[^\\\'\n]+', String.Single),
|
||||
],
|
||||
'namespace': [
|
||||
(r'[a-z\*](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*', Name.Namespace, '#pop'),
|
||||
(r'[a-z*](\.\w|\w)*', Name.Namespace, '#pop'),
|
||||
default('#pop'),
|
||||
],
|
||||
'class': [
|
||||
|
@ -156,7 +156,7 @@ class ThriftLexer(RegexLexer):
|
|||
Keyword.Namespace),
|
||||
(words((
|
||||
'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
|
||||
'string', 'binary', 'void', 'map', 'list', 'set', 'slist',
|
||||
'string', 'binary', 'map', 'list', 'set', 'slist',
|
||||
'senum'), suffix=r'\b'),
|
||||
Keyword.Type),
|
||||
(words((
|
||||
|
@ -581,7 +581,7 @@ class PanLexer(RegexLexer):
|
|||
'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
|
||||
'prefix', 'unique', 'object', 'foreach', 'include', 'template',
|
||||
'function', 'variable', 'structure', 'extensible', 'declaration'),
|
||||
prefix=r'\b', suffix=r'\s*\b'),
|
||||
prefix=r'\b', suffix=r'\s*\b'),
|
||||
Keyword),
|
||||
(words((
|
||||
'file_contents', 'format', 'index', 'length', 'match', 'matches',
|
||||
|
@ -593,7 +593,7 @@ class PanLexer(RegexLexer):
|
|||
'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
|
||||
'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
|
||||
'path_exists', 'if_exists', 'return', 'value'),
|
||||
prefix=r'\b', suffix=r'\s*\b'),
|
||||
prefix=r'\b', suffix=r'\s*\b'),
|
||||
Name.Builtin),
|
||||
(r'#.*', Comment),
|
||||
(r'\\[\w\W]', String.Escape),
|
||||
|
@ -692,3 +692,187 @@ class CrmshLexer(RegexLexer):
|
|||
(r'\s+|\n', Whitespace),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class FlatlineLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for `Flatline <https://github.com/bigmlcom/flatline>`_ expressions.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'Flatline'
|
||||
aliases = ['flatline']
|
||||
filenames = []
|
||||
mimetypes = ['text/x-flatline']
|
||||
|
||||
special_forms = ('let',)
|
||||
|
||||
builtins = (
|
||||
"!=", "*", "+", "-", "<", "<=", "=", ">", ">=", "abs", "acos", "all",
|
||||
"all-but", "all-with-defaults", "all-with-numeric-default", "and",
|
||||
"asin", "atan", "avg", "avg-window", "bin-center", "bin-count", "call",
|
||||
"category-count", "ceil", "cond", "cond-window", "cons", "cos", "cosh",
|
||||
"count", "diff-window", "div", "ensure-value", "ensure-weighted-value",
|
||||
"epoch", "epoch-day", "epoch-fields", "epoch-hour", "epoch-millisecond",
|
||||
"epoch-minute", "epoch-month", "epoch-second", "epoch-weekday",
|
||||
"epoch-year", "exp", "f", "field", "field-prop", "fields", "filter",
|
||||
"first", "floor", "head", "if", "in", "integer", "language", "length",
|
||||
"levenshtein", "linear-regression", "list", "ln", "log", "log10", "map",
|
||||
"matches", "matches?", "max", "maximum", "md5", "mean", "median", "min",
|
||||
"minimum", "missing", "missing-count", "missing?", "missing_count",
|
||||
"mod", "mode", "normalize", "not", "nth", "occurrences", "or",
|
||||
"percentile", "percentile-label", "population", "population-fraction",
|
||||
"pow", "preferred", "preferred?", "quantile-label", "rand", "rand-int",
|
||||
"random-value", "re-quote", "real", "replace", "replace-first", "rest",
|
||||
"round", "row-number", "segment-label", "sha1", "sha256", "sin", "sinh",
|
||||
"sqrt", "square", "standard-deviation", "standard_deviation", "str",
|
||||
"subs", "sum", "sum-squares", "sum-window", "sum_squares", "summary",
|
||||
"summary-no", "summary-str", "tail", "tan", "tanh", "to-degrees",
|
||||
"to-radians", "variance", "vectorize", "weighted-random-value", "window",
|
||||
"winnow", "within-percentiles?", "z-score",
|
||||
)
|
||||
|
||||
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
# whitespaces - usually not relevant
|
||||
(r'[,\s]+', Text),
|
||||
|
||||
# numbers
|
||||
(r'-?\d+\.\d+', Number.Float),
|
||||
(r'-?\d+', Number.Integer),
|
||||
(r'0x-?[a-f\d]+', Number.Hex),
|
||||
|
||||
# strings, symbols and characters
|
||||
(r'"(\\\\|\\"|[^"])*"', String),
|
||||
(r"\\(.|[a-z]+)", String.Char),
|
||||
|
||||
# expression template placeholder
|
||||
(r'_', String.Symbol),
|
||||
|
||||
# highlight the special forms
|
||||
(words(special_forms, suffix=' '), Keyword),
|
||||
|
||||
# highlight the builtins
|
||||
(words(builtins, suffix=' '), Name.Builtin),
|
||||
|
||||
# the remaining functions
|
||||
(r'(?<=\()' + valid_name, Name.Function),
|
||||
|
||||
# find the remaining variables
|
||||
(valid_name, Name.Variable),
|
||||
|
||||
# parentheses
|
||||
(r'(\(|\))', Punctuation),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class SnowballLexer(ExtendedRegexLexer):
|
||||
"""
|
||||
Lexer for `Snowball <http://snowballstem.org/>`_ source code.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
name = 'Snowball'
|
||||
aliases = ['snowball']
|
||||
filenames = ['*.sbl']
|
||||
|
||||
_ws = r'\n\r\t '
|
||||
|
||||
def __init__(self, **options):
|
||||
self._reset_stringescapes()
|
||||
ExtendedRegexLexer.__init__(self, **options)
|
||||
|
||||
def _reset_stringescapes(self):
|
||||
self._start = "'"
|
||||
self._end = "'"
|
||||
|
||||
def _string(do_string_first):
|
||||
def callback(lexer, match, ctx):
|
||||
s = match.start()
|
||||
text = match.group()
|
||||
string = re.compile(r'([^%s]*)(.)' % re.escape(lexer._start)).match
|
||||
escape = re.compile(r'([^%s]*)(.)' % re.escape(lexer._end)).match
|
||||
pos = 0
|
||||
do_string = do_string_first
|
||||
while pos < len(text):
|
||||
if do_string:
|
||||
match = string(text, pos)
|
||||
yield s + match.start(1), String.Single, match.group(1)
|
||||
if match.group(2) == "'":
|
||||
yield s + match.start(2), String.Single, match.group(2)
|
||||
ctx.stack.pop()
|
||||
break
|
||||
yield s + match.start(2), String.Escape, match.group(2)
|
||||
pos = match.end()
|
||||
match = escape(text, pos)
|
||||
yield s + match.start(), String.Escape, match.group()
|
||||
if match.group(2) != lexer._end:
|
||||
ctx.stack[-1] = 'escape'
|
||||
break
|
||||
pos = match.end()
|
||||
do_string = True
|
||||
ctx.pos = s + match.end()
|
||||
return callback
|
||||
|
||||
def _stringescapes(lexer, match, ctx):
|
||||
lexer._start = match.group(3)
|
||||
lexer._end = match.group(5)
|
||||
return bygroups(Keyword.Reserved, Text, String.Escape, Text,
|
||||
String.Escape)(lexer, match, ctx)
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(words(('len', 'lenof'), suffix=r'\b'), Operator.Word),
|
||||
include('root1'),
|
||||
],
|
||||
'root1': [
|
||||
(r'[%s]+' % _ws, Text),
|
||||
(r'\d+', Number.Integer),
|
||||
(r"'", String.Single, 'string'),
|
||||
(r'[()]', Punctuation),
|
||||
(r'/\*[\w\W]*?\*/', Comment.Multiline),
|
||||
(r'//.*', Comment.Single),
|
||||
(r'[!*+\-/<=>]=|[-=]>|<[+-]|[$*+\-/<=>?\[\]]', Operator),
|
||||
(words(('as', 'get', 'hex', 'among', 'define', 'decimal',
|
||||
'backwardmode'), suffix=r'\b'),
|
||||
Keyword.Reserved),
|
||||
(words(('strings', 'booleans', 'integers', 'routines', 'externals',
|
||||
'groupings'), suffix=r'\b'),
|
||||
Keyword.Reserved, 'declaration'),
|
||||
(words(('do', 'or', 'and', 'for', 'hop', 'non', 'not', 'set', 'try',
|
||||
'fail', 'goto', 'loop', 'next', 'test', 'true',
|
||||
'false', 'unset', 'atmark', 'attach', 'delete', 'gopast',
|
||||
'insert', 'repeat', 'sizeof', 'tomark', 'atleast',
|
||||
'atlimit', 'reverse', 'setmark', 'tolimit', 'setlimit',
|
||||
'backwards', 'substring'), suffix=r'\b'),
|
||||
Operator.Word),
|
||||
(words(('size', 'limit', 'cursor', 'maxint', 'minint'),
|
||||
suffix=r'\b'),
|
||||
Name.Builtin),
|
||||
(r'(stringdef\b)([%s]*)([^%s]+)' % (_ws, _ws),
|
||||
bygroups(Keyword.Reserved, Text, String.Escape)),
|
||||
(r'(stringescapes\b)([%s]*)(.)([%s]*)(.)' % (_ws, _ws),
|
||||
_stringescapes),
|
||||
(r'[A-Za-z]\w*', Name),
|
||||
],
|
||||
'declaration': [
|
||||
(r'\)', Punctuation, '#pop'),
|
||||
(words(('len', 'lenof'), suffix=r'\b'), Name,
|
||||
('root1', 'declaration')),
|
||||
include('root1'),
|
||||
],
|
||||
'string': [
|
||||
(r"[^']*'", _string(True)),
|
||||
],
|
||||
'escape': [
|
||||
(r"[^']*'", _string(False)),
|
||||
],
|
||||
}
|
||||
|
||||
def get_tokens_unprocessed(self, text=None, context=None):
|
||||
self._reset_stringescapes()
|
||||
return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for the Dylan language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for the ECL language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexer for the Eiffel language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexer for the Elm programming language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -46,7 +46,7 @@ class ElmLexer(RegexLexer):
|
|||
'root': [
|
||||
|
||||
# Comments
|
||||
(r'{-', Comment.Multiline, 'comment'),
|
||||
(r'\{-', Comment.Multiline, 'comment'),
|
||||
(r'--.*', Comment.Single),
|
||||
|
||||
# Whitespace
|
||||
|
@ -86,20 +86,20 @@ class ElmLexer(RegexLexer):
|
|||
(validName, Name.Variable),
|
||||
|
||||
# Parens
|
||||
(r'[,\(\)\[\]{}]', Punctuation),
|
||||
(r'[,()\[\]{}]', Punctuation),
|
||||
|
||||
],
|
||||
|
||||
'comment': [
|
||||
(r'-(?!})', Comment.Multiline),
|
||||
(r'{-', Comment.Multiline, 'comment'),
|
||||
(r'-(?!\})', Comment.Multiline),
|
||||
(r'\{-', Comment.Multiline, 'comment'),
|
||||
(r'[^-}]', Comment.Multiline),
|
||||
(r'-}', Comment.Multiline, '#pop'),
|
||||
(r'-\}', Comment.Multiline, '#pop'),
|
||||
],
|
||||
|
||||
'doublequote': [
|
||||
(r'\\u[0-9a-fA-F]\{4}', String.Escape),
|
||||
(r'\\[nrfvb\\\"]', String.Escape),
|
||||
(r'\\u[0-9a-fA-F]{4}', String.Escape),
|
||||
(r'\\[nrfvb\\"]', String.Escape),
|
||||
(r'[^"]', String),
|
||||
(r'"', String, '#pop'),
|
||||
],
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for Erlang.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -82,7 +82,11 @@ class ErlangLexer(RegexLexer):
|
|||
|
||||
variable_re = r'(?:[A-Z_]\w*)'
|
||||
|
||||
escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
|
||||
esc_char_re = r'[bdefnrstv\'"\\]'
|
||||
esc_octal_re = r'[0-7][0-7]?[0-7]?'
|
||||
esc_hex_re = r'(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})'
|
||||
esc_ctrl_re = r'\^[a-zA-Z]'
|
||||
escape_re = r'(?:\\(?:'+esc_char_re+r'|'+esc_octal_re+r'|'+esc_hex_re+r'|'+esc_ctrl_re+r'))'
|
||||
|
||||
macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
|
||||
|
||||
|
@ -112,11 +116,18 @@ class ErlangLexer(RegexLexer):
|
|||
(r'\?'+macro_re, Name.Constant),
|
||||
(r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
|
||||
(r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
|
||||
|
||||
# Erlang script shebang
|
||||
(r'\A#!.+\n', Comment.Hashbang),
|
||||
|
||||
# EEP 43: Maps
|
||||
# http://www.erlang.org/eeps/eep-0043.html
|
||||
(r'#\{', Punctuation, 'map_key'),
|
||||
],
|
||||
'string': [
|
||||
(escape_re, String.Escape),
|
||||
(r'"', String, '#pop'),
|
||||
(r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
|
||||
(r'~[0-9.*]*[~#+BPWXb-ginpswx]', String.Interpol),
|
||||
(r'[^"\\~]+', String),
|
||||
(r'~', String),
|
||||
],
|
||||
|
@ -127,6 +138,17 @@ class ErlangLexer(RegexLexer):
|
|||
bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
|
||||
(atom_re, Name.Entity, '#pop'),
|
||||
],
|
||||
'map_key': [
|
||||
include('root'),
|
||||
(r'=>', Punctuation, 'map_val'),
|
||||
(r':=', Punctuation, 'map_val'),
|
||||
(r'\}', Punctuation, '#pop'),
|
||||
],
|
||||
'map_val': [
|
||||
include('root'),
|
||||
(r',', Punctuation, '#pop'),
|
||||
(r'(?=\})', Punctuation, '#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
@ -218,11 +240,11 @@ class ElixirLexer(RegexLexer):
|
|||
KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
|
||||
BUILTIN = (
|
||||
'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
|
||||
'quote', 'unquote', 'unquote_splicing', 'throw', 'super'
|
||||
'quote', 'unquote', 'unquote_splicing', 'throw', 'super',
|
||||
)
|
||||
BUILTIN_DECLARATION = (
|
||||
'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
|
||||
'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback'
|
||||
'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback',
|
||||
)
|
||||
|
||||
BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
|
||||
|
@ -241,7 +263,7 @@ class ElixirLexer(RegexLexer):
|
|||
OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
|
||||
|
||||
PUNCTUATION = (
|
||||
'\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']'
|
||||
'\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']',
|
||||
)
|
||||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
|
|
|
@ -5,15 +5,16 @@
|
|||
|
||||
Lexers for esoteric languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from pygments.lexer import RegexLexer, include, words
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
||||
Number, Punctuation, Error, Whitespace
|
||||
Number, Punctuation, Error
|
||||
|
||||
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'BoogieLexer', 'RedcodeLexer', 'CAmkESLexer']
|
||||
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
|
||||
'CapDLLexer', 'AheuiLexer']
|
||||
|
||||
|
||||
class BrainfuckLexer(RegexLexer):
|
||||
|
@ -90,7 +91,7 @@ class CAmkESLexer(RegexLexer):
|
|||
filenames = ['*.camkes', '*.idl4']
|
||||
|
||||
tokens = {
|
||||
'root':[
|
||||
'root': [
|
||||
# C pre-processor directive
|
||||
(r'^\s*#.*\n', Comment.Preproc),
|
||||
|
||||
|
@ -99,21 +100,25 @@ class CAmkESLexer(RegexLexer):
|
|||
(r'/\*(.|\n)*?\*/', Comment),
|
||||
(r'//.*\n', Comment),
|
||||
|
||||
(r'[\[\(\){},\.;=\]]', Punctuation),
|
||||
(r'[\[(){},.;\]]', Punctuation),
|
||||
(r'[~!%^&*+=|?:<>/-]', Operator),
|
||||
|
||||
(words(('assembly', 'attribute', 'component', 'composition',
|
||||
'configuration', 'connection', 'connector', 'consumes',
|
||||
'control', 'dataport', 'Dataport', 'emits', 'event',
|
||||
'Event', 'from', 'group', 'hardware', 'has', 'interface',
|
||||
'Interface', 'maybe', 'procedure', 'Procedure', 'provides',
|
||||
'template', 'to', 'uses'), suffix=r'\b'), Keyword),
|
||||
'control', 'dataport', 'Dataport', 'Dataports', 'emits',
|
||||
'event', 'Event', 'Events', 'export', 'from', 'group',
|
||||
'hardware', 'has', 'interface', 'Interface', 'maybe',
|
||||
'procedure', 'Procedure', 'Procedures', 'provides',
|
||||
'template', 'thread', 'threads', 'to', 'uses', 'with'),
|
||||
suffix=r'\b'), Keyword),
|
||||
|
||||
(words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
|
||||
'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
|
||||
'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
|
||||
'refin', 'semaphore', 'signed', 'string', 'uint16_t',
|
||||
'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t', 'unsigned',
|
||||
'void'), suffix=r'\b'), Keyword.Type),
|
||||
'refin', 'semaphore', 'signed', 'string', 'struct',
|
||||
'uint16_t', 'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t',
|
||||
'unsigned', 'void'),
|
||||
suffix=r'\b'), Keyword.Type),
|
||||
|
||||
# Recognised attributes
|
||||
(r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
|
||||
|
@ -131,6 +136,7 @@ class CAmkESLexer(RegexLexer):
|
|||
(r'-?[\d]+', Number),
|
||||
(r'-?[\d]+\.[\d]+', Number.Float),
|
||||
(r'"[^"]*"', String),
|
||||
(r'[Tt]rue|[Ff]alse', Name.Builtin),
|
||||
|
||||
# Identifiers
|
||||
(r'[a-zA-Z_]\w*', Name),
|
||||
|
@ -138,6 +144,65 @@ class CAmkESLexer(RegexLexer):
|
|||
}
|
||||
|
||||
|
||||
class CapDLLexer(RegexLexer):
|
||||
"""
|
||||
Basic lexer for
|
||||
`CapDL <https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml>`_.
|
||||
|
||||
The source of the primary tool that reads such specifications is available
|
||||
at https://github.com/seL4/capdl/tree/master/capDL-tool. Note that this
|
||||
lexer only supports a subset of the grammar. For example, identifiers can
|
||||
shadow type names, but these instances are currently incorrectly
|
||||
highlighted as types. Supporting this would need a stateful lexer that is
|
||||
considered unnecessarily complex for now.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'CapDL'
|
||||
aliases = ['capdl']
|
||||
filenames = ['*.cdl']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
# C pre-processor directive
|
||||
(r'^\s*#.*\n', Comment.Preproc),
|
||||
|
||||
# Whitespace, comments
|
||||
(r'\s+', Text),
|
||||
(r'/\*(.|\n)*?\*/', Comment),
|
||||
(r'(//|--).*\n', Comment),
|
||||
|
||||
(r'[<>\[(){},:;=\]]', Punctuation),
|
||||
(r'\.\.', Punctuation),
|
||||
|
||||
(words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
|
||||
'objects'), suffix=r'\b'), Keyword),
|
||||
|
||||
(words(('aep', 'asid_pool', 'cnode', 'ep', 'frame', 'io_device',
|
||||
'io_ports', 'io_pt', 'notification', 'pd', 'pt', 'tcb',
|
||||
'ut', 'vcpu'), suffix=r'\b'), Keyword.Type),
|
||||
|
||||
# Properties
|
||||
(words(('asid', 'addr', 'badge', 'cached', 'dom', 'domainID', 'elf',
|
||||
'fault_ep', 'G', 'guard', 'guard_size', 'init', 'ip',
|
||||
'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
|
||||
'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
|
||||
'ports', 'reply', 'uncached'), suffix=r'\b'),
|
||||
Keyword.Reserved),
|
||||
|
||||
# Literals
|
||||
(r'0[xX][\da-fA-F]+', Number.Hex),
|
||||
(r'\d+(\.\d+)?(k|M)?', Number),
|
||||
(words(('bits',), suffix=r'\b'), Number),
|
||||
(words(('cspace', 'vspace', 'reply_slot', 'caller_slot',
|
||||
'ipc_buffer_slot'), suffix=r'\b'), Number),
|
||||
|
||||
# Identifiers
|
||||
(r'[a-zA-Z_][-@\.\w]*', Name),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class RedcodeLexer(RegexLexer):
|
||||
"""
|
||||
A simple Redcode lexer based on ICWS'94.
|
||||
|
@ -174,46 +239,39 @@ class RedcodeLexer(RegexLexer):
|
|||
}
|
||||
|
||||
|
||||
class BoogieLexer(RegexLexer):
|
||||
class AheuiLexer(RegexLexer):
|
||||
"""
|
||||
For `Boogie <https://boogie.codeplex.com/>`_ source code.
|
||||
Aheui_ Lexer.
|
||||
|
||||
Aheui_ is esoteric language based on Korean alphabets.
|
||||
|
||||
.. _Aheui:: http://aheui.github.io/
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
name = 'Boogie'
|
||||
aliases = ['boogie']
|
||||
filenames = ['*.bpl']
|
||||
|
||||
name = 'Aheui'
|
||||
aliases = ['aheui']
|
||||
filenames = ['*.aheui']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
# Whitespace and Comments
|
||||
(r'\n', Whitespace),
|
||||
(r'\s+', Whitespace),
|
||||
(r'//[/!](.*?)\n', Comment.Doc),
|
||||
(r'//(.*?)\n', Comment.Single),
|
||||
(r'/\*', Comment.Multiline, 'comment'),
|
||||
|
||||
(words((
|
||||
'axiom', 'break', 'call', 'ensures', 'else', 'exists', 'function',
|
||||
'forall', 'if', 'invariant', 'modifies', 'procedure', 'requires',
|
||||
'then', 'var', 'while'),
|
||||
suffix=r'\b'), Keyword),
|
||||
(words(('const',), suffix=r'\b'), Keyword.Reserved),
|
||||
|
||||
(words(('bool', 'int', 'ref'), suffix=r'\b'), Keyword.Type),
|
||||
include('numbers'),
|
||||
(r"(>=|<=|:=|!=|==>|&&|\|\||[+/\-=>*<\[\]])", Operator),
|
||||
(r"([{}():;,.])", Punctuation),
|
||||
# Identifier
|
||||
(r'[a-zA-Z_]\w*', Name),
|
||||
],
|
||||
'comment': [
|
||||
(r'[^*/]+', Comment.Multiline),
|
||||
(r'/\*', Comment.Multiline, '#push'),
|
||||
(r'\*/', Comment.Multiline, '#pop'),
|
||||
(r'[*/]', Comment.Multiline),
|
||||
],
|
||||
'numbers': [
|
||||
(r'[0-9]+', Number.Integer),
|
||||
(u'['
|
||||
u'나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
|
||||
u'다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
|
||||
u'따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟'
|
||||
u'라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫'
|
||||
u'마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷'
|
||||
u'바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃'
|
||||
u'빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏'
|
||||
u'사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛'
|
||||
u'싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧'
|
||||
u'자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿'
|
||||
u'차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗'
|
||||
u'카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣'
|
||||
u'타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯'
|
||||
u'파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻'
|
||||
u'하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
|
||||
u']', Operator),
|
||||
('.', Comment),
|
||||
],
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Pygments lexers for Ezhil language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -16,6 +16,7 @@ from pygments.token import String, Number, Punctuation, Operator
|
|||
|
||||
__all__ = ['EzhilLexer']
|
||||
|
||||
|
||||
class EzhilLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
|
||||
|
@ -36,13 +37,13 @@ class EzhilLexer(RegexLexer):
|
|||
(r'#.*\n', Comment.Single),
|
||||
(r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
|
||||
(u'இல்', Operator.Word),
|
||||
(words(('assert', 'max', 'min',
|
||||
'நீளம்','சரம்_இடமாற்று','சரம்_கண்டுபிடி',
|
||||
'பட்டியல்','பின்இணை','வரிசைப்படுத்து',
|
||||
'எடு','தலைகீழ்','நீட்டிக்க','நுழைக்க','வை',
|
||||
'கோப்பை_திற','கோப்பை_எழுது','கோப்பை_மூடு',
|
||||
'pi','sin','cos','tan','sqrt','hypot','pow','exp','log','log10'
|
||||
'min','max','exit',
|
||||
(words((u'assert', u'max', u'min',
|
||||
u'நீளம்', u'சரம்_இடமாற்று', u'சரம்_கண்டுபிடி',
|
||||
u'பட்டியல்', u'பின்இணை', u'வரிசைப்படுத்து',
|
||||
u'எடு', u'தலைகீழ்', u'நீட்டிக்க', u'நுழைக்க', u'வை',
|
||||
u'கோப்பை_திற', u'கோப்பை_எழுது', u'கோப்பை_மூடு',
|
||||
u'pi', u'sin', u'cos', u'tan', u'sqrt', u'hypot', u'pow',
|
||||
u'exp', u'log', u'log10', u'exit',
|
||||
), suffix=r'\b'), Name.Builtin),
|
||||
(r'(True|False)\b', Keyword.Constant),
|
||||
(r'[^\S\n]+', Text),
|
||||
|
@ -62,7 +63,7 @@ class EzhilLexer(RegexLexer):
|
|||
(r'(?u)\d+', Number.Integer),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def __init__(self, **options):
|
||||
super(EzhilLexer, self).__init__(**options)
|
||||
self.encoding = options.get('encoding', 'utf-8')
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for the Factor language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexer for the Fantom language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexer for the Felix language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -237,7 +237,7 @@ class FelixLexer(RegexLexer):
|
|||
],
|
||||
'strings': [
|
||||
(r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
|
||||
'[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
|
||||
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
|
||||
(r'[^\\\'"%\n]+', String),
|
||||
# quotes, percents and backslashes must be parsed one at a time
|
||||
(r'[\'"\\]', String),
|
||||
|
|
177
wakatime/packages/pygments/lexers/forth.py
Normal file
177
wakatime/packages/pygments/lexers/forth.py
Normal file
|
@ -0,0 +1,177 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pygments.lexers.forth
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, include, bygroups
|
||||
from pygments.token import Error, Punctuation, Literal, Token, \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Generic
|
||||
|
||||
|
||||
__all__ = ['ForthLexer']
|
||||
|
||||
|
||||
class ForthLexer(RegexLexer):
|
||||
"""
|
||||
Lexer for Forth files.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'Forth'
|
||||
aliases = ['forth']
|
||||
filenames = ['*.frt', '*.fs']
|
||||
mimetypes = ['application/x-forth']
|
||||
|
||||
delimiter = r'\s'
|
||||
delimiter_end = r'(?=[%s])' % delimiter
|
||||
|
||||
valid_name_chars = r'[^%s]' % delimiter
|
||||
valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
|
||||
|
||||
flags = re.IGNORECASE | re.MULTILINE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\s+', Text),
|
||||
# All comment types
|
||||
(r'\\.*?\n', Comment.Single),
|
||||
(r'\([\s].*?\)', Comment.Single),
|
||||
# defining words. The next word is a new command name
|
||||
(r'(:|variable|constant|value|buffer:)(\s+)',
|
||||
bygroups(Keyword.Namespace, Text), 'worddef'),
|
||||
# strings are rather simple
|
||||
(r'([.sc]")(\s+?)', bygroups(String, Text), 'stringdef'),
|
||||
# keywords from the various wordsets
|
||||
# *** Wordset BLOCK
|
||||
(r'(blk|block|buffer|evaluate|flush|load|save-buffers|update|'
|
||||
# *** Wordset BLOCK-EXT
|
||||
r'empty-buffers|list|refill|scr|thru|'
|
||||
# *** Wordset CORE
|
||||
r'\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|'
|
||||
r'2\*|2\/|2@|2drop|2dup|2over|2swap|>body|'
|
||||
r'>in|>number|>r|\?dup|abort|abort\"|abs|'
|
||||
r'accept|align|aligned|allot|and|base|begin|'
|
||||
r'bl|c!|c,|c@|cell\+|cells|char|char\+|'
|
||||
r'chars|constant|count|cr|create|decimal|'
|
||||
r'depth|do|does>|drop|dup|else|emit|environment\?|'
|
||||
r'evaluate|execute|exit|fill|find|fm\/mod|'
|
||||
r'here|hold|i|if|immediate|invert|j|key|'
|
||||
r'leave|literal|loop|lshift|m\*|max|min|'
|
||||
r'mod|move|negate|or|over|postpone|quit|'
|
||||
r'r>|r@|recurse|repeat|rot|rshift|s\"|s>d|'
|
||||
r'sign|sm\/rem|source|space|spaces|state|swap|'
|
||||
r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|'
|
||||
r'variable|while|word|xor|\[char\]|\[\'\]|'
|
||||
r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|'
|
||||
# *** Wordset CORE-EXT
|
||||
r'\.r|0<>|'
|
||||
r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|'
|
||||
r'case|compile,|endcase|endof|erase|false|'
|
||||
r'hex|marker|nip|of|pad|parse|pick|refill|'
|
||||
r'restore-input|roll|save-input|source-id|to|'
|
||||
r'true|tuck|u\.r|u>|unused|value|within|'
|
||||
r'\[compile\]|'
|
||||
# *** Wordset CORE-EXT-obsolescent
|
||||
r'\#tib|convert|expect|query|span|'
|
||||
r'tib|'
|
||||
# *** Wordset DOUBLE
|
||||
r'2constant|2literal|2variable|d\+|d-|'
|
||||
r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|'
|
||||
r'dabs|dmax|dmin|dnegate|m\*\/|m\+|'
|
||||
# *** Wordset DOUBLE-EXT
|
||||
r'2rot|du<|'
|
||||
# *** Wordset EXCEPTION
|
||||
r'catch|throw|'
|
||||
# *** Wordset EXCEPTION-EXT
|
||||
r'abort|abort\"|'
|
||||
# *** Wordset FACILITY
|
||||
r'at-xy|key\?|page|'
|
||||
# *** Wordset FACILITY-EXT
|
||||
r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|'
|
||||
# *** Wordset FILE
|
||||
r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|'
|
||||
r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|'
|
||||
r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|'
|
||||
r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|'
|
||||
# *** Wordset FILE-EXT
|
||||
r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|'
|
||||
# *** Wordset FLOAT
|
||||
r'>float|d>f|'
|
||||
r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|'
|
||||
r'falign|faligned|fconstant|fdepth|fdrop|fdup|'
|
||||
r'fliteral|float\+|floats|floor|fmax|fmin|'
|
||||
r'fnegate|fover|frot|fround|fswap|fvariable|'
|
||||
r'represent|'
|
||||
# *** Wordset FLOAT-EXT
|
||||
r'df!|df@|dfalign|dfaligned|dfloat\+|'
|
||||
r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|'
|
||||
r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|'
|
||||
r'fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|'
|
||||
r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|'
|
||||
r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|'
|
||||
r'sfloats|'
|
||||
# *** Wordset LOCAL
|
||||
r'\(local\)|to|'
|
||||
# *** Wordset LOCAL-EXT
|
||||
r'locals\||'
|
||||
# *** Wordset MEMORY
|
||||
r'allocate|free|resize|'
|
||||
# *** Wordset SEARCH
|
||||
r'definitions|find|forth-wordlist|get-current|'
|
||||
r'get-order|search-wordlist|set-current|set-order|'
|
||||
r'wordlist|'
|
||||
# *** Wordset SEARCH-EXT
|
||||
r'also|forth|only|order|previous|'
|
||||
# *** Wordset STRING
|
||||
r'-trailing|\/string|blank|cmove|cmove>|compare|'
|
||||
r'search|sliteral|'
|
||||
# *** Wordset TOOLS
|
||||
r'.s|dump|see|words|'
|
||||
# *** Wordset TOOLS-EXT
|
||||
r';code|'
|
||||
r'ahead|assembler|bye|code|cs-pick|cs-roll|'
|
||||
r'editor|state|\[else\]|\[if\]|\[then\]|'
|
||||
# *** Wordset TOOLS-EXT-obsolescent
|
||||
r'forget|'
|
||||
# Forth 2012
|
||||
r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
|
||||
r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
|
||||
r'name>interpret|name>compile|name>string|'
|
||||
r'cfield:|end-structure)'+delimiter, Keyword),
|
||||
|
||||
# Numbers
|
||||
(r'(\$[0-9A-F]+)', Number.Hex),
|
||||
(r'(\#|%|&|\-|\+)?[0-9]+', Number.Integer),
|
||||
(r'(\#|%|&|\-|\+)?[0-9.]+', Keyword.Type),
|
||||
# amforth specific
|
||||
(r'(@i|!i|@e|!e|pause|noop|turnkey|sleep|'
|
||||
r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|'
|
||||
r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|'
|
||||
r'find-name|1ms|'
|
||||
r'sp0|rp0|\(evaluate\)|int-trap|int!)' + delimiter,
|
||||
Name.Constant),
|
||||
# a proposal
|
||||
(r'(do-recognizer|r:fail|recognizer:|get-recognizers|'
|
||||
r'set-recognizers|r:float|r>comp|r>int|r>post|'
|
||||
r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|'
|
||||
r'rec:num|rec:float|rec:word)' + delimiter, Name.Decorator),
|
||||
# defining words. The next word is a new command name
|
||||
(r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)',
|
||||
bygroups(Keyword.Namespace, Text), 'worddef'),
|
||||
|
||||
(valid_name, Name.Function), # Anything else is executed
|
||||
|
||||
],
|
||||
'worddef': [
|
||||
(r'\S+', Name.Class, '#pop'),
|
||||
],
|
||||
'stringdef': [
|
||||
(r'[^"]+', String, '#pop'),
|
||||
],
|
||||
}
|
|
@ -5,13 +5,13 @@
|
|||
|
||||
Lexers for Fortran languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, include, words, using
|
||||
from pygments.lexer import RegexLexer, bygroups, include, words, using, default
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
||||
Number, Punctuation, Generic
|
||||
|
||||
|
@ -156,8 +156,8 @@ class FortranLexer(RegexLexer):
|
|||
|
||||
'nums': [
|
||||
(r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
|
||||
(r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
|
||||
(r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
|
||||
(r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
|
||||
(r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
|
||||
],
|
||||
}
|
||||
|
||||
|
@ -191,16 +191,15 @@ class FortranFixedLexer(RegexLexer):
|
|||
(r'(.{5})', Name.Label, 'cont-char'),
|
||||
(r'.*\n', using(FortranLexer)),
|
||||
],
|
||||
|
||||
'cont-char': [
|
||||
(' ', Text, 'code'),
|
||||
('0', Comment, 'code'),
|
||||
('.', Generic.Strong, 'code')
|
||||
('.', Generic.Strong, 'code'),
|
||||
],
|
||||
|
||||
'code': [
|
||||
(r'(.{66})(.*)(\n)',
|
||||
bygroups(_lex_fortran, Comment, Text), 'root'),
|
||||
(r'(.*)(\n)', bygroups(_lex_fortran, Text), 'root'),
|
||||
(r'', Text, 'root')]
|
||||
default('root'),
|
||||
]
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Simple lexer for Microsoft Visual FoxPro source code.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Just export lexer classes previously contained in this module.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for the Google Go language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,15 +5,17 @@
|
|||
|
||||
Lexers for grammer notations like BNF.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, words
|
||||
from pygments.token import Punctuation, Text, Comment, Operator, \
|
||||
Keyword, Name, Literal
|
||||
import re
|
||||
|
||||
__all__ = ['BnfLexer', 'AbnfLexer']
|
||||
from pygments.lexer import RegexLexer, bygroups, include, this, using, words
|
||||
from pygments.token import Comment, Keyword, Literal, Name, Number, \
|
||||
Operator, Punctuation, String, Text
|
||||
|
||||
__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer']
|
||||
|
||||
|
||||
class BnfLexer(RegexLexer):
|
||||
|
@ -129,3 +131,83 @@ class AbnfLexer(RegexLexer):
|
|||
(r'.', Text),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class JsgfLexer(RegexLexer):
|
||||
"""
|
||||
For `JSpeech Grammar Format <https://www.w3.org/TR/jsgf/>`_
|
||||
grammars.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
name = 'JSGF'
|
||||
aliases = ['jsgf']
|
||||
filenames = ['*.jsgf']
|
||||
mimetypes = ['application/jsgf', 'application/x-jsgf', 'text/jsgf']
|
||||
|
||||
flags = re.MULTILINE | re.UNICODE
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('comments'),
|
||||
include('non-comments'),
|
||||
],
|
||||
'comments': [
|
||||
(r'/\*\*(?!/)', Comment.Multiline, 'documentation comment'),
|
||||
(r'/\*[\w\W]*?\*/', Comment.Multiline),
|
||||
(r'//.*', Comment.Single),
|
||||
],
|
||||
'non-comments': [
|
||||
('\A#JSGF[^;]*', Comment.Preproc),
|
||||
(r'\s+', Text),
|
||||
(r';', Punctuation),
|
||||
(r'[=|()\[\]*+]', Operator),
|
||||
(r'/[^/]+/', Number.Float),
|
||||
(r'"', String.Double, 'string'),
|
||||
(r'\{', String.Other, 'tag'),
|
||||
(words(('import', 'public'), suffix=r'\b'), Keyword.Reserved),
|
||||
(r'grammar\b', Keyword.Reserved, 'grammar name'),
|
||||
(r'(<)(NULL|VOID)(>)',
|
||||
bygroups(Punctuation, Name.Builtin, Punctuation)),
|
||||
(r'<', Punctuation, 'rulename'),
|
||||
(r'\w+|[^\s;=|()\[\]*+/"{<\w]+', Text),
|
||||
],
|
||||
'string': [
|
||||
(r'"', String.Double, '#pop'),
|
||||
(r'\\.', String.Escape),
|
||||
(r'[^\\"]+', String.Double),
|
||||
],
|
||||
'tag': [
|
||||
(r'\}', String.Other, '#pop'),
|
||||
(r'\\.', String.Escape),
|
||||
(r'[^\\}]+', String.Other),
|
||||
],
|
||||
'grammar name': [
|
||||
(r';', Punctuation, '#pop'),
|
||||
(r'\s+', Text),
|
||||
(r'\.', Punctuation),
|
||||
(r'[^;\s.]+', Name.Namespace),
|
||||
],
|
||||
'rulename': [
|
||||
(r'>', Punctuation, '#pop'),
|
||||
(r'\*', Punctuation),
|
||||
(r'\s+', Text),
|
||||
(r'([^.>]+)(\s*)(\.)', bygroups(Name.Namespace, Text, Punctuation)),
|
||||
(r'[^.>]+', Name.Constant),
|
||||
],
|
||||
'documentation comment': [
|
||||
(r'\*/', Comment.Multiline, '#pop'),
|
||||
(r'(^\s*\*?\s*)(@(?:example|see)\s+)'
|
||||
r'([\w\W]*?(?=(?:^\s*\*?\s*@|\*/)))',
|
||||
bygroups(Comment.Multiline, Comment.Special,
|
||||
using(this, state='example'))),
|
||||
(r'(^\s*\*?\s*)(@\S*)',
|
||||
bygroups(Comment.Multiline, Comment.Special)),
|
||||
(r'[^*\n@]+|\w|\W', Comment.Multiline),
|
||||
],
|
||||
'example': [
|
||||
(r'\n\s*\*', Comment.Multiline),
|
||||
include('non-comments'),
|
||||
(r'.', Comment.Multiline),
|
||||
],
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for graph query languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for computer graphics and plotting related languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for Haskell and related languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -39,7 +39,7 @@ class HaskellLexer(RegexLexer):
|
|||
flags = re.MULTILINE | re.UNICODE
|
||||
|
||||
reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
|
||||
'if', 'in', 'infix[lr]?', 'instance',
|
||||
'family', 'if', 'in', 'infix[lr]?', 'instance',
|
||||
'let', 'newtype', 'of', 'then', 'type', 'where', '_')
|
||||
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
|
||||
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
|
||||
|
@ -63,6 +63,9 @@ class HaskellLexer(RegexLexer):
|
|||
(r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
|
||||
(r"'?[_" + uni.Ll + r"][\w']*", Name),
|
||||
(r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
|
||||
(r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
|
||||
(r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
|
||||
(r"(')\([^)]*\)", Keyword.Type), # ..
|
||||
# Operators
|
||||
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
|
||||
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
|
||||
|
@ -321,7 +324,7 @@ class AgdaLexer(RegexLexer):
|
|||
'module': [
|
||||
(r'\{-', Comment.Multiline, 'comment'),
|
||||
(r'[a-zA-Z][\w.]*', Name, '#pop'),
|
||||
(r'[^a-zA-Z]+', Text)
|
||||
(r'[\W0-9_]+', Text)
|
||||
],
|
||||
'comment': HaskellLexer.tokens['comment'],
|
||||
'character': HaskellLexer.tokens['character'],
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for Haxe and related stuff.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for hardware descriptor languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,12 +5,10 @@
|
|||
|
||||
Lexers for hexadecimal dumps.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, include
|
||||
from pygments.token import Text, Name, Number, String, Punctuation
|
||||
|
||||
|
@ -36,7 +34,7 @@ class HexdumpLexer(RegexLexer):
|
|||
* ``od -t x1z FILE``
|
||||
* ``xxd FILE``
|
||||
* ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
|
||||
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
name = 'Hexdump'
|
||||
|
@ -48,12 +46,17 @@ class HexdumpLexer(RegexLexer):
|
|||
'root': [
|
||||
(r'\n', Text),
|
||||
include('offset'),
|
||||
(r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)),
|
||||
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
|
||||
bygroups(Number.Hex, Punctuation, Number.Hex)),
|
||||
(hd+r'{2}', Number.Hex),
|
||||
(r'(\s{2,3})(\>)(.{16})(\<)$', bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
|
||||
(r'(\s{2,3})(\|)(.{16})(\|)$', bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'),
|
||||
(r'(\s{2,3})(\>)(.{1,15})(\<)$', bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'(\s{2,3})(\|)(.{1,15})(\|)$', bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'(\s{2,3})(\>)(.{16})(\<)$',
|
||||
bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
|
||||
(r'(\s{2,3})(\|)(.{16})(\|)$',
|
||||
bygroups(Text, Punctuation, String, Punctuation), 'piped-strings'),
|
||||
(r'(\s{2,3})(\>)(.{1,15})(\<)$',
|
||||
bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'(\s{2,3})(\|)(.{1,15})(\|)$',
|
||||
bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'(\s{2,3})(.{1,15})$', bygroups(Text, String)),
|
||||
(r'(\s{2,3})(.{16}|.{20})$', bygroups(Text, String), 'nonpiped-strings'),
|
||||
(r'\s', Text),
|
||||
|
@ -72,7 +75,8 @@ class HexdumpLexer(RegexLexer):
|
|||
(r'\n', Text),
|
||||
include('offset'),
|
||||
(hd+r'{2}', Number.Hex),
|
||||
(r'(\s{2,3})(\|)(.{1,16})(\|)$', bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'(\s{2,3})(\|)(.{1,16})(\|)$',
|
||||
bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'\s', Text),
|
||||
(r'^\*', Punctuation),
|
||||
],
|
||||
|
@ -80,14 +84,16 @@ class HexdumpLexer(RegexLexer):
|
|||
(r'\n', Text),
|
||||
include('offset'),
|
||||
(hd+r'{2}', Number.Hex),
|
||||
(r'(\s{2,3})(\>)(.{1,16})(\<)$', bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'(\s{2,3})(\>)(.{1,16})(\<)$',
|
||||
bygroups(Text, Punctuation, String, Punctuation)),
|
||||
(r'\s', Text),
|
||||
(r'^\*', Punctuation),
|
||||
],
|
||||
'nonpiped-strings': [
|
||||
(r'\n', Text),
|
||||
include('offset'),
|
||||
(r'('+hd+r'{2})(\-)('+hd+r'{2})', bygroups(Number.Hex, Punctuation, Number.Hex)),
|
||||
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
|
||||
bygroups(Number.Hex, Punctuation, Number.Hex)),
|
||||
(hd+r'{2}', Number.Hex),
|
||||
(r'(\s{19,})(.{1,20}?)$', bygroups(Text, String)),
|
||||
(r'(\s{2,3})(.{1,20})$', bygroups(Text, String)),
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for HTML, XML and related markup.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -23,7 +23,7 @@ from pygments.lexers.css import CssLexer, _indentation, _starts_block
|
|||
from pygments.lexers.ruby import RubyLexer
|
||||
|
||||
__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
|
||||
'ScamlLexer', 'JadeLexer']
|
||||
'ScamlLexer', 'PugLexer']
|
||||
|
||||
|
||||
class HtmlLexer(RegexLexer):
|
||||
|
@ -492,19 +492,19 @@ class ScamlLexer(ExtendedRegexLexer):
|
|||
}
|
||||
|
||||
|
||||
class JadeLexer(ExtendedRegexLexer):
|
||||
class PugLexer(ExtendedRegexLexer):
|
||||
"""
|
||||
For Jade markup.
|
||||
Jade is a variant of Scaml, see:
|
||||
For Pug markup.
|
||||
Pug is a variant of Scaml, see:
|
||||
http://scalate.fusesource.org/documentation/scaml-reference.html
|
||||
|
||||
.. versionadded:: 1.4
|
||||
"""
|
||||
|
||||
name = 'Jade'
|
||||
aliases = ['jade']
|
||||
filenames = ['*.jade']
|
||||
mimetypes = ['text/x-jade']
|
||||
name = 'Pug'
|
||||
aliases = ['pug', 'jade']
|
||||
filenames = ['*.pug', '*.jade']
|
||||
mimetypes = ['text/x-pug', 'text/x-jade']
|
||||
|
||||
flags = re.IGNORECASE
|
||||
_dot = r'.'
|
||||
|
@ -599,3 +599,4 @@ class JadeLexer(ExtendedRegexLexer):
|
|||
(r'\n', Text, 'root'),
|
||||
],
|
||||
}
|
||||
JadeLexer = PugLexer # compat
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for IDL.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -249,7 +249,7 @@ class IDLLexer(RegexLexer):
|
|||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'^\s*;.*?\n', Comment.Singleline),
|
||||
(r'^\s*;.*?\n', Comment.Single),
|
||||
(words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
|
||||
(words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
|
||||
(r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
|
||||
|
@ -258,12 +258,13 @@ class IDLLexer(RegexLexer):
|
|||
(r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
|
||||
(r'"[^\"]*"', String.Double),
|
||||
(r"'[^\']*'", String.Single),
|
||||
(r'\b[\+\-]?([0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(D|E)?([\+\-]?[0-9]+)?\b', Number.Float),
|
||||
(r'\b\'[\+\-]?[0-9A-F]+\'X(U?(S?|L{1,2})|B)\b', Number.Hex),
|
||||
(r'\b\'[\+\-]?[0-7]+\'O(U?(S?|L{1,2})|B)\b', Number.Oct),
|
||||
(r'\b[\+\-]?[0-9]+U?L{1,2}\b', Number.Integer.Long),
|
||||
(r'\b[\+\-]?[0-9]+U?S?\b', Number.Integer),
|
||||
(r'\b[\+\-]?[0-9]+B\b', Number),
|
||||
(r'\b[+\-]?([0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(D|E)?([+\-]?[0-9]+)?\b',
|
||||
Number.Float),
|
||||
(r'\b\'[+\-]?[0-9A-F]+\'X(U?(S?|L{1,2})|B)\b', Number.Hex),
|
||||
(r'\b\'[+\-]?[0-7]+\'O(U?(S?|L{1,2})|B)\b', Number.Oct),
|
||||
(r'\b[+\-]?[0-9]+U?L{1,2}\b', Number.Integer.Long),
|
||||
(r'\b[+\-]?[0-9]+U?S?\b', Number.Integer),
|
||||
(r'\b[+\-]?[0-9]+B\b', Number),
|
||||
(r'.', Text),
|
||||
]
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for Igor Pro.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -40,7 +40,7 @@ class IgorLexer(RegexLexer):
|
|||
types = (
|
||||
'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
|
||||
'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
|
||||
'uint32', 'float', 'double'
|
||||
'uint32', 'int64', 'uint64', 'float', 'double'
|
||||
)
|
||||
keywords = (
|
||||
'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
|
||||
|
@ -48,213 +48,221 @@ class IgorLexer(RegexLexer):
|
|||
'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
|
||||
)
|
||||
operations = (
|
||||
'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio',
|
||||
'AddMovieFrame', 'APMath', 'Append', 'AppendImage',
|
||||
'AppendLayoutObject', 'AppendMatrixContour', 'AppendText',
|
||||
'AppendToGraph', 'AppendToLayout', 'AppendToTable', 'AppendXYZContour',
|
||||
'AutoPositionWindow', 'BackgroundInfo', 'Beep', 'BoundingBall',
|
||||
'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox',
|
||||
'CheckDisplayed', 'ChooseColor', 'Close', 'CloseMovie', 'CloseProc',
|
||||
'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
|
||||
'ControlInfo', 'ControlUpdate', 'ConvexHull', 'Convolve', 'CopyFile',
|
||||
'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'Cross',
|
||||
'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
|
||||
'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions',
|
||||
'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', 'DefineGuide',
|
||||
'DelayUpdate', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
|
||||
'Differentiate', 'dir', 'Display', 'DisplayHelpTopic',
|
||||
'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow',
|
||||
'DoXOPIdle', 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine',
|
||||
'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText',
|
||||
'DSPDetrend', 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder',
|
||||
'DWT', 'EdgeStats', 'Edit', 'ErrorBars', 'Execute', 'ExecuteScriptText',
|
||||
'ExperimentModified', 'Extract', 'FastGaussTransform', 'FastOp',
|
||||
'FBinRead', 'FBinWrite', 'FFT', 'FIFO2Wave', 'FIFOStatus', 'FilterFIR',
|
||||
'FilterIIR', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
|
||||
'FindRoots', 'FindSequence', 'FindValue', 'FPClustering', 'fprintf',
|
||||
'FReadLine', 'FSetPos', 'FStatus', 'FTPDelete', 'FTPDownload',
|
||||
'FTPUpload', 'FuncFit', 'FuncFitMD', 'GetAxis', 'GetFileFolderInfo',
|
||||
'GetLastUserMenuInfo', 'GetMarquee', 'GetSelection', 'GetWindow',
|
||||
'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
|
||||
'Hanning', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
|
||||
'HilbertTransform', 'Histogram', 'IFFT', 'ImageAnalyzeParticles',
|
||||
'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection',
|
||||
'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageGenerateROIMask',
|
||||
'ImageHistModification', 'ImageHistogram', 'ImageInterpolate',
|
||||
'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration',
|
||||
'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave',
|
||||
'ImageSeedFill', 'ImageSnake', 'ImageStats', 'ImageThreshold',
|
||||
'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort',
|
||||
'InsertPoints', 'Integrate', 'IntegrateODE', 'Interp3DPath',
|
||||
'Interpolate3D', 'KillBackground', 'KillControl', 'KillDataFolder',
|
||||
'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings',
|
||||
'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', 'Layout',
|
||||
'Legend', 'LinearFeedbackShiftRegister', 'ListBox', 'LoadData',
|
||||
'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
|
||||
'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime',
|
||||
'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFilter',
|
||||
'MatrixGaussJ', 'MatrixInverse', 'MatrixLinearSolve',
|
||||
'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD',
|
||||
'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve',
|
||||
'MatrixSVBkSub', 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText',
|
||||
'Modify', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
|
||||
'ModifyFreeAxis', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
|
||||
'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', 'MoveDataFolder',
|
||||
'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
|
||||
'MoveWave', 'MoveWindow', 'NeuralNetworkRun', 'NeuralNetworkTrain',
|
||||
'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewImage',
|
||||
'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath',
|
||||
'NewWaterfall', 'Note', 'Notebook', 'NotebookAction', 'Open',
|
||||
'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
|
||||
'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
|
||||
'PlaySnd', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
|
||||
'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs',
|
||||
'PrintLayout', 'PrintNotebook', 'PrintSettings', 'PrintTable',
|
||||
'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
|
||||
'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour',
|
||||
'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame',
|
||||
'AdoptFiles', 'APMath', 'Append', 'AppendImage', 'AppendLayoutObject',
|
||||
'AppendMatrixContour', 'AppendText', 'AppendToGizmo', 'AppendToGraph',
|
||||
'AppendToLayout', 'AppendToTable', 'AppendXYZContour', 'AutoPositionWindow',
|
||||
'BackgroundInfo', 'Beep', 'BoundingBall', 'BoxSmooth', 'BrowseURL', 'BuildMenu',
|
||||
'Button', 'cd', 'Chart', 'CheckBox', 'CheckDisplayed', 'ChooseColor', 'Close',
|
||||
'CloseHelp', 'CloseMovie', 'CloseProc', 'ColorScale', 'ColorTab2Wave',
|
||||
'Concatenate', 'ControlBar', 'ControlInfo', 'ControlUpdate',
|
||||
'ConvertGlobalStringTextEncoding', 'ConvexHull', 'Convolve', 'CopyFile',
|
||||
'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'CreateBrowser',
|
||||
'Cross', 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
|
||||
'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions', 'DefaultFont',
|
||||
'DefaultGuiControls', 'DefaultGuiFont', 'DefaultTextEncoding', 'DefineGuide',
|
||||
'DelayUpdate', 'DeleteAnnotations', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
|
||||
'Differentiate', 'dir', 'Display', 'DisplayHelpTopic', 'DisplayProcedure',
|
||||
'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow', 'DoXOPIdle', 'DPSS',
|
||||
'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine', 'DrawOval', 'DrawPICT',
|
||||
'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText', 'DrawUserShape', 'DSPDetrend',
|
||||
'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder', 'DWT', 'EdgeStats', 'Edit',
|
||||
'ErrorBars', 'EstimatePeakSizes', 'Execute', 'ExecuteScriptText',
|
||||
'ExperimentModified', 'ExportGizmo', 'Extract', 'FastGaussTransform', 'FastOp',
|
||||
'FBinRead', 'FBinWrite', 'FFT', 'FIFOStatus', 'FIFO2Wave', 'FilterFIR',
|
||||
'FilterIIR', 'FindAPeak', 'FindContour', 'FindDuplicates', 'FindLevel',
|
||||
'FindLevels', 'FindPeak', 'FindPointsInPoly', 'FindRoots', 'FindSequence',
|
||||
'FindValue', 'FPClustering', 'fprintf', 'FReadLine', 'FSetPos', 'FStatus',
|
||||
'FTPCreateDirectory', 'FTPDelete', 'FTPDownload', 'FTPUpload', 'FuncFit',
|
||||
'FuncFitMD', 'GBLoadWave', 'GetAxis', 'GetCamera', 'GetFileFolderInfo',
|
||||
'GetGizmo', 'GetLastUserMenuInfo', 'GetMarquee', 'GetMouse', 'GetSelection',
|
||||
'GetWindow', 'GPIBReadBinaryWave2', 'GPIBReadBinary2', 'GPIBReadWave2',
|
||||
'GPIBRead2', 'GPIBWriteBinaryWave2', 'GPIBWriteBinary2', 'GPIBWriteWave2',
|
||||
'GPIBWrite2', 'GPIB2', 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep',
|
||||
'GroupBox', 'Hanning', 'HDF5CloseFile', 'HDF5CloseGroup', 'HDF5ConvertColors',
|
||||
'HDF5CreateFile', 'HDF5CreateGroup', 'HDF5CreateLink', 'HDF5Dump',
|
||||
'HDF5DumpErrors', 'HDF5DumpState', 'HDF5ListAttributes', 'HDF5ListGroup',
|
||||
'HDF5LoadData', 'HDF5LoadGroup', 'HDF5LoadImage', 'HDF5OpenFile', 'HDF5OpenGroup',
|
||||
'HDF5SaveData', 'HDF5SaveGroup', 'HDF5SaveImage', 'HDF5TestOperation',
|
||||
'HDF5UnlinkObject', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
|
||||
'HilbertTransform', 'Histogram', 'ICA', 'IFFT', 'ImageAnalyzeParticles',
|
||||
'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection', 'ImageFileInfo',
|
||||
'ImageFilter', 'ImageFocus', 'ImageFromXYZ', 'ImageGenerateROIMask', 'ImageGLCM',
|
||||
'ImageHistModification', 'ImageHistogram', 'ImageInterpolate', 'ImageLineProfile',
|
||||
'ImageLoad', 'ImageMorphology', 'ImageRegistration', 'ImageRemoveBackground',
|
||||
'ImageRestore', 'ImageRotate', 'ImageSave', 'ImageSeedFill', 'ImageSkeleton3d',
|
||||
'ImageSnake', 'ImageStats', 'ImageThreshold', 'ImageTransform',
|
||||
'ImageUnwrapPhase', 'ImageWindow', 'IndexSort', 'InsertPoints', 'Integrate',
|
||||
'IntegrateODE', 'Integrate2D', 'Interpolate2', 'Interpolate3D', 'Interp3DPath',
|
||||
'JCAMPLoadWave', 'JointHistogram', 'KillBackground', 'KillControl',
|
||||
'KillDataFolder', 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs',
|
||||
'KillStrings', 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label',
|
||||
'Layout', 'LayoutPageAction', 'LayoutSlideShow', 'Legend',
|
||||
'LinearFeedbackShiftRegister', 'ListBox', 'LoadData', 'LoadPackagePreferences',
|
||||
'LoadPICT', 'LoadWave', 'Loess', 'LombPeriodogram', 'Make', 'MakeIndex',
|
||||
'MarkPerfTestTime', 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV',
|
||||
'MatrixFilter', 'MatrixGaussJ', 'MatrixGLM', 'MatrixInverse', 'MatrixLinearSolve',
|
||||
'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD', 'MatrixLUDTD',
|
||||
'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve', 'MatrixSVBkSub',
|
||||
'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText', 'MLLoadWave', 'Modify',
|
||||
'ModifyBrowser', 'ModifyCamera', 'ModifyContour', 'ModifyControl',
|
||||
'ModifyControlList', 'ModifyFreeAxis', 'ModifyGizmo', 'ModifyGraph',
|
||||
'ModifyImage', 'ModifyLayout', 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall',
|
||||
'MoveDataFolder', 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow',
|
||||
'MoveVariable', 'MoveWave', 'MoveWindow', 'MultiTaperPSD',
|
||||
'MultiThreadingControl', 'NeuralNetworkRun', 'NeuralNetworkTrain', 'NewCamera',
|
||||
'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewGizmo', 'NewImage',
|
||||
'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath', 'NewWaterfall',
|
||||
'NI4882', 'Note', 'Notebook', 'NotebookAction', 'Open', 'OpenHelp',
|
||||
'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo', 'PauseForUser',
|
||||
'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction', 'PlaySound',
|
||||
'PopupContextualMenu', 'PopupMenu', 'Preferences', 'PrimeFactors', 'Print',
|
||||
'printf', 'PrintGraphs', 'PrintLayout', 'PrintNotebook', 'PrintSettings',
|
||||
'PrintTable', 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
|
||||
'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour', 'RemoveFromGizmo',
|
||||
'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
|
||||
'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder',
|
||||
'RenamePath', 'RenamePICT', 'RenameWindow', 'ReorderImages',
|
||||
'ReorderTraces', 'ReplaceText', 'ReplaceWave', 'Resample',
|
||||
'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
|
||||
'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
|
||||
'SavePackagePreferences', 'SavePICT', 'SaveTableCopy',
|
||||
'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern',
|
||||
'SetDataFolder', 'SetDimLabel', 'SetDrawEnv', 'SetDrawLayer',
|
||||
'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', 'SetIgorMenuMode',
|
||||
'SetIgorOption', 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed',
|
||||
'SetScale', 'SetVariable', 'SetWaveLock', 'SetWindow', 'ShowIgorMenus',
|
||||
'ShowInfo', 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth',
|
||||
'SmoothCustom', 'Sort', 'SoundInRecord', 'SoundInSet',
|
||||
'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
|
||||
'SphericalInterpolate', 'SphericalTriangulate', 'SplitString',
|
||||
'sprintf', 'sscanf', 'Stack', 'StackWindows',
|
||||
'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder', 'RenamePath',
|
||||
'RenamePICT', 'RenameWindow', 'ReorderImages', 'ReorderTraces', 'ReplaceText',
|
||||
'ReplaceWave', 'Resample', 'ResumeUpdate', 'Reverse', 'Rotate', 'Save',
|
||||
'SaveData', 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
|
||||
'SavePackagePreferences', 'SavePICT', 'SaveTableCopy', 'SetActiveSubwindow',
|
||||
'SetAxis', 'SetBackground', 'SetDashPattern', 'SetDataFolder', 'SetDimLabel',
|
||||
'SetDrawEnv', 'SetDrawLayer', 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook',
|
||||
'SetIgorMenuMode', 'SetIgorOption', 'SetMarquee', 'SetProcessSleep',
|
||||
'SetRandomSeed', 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWaveTextEncoding',
|
||||
'SetWindow', 'ShowIgorMenus', 'ShowInfo', 'ShowTools', 'Silent', 'Sleep',
|
||||
'Slider', 'Smooth', 'SmoothCustom', 'Sort', 'SortColumns', 'SoundInRecord',
|
||||
'SoundInSet', 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
|
||||
'SoundLoadWave', 'SoundSaveWave', 'SphericalInterpolate', 'SphericalTriangulate',
|
||||
'SplitString', 'SplitWave', 'sprintf', 'sscanf', 'Stack', 'StackWindows',
|
||||
'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
|
||||
'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
|
||||
'StatsCircularCorrelationTest', 'StatsCircularMeans',
|
||||
'StatsCircularMoments', 'StatsCircularTwoSampleTest',
|
||||
'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest',
|
||||
'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
|
||||
'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKendallTauTest',
|
||||
'StatsCircularCorrelationTest', 'StatsCircularMeans', 'StatsCircularMoments',
|
||||
'StatsCircularTwoSampleTest', 'StatsCochranTest', 'StatsContingencyTable',
|
||||
'StatsDIPTest', 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
|
||||
'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKDE', 'StatsKendallTauTest',
|
||||
'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
|
||||
'StatsLinearRegression', 'StatsMultiCorrelationTest',
|
||||
'StatsNPMCTest', 'StatsNPNominalSRTest', 'StatsQuantiles',
|
||||
'StatsRankCorrelationTest', 'StatsResample', 'StatsSample',
|
||||
'StatsScheffeTest', 'StatsSignTest', 'StatsSRTest', 'StatsTTest',
|
||||
'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest',
|
||||
'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest',
|
||||
'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', 'String',
|
||||
'StructGet', 'StructPut', 'TabControl', 'Tag', 'TextBox', 'Tile',
|
||||
'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid',
|
||||
'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv',
|
||||
'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform',
|
||||
'WindowFunction',
|
||||
'StatsLinearRegression', 'StatsMultiCorrelationTest', 'StatsNPMCTest',
|
||||
'StatsNPNominalSRTest', 'StatsQuantiles', 'StatsRankCorrelationTest',
|
||||
'StatsResample', 'StatsSample', 'StatsScheffeTest', 'StatsShapiroWilkTest',
|
||||
'StatsSignTest', 'StatsSRTest', 'StatsTTest', 'StatsTukeyTest',
|
||||
'StatsVariancesTest', 'StatsWatsonUSquaredTest', 'StatsWatsonWilliamsTest',
|
||||
'StatsWheelerWatsonTest', 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest',
|
||||
'String', 'StructGet', 'StructPut', 'SumDimension', 'SumSeries', 'TabControl',
|
||||
'Tag', 'TextBox', 'ThreadGroupPutDF', 'ThreadStart', 'Tile', 'TileWindows',
|
||||
'TitleBox', 'ToCommandLine', 'ToolsGrid', 'Triangulate3d', 'Unwrap', 'URLRequest',
|
||||
'ValDisplay', 'Variable', 'VDTClosePort2', 'VDTGetPortList2', 'VDTGetStatus2',
|
||||
'VDTOpenPort2', 'VDTOperationsPort2', 'VDTReadBinaryWave2', 'VDTReadBinary2',
|
||||
'VDTReadHexWave2', 'VDTReadHex2', 'VDTReadWave2', 'VDTRead2', 'VDTTerminalPort2',
|
||||
'VDTWriteBinaryWave2', 'VDTWriteBinary2', 'VDTWriteHexWave2', 'VDTWriteHex2',
|
||||
'VDTWriteWave2', 'VDTWrite2', 'VDT2', 'WaveMeanStdv', 'WaveStats',
|
||||
'WaveTransform', 'wfprintf', 'WignerTransform', 'WindowFunction', 'XLLoadWave'
|
||||
)
|
||||
functions = (
|
||||
'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog',
|
||||
'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
|
||||
'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi',
|
||||
'bessj', 'bessk', 'bessy', 'beta', 'betai', 'BinarySearch',
|
||||
'abs', 'acos', 'acosh', 'AddListItem', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD',
|
||||
'alog', 'AnnotationInfo', 'AnnotationList', 'area', 'areaXY', 'asin', 'asinh',
|
||||
'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel', 'Besseli',
|
||||
'Besselj', 'Besselk', 'Bessely', 'beta', 'betai', 'BinarySearch',
|
||||
'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
|
||||
'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev',
|
||||
'chebyshevU', 'CheckName', 'cmplx', 'cmpstr', 'conj', 'ContourZ', 'cos',
|
||||
'cosh', 'cot', 'CountObjects', 'CountObjectsDFR', 'cpowi',
|
||||
'CreationDate', 'csc', 'DataFolderExists', 'DataFolderRefsEqual',
|
||||
'DataFolderRefStatus', 'date2secs', 'datetime', 'DateToJulian',
|
||||
'Dawson', 'DDEExecute', 'DDEInitiate', 'DDEPokeString', 'DDEPokeWave',
|
||||
'DDERequestWave', 'DDEStatus', 'DDETerminate', 'defined', 'deltax', 'digamma',
|
||||
'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf',
|
||||
'erfc', 'exists', 'exp', 'expInt', 'expNoise', 'factorial', 'fakedata',
|
||||
'faverage', 'faverageXY', 'FindDimLabel', 'FindListItem', 'floor',
|
||||
'CaptureHistory', 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num',
|
||||
'chebyshev', 'chebyshevU', 'CheckName', 'ChildWindowList', 'CleanupName', 'cmplx',
|
||||
'cmpstr', 'conj', 'ContourInfo', 'ContourNameList', 'ContourNameToWaveRef',
|
||||
'ContourZ', 'ControlNameList', 'ConvertTextEncoding', 'cos', 'cosh',
|
||||
'cosIntegral', 'cot', 'coth', 'CountObjects', 'CountObjectsDFR', 'cpowi',
|
||||
'CreationDate', 'csc', 'csch', 'CsrInfo', 'CsrWave', 'CsrWaveRef', 'CsrXWave',
|
||||
'CsrXWaveRef', 'CTabList', 'DataFolderDir', 'DataFolderExists',
|
||||
'DataFolderRefsEqual', 'DataFolderRefStatus', 'date', 'datetime', 'DateToJulian',
|
||||
'date2secs', 'Dawson', 'DDERequestString', 'defined', 'deltax', 'digamma',
|
||||
'dilogarithm', 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves',
|
||||
'erf', 'erfc', 'erfcw', 'exists', 'exp', 'ExpConvExp', 'ExpConvExpFit',
|
||||
'ExpConvExpFitBL', 'ExpConvExpFit1Shape', 'ExpConvExpFit1ShapeBL', 'ExpGauss',
|
||||
'ExpGaussFit', 'ExpGaussFitBL', 'ExpGaussFit1Shape', 'ExpGaussFit1ShapeBL',
|
||||
'expInt', 'expIntegralE1', 'expNoise', 'factorial', 'fakedata', 'faverage',
|
||||
'faverageXY', 'FetchURL', 'FindDimLabel', 'FindListItem', 'floor', 'FontList',
|
||||
'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
|
||||
'gamma', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
|
||||
'Gauss1D', 'Gauss2D', 'gcd', 'GetDefaultFontSize',
|
||||
'GetDefaultFontStyle', 'GetKeyState', 'GetRTError', 'gnoise',
|
||||
'GrepString', 'hcsr', 'hermite', 'hermiteGauss', 'HyperG0F1',
|
||||
'HyperG1F1', 'HyperG2F1', 'HyperGNoise', 'HyperGPFQ', 'IgorVersion',
|
||||
'ilim', 'imag', 'Inf', 'Integrate1D', 'interp', 'Interp2D', 'Interp3D',
|
||||
'inverseERF', 'inverseERFC', 'ItemsInList', 'jlim', 'Laguerre',
|
||||
'LaguerreA', 'LaguerreGauss', 'leftx', 'LegendreA', 'limit', 'ln',
|
||||
'log', 'logNormalNoise', 'lorentzianNoise', 'magsqr', 'MandelbrotPoint',
|
||||
'MarcumQ', 'MatrixDet', 'MatrixDot', 'MatrixRank', 'MatrixTrace', 'max',
|
||||
'mean', 'min', 'mod', 'ModDate', 'NaN', 'norm', 'NumberByKey',
|
||||
'numpnts', 'numtype', 'NumVarOrDefault', 'NVAR_Exists', 'p2rect',
|
||||
'ParamIsDefault', 'pcsr', 'Pi', 'PixelFromAxisVal', 'pnt2x',
|
||||
'poissonNoise', 'poly', 'poly2D', 'PolygonArea', 'qcsr', 'r2polar',
|
||||
'real', 'rightx', 'round', 'sawtooth', 'ScreenResolution', 'sec',
|
||||
'SelectNumber', 'sign', 'sin', 'sinc', 'sinh', 'SphericalBessJ',
|
||||
'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
|
||||
'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
|
||||
'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
|
||||
'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF',
|
||||
'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF',
|
||||
'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF',
|
||||
'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
|
||||
'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
|
||||
'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsHyperGCDF',
|
||||
'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
|
||||
'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF',
|
||||
'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
|
||||
'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF',
|
||||
'StatsInvGeometricCDF', 'StatsInvKuiperCDF', 'StatsInvLogisticCDF',
|
||||
'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF', 'StatsInvMooreCDF',
|
||||
'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
|
||||
'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
|
||||
'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
|
||||
'FuncRefInfo', 'FunctionInfo', 'FunctionList', 'FunctionPath', 'gamma',
|
||||
'gammaEuler', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
|
||||
'GaussFit', 'GaussFitBL', 'GaussFit1Width', 'GaussFit1WidthBL', 'Gauss1D',
|
||||
'Gauss2D', 'gcd', 'GetBrowserLine', 'GetBrowserSelection', 'GetDataFolder',
|
||||
'GetDataFolderDFR', 'GetDefaultFont', 'GetDefaultFontSize', 'GetDefaultFontStyle',
|
||||
'GetDimLabel', 'GetEnvironmentVariable', 'GetErrMessage', 'GetFormula',
|
||||
'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
|
||||
'GetKeyState', 'GetRTErrMessage', 'GetRTError', 'GetRTLocation', 'GetRTLocInfo',
|
||||
'GetRTStackInfo', 'GetScrapText', 'GetUserData', 'GetWavesDataFolder',
|
||||
'GetWavesDataFolderDFR', 'GizmoInfo', 'GizmoScale', 'gnoise', 'GrepList',
|
||||
'GrepString', 'GuideInfo', 'GuideNameList', 'Hash', 'hcsr', 'HDF5AttributeInfo',
|
||||
'HDF5DatasetInfo', 'HDF5LibraryInfo', 'HDF5TypeInfo', 'hermite', 'hermiteGauss',
|
||||
'HyperGNoise', 'HyperGPFQ', 'HyperG0F1', 'HyperG1F1', 'HyperG2F1', 'IgorInfo',
|
||||
'IgorVersion', 'imag', 'ImageInfo', 'ImageNameList', 'ImageNameToWaveRef',
|
||||
'IndependentModuleList', 'IndexedDir', 'IndexedFile', 'Inf', 'Integrate1D',
|
||||
'interp', 'Interp2D', 'Interp3D', 'inverseERF', 'inverseERFC', 'ItemsInList',
|
||||
'JacobiCn', 'JacobiSn', 'JulianToDate', 'Laguerre', 'LaguerreA', 'LaguerreGauss',
|
||||
'LambertW', 'LayoutInfo', 'leftx', 'LegendreA', 'limit', 'ListMatch',
|
||||
'ListToTextWave', 'ListToWaveRefWave', 'ln', 'log', 'logNormalNoise',
|
||||
'LorentzianFit', 'LorentzianFitBL', 'LorentzianFit1Width',
|
||||
'LorentzianFit1WidthBL', 'lorentzianNoise', 'LowerStr', 'MacroList', 'magsqr',
|
||||
'MandelbrotPoint', 'MarcumQ', 'MatrixCondition', 'MatrixDet', 'MatrixDot',
|
||||
'MatrixRank', 'MatrixTrace', 'max', 'mean', 'median', 'min', 'mod', 'ModDate',
|
||||
'MPFXEMGPeak', 'MPFXExpConvExpPeak', 'MPFXGaussPeak', 'MPFXLorenzianPeak',
|
||||
'MPFXVoigtPeak', 'NameOfWave', 'NaN', 'NewFreeDataFolder', 'NewFreeWave', 'norm',
|
||||
'NormalizeUnicode', 'note', 'NumberByKey', 'numpnts', 'numtype',
|
||||
'NumVarOrDefault', 'num2char', 'num2istr', 'num2str', 'NVAR_Exists',
|
||||
'OperationList', 'PadString', 'PanelResolution', 'ParamIsDefault',
|
||||
'ParseFilePath', 'PathList', 'pcsr', 'Pi', 'PICTInfo', 'PICTList',
|
||||
'PixelFromAxisVal', 'pnt2x', 'poissonNoise', 'poly', 'PolygonArea', 'poly2D',
|
||||
'PossiblyQuoteName', 'ProcedureText', 'p2rect', 'qcsr', 'real', 'RemoveByKey',
|
||||
'RemoveEnding', 'RemoveFromList', 'RemoveListItem', 'ReplaceNumberByKey',
|
||||
'ReplaceString', 'ReplaceStringByKey', 'rightx', 'round', 'r2polar', 'sawtooth',
|
||||
'scaleToIndex', 'ScreenResolution', 'sec', 'sech', 'Secs2Date', 'Secs2Time',
|
||||
'SelectNumber', 'SelectString', 'SetEnvironmentVariable', 'sign', 'sin', 'sinc',
|
||||
'sinh', 'sinIntegral', 'SortList', 'SpecialCharacterInfo', 'SpecialCharacterList',
|
||||
'SpecialDirPath', 'SphericalBessJ', 'SphericalBessJD', 'SphericalBessY',
|
||||
'SphericalBessYD', 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
|
||||
'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF', 'StatsCauchyCDF',
|
||||
'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF', 'StatsCMSSDCDF',
|
||||
'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF', 'StatsErlangCDF',
|
||||
'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF', 'StatsEValuePDF',
|
||||
'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF', 'StatsFPDF', 'StatsFriedmanCDF',
|
||||
'StatsGammaCDF', 'StatsGammaPDF', 'StatsGeometricCDF', 'StatsGeometricPDF',
|
||||
'StatsGEVCDF', 'StatsGEVPDF', 'StatsHyperGCDF', 'StatsHyperGPDF',
|
||||
'StatsInvBetaCDF', 'StatsInvBinomialCDF', 'StatsInvCauchyCDF', 'StatsInvChiCDF',
|
||||
'StatsInvCMSSDCDF', 'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
|
||||
'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF', 'StatsInvGeometricCDF',
|
||||
'StatsInvKuiperCDF', 'StatsInvLogisticCDF', 'StatsInvLogNormalCDF',
|
||||
'StatsInvMaxwellCDF', 'StatsInvMooreCDF', 'StatsInvNBinomialCDF',
|
||||
'StatsInvNCChiCDF', 'StatsInvNCFCDF', 'StatsInvNormalCDF', 'StatsInvParetoCDF',
|
||||
'StatsInvPoissonCDF', 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
|
||||
'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
|
||||
'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
|
||||
'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
|
||||
'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF',
|
||||
'StatsLogNormalCDF', 'StatsLogNormalPDF', 'StatsMaxwellCDF',
|
||||
'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF', 'StatsNBinomialCDF',
|
||||
'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF', 'StatsNCFCDF',
|
||||
'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
|
||||
'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
|
||||
'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
|
||||
'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF',
|
||||
'StatsRayleighCDF', 'StatsRayleighPDF', 'StatsRectangularCDF',
|
||||
'StatsRectangularPDF', 'StatsRunsCDF', 'StatsSpearmanRhoCDF',
|
||||
'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
|
||||
'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF', 'StatsLogNormalCDF',
|
||||
'StatsLogNormalPDF', 'StatsMaxwellCDF', 'StatsMaxwellPDF', 'StatsMedian',
|
||||
'StatsMooreCDF', 'StatsNBinomialCDF', 'StatsNBinomialPDF', 'StatsNCChiCDF',
|
||||
'StatsNCChiPDF', 'StatsNCFCDF', 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF',
|
||||
'StatsNormalCDF', 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF',
|
||||
'StatsPermute', 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
|
||||
'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF', 'StatsRayleighCDF',
|
||||
'StatsRayleighPDF', 'StatsRectangularCDF', 'StatsRectangularPDF', 'StatsRunsCDF',
|
||||
'StatsSpearmanRhoCDF', 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
|
||||
'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
|
||||
'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise',
|
||||
'StatsVonMisesPDF', 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF',
|
||||
'StatsWeibullPDF', 'StopMSTimer', 'str2num', 'stringCRC', 'stringmatch',
|
||||
'strlen', 'strsearch', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
|
||||
'TagVal', 'tan', 'tanh', 'ThreadGroupCreate', 'ThreadGroupRelease',
|
||||
'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks',
|
||||
'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists',
|
||||
'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem',
|
||||
'WinType', 'WNoise', 'x2pnt', 'xcsr', 'zcsr', 'ZernikeR',
|
||||
)
|
||||
functions += (
|
||||
'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo',
|
||||
'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName',
|
||||
'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo',
|
||||
'CsrWave', 'CsrXWave', 'CTabList', 'DataFolderDir', 'date',
|
||||
'DDERequestString', 'FontList', 'FuncRefInfo', 'FunctionInfo',
|
||||
'FunctionList', 'FunctionPath', 'GetDataFolder', 'GetDefaultFont',
|
||||
'GetDimLabel', 'GetErrMessage', 'GetFormula',
|
||||
'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
|
||||
'GetRTErrMessage', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
|
||||
'GetWavesDataFolder', 'GrepList', 'GuideInfo', 'GuideNameList', 'Hash',
|
||||
'IgorInfo', 'ImageInfo', 'ImageNameList', 'IndexedDir', 'IndexedFile',
|
||||
'JulianToDate', 'LayoutInfo', 'ListMatch', 'LowerStr', 'MacroList',
|
||||
'NameOfWave', 'note', 'num2char', 'num2istr', 'num2str',
|
||||
'OperationList', 'PadString', 'ParseFilePath', 'PathList', 'PICTInfo',
|
||||
'PICTList', 'PossiblyQuoteName', 'ProcedureText', 'RemoveByKey',
|
||||
'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
|
||||
'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey',
|
||||
'Secs2Date', 'Secs2Time', 'SelectString', 'SortList',
|
||||
'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
|
||||
'StringByKey', 'StringFromList', 'StringList', 'StrVarOrDefault',
|
||||
'TableInfo', 'TextFile', 'ThreadGroupGetDF', 'time', 'TraceFromPixel',
|
||||
'TraceInfo', 'TraceNameList', 'UniqueName', 'UnPadString', 'UpperStr',
|
||||
'VariableList', 'WaveInfo', 'WaveList', 'WaveName', 'WaveUnits',
|
||||
'WinList', 'WinName', 'WinRecreation', 'XWaveName',
|
||||
'ContourNameToWaveRef', 'CsrWaveRef', 'CsrXWaveRef',
|
||||
'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef',
|
||||
'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR',
|
||||
'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR',
|
||||
'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise', 'StatsVonMisesPDF',
|
||||
'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF', 'StatsWeibullPDF',
|
||||
'StopMSTimer', 'StringByKey', 'stringCRC', 'StringFromList', 'StringList',
|
||||
'stringmatch', 'strlen', 'strsearch', 'StrVarOrDefault', 'str2num', 'StudentA',
|
||||
'StudentT', 'sum', 'SVAR_Exists', 'TableInfo', 'TagVal', 'TagWaveRef', 'tan',
|
||||
'tanh', 'TextEncodingCode', 'TextEncodingName', 'TextFile', 'ThreadGroupCreate',
|
||||
'ThreadGroupGetDF', 'ThreadGroupGetDFR', 'ThreadGroupRelease', 'ThreadGroupWait',
|
||||
'ThreadProcessorCount', 'ThreadReturnValue', 'ticks', 'time', 'TraceFromPixel',
|
||||
'TraceInfo', 'TraceNameList', 'TraceNameToWaveRef', 'trunc', 'UniqueName',
|
||||
'UnPadString', 'UnsetEnvironmentVariable', 'UpperStr', 'URLDecode', 'URLEncode',
|
||||
'VariableList', 'Variance', 'vcsr', 'Voigt', 'VoigtFit', 'VoigtFitBL',
|
||||
'VoigtFit1Shape', 'VoigtFit1ShapeBL', 'VoigtFit1Shape1Width',
|
||||
'VoigtFit1Shape1WidthBL', 'VoigtFunc', 'WaveCRC', 'WaveDims', 'WaveExists',
|
||||
'WaveInfo', 'WaveList', 'WaveMax', 'WaveMin', 'WaveName', 'WaveRefIndexed',
|
||||
'WaveRefIndexedDFR', 'WaveRefsEqual', 'WaveRefWaveToList', 'WaveTextEncoding',
|
||||
'WaveType', 'WaveUnits', 'WhichListItem', 'WinList', 'WinName', 'WinRecreation',
|
||||
'WinType', 'WMFindWholeWord', 'WNoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace',
|
||||
'x2pnt', 'zcsr', 'ZernikeR', 'zeta'
|
||||
)
|
||||
|
||||
tokens = {
|
||||
|
@ -272,7 +280,7 @@ class IgorLexer(RegexLexer):
|
|||
# Built-in functions.
|
||||
(words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
|
||||
# Compiler directives.
|
||||
(r'^#(include|pragma|define|ifdef|ifndef|endif)',
|
||||
(r'^#(include|pragma|define|undef|ifdef|ifndef|if|elif|else|endif)',
|
||||
Name.Decorator),
|
||||
(r'[^a-z"/]+$', Text),
|
||||
(r'.', Text),
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for Inferno os and all the related stuff.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for installer/packager DSLs and formats.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for interactive fiction languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for the Io language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexer for the J programming language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -48,7 +48,7 @@ class JLexer(RegexLexer):
|
|||
|
||||
# Definitions
|
||||
(r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'),
|
||||
(r'\b(([1-4]|13)\s+:\s*0)|((adverb|conjunction|dyad|monad|verb)\s+define)\b',
|
||||
(r'(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b',
|
||||
Name.Function, 'explicitDefinition'),
|
||||
|
||||
# Flow Control
|
||||
|
@ -75,8 +75,8 @@ class JLexer(RegexLexer):
|
|||
'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
|
||||
'getenv', 'hfd', 'inv', 'inverse', 'iospath',
|
||||
'isatty', 'isutf8', 'items', 'leaf', 'list',
|
||||
'nameclass', 'namelist', 'namelist', 'names', 'nc',
|
||||
'nl', 'on', 'pick', 'pick', 'rows',
|
||||
'nameclass', 'namelist', 'names', 'nc',
|
||||
'nl', 'on', 'pick', 'rows',
|
||||
'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
|
||||
'sort', 'split', 'stderr', 'stdin', 'stdout',
|
||||
'table', 'take', 'timespacex', 'timex', 'tmoutput',
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Lexers for JavaScript and related languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -20,7 +20,7 @@ import pygments.unistring as uni
|
|||
|
||||
__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
|
||||
'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
|
||||
'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer']
|
||||
'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer']
|
||||
|
||||
JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
|
||||
']|\\\\u[a-fA-F0-9]{4})')
|
||||
|
@ -53,7 +53,7 @@ class JavascriptLexer(RegexLexer):
|
|||
'slashstartsregex': [
|
||||
include('commentsandwhitespace'),
|
||||
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
|
||||
r'([gim]+\b|\B)', String.Regex, '#pop'),
|
||||
r'([gimuy]+\b|\B)', String.Regex, '#pop'),
|
||||
(r'(?=/)', Text, ('#pop', 'badregex')),
|
||||
default('#pop')
|
||||
],
|
||||
|
@ -64,9 +64,14 @@ class JavascriptLexer(RegexLexer):
|
|||
(r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
|
||||
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
|
||||
include('commentsandwhitespace'),
|
||||
(r'(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?', Number.Float),
|
||||
(r'0[bB][01]+', Number.Bin),
|
||||
(r'0[oO][0-7]+', Number.Oct),
|
||||
(r'0[xX][0-9a-fA-F]+', Number.Hex),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
(r'\.\.\.|=>', Punctuation),
|
||||
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
|
||||
r'(<<|>>>?|=>|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
|
||||
(r'\.\.\.', Punctuation),
|
||||
r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
|
||||
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
|
||||
(r'[})\].]', Punctuation),
|
||||
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
|
||||
|
@ -84,11 +89,6 @@ class JavascriptLexer(RegexLexer):
|
|||
r'Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|'
|
||||
r'document|this|window)\b', Name.Builtin),
|
||||
(JS_IDENT, Name.Other),
|
||||
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
||||
(r'0b[01]+', Number.Bin),
|
||||
(r'0o[0-7]+', Number.Oct),
|
||||
(r'0x[0-9a-fA-F]+', Number.Hex),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
(r"'(\\\\|\\'|[^'])*'", String.Single),
|
||||
(r'`', String.Backtick, 'interp'),
|
||||
|
@ -97,13 +97,13 @@ class JavascriptLexer(RegexLexer):
|
|||
(r'`', String.Backtick, '#pop'),
|
||||
(r'\\\\', String.Backtick),
|
||||
(r'\\`', String.Backtick),
|
||||
(r'\${', String.Interpol, 'interp-inside'),
|
||||
(r'\$\{', String.Interpol, 'interp-inside'),
|
||||
(r'\$', String.Backtick),
|
||||
(r'[^`\\$]+', String.Backtick),
|
||||
],
|
||||
'interp-inside': [
|
||||
# TODO: should this include single-line comments and allow nesting strings?
|
||||
(r'}', String.Interpol, '#pop'),
|
||||
(r'\}', String.Interpol, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
# (\\\\|\\`|[^`])*`', String.Backtick),
|
||||
|
@ -366,9 +366,10 @@ class DartLexer(RegexLexer):
|
|||
(r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
|
||||
r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
|
||||
Keyword),
|
||||
(r'\b(abstract|const|extends|factory|final|get|implements|'
|
||||
r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
|
||||
(r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
|
||||
(r'\b(abstract|async|await|const|extends|factory|final|get|'
|
||||
r'implements|native|operator|set|static|sync|typedef|var|with|'
|
||||
r'yield)\b', Keyword.Declaration),
|
||||
(r'\b(bool|double|dynamic|int|num|Object|String|void)\b', Keyword.Type),
|
||||
(r'\b(false|null|true)\b', Keyword.Constant),
|
||||
(r'[~!%^&*+=|?:<>/-]|as\b', Operator),
|
||||
(r'[a-zA-Z_$]\w*:', Name.Label),
|
||||
|
@ -447,7 +448,7 @@ class TypeScriptLexer(RegexLexer):
|
|||
|
||||
name = 'TypeScript'
|
||||
aliases = ['ts', 'typescript']
|
||||
filenames = ['*.ts']
|
||||
filenames = ['*.ts', '*.tsx']
|
||||
mimetypes = ['text/x-typescript']
|
||||
|
||||
flags = re.DOTALL | re.MULTILINE
|
||||
|
@ -511,11 +512,34 @@ class TypeScriptLexer(RegexLexer):
|
|||
(r'[0-9]+', Number.Integer),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
(r"'(\\\\|\\'|[^'])*'", String.Single),
|
||||
(r'`', String.Backtick, 'interp'),
|
||||
# Match stuff like: Decorators
|
||||
(r'@\w+', Keyword.Declaration),
|
||||
]
|
||||
],
|
||||
|
||||
# The 'interp*' rules match those in JavascriptLexer. Changes made
|
||||
# there should be reflected here as well.
|
||||
'interp': [
|
||||
(r'`', String.Backtick, '#pop'),
|
||||
(r'\\\\', String.Backtick),
|
||||
(r'\\`', String.Backtick),
|
||||
(r'\$\{', String.Interpol, 'interp-inside'),
|
||||
(r'\$', String.Backtick),
|
||||
(r'[^`\\$]+', String.Backtick),
|
||||
],
|
||||
'interp-inside': [
|
||||
# TODO: should this include single-line comments and allow nesting strings?
|
||||
(r'\}', String.Interpol, '#pop'),
|
||||
include('root'),
|
||||
],
|
||||
}
|
||||
|
||||
def analyse_text(text):
|
||||
if re.search('^(import.+(from\s+)?["\']|'
|
||||
'(export\s*)?(interface|class|function)\s+)',
|
||||
text, re.MULTILINE):
|
||||
return 1.0
|
||||
|
||||
|
||||
class LassoLexer(RegexLexer):
|
||||
"""
|
||||
|
@ -545,12 +569,7 @@ class LassoLexer(RegexLexer):
|
|||
tokens = {
|
||||
'root': [
|
||||
(r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
|
||||
(r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
|
||||
(r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
|
||||
(r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
|
||||
(r'<\?(LassoScript|lasso|=)', Comment.Preproc,
|
||||
('delimiters', 'anglebrackets')),
|
||||
(r'<(!--.*?-->)?', Other, 'delimiters'),
|
||||
(r'(?=\[|<)', Other, 'delimiters'),
|
||||
(r'\s+', Other),
|
||||
default(('delimiters', 'lassofile')),
|
||||
],
|
||||
|
@ -558,14 +577,14 @@ class LassoLexer(RegexLexer):
|
|||
(r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
|
||||
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
|
||||
(r'\[', Comment.Preproc, 'squarebrackets'),
|
||||
(r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
|
||||
(r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
|
||||
(r'<(!--.*?-->)?', Other),
|
||||
(r'[^[<]+', Other),
|
||||
],
|
||||
'nosquarebrackets': [
|
||||
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
|
||||
(r'\[', Other),
|
||||
(r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
|
||||
(r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
|
||||
(r'<(!--.*?-->)?', Other),
|
||||
(r'[^[<]+', Other),
|
||||
],
|
||||
|
@ -607,7 +626,7 @@ class LassoLexer(RegexLexer):
|
|||
|
||||
# names
|
||||
(r'\$[a-z_][\w.]*', Name.Variable),
|
||||
(r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
|
||||
(r'#([a-z_][\w.]*|\d+\b)', Name.Variable.Instance),
|
||||
(r"(\.\s*)('[a-z_][\w.]*')",
|
||||
bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
|
||||
(r"(self)(\s*->\s*)('[a-z_][\w.]*')",
|
||||
|
@ -658,20 +677,20 @@ class LassoLexer(RegexLexer):
|
|||
r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
|
||||
r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
|
||||
r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
|
||||
r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
|
||||
r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
|
||||
r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
|
||||
r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
|
||||
r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
|
||||
r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
|
||||
r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
|
||||
r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
|
||||
r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
|
||||
r'Tag_Name|ascending|average|by|define|descending|do|equals|'
|
||||
r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
|
||||
r'min|on|order|parent|protected|provide|public|require|returnhome|'
|
||||
r'skip|split_thread|sum|take|thread|to|trait|type|where|with|'
|
||||
r'yield|yieldhome)\b',
|
||||
r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|Link_FirstRecord|'
|
||||
r'Link_LastGroup|Link_LastRecord|Link_NextGroup|Link_NextRecord|'
|
||||
r'Link_PrevGroup|Link_PrevRecord|Log|Loop|Output_None|Portal|'
|
||||
r'Private|Protect|Records|Referer|Referrer|Repeating|ResultSet|'
|
||||
r'Rows|Search_Args|Search_Arguments|Select|Sort_Args|'
|
||||
r'Sort_Arguments|Thread_Atomic|Value_List|While|Abort|Case|Else|'
|
||||
r'Fail_If|Fail_IfNot|Fail|If_Empty|If_False|If_Null|If_True|'
|
||||
r'Loop_Abort|Loop_Continue|Loop_Count|Params|Params_Up|Return|'
|
||||
r'Return_Value|Run_Children|SOAP_DefineTag|SOAP_LastRequest|'
|
||||
r'SOAP_LastResponse|Tag_Name|ascending|average|by|define|'
|
||||
r'descending|do|equals|frozen|group|handle_failure|import|in|into|'
|
||||
r'join|let|match|max|min|on|order|parent|protected|provide|public|'
|
||||
r'require|returnhome|skip|split_thread|sum|take|thread|to|trait|'
|
||||
r'type|where|with|yield|yieldhome)\b',
|
||||
bygroups(Punctuation, Keyword)),
|
||||
|
||||
# other
|
||||
|
@ -1016,6 +1035,12 @@ class CoffeeScriptLexer(RegexLexer):
|
|||
filenames = ['*.coffee']
|
||||
mimetypes = ['text/coffeescript']
|
||||
|
||||
|
||||
_operator_re = (
|
||||
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
|
||||
r'\|\||\\(?=\n)|'
|
||||
r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?')
|
||||
|
||||
flags = re.DOTALL
|
||||
tokens = {
|
||||
'commentsandwhitespace': [
|
||||
|
@ -1034,17 +1059,17 @@ class CoffeeScriptLexer(RegexLexer):
|
|||
(r'///', String.Regex, ('#pop', 'multilineregex')),
|
||||
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
|
||||
r'([gim]+\b|\B)', String.Regex, '#pop'),
|
||||
# This isn't really guarding against mishighlighting well-formed
|
||||
# code, just the ability to infinite-loop between root and
|
||||
# slashstartsregex.
|
||||
(r'/', Operator),
|
||||
default('#pop'),
|
||||
],
|
||||
'root': [
|
||||
# this next expr leads to infinite loops root -> slashstartsregex
|
||||
# (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
|
||||
include('commentsandwhitespace'),
|
||||
(r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
|
||||
r'\|\||\\(?=\n)|'
|
||||
r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|^/])=?',
|
||||
Operator, 'slashstartsregex'),
|
||||
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function),
|
||||
(r'^(?=\s|/)', Text, 'slashstartsregex'),
|
||||
(_operator_re, Operator, 'slashstartsregex'),
|
||||
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
|
||||
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
|
||||
(r'[})\].]', Punctuation),
|
||||
(r'(?<![.$])(for|own|in|of|while|until|'
|
||||
|
@ -1065,7 +1090,7 @@ class CoffeeScriptLexer(RegexLexer):
|
|||
(r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
|
||||
'slashstartsregex'),
|
||||
(r'@', Name.Other, 'slashstartsregex'),
|
||||
(r'@?[$a-zA-Z_][\w$]*', Name.Other, 'slashstartsregex'),
|
||||
(r'@?[$a-zA-Z_][\w$]*', Name.Other),
|
||||
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
||||
(r'0x[0-9a-fA-F]+', Number.Hex),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
|
@ -1245,32 +1270,32 @@ class EarlGreyLexer(RegexLexer):
|
|||
include('control'),
|
||||
(r'[^\S\n]+', Text),
|
||||
(r';;.*\n', Comment),
|
||||
(r'[\[\]\{\}\:\(\)\,\;]', Punctuation),
|
||||
(r'[\[\]{}:(),;]', Punctuation),
|
||||
(r'\\\n', Text),
|
||||
(r'\\', Text),
|
||||
include('errors'),
|
||||
(words((
|
||||
'with', 'where', 'when', 'and', 'not', 'or', 'in',
|
||||
'as', 'of', 'is'),
|
||||
prefix=r'(?<=\s|\[)', suffix=r'(?![\w\$\-])'),
|
||||
prefix=r'(?<=\s|\[)', suffix=r'(?![\w$\-])'),
|
||||
Operator.Word),
|
||||
(r'[\*@]?->', Name.Function),
|
||||
(r'[*@]?->', Name.Function),
|
||||
(r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word),
|
||||
(r'\.{2,3}', Operator.Word), # Range Operator
|
||||
(r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator),
|
||||
(r'(?<![\w\$\-])(var|let)(?:[^\w\$])', Keyword.Declaration),
|
||||
(r'(?<![\w$\-])(var|let)(?:[^\w$])', Keyword.Declaration),
|
||||
include('keywords'),
|
||||
include('builtins'),
|
||||
include('assignment'),
|
||||
(r'''(?x)
|
||||
(?:()([a-zA-Z$_](?:[a-zA-Z$0-9_\-]*[a-zA-Z$0-9_])?)|
|
||||
(?<=[\s\{\[\(])(\.)([a-zA-Z$_](?:[a-zA-Z$0-9_\-]*[a-zA-Z$0-9_])?))
|
||||
(?:()([a-zA-Z$_](?:[\w$\-]*[\w$])?)|
|
||||
(?<=[\s{\[(])(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?))
|
||||
(?=.*%)''',
|
||||
bygroups(Punctuation, Name.Tag, Punctuation, Name.Class.Start), 'dbs'),
|
||||
(r'[rR]?`', String.Backtick, 'bt'),
|
||||
(r'[rR]?```', String.Backtick, 'tbt'),
|
||||
(r'(?<=[\s\[\{\(,;])\.([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)'
|
||||
r'(?=[\s\]\}\),;])', String.Symbol),
|
||||
(r'(?<=[\s\[{(,;])\.([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
|
||||
r'(?=[\s\]}),;])', String.Symbol),
|
||||
include('nested'),
|
||||
(r'(?:[rR]|[rR]\.[gmi]{1,3})?"', String, combined('stringescape', 'dqs')),
|
||||
(r'(?:[rR]|[rR]\.[gmi]{1,3})?\'', String, combined('stringescape', 'sqs')),
|
||||
|
@ -1281,9 +1306,9 @@ class EarlGreyLexer(RegexLexer):
|
|||
include('numbers'),
|
||||
],
|
||||
'dbs': [
|
||||
(r'(\.)([a-zA-Z$_](?:[a-zA-Z$0-9_\-]*[a-zA-Z$0-9_])?)(?=[\[\.\s])',
|
||||
(r'(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?)(?=[.\[\s])',
|
||||
bygroups(Punctuation, Name.Class.DBS)),
|
||||
(r'(\[)([\^#][a-zA-Z$_](?:[a-zA-Z$0-9_\-]*[a-zA-Z$0-9_])?)(\])',
|
||||
(r'(\[)([\^#][a-zA-Z$_](?:[\w$\-]*[\w$])?)(\])',
|
||||
bygroups(Punctuation, Name.Entity.DBS, Punctuation)),
|
||||
(r'\s+', Text),
|
||||
(r'%', Operator.DBS, '#pop'),
|
||||
|
@ -1293,29 +1318,29 @@ class EarlGreyLexer(RegexLexer):
|
|||
bygroups(Text.Whitespace, Text)),
|
||||
],
|
||||
'assignment': [
|
||||
(r'(\.)?([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)'
|
||||
(r'(\.)?([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
|
||||
r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)',
|
||||
bygroups(Punctuation, Name.Variable))
|
||||
],
|
||||
'errors': [
|
||||
(words(('Error', 'TypeError', 'ReferenceError'),
|
||||
prefix=r'(?<![\w\$\-\.])', suffix=r'(?![\w\$\-\.])'),
|
||||
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
|
||||
Name.Exception),
|
||||
(r'''(?x)
|
||||
(?<![\w\$])
|
||||
E\.[\w\$](?:[\w\$\-]*[\w\$])?
|
||||
(?:\.[\w\$](?:[\w\$\-]*[\w\$])?)*
|
||||
(?=[\(\{\[\?\!\s])''',
|
||||
(?<![\w$])
|
||||
E\.[\w$](?:[\w$\-]*[\w$])?
|
||||
(?:\.[\w$](?:[\w$\-]*[\w$])?)*
|
||||
(?=[({\[?!\s])''',
|
||||
Name.Exception),
|
||||
],
|
||||
'control': [
|
||||
(r'''(?x)
|
||||
([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)
|
||||
([a-zA-Z$_](?:[\w$-]*[\w$])?)
|
||||
(?!\n)\s+
|
||||
(?!and|as|each\*|each|in|is|mod|of|or|when|where|with)
|
||||
(?=(?:[+\-*/~^<>%&|?!@#.])?[a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)''',
|
||||
(?=(?:[+\-*/~^<>%&|?!@#.])?[a-zA-Z$_](?:[\w$-]*[\w$])?)''',
|
||||
Keyword.Control),
|
||||
(r'([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)(?!\n)\s+(?=[\'"\d\{\[\(])',
|
||||
(r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(?!\n)\s+(?=[\'"\d{\[(])',
|
||||
Keyword.Control),
|
||||
(r'''(?x)
|
||||
(?:
|
||||
|
@ -1324,28 +1349,28 @@ class EarlGreyLexer(RegexLexer):
|
|||
(?<=with|each|with)|
|
||||
(?<=each\*|where)
|
||||
)(\s+)
|
||||
([a-zA-Z$_](?:[a-zA-Z$0-9_\-]*[a-zA-Z$0-9_])?)(:)''',
|
||||
([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
|
||||
bygroups(Text, Keyword.Control, Punctuation)),
|
||||
(r'''(?x)
|
||||
(?<![+\-*/~^<>%&|?!@#.])(\s+)
|
||||
([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)(:)''',
|
||||
([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
|
||||
bygroups(Text, Keyword.Control, Punctuation)),
|
||||
],
|
||||
'nested': [
|
||||
(r'''(?x)
|
||||
(?<=[a-zA-Z$0-9_\]\}\)])(\.)
|
||||
([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)
|
||||
(?<=[\w$\]})])(\.)
|
||||
([a-zA-Z$_](?:[\w$-]*[\w$])?)
|
||||
(?=\s+with(?:\s|\n))''',
|
||||
bygroups(Punctuation, Name.Function)),
|
||||
(r'''(?x)
|
||||
(?<!\s)(\.)
|
||||
([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)
|
||||
(?=[\}\]\)\.,;:\s])''',
|
||||
([a-zA-Z$_](?:[\w$-]*[\w$])?)
|
||||
(?=[}\]).,;:\s])''',
|
||||
bygroups(Punctuation, Name.Field)),
|
||||
(r'''(?x)
|
||||
(?<=[a-zA-Z$0-9_\]\}\)])(\.)
|
||||
([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)
|
||||
(?=[\[\{\(:])''',
|
||||
(?<=[\w$\]})])(\.)
|
||||
([a-zA-Z$_](?:[\w$-]*[\w$])?)
|
||||
(?=[\[{(:])''',
|
||||
bygroups(Punctuation, Name.Function)),
|
||||
],
|
||||
'keywords': [
|
||||
|
@ -1354,15 +1379,15 @@ class EarlGreyLexer(RegexLexer):
|
|||
'continue', 'elif', 'expr-value', 'if', 'match',
|
||||
'return', 'yield', 'pass', 'else', 'require', 'var',
|
||||
'let', 'async', 'method', 'gen'),
|
||||
prefix=r'(?<![\w\$\-\.])', suffix=r'(?![\w\$\-\.])'),
|
||||
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
|
||||
Keyword.Pseudo),
|
||||
(words(('this', 'self', '@'),
|
||||
prefix=r'(?<![\w\$\-\.])', suffix=r'(?![\w\$\-])'),
|
||||
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
|
||||
Keyword.Constant),
|
||||
(words((
|
||||
'Function', 'Object', 'Array', 'String', 'Number',
|
||||
'Boolean', 'ErrorFactory', 'ENode', 'Promise'),
|
||||
prefix=r'(?<![\w\$\-\.])', suffix=r'(?![\w\$\-])'),
|
||||
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
|
||||
Keyword.Type),
|
||||
],
|
||||
'builtins': [
|
||||
|
@ -1373,20 +1398,20 @@ class EarlGreyLexer(RegexLexer):
|
|||
'getChecker', 'get-checker', 'getProperty', 'get-property',
|
||||
'getProjector', 'get-projector', 'consume', 'take',
|
||||
'promisify', 'spawn', 'constructor'),
|
||||
prefix=r'(?<![\w\-#\.])', suffix=r'(?![\w\-\.])'),
|
||||
prefix=r'(?<![\w\-#.])', suffix=r'(?![\w\-.])'),
|
||||
Name.Builtin),
|
||||
(words((
|
||||
'true', 'false', 'null', 'undefined'),
|
||||
prefix=r'(?<![\w\$\-\.])', suffix=r'(?![\w\$\-\.])'),
|
||||
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
|
||||
Name.Constant),
|
||||
],
|
||||
'name': [
|
||||
(r'@([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)', Name.Variable.Instance),
|
||||
(r'([a-zA-Z$_](?:[a-zA-Z$0-9_-]*[a-zA-Z$0-9_])?)(\+\+|\-\-)?',
|
||||
(r'@([a-zA-Z$_](?:[\w$-]*[\w$])?)', Name.Variable.Instance),
|
||||
(r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(\+\+|\-\-)?',
|
||||
bygroups(Name.Symbol, Operator.Word))
|
||||
],
|
||||
'tuple': [
|
||||
(r'#[a-zA-Z_][a-zA-Z_\-0-9]*(?=[\s\{\(,;\n])', Name.Namespace)
|
||||
(r'#[a-zA-Z_][\w\-]*(?=[\s{(,;])', Name.Namespace)
|
||||
],
|
||||
'interpoling_string': [
|
||||
(r'\}', String.Interpol, '#pop'),
|
||||
|
@ -1426,7 +1451,7 @@ class EarlGreyLexer(RegexLexer):
|
|||
(r'```', String.Backtick, '#pop'),
|
||||
(r'\n', String.Backtick),
|
||||
(r'\^=?', String.Escape),
|
||||
(r'[^\`]+', String.Backtick),
|
||||
(r'[^`]+', String.Backtick),
|
||||
],
|
||||
'numbers': [
|
||||
(r'\d+\.(?!\.)\d*([eE][+-]?[0-9]+)?', Number.Float),
|
||||
|
@ -1434,7 +1459,67 @@ class EarlGreyLexer(RegexLexer):
|
|||
(r'8r[0-7]+', Number.Oct),
|
||||
(r'2r[01]+', Number.Bin),
|
||||
(r'16r[a-fA-F0-9]+', Number.Hex),
|
||||
(r'([3-79]|[1-2][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?', Number.Radix),
|
||||
(r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?', Number.Radix),
|
||||
(r'\d+', Number.Integer)
|
||||
],
|
||||
}
|
||||
|
||||
class JuttleLexer(RegexLexer):
|
||||
"""
|
||||
For `Juttle`_ source code.
|
||||
|
||||
.. _Juttle: https://github.com/juttle/juttle
|
||||
|
||||
"""
|
||||
|
||||
name = 'Juttle'
|
||||
aliases = ['juttle', 'juttle']
|
||||
filenames = ['*.juttle']
|
||||
mimetypes = ['application/juttle', 'application/x-juttle',
|
||||
'text/x-juttle', 'text/juttle']
|
||||
|
||||
flags = re.DOTALL | re.UNICODE | re.MULTILINE
|
||||
|
||||
tokens = {
|
||||
'commentsandwhitespace': [
|
||||
(r'\s+', Text),
|
||||
(r'//.*?\n', Comment.Single),
|
||||
(r'/\*.*?\*/', Comment.Multiline)
|
||||
],
|
||||
'slashstartsregex': [
|
||||
include('commentsandwhitespace'),
|
||||
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
|
||||
r'([gim]+\b|\B)', String.Regex, '#pop'),
|
||||
(r'(?=/)', Text, ('#pop', 'badregex')),
|
||||
default('#pop')
|
||||
],
|
||||
'badregex': [
|
||||
(r'\n', Text, '#pop')
|
||||
],
|
||||
'root': [
|
||||
(r'^(?=\s|/)', Text, 'slashstartsregex'),
|
||||
include('commentsandwhitespace'),
|
||||
(r':\d{2}:\d{2}:\d{2}(\.\d*)?:', String.Moment),
|
||||
(r':(now|beginning|end|forever|yesterday|today|tomorrow|(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
|
||||
(r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
|
||||
(r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?'
|
||||
r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?)'
|
||||
r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
|
||||
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
|
||||
r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
|
||||
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
|
||||
(r'[})\].]', Punctuation),
|
||||
(r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
|
||||
(r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration, 'slashstartsregex'),
|
||||
(r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
|
||||
r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b', Keyword.Reserved),
|
||||
(r'(true|false|null|Infinity)\b', Keyword.Constant),
|
||||
(r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b', Name.Builtin),
|
||||
(JS_IDENT, Name.Other),
|
||||
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
|
||||
(r'[0-9]+', Number.Integer),
|
||||
(r'"(\\\\|\\"|[^"])*"', String.Double),
|
||||
(r"'(\\\\|\\'|[^'])*'", String.Single)
|
||||
]
|
||||
|
||||
}
|
||||
|
|
|
@ -5,19 +5,24 @@
|
|||
|
||||
Lexers for the Julia language.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pygments.lexer import Lexer, RegexLexer, bygroups, combined, do_insertions
|
||||
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
|
||||
words, include
|
||||
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
||||
Number, Punctuation, Generic
|
||||
from pygments.util import shebang_matches, unirange
|
||||
|
||||
__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
|
||||
|
||||
allowed_variable = (
|
||||
u'(?:[a-zA-Z_\u00A1-\uffff]|%s)(?:[a-zA-Z_0-9\u00A1-\uffff]|%s)*!*' %
|
||||
((unirange(0x10000, 0x10ffff),) * 2))
|
||||
|
||||
|
||||
class JuliaLexer(RegexLexer):
|
||||
"""
|
||||
|
@ -25,6 +30,7 @@ class JuliaLexer(RegexLexer):
|
|||
|
||||
.. versionadded:: 1.6
|
||||
"""
|
||||
|
||||
name = 'Julia'
|
||||
aliases = ['julia', 'jl']
|
||||
filenames = ['*.jl']
|
||||
|
@ -32,51 +38,151 @@ class JuliaLexer(RegexLexer):
|
|||
|
||||
flags = re.MULTILINE | re.UNICODE
|
||||
|
||||
builtins = [
|
||||
'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple',
|
||||
'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype',
|
||||
'typemin', 'typemax', 'realmin', 'realmax', 'sizeof', 'eps', 'promote_type',
|
||||
'method_exists', 'applicable', 'invoke', 'dlopen', 'dlsym', 'system',
|
||||
'error', 'throw', 'assert', 'new', 'Inf', 'Nan', 'pi', 'im',
|
||||
]
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'\n', Text),
|
||||
(r'[^\S\n]+', Text),
|
||||
(r'#=', Comment.Multiline, "blockcomment"),
|
||||
(r'#.*$', Comment),
|
||||
(r'[]{}:(),;[@]', Punctuation),
|
||||
(r'\\\n', Text),
|
||||
(r'\\', Text),
|
||||
(r'[\[\]{}(),;]', Punctuation),
|
||||
|
||||
# keywords
|
||||
(r'(begin|while|for|in|return|break|continue|'
|
||||
r'macro|quote|let|if|elseif|else|try|catch|end|'
|
||||
r'bitstype|ccall|do|using|module|import|export|'
|
||||
r'importall|baremodule|immutable)\b', Keyword),
|
||||
(r'in\b', Keyword.Pseudo),
|
||||
(r'(true|false)\b', Keyword.Constant),
|
||||
(r'(local|global|const)\b', Keyword.Declaration),
|
||||
(r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
|
||||
r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
|
||||
(words([
|
||||
'function', 'type', 'typealias', 'abstract', 'immutable',
|
||||
'baremodule', 'begin', 'bitstype', 'break', 'catch', 'ccall',
|
||||
'continue', 'do', 'else', 'elseif', 'end', 'export', 'finally',
|
||||
'for', 'if', 'import', 'importall', 'let', 'macro', 'module',
|
||||
'quote', 'return', 'try', 'using', 'while'],
|
||||
suffix=r'\b'), Keyword),
|
||||
|
||||
# NOTE
|
||||
# Patterns below work only for definition sites and thus hardly reliable.
|
||||
#
|
||||
# functions
|
||||
# (r'(function)(\s+)(' + allowed_variable + ')',
|
||||
# bygroups(Keyword, Text, Name.Function)),
|
||||
#
|
||||
# types
|
||||
# (r'(type|typealias|abstract|immutable)(\s+)(' + allowed_variable + ')',
|
||||
# bygroups(Keyword, Text, Name.Class)),
|
||||
|
||||
# type names
|
||||
(words([
|
||||
'ANY', 'ASCIIString', 'AbstractArray', 'AbstractChannel',
|
||||
'AbstractFloat', 'AbstractMatrix', 'AbstractRNG',
|
||||
'AbstractSparseArray', 'AbstractSparseMatrix',
|
||||
'AbstractSparseVector', 'AbstractString', 'AbstractVecOrMat',
|
||||
'AbstractVector', 'Any', 'ArgumentError', 'Array',
|
||||
'AssertionError', 'Associative', 'Base64DecodePipe',
|
||||
'Base64EncodePipe', 'Bidiagonal', 'BigFloat', 'BigInt',
|
||||
'BitArray', 'BitMatrix', 'BitVector', 'Bool', 'BoundsError',
|
||||
'Box', 'BufferStream', 'CapturedException', 'CartesianIndex',
|
||||
'CartesianRange', 'Cchar', 'Cdouble', 'Cfloat', 'Channel',
|
||||
'Char', 'Cint', 'Cintmax_t', 'Clong', 'Clonglong',
|
||||
'ClusterManager', 'Cmd', 'Coff_t', 'Colon', 'Complex',
|
||||
'Complex128', 'Complex32', 'Complex64', 'CompositeException',
|
||||
'Condition', 'Cptrdiff_t', 'Cshort', 'Csize_t', 'Cssize_t',
|
||||
'Cstring', 'Cuchar', 'Cuint', 'Cuintmax_t', 'Culong',
|
||||
'Culonglong', 'Cushort', 'Cwchar_t', 'Cwstring', 'DataType',
|
||||
'Date', 'DateTime', 'DenseArray', 'DenseMatrix',
|
||||
'DenseVecOrMat', 'DenseVector', 'Diagonal', 'Dict',
|
||||
'DimensionMismatch', 'Dims', 'DirectIndexString', 'Display',
|
||||
'DivideError', 'DomainError', 'EOFError', 'EachLine', 'Enum',
|
||||
'Enumerate', 'ErrorException', 'Exception', 'Expr',
|
||||
'Factorization', 'FileMonitor', 'FileOffset', 'Filter',
|
||||
'Float16', 'Float32', 'Float64', 'FloatRange', 'Function',
|
||||
'GenSym', 'GlobalRef', 'GotoNode', 'HTML', 'Hermitian', 'IO',
|
||||
'IOBuffer', 'IOStream', 'IPv4', 'IPv6', 'InexactError',
|
||||
'InitError', 'Int', 'Int128', 'Int16', 'Int32', 'Int64', 'Int8',
|
||||
'IntSet', 'Integer', 'InterruptException', 'IntrinsicFunction',
|
||||
'InvalidStateException', 'Irrational', 'KeyError', 'LabelNode',
|
||||
'LambdaStaticData', 'LinSpace', 'LineNumberNode', 'LoadError',
|
||||
'LocalProcess', 'LowerTriangular', 'MIME', 'Matrix',
|
||||
'MersenneTwister', 'Method', 'MethodError', 'MethodTable',
|
||||
'Module', 'NTuple', 'NewvarNode', 'NullException', 'Nullable',
|
||||
'Number', 'ObjectIdDict', 'OrdinalRange', 'OutOfMemoryError',
|
||||
'OverflowError', 'Pair', 'ParseError', 'PartialQuickSort',
|
||||
'Pipe', 'PollingFileWatcher', 'ProcessExitedException',
|
||||
'ProcessGroup', 'Ptr', 'QuoteNode', 'RandomDevice', 'Range',
|
||||
'Rational', 'RawFD', 'ReadOnlyMemoryError', 'Real',
|
||||
'ReentrantLock', 'Ref', 'Regex', 'RegexMatch',
|
||||
'RemoteException', 'RemoteRef', 'RepString', 'RevString',
|
||||
'RopeString', 'RoundingMode', 'SegmentationFault',
|
||||
'SerializationState', 'Set', 'SharedArray', 'SharedMatrix',
|
||||
'SharedVector', 'Signed', 'SimpleVector', 'SparseMatrixCSC',
|
||||
'StackOverflowError', 'StatStruct', 'StepRange', 'StridedArray',
|
||||
'StridedMatrix', 'StridedVecOrMat', 'StridedVector', 'SubArray',
|
||||
'SubString', 'SymTridiagonal', 'Symbol', 'SymbolNode',
|
||||
'Symmetric', 'SystemError', 'TCPSocket', 'Task', 'Text',
|
||||
'TextDisplay', 'Timer', 'TopNode', 'Tridiagonal', 'Tuple',
|
||||
'Type', 'TypeConstructor', 'TypeError', 'TypeName', 'TypeVar',
|
||||
'UDPSocket', 'UInt', 'UInt128', 'UInt16', 'UInt32', 'UInt64',
|
||||
'UInt8', 'UTF16String', 'UTF32String', 'UTF8String',
|
||||
'UndefRefError', 'UndefVarError', 'UnicodeError', 'UniformScaling',
|
||||
'Union', 'UnitRange', 'Unsigned', 'UpperTriangular', 'Val',
|
||||
'Vararg', 'VecOrMat', 'Vector', 'VersionNumber', 'Void', 'WString',
|
||||
'WeakKeyDict', 'WeakRef', 'WorkerConfig', 'Zip'], suffix=r'\b'),
|
||||
Keyword.Type),
|
||||
|
||||
# functions
|
||||
(r'(function)((?:\s|\\\s)+)',
|
||||
bygroups(Keyword, Name.Function), 'funcname'),
|
||||
|
||||
# types
|
||||
(r'(type|typealias|abstract|immutable)((?:\s|\\\s)+)',
|
||||
bygroups(Keyword, Name.Class), 'typename'),
|
||||
# builtins
|
||||
(words([
|
||||
u'ARGS', u'CPU_CORES', u'C_NULL', u'DevNull', u'ENDIAN_BOM',
|
||||
u'ENV', u'I', u'Inf', u'Inf16', u'Inf32', u'Inf64',
|
||||
u'InsertionSort', u'JULIA_HOME', u'LOAD_PATH', u'MergeSort',
|
||||
u'NaN', u'NaN16', u'NaN32', u'NaN64', u'OS_NAME',
|
||||
u'QuickSort', u'RoundDown', u'RoundFromZero', u'RoundNearest',
|
||||
u'RoundNearestTiesAway', u'RoundNearestTiesUp',
|
||||
u'RoundToZero', u'RoundUp', u'STDERR', u'STDIN', u'STDOUT',
|
||||
u'VERSION', u'WORD_SIZE', u'catalan', u'e', u'eu',
|
||||
u'eulergamma', u'golden', u'im', u'nothing', u'pi', u'γ',
|
||||
u'π', u'φ'],
|
||||
suffix=r'\b'), Name.Builtin),
|
||||
|
||||
# operators
|
||||
(r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
|
||||
(r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
|
||||
|
||||
# builtins
|
||||
('(' + '|'.join(builtins) + r')\b', Name.Builtin),
|
||||
|
||||
# backticks
|
||||
(r'`(?s).*?`', String.Backtick),
|
||||
# see: https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
|
||||
(words([
|
||||
# prec-assignment
|
||||
u'=', u':=', u'+=', u'-=', u'*=', u'/=', u'//=', u'.//=', u'.*=', u'./=',
|
||||
u'\=', u'.\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=',
|
||||
u'$=', u'=>', u'<<=', u'>>=', u'>>>=', u'~', u'.+=', u'.-=',
|
||||
# prec-conditional
|
||||
u'?',
|
||||
# prec-arrow
|
||||
u'--', u'-->',
|
||||
# prec-lazy-or
|
||||
u'||',
|
||||
# prec-lazy-and
|
||||
u'&&',
|
||||
# prec-comparison
|
||||
u'>', u'<', u'>=', u'≥', u'<=', u'≤', u'==', u'===', u'≡', u'!=', u'≠',
|
||||
u'!==', u'≢', u'.>', u'.<', u'.>=', u'.≥', u'.<=', u'.≤', u'.==', u'.!=',
|
||||
u'.≠', u'.=', u'.!', u'<:', u'>:', u'∈', u'∉', u'∋', u'∌', u'⊆',
|
||||
u'⊈', u'⊂',
|
||||
u'⊄', u'⊊',
|
||||
# prec-pipe
|
||||
u'|>', u'<|',
|
||||
# prec-colon
|
||||
u':',
|
||||
# prec-plus
|
||||
u'+', u'-', u'.+', u'.-', u'|', u'∪', u'$',
|
||||
# prec-bitshift
|
||||
u'<<', u'>>', u'>>>', u'.<<', u'.>>', u'.>>>',
|
||||
# prec-times
|
||||
u'*', u'/', u'./', u'÷', u'.÷', u'%', u'⋅', u'.%', u'.*', u'\\', u'.\\', u'&', u'∩',
|
||||
# prec-rational
|
||||
u'//', u'.//',
|
||||
# prec-power
|
||||
u'^', u'.^',
|
||||
# prec-decl
|
||||
u'::',
|
||||
# prec-dot
|
||||
u'.',
|
||||
# unary op
|
||||
u'+', u'-', u'!', u'~', u'√', u'∛', u'∜'
|
||||
]), Operator),
|
||||
|
||||
# chars
|
||||
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
|
||||
|
@ -86,13 +192,19 @@ class JuliaLexer(RegexLexer):
|
|||
(r'(?<=[.\w)\]])\'+', Operator),
|
||||
|
||||
# strings
|
||||
(r'(?:[IL])"', String, 'string'),
|
||||
(r'[E]?"', String, combined('stringescape', 'string')),
|
||||
(r'"""', String, 'tqstring'),
|
||||
(r'"', String, 'string'),
|
||||
|
||||
# regular expressions
|
||||
(r'r"""', String.Regex, 'tqregex'),
|
||||
(r'r"', String.Regex, 'regex'),
|
||||
|
||||
# backticks
|
||||
(r'`', String.Backtick, 'command'),
|
||||
|
||||
# names
|
||||
(r'@[\w.]+', Name.Decorator),
|
||||
(u'(?:[a-zA-Z_\u00A1-\uffff]|%s)(?:[a-zA-Z_0-9\u00A1-\uffff]|%s)*!*' %
|
||||
((unirange(0x10000, 0x10ffff),)*2), Name),
|
||||
(allowed_variable, Name),
|
||||
(r'@' + allowed_variable, Name.Decorator),
|
||||
|
||||
# numbers
|
||||
(r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
|
||||
|
@ -109,45 +221,59 @@ class JuliaLexer(RegexLexer):
|
|||
(r'\d+', Number.Integer)
|
||||
],
|
||||
|
||||
'funcname': [
|
||||
('[a-zA-Z_]\w*', Name.Function, '#pop'),
|
||||
('\([^\s\w{]{1,2}\)', Operator, '#pop'),
|
||||
('[^\s\w{]{1,2}', Operator, '#pop'),
|
||||
],
|
||||
|
||||
'typename': [
|
||||
('[a-zA-Z_]\w*', Name.Class, '#pop')
|
||||
],
|
||||
|
||||
'stringescape': [
|
||||
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
|
||||
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
|
||||
],
|
||||
"blockcomment": [
|
||||
(r'[^=#]', Comment.Multiline),
|
||||
(r'#=', Comment.Multiline, '#push'),
|
||||
(r'=#', Comment.Multiline, '#pop'),
|
||||
(r'[=#]', Comment.Multiline),
|
||||
],
|
||||
|
||||
'string': [
|
||||
(r'"', String, '#pop'),
|
||||
(r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
|
||||
# FIXME: This escape pattern is not perfect.
|
||||
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
|
||||
# Interpolation is defined as "$" followed by the shortest full
|
||||
# expression, which is something we can't parse.
|
||||
# Include the most common cases here: $word, and $(paren'd expr).
|
||||
(r'\$[a-zA-Z_]+', String.Interpol),
|
||||
(r'\$\(', String.Interpol, 'in-intp'),
|
||||
(r'\$' + allowed_variable, String.Interpol),
|
||||
# (r'\$[a-zA-Z_]+', String.Interpol),
|
||||
(r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
|
||||
# @printf and @sprintf formats
|
||||
(r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[diouxXeEfFgGcrs%]',
|
||||
(r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]',
|
||||
String.Interpol),
|
||||
(r'[^$%"\\]+', String),
|
||||
# unhandled special signs
|
||||
(r'[$%"\\]', String),
|
||||
(r'.|\s', String),
|
||||
],
|
||||
|
||||
'tqstring': [
|
||||
(r'"""', String, '#pop'),
|
||||
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
|
||||
(r'\$' + allowed_variable, String.Interpol),
|
||||
(r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
|
||||
(r'.|\s', String),
|
||||
],
|
||||
|
||||
'regex': [
|
||||
(r'"', String.Regex, '#pop'),
|
||||
(r'\\"', String.Regex),
|
||||
(r'.|\s', String.Regex),
|
||||
],
|
||||
|
||||
'tqregex': [
|
||||
(r'"""', String.Regex, '#pop'),
|
||||
(r'.|\s', String.Regex),
|
||||
],
|
||||
|
||||
'command': [
|
||||
(r'`', String.Backtick, '#pop'),
|
||||
(r'\$' + allowed_variable, String.Interpol),
|
||||
(r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
|
||||
(r'.|\s', String.Backtick)
|
||||
],
|
||||
|
||||
'in-intp': [
|
||||
(r'[^()]+', String.Interpol),
|
||||
(r'\(', String.Interpol, '#push'),
|
||||
(r'\)', String.Interpol, '#pop'),
|
||||
(r'\(', Punctuation, '#push'),
|
||||
(r'\)', Punctuation, '#pop'),
|
||||
include('root'),
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -155,9 +281,6 @@ class JuliaLexer(RegexLexer):
|
|||
return shebang_matches(text, r'julia')
|
||||
|
||||
|
||||
line_re = re.compile('.*?\n')
|
||||
|
||||
|
||||
class JuliaConsoleLexer(Lexer):
|
||||
"""
|
||||
For Julia console sessions. Modeled after MatlabSessionLexer.
|
||||
|
@ -169,27 +292,26 @@ class JuliaConsoleLexer(Lexer):
|
|||
|
||||
def get_tokens_unprocessed(self, text):
|
||||
jllexer = JuliaLexer(**self.options)
|
||||
|
||||
start = 0
|
||||
curcode = ''
|
||||
insertions = []
|
||||
output = False
|
||||
error = False
|
||||
|
||||
for match in line_re.finditer(text):
|
||||
line = match.group()
|
||||
|
||||
for line in text.splitlines(True):
|
||||
if line.startswith('julia>'):
|
||||
insertions.append((len(curcode),
|
||||
[(0, Generic.Prompt, line[:6])]))
|
||||
insertions.append((len(curcode), [(0, Generic.Prompt, line[:6])]))
|
||||
curcode += line[6:]
|
||||
output = False
|
||||
error = False
|
||||
elif line.startswith('help?>') or line.startswith('shell>'):
|
||||
yield start, Generic.Prompt, line[:6]
|
||||
yield start + 6, Text, line[6:]
|
||||
output = False
|
||||
error = False
|
||||
elif line.startswith(' ') and not output:
|
||||
insertions.append((len(curcode), [(0, Text, line[:6])]))
|
||||
curcode += line[6:]
|
||||
|
||||
elif line.startswith(' '):
|
||||
|
||||
idx = len(curcode)
|
||||
|
||||
# without is showing error on same line as before...?
|
||||
line = "\n" + line
|
||||
token = (0, Generic.Traceback, line)
|
||||
insertions.append((idx, [token]))
|
||||
|
||||
else:
|
||||
if curcode:
|
||||
for item in do_insertions(
|
||||
|
@ -197,10 +319,15 @@ class JuliaConsoleLexer(Lexer):
|
|||
yield item
|
||||
curcode = ''
|
||||
insertions = []
|
||||
if line.startswith('ERROR: ') or error:
|
||||
yield start, Generic.Error, line
|
||||
error = True
|
||||
else:
|
||||
yield start, Generic.Output, line
|
||||
output = True
|
||||
start += len(line)
|
||||
|
||||
yield match.start(), Generic.Output, line
|
||||
|
||||
if curcode: # or item:
|
||||
if curcode:
|
||||
for item in do_insertions(
|
||||
insertions, jllexer.get_tokens_unprocessed(curcode)):
|
||||
yield item
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
Pygments lexers for JVM languages.
|
||||
|
||||
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
|
||||
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
|
@ -59,25 +59,27 @@ class JavaLexer(RegexLexer):
|
|||
Keyword.Type),
|
||||
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
|
||||
(r'(true|false|null)\b', Keyword.Constant),
|
||||
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
|
||||
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
|
||||
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
|
||||
'class'),
|
||||
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
|
||||
'import'),
|
||||
(r'"(\\\\|\\"|[^"])*"', String),
|
||||
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
|
||||
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
|
||||
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
|
||||
(r'([^\W\d]|\$)[\w$]*', Name),
|
||||
(r'([0-9](_*[0-9]+)*\.([0-9](_*[0-9]+)*)?|'
|
||||
r'([0-9](_*[0-9]+)*)?\.[0-9](_*[0-9]+)*)'
|
||||
r'([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]?|'
|
||||
r'[0-9][eE][+\-]?[0-9](_*[0-9]+)*[fFdD]?|'
|
||||
r'[0-9]([eE][+\-]?[0-9](_*[0-9]+)*)?[fFdD]|'
|
||||
r'0[xX]([0-9a-fA-F](_*[0-9a-fA-F]+)*\.?|'
|
||||
r'([0-9a-fA-F](_*[0-9a-fA-F]+)*)?\.[0-9a-fA-F](_*[0-9a-fA-F]+)*)'
|
||||
r'[pP][+\-]?[0-9](_*[0-9]+)*[fFdD]?', Number.Float),
|
||||
(r'0[xX][0-9a-fA-F](_*[0-9a-fA-F]+)*[lL]?', Number.Hex),
|
||||
(r'0[bB][01](_*[01]+)*[lL]?', Number.Bin),
|
||||
(r'0(_*[0-7]+)+[lL]?', Number.Oct),
|
||||
(r'0|[1-9](_*[0-9]+)*[lL]?', Number.Integer),
|
||||
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
|
||||
r'\.[0-9][0-9_]*)'
|
||||
r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
|
||||
r'[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|'
|
||||
r'[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|'
|
||||
r'0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|'
|
||||
r'([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)'
|
||||
r'[pP][+\-]?[0-9][0-9_]*[fFdD]?', Number.Float),
|
||||
(r'0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?', Number.Hex),
|
||||
(r'0[bB][01][01_]*[lL]?', Number.Bin),
|
||||
(r'0[0-7_]+[lL]?', Number.Oct),
|
||||
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
|
||||
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
|
||||
(r'\n', Text)
|
||||
],
|
||||
|
@ -564,14 +566,14 @@ class IokeLexer(RegexLexer):
|
|||
],
|
||||
|
||||
'slashRegexp': [
|
||||
(r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
|
||||
(r'(?<!\\)/[im-psux]*', String.Regex, '#pop'),
|
||||
include('interpolatableText'),
|
||||
(r'\\/', String.Regex),
|
||||
(r'[^/]', String.Regex)
|
||||
],
|
||||
|
||||
'squareRegexp': [
|
||||
(r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
|
||||
(r'(?<!\\)][im-psux]*', String.Regex, '#pop'),
|
||||
include('interpolatableText'),
|
||||
(r'\\]', String.Regex),
|
||||
(r'[^\]]', String.Regex)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue